3 * http://uupaa.hatenablog.com/entry/2011/12/12/213233
\r
4 * Mobile Opera11 は Audio をサポートするがイベントが取れない
\r
5 * iframe 内で生成して、Audio Sprite の preset で再生できないか?
\r
7 var X_Audio_Sprite_shouldUse = window.HTMLAudioElement && ( X_UA[ 'iOS' ] || X_UA[ 'AndroidBrowser' ] || X_UA[ 'OperaMobile' ] || X_UA[ 'OperaTablet' ] ), // Flash がない
\r
8 X_Audio_Sprite_useVideoForMulti = //( X_UA[ 'AndroidBrowser3' ] && 3.1 <= X_UA[ 'AndroidBrowser' ] ) ||
\r
9 //( ( 4.2 <= X_UA[ 'AndroidBrowser' ] || ( 4.1 <= X_UA[ 'AndroidBrowser' ] && 2 <= X_UA[ 'AndroidPatch' ] ) ) && X_UA[ 'AndroidWebkit' ] <= 534.3 ),
\r
10 // ドスパラパッドはビデオのインライン再生が不可, 534.30 で Webkit系は終了, 次は 537.36 で Chrome系
\r
11 false, //X_UA[ 'AndroidChromeBrowser' ],
\r
12 X_Audio_Sprite_needTouchAndroid = X_UA[ 'AndroidChromeBrowser' ] && !X_Audio_WebAudioWrapper,
\r
13 X_Audio_Sprite_needTouchFirst = X_UA[ 'iOS' ] || X_Audio_Sprite_needTouchAndroid || ( X_UA[ 'WinPhone' ] && X_UA[ 'IE9' ] ),
\r
14 X_Audio_Sprite_disableMultiTrack = ( X_UA[ 'iOS' ] && !X_Audio_WebAudio_context ) || ( !X_UA[ 'AndroidChromeBrowser' ] && X_UA[ 'AndroidBrowser4' ] ) || ( X_UA[ 'WinPhone' ] && X_UA[ 'IE' ] < 12 ),
\r
15 X_Audio_Sprite_enableVolume = window.HTMLAudioElement && ( !X_UA[ 'iOS' ] && !X_UA[ 'AndroidBrowser' ] && !X_UA[ 'OperaMobile' ] && !X_UA[ 'OperaTablet' ] ), // TODO fennec は 25以上
\r
16 // http://tukumemo.com/html5-audio-sp/
\r
17 // iOS6、Android4.1から同時再生が可能になりました。
\r
18 X_Audio_Sprite_maxTracks = X_Audio_Sprite_useVideoForMulti ? 2 : X_Audio_Sprite_disableMultiTrack ? 1 : 9,
\r
19 X_Audio_Sprite_lengthSilence = 10000, // 一番最初の無音部分の長さ
\r
20 X_Audio_Sprite_lengthDistance = 5000, // 音間の無音の長さ
\r
21 X_Audio_Sprite_uid = 0,
\r
22 X_Audio_Sprite_members = {},
\r
23 X_Audio_Sprite_TEMP = {
\r
27 pauseTracks : [], // X_EVENT_DEACTIVATE によって pause した再生中のトラックたち。
\r
35 X_Audio_Sprite_instance,
\r
36 X_Audio_Sprite_numTracks,
\r
37 X_Audio_Sprite_useVideo;
\r
39 X[ 'AudioSprite' ] = function( setting ){
\r
40 var tracks = X_Audio_Sprite_TEMP.tracks,
\r
41 bgms = X_Audio_Sprite_TEMP.BGMs,
\r
42 presets = X_Audio_Sprite_TEMP.presets,
\r
43 urls = setting[ 'urls' ],
\r
44 video = setting[ 'useVideo' ],
\r
45 n = video ? 1 : setting[ 'numTracks' ] || 1,
\r
47 volume : setting[ 'volume' ] || 0.5,
\r
50 endTime : X_Audio_Sprite_lengthSilence,
\r
55 if( !X_Audio_Sprite_instance ){
\r
56 X_Audio_Sprite_instance = X_Class_override( X_EventDispatcher(), X_Audio_Sprite_members );
\r
57 X_ViewPort[ 'listen' ]( [ X_EVENT_VIEW_ACTIVATE, X_EVENT_VIEW_DEACTIVATE ], X_Audio_Sprite_instance, X_Audio_Sprite_handleEvent );
\r
60 n = n <= X_Audio_Sprite_maxTracks ? n : X_Audio_Sprite_maxTracks;
\r
63 // Android4.x標準ブラウザ(Chrome系)でブラウザが隠れた場合に音が鳴り続ける問題、ビデオで解決できる?
\r
64 //if( X_Audio_Sprite_needTouchAndroid && n === 1 ){
\r
68 for( k in setting ){
\r
70 if( X_Type_isArray( v ) && v !== urls ){
\r
71 v = X_Array_copy( v );
\r
72 for( i = v.length; i; ){
\r
74 if( i !== 2 ) v[ i ] = X_AudioWrapper_timeStringToNumber( v[ i ] );
\r
76 if( v[ 2 ] ) bgms[ k ] = v;
\r
81 X_Audio_startDetectionBackend( X_Audio_BACKENDS[ 0 ], X_Audio_Sprite_instance, X_Array_copy( urls ), option );
\r
83 X_Audio_Sprite_instance[ 'listenOnce' ]( [ X_EVENT_BACKEND_READY, X_EVENT_BACKEND_NONE ], X_AudioSprite_backendHandler );
\r
84 X_Audio_Sprite_instance[ 'listenOnce' ]( X_EVENT_KILL_INSTANCE, X_Audio_Sprite_handleEvent );
\r
86 X_Audio_Sprite_useVideo = video;
\r
87 X_Audio_Sprite_numTracks = X_Audio_Sprite_instance[ 'numTracks' ] = n;
\r
89 return X_Audio_Sprite_instance;
\r
92 X[ 'AudioSprite' ][ 'shouldUse' ] = X_Audio_Sprite_shouldUse;
\r
93 X[ 'AudioSprite' ][ 'needTouchFirst' ] = X_Audio_Sprite_needTouchFirst;
\r
94 X[ 'AudioSprite' ][ 'enableMultiTrack' ] = !X_Audio_Sprite_disableMultiTrack;
\r
96 // 再生が終わっているもの、終わりかけのものを探す
\r
97 // TODO 終わりかけのもの、と一番古いもの、どちらを再利用するか?これ以上に細かい実装を望む場合は X.Audio.Sprite は使わず自力で実装
\r
98 function X_Audio_Sprite_getTrackEnded(){
\r
99 var tracks = X_Audio_Sprite_TEMP.tracks,
\r
101 i = 0, track, state, last = 1 / 0, _last, index;
\r
103 for( ; i < l; ++i ){
\r
104 track = tracks[ i ];
\r
105 state = track.getState();
\r
106 if( !state.playing ) return track;
\r
107 if( track === X_Audio_Sprite_TEMP.bgmTrack ) continue;
\r
108 if( state.currentTime <= X_Audio_Sprite_lengthSilence + X_Audio_Sprite_lengthDistance ) return track;
\r
109 _last = state.endTime - state.currentTime;
\r
110 if( _last < last ){
\r
115 return tracks[ index ];
\r
120 * urls : [ 'xx.ogg', 'xx.mp3' ],
\r
122 * useVideo : false,
\r
124 * BGM_01 : [ '15.00', '45.500', true, '17.666', '50.999' ],
\r
125 * BGM_02 : [ '56.00', '1:15.230', true ]
\r
128 * X_EVENT_BACKEND_READY
\r
129 * X_EVENT_BACKEND_NONE
\r
132 * X_EVENT_MEDIA_LOOPED
\r
133 * X_EVENT_MEDIA_ENDED
\r
137 X_Audio_Sprite_members = {
\r
141 'load' : function(){
\r
142 var tracks = X_Audio_Sprite_TEMP.tracks,
\r
143 i = 0, l = tracks.length;
\r
145 for( ; i < l; ++i ){
\r
146 if( X_Audio_Sprite_needTouchAndroid ){
\r
147 console.log( '[duration fix]開始 - ' + tracks[ i ][ '_rawObject' ].duration );
\r
148 tracks[ i ]._playForDuration = 1;
\r
149 tracks[ i ][ '_rawObject' ].play();
\r
151 if( X_UA[ 'WinPhone' ] ){
\r
152 console.log( 'WinPhone : touch -> play()' );
\r
153 //tracks[ i ].play( 0, X_Audio_Sprite_lengthSilence, true, 0, X_Audio_Sprite_lengthSilence ).seek( 0 );
\r
154 this[ 'pause' ]( i );
\r
156 tracks[ i ][ '_rawObject' ].load();
\r
162 * @return {number} uid
\r
164 'play' : function( name ){
\r
165 var bgm = X_Audio_Sprite_TEMP.bgmTrack,
\r
166 tracks = X_Audio_Sprite_TEMP.tracks,
\r
167 bgms = X_Audio_Sprite_TEMP.BGMs,
\r
168 presets = X_Audio_Sprite_TEMP.presets,
\r
169 preset = presets[ name ],
\r
173 if( bgms[ name ] ){
\r
174 if( name !== X_Audio_Sprite_TEMP.bgmName ){
\r
176 X_Audio_Sprite_TEMP.bgmName = name;
\r
177 X_Audio_Sprite_TEMP.bgmPosition = preset[ 0 ];
\r
178 X_Audio_Sprite_TEMP.bgmLooped = false;
\r
181 X_Audio_Sprite_TEMP.bgmPlaying = true;
\r
186 if( 1 < tracks.length ){
\r
187 track = X_Audio_Sprite_TEMP.bgmTrack = X_Audio_Sprite_getTrackEnded();
\r
189 track = X_Audio_Sprite_TEMP.bgmTrack = tracks[ 0 ];
\r
192 if( track[ 'listen' ]( [ X_EVENT_MEDIA_PLAYING, X_EVENT_MEDIA_BEFORE_LOOP ], this, X_Audio_Sprite_handleEvent ).playing ){
\r
195 'looped' : X_Audio_Sprite_TEMP.bgmLooped,
\r
196 'currentTime' : X_Audio_Sprite_TEMP.bgmPosition,
\r
197 'startTime' : preset[ 0 ],
\r
198 'endTime' : preset[ 1 ],
\r
199 'loopStartTime' : preset[ 3 ],
\r
200 'loopEndTime' : preset[ 4 ]
\r
203 track.setState( { 'looped' : X_Audio_Sprite_TEMP.bgmLooped } );
\r
204 track.play( preset[ 0 ], preset[ 1 ], true, preset[ 3 ], preset[ 4 ] );
\r
205 track.seek( X_Audio_Sprite_TEMP.bgmPosition );
\r
209 if( 1 < tracks.length ){
\r
210 track = X_Audio_Sprite_getTrackEnded( X_Audio_Sprite_TEMP.bgmPlaying );
\r
212 [ 'listen' ]( [ X_EVENT_MEDIA_PLAYING, X_EVENT_MEDIA_BEFORE_LOOP ], this, X_Audio_Sprite_handleEvent )
\r
213 .setState( { 'looped' : false } );
\r
214 track.play( preset[ 0 ], preset[ 1 ], true, 0, X_Audio_Sprite_lengthSilence );
\r
216 // single track, iOS
\r
218 X_Audio_Sprite_TEMP.bgmPosition = bgm.currentTime();
\r
219 //console.log( 'bgm position : ' + X_Audio_Sprite_TEMP.bgmPosition + ' isPlay:' + bgm.playing );
\r
220 X_Audio_Sprite_TEMP.bgmTrack = null;
\r
222 track = tracks[ 0 ];
\r
224 if( track[ 'listen' ]( [ X_EVENT_MEDIA_PLAYING, X_EVENT_MEDIA_BEFORE_LOOP ], this, X_Audio_Sprite_handleEvent ).playing ){
\r
228 //'currentTime' : preset[ 0 ],
\r
229 'startTime' : preset[ 0 ],
\r
230 'endTime' : preset[ 1 ],
\r
231 'loopStartTime' : 0,
\r
232 'loopEndTime' : X_Audio_Sprite_lengthSilence
\r
236 track.play( preset[ 0 ], preset[ 1 ], true, 0, X_Audio_Sprite_lengthSilence );
\r
240 return tracks.indexOf( track );
\r
245 'pause' : function( uid ){
\r
246 var track = X_Audio_Sprite_TEMP.tracks[ uid ];
\r
247 if( X_Audio_Sprite_TEMP.bgmTrack === track ){
\r
248 X_Audio_Sprite_TEMP.bgmPosition = track.currentTime();
\r
249 X_Audio_Sprite_TEMP.bgmPlaying = false;
\r
250 X_Audio_Sprite_TEMP.bgmTrack = null;
\r
252 track && track.play( 0, X_Audio_Sprite_lengthSilence, true, 0, X_Audio_Sprite_lengthSilence );
\r
253 track && track.seek( 0 );
\r
254 this[ 'asyncDispatch' ]( X_EVENT_MEDIA_PAUSED );
\r
258 'seek' : function( uid, position ){
\r
259 var track = X_Audio_Sprite_TEMP.tracks[ uid ],
\r
262 delete track.seekTime;
\r
263 end = X_AudioWrapper_getEndTime( track );
\r
264 start = X_AudioWrapper_getStartTime( track, end );
\r
265 0 <= position && position <= ( end - start ) && track.seek( start + position );
\r
270 'volume' : function( uid, opt_volume ){
\r
274 if( opt_volume === undefined ){
\r
275 return X_Audio_Sprite_TEMP.volume;
\r
277 for( i = X_Audio_Sprite_TEMP.tracks.length; i; ){
\r
278 X_Audio_Sprite_TEMP.tracks[ --i ].volume( opt_volume );
\r
282 track = X_Audio_Sprite_TEMP.tracks[ uid ];
\r
283 if( opt_volume === undefined ){
\r
284 return track ? track.gain : -1;
\r
286 track && track.volume( opt_volume );
\r
290 'state' : function( uid, opt_obj ){
\r
291 var track = X_Audio_Sprite_TEMP.tracks[ uid ],
\r
294 if( opt_obj === undefined ){
\r
297 state = track.getState();
\r
298 start = state.startTime;
\r
300 'currentTime' : state.currentTime - start,
\r
301 'playing' : start <= state.currentTime && state.currentTime <= state.endTime,
\r
302 'duration' : state.endTime - start,
\r
303 'volume' : X_Audio_Sprite_TEMP.volume
\r
306 return { 'volume' : X_Audio_Sprite_TEMP.volume, 'playing' : false };
\r
308 track && track.setState( opt_obj );
\r
313 function X_AudioSprite_backendHandler( e ){
\r
314 var i, backend, option, src, name, last, _e;
\r
317 case X_EVENT_BACKEND_READY :
\r
319 backend = X_Audio_BACKENDS[ e[ 'backendID' ] ];
\r
320 option = e[ 'option' ];
\r
322 this[ 'unlisten' ]( X_EVENT_BACKEND_NONE, X_AudioSprite_backendHandler );
\r
323 this[ 'source' ] = src = e[ 'source' ];
\r
324 this[ 'backendName' ] = name = backend.backendName;
\r
326 //console.log( i + ' / ' + X_Audio_Sprite_numTracks );
\r
328 for( i = 0; i < X_Audio_Sprite_numTracks; ++i ){
\r
329 if( X_Audio_Sprite_useVideo || ( i === 1 && X_Audio_Sprite_useVideoForMulti ) ){
\r
330 option[ 'useVideo' ] = true;
\r
331 console.log( 'use video' );
\r
333 // Audiobackend の owner として null を渡すとAudioBackend 自身へ dispatch する
\r
334 X_Audio_Sprite_TEMP.tracks.push( last = backend.klass( null, e[ 'source' ], option ) );
\r
337 //console.dir( backend );
\r
338 //console.dir( last );
\r
342 'type' : X_EVENT_BACKEND_READY,
\r
344 'backendName' : name
\r
347 if( X_Audio_Sprite_needTouchFirst ){
\r
348 if( name === 'Web Audio' ){
\r
349 _e[ 'needTouchForPlay' ] = true;
\r
351 _e[ 'needTouchForLoad' ] = true;
\r
354 this[ 'asyncDispatch' ]( _e );
\r
356 console.log( 'AudioSprite - X_EVENT_BACKEND_READY' );
\r
358 last[ 'listenOnce' ]( X_EVENT_READY, this, X_AudioSprite_backendHandler );
\r
360 // READY, needTouchForPlay, needTouchForLoad
\r
361 if( X_HTMLAudio_durationFix && !X_Audio_Sprite_needTouchFirst ){
\r
362 for( i = 0; i < X_Audio_Sprite_TEMP.tracks.length; ++i ){
\r
363 this[ 'pause' ]( i );
\r
367 return X_CALLBACK_STOP_NOW;
\r
369 case X_EVENT_BACKEND_NONE :
\r
370 this[ 'unlisten' ]( X_EVENT_BACKEND_READY, this, X_AudioSprite_backendHandler )
\r
371 [ 'asyncDispatch' ]( X_EVENT_BACKEND_NONE );
\r
372 return X_CALLBACK_STOP_NOW;
\r
374 case X_EVENT_READY :
\r
375 if( X_Audio_Sprite_needTouchAndroid ){
\r
376 for( i = 0; i < X_Audio_Sprite_TEMP.tracks.length; ++i ){
\r
377 this[ 'pause' ]( i );
\r
379 e.target[ 'listenOnce' ]( X_EVENT_MEDIA_PLAYING, this, this[ 'asyncDispatch' ], [ X_EVENT_READY ] );
\r
383 console.log( 'X.AudioSprite - Ready!' );
\r
384 this[ 'asyncDispatch' ]( X_EVENT_READY );
\r
390 function X_Audio_Sprite_handleEvent( e ){
\r
391 var i, tracks, track, _e, k;
\r
394 case X_EVENT_MEDIA_PLAYING :
\r
395 ( e.target === X_Audio_Sprite_TEMP.bgmTrack || !e.target.looped ) && this[ 'asyncDispatch' ]( X_EVENT_MEDIA_PLAYING );
\r
398 case X_EVENT_MEDIA_BEFORE_LOOP :
\r
399 if( e.target === X_Audio_Sprite_TEMP.bgmTrack ){
\r
400 X_Audio_Sprite_TEMP.bgmLooped = true;
\r
401 this[ 'asyncDispatch' ]( X_EVENT_MEDIA_LOOPED ); // TODO uid
\r
403 if( e.target.looped ){
\r
404 //this[ 'asyncDispatch' ]( X_EVENT_MEDIA_LOOPED ); // TODO uid
\r
406 this[ 'asyncDispatch' ]( X_EVENT_MEDIA_ENDED ); // TODO uid
\r
409 //console.log( '[AudioSprite] bgmPlaying:' + X_Audio_Sprite_TEMP.bgmPlaying + ' ' + !X_Audio_Sprite_TEMP.bgmTrack );
\r
411 // single track | iOS
\r
412 if( X_Audio_Sprite_TEMP.bgmPlaying && !X_Audio_Sprite_TEMP.bgmTrack ){
\r
413 X_Audio_Sprite_TEMP.bgmTrack = e.target;
\r
414 this.play( X_Audio_Sprite_TEMP.bgmName );
\r
415 return X_CALLBACK_PREVENT_DEFAULT;
\r
420 // TODO Android Firefox で アクティブ検出できない!
\r
421 case X_EVENT_VIEW_ACTIVATE :
\r
422 console.log( '■ アクティブ' );
\r
423 // track.play(); or iOS need touch??
\r
424 tracks = X_Audio_Sprite_TEMP.pauseTracks;
\r
425 while( tracks.length ) tracks.pop().actualPlay();
\r
428 case X_EVENT_VIEW_DEACTIVATE :
\r
429 console.log( '■ デアクティブ' );
\r
431 tracks = X_Audio_Sprite_TEMP.tracks;
\r
434 track = tracks[ --i ];
\r
435 track.playing && X_Audio_Sprite_TEMP.pauseTracks.push( track ) && track.pause();
\r
439 case X_EVENT_KILL_INSTANCE :
\r
441 while( X_Audio_Sprite_TEMP.tracks.length ){
\r
442 X_Audio_Sprite_TEMP.tracks.pop()[ 'kill' ]();
\r
445 for( k in X_Audio_Sprite_TEMP.bgms ){
\r
446 delete X_Audio_Sprite_TEMP.bgms[ k ];
\r
448 for( k in X_Audio_Sprite_TEMP.presets ){
\r
449 delete X_Audio_Sprite_TEMP.presets[ k ];
\r
452 X_Audio_Sprite_TEMP.bgmTrack = null;
\r
453 X_Audio_Sprite_TEMP.bgmPosition = 0;
\r
454 X_Audio_Sprite_TEMP.bgmName = '';
\r
455 X_Audio_Sprite_TEMP.bgmLooped = false;
\r
456 X_Audio_Sprite_TEMP.bgmPlaying = false;
\r
458 X_ViewPort[ 'unlisten' ]( [ X_EVENT_VIEW_ACTIVATE, X_EVENT_VIEW_DEACTIVATE ], this, X_Audio_Sprite_handleEvent );
\r