3 * http://uupaa.hatenablog.com/entry/2011/12/12/213233
\r
4 * Mobile Opera11 は Audio をサポートするがイベントが取れない
\r
5 * iframe 内で生成して、Audio Sprite の preset で再生できないか?
\r
7 var X_Audio_Sprite_shouldUse = window.HTMLAudioElement && ( X_UA.iOS || X_UA.AndroidBrowser || X_UA.OperaMobile || X_UA.OperaTablet ),
\r
8 X_Audio_Sprite_needTouchFirst = !!X_UA.iOS,
\r
9 X_Audio_Sprite_inTouchAction = false,
\r
10 X_Audio_Sprite_enableMultiTrack = !( X_UA.iOS < 6 ),
\r
11 X_Audio_Sprite_enableVolume = window.HTMLAudioElement && ( !X_UA.iOS && !X_UA.AndroidBrowser && !X_UA.OperaMobile && !X_UA.OperaTablet ),
\r
12 X_Audio_Sprite_useVideoForMulti = 4 <= X_UA.AndroidBrowser,
\r
13 X_Audio_Sprite_maxTracks = X_UA.iOS < 6 ? 1 : X_Audio_Sprite_useVideoForMulti ? 2 : 9,
\r
14 X_Audio_Sprite_lengthSilence = 10000, // 一番最初の無音部分の長さ
\r
15 X_Audio_Sprite_lengthDistance = 5000, // 音間の無音の長さ
\r
16 X_Audio_Sprite_uid = 0,
\r
17 X_Audio_Sprite_members = {},
\r
18 X_Audio_Sprite_TEMP = {
\r
29 X_Audio_Sprite_instance;
\r
33 shouldUse : X_Audio_Sprite_shouldUse,
\r
35 needTouchFirst : X_Audio_Sprite_needTouchFirst,
\r
37 enableMultiTrack : X_Audio_Sprite_enableMultiTrack,
\r
39 create : function( setting ){
\r
41 if( X_Audio_Sprite_instance ){
\r
42 X_Audio_Sprite_instance.close();
\r
44 X_Audio_Sprite_instance = X_Class_override( new X.EventDispatcher(), X_Audio_Sprite_members );
\r
46 X_Audio_Sprite_instance.setup( setting );
\r
47 return X_Audio_Sprite_instance;
\r
52 // 再生が終わっているもの、終わりかけのものを探す
\r
53 function X_Audio_Sprite_getTrackEnded(){
\r
54 var tracks = X_Audio_Sprite_TEMP.tracks,
\r
56 i = 0, track, state, last = 1 / 0, _last, index;
\r
58 for( ; i < l; ++i ){
\r
59 track = tracks[ i ];
\r
60 state = track.state();
\r
61 if( !state.playing ) return track;
\r
62 if( track === X_Audio_Sprite_TEMP.bgmTrack ) continue;
\r
63 if( state.currentTime <= X_Audio_Sprite_lengthSilence + X_Audio_Sprite_lengthDistance ) return track;
\r
64 _last = state.endTime - state.currentTime;
\r
70 return tracks[ index ];
\r
75 * urls : [ 'xx.ogg', 'xx.mp3' ],
\r
79 * BGM_01 : [ '15.00', '45.500', true, '17.666', '50.999' ],
\r
80 * BGM_02 : [ '56.00', '1:15.230', true ]
\r
84 X_Audio_Sprite_members = {
\r
86 setup : function( setting ){
\r
88 var tracks = X_Audio_Sprite_TEMP.tracks,
\r
89 bgms = X_Audio_Sprite_TEMP.BGMs,
\r
90 presets = X_Audio_Sprite_TEMP.presets,
\r
91 urls = setting[ 'urls' ],
\r
92 n = setting[ 'numTracks' ] || 1,
\r
93 video = setting[ 'useVideo' ],
\r
95 volume : setting[ 'volume' ] || 0.5,
\r
98 endTime : X_Audio_Sprite_lengthSilence,
\r
103 n = n <= X_Audio_Sprite_maxTracks ? n : X_Audio_Sprite_maxTracks;
\r
105 video = video || ( 1 < n && X_Audio_Sprite_useVideoForMulti );
\r
107 for( k in setting ){
\r
109 if( X.Type.isArray( v ) && v !== urls){
\r
110 v = X.Object.cloneArray( v );
\r
111 for( i = v.length; i; ){
\r
113 if( i !== 2 ) v[ i ] = X_AudioWrapper_timeStringToNumber( v[ i ] );
\r
115 if( v[ 2 ] ) bgms[ k ] = v;
\r
120 for( i = 0; i < n; ++i ){
\r
121 if( i === 1 && X_Audio_Sprite_useVideoForMulti ){
\r
122 // TODO use <Video>
\r
123 tracks.push( X.Audio.create( urls, option ) );
\r
125 tracks.push( X.Audio.create( urls, option ) );
\r
129 tracks[ n - 1 ].listenOnce( [ 'backendfound', 'nobackend' ], this, X_Audio_Sprite_handleEvent );
\r
131 X_Audio_Sprite_instance.numTracks = n;
\r
134 close : function(){
\r
135 var tracks = X_Audio_Sprite_TEMP.tracks,
\r
136 bgms = X_Audio_Sprite_TEMP.BGMs,
\r
137 presets = X_Audio_Sprite_TEMP.presets,
\r
140 while( tracks.length ){
\r
141 tracks.pop().kill();
\r
147 for( k in presets ){
\r
148 delete presets[ k ];
\r
151 X_Audio_Sprite_TEMP.bgmTrack = null;
\r
152 X_Audio_Sprite_TEMP.bgmPosition = 0;
\r
153 X_Audio_Sprite_TEMP.bgmName = '';
\r
154 X_Audio_Sprite_TEMP.bgmLooped = false;
\r
155 X_Audio_Sprite_TEMP.bgmPlaying = false;
\r
159 var tracks = X_Audio_Sprite_TEMP.tracks,
\r
160 i = 0, l = tracks.length;
\r
161 for( ; i < l; ++i ){
\r
162 X_AudioProxy_getAudioWrapper( tracks[ i ] )._rawObject.load();
\r
167 * @return uid Number
\r
169 play : function( name ){
\r
170 var bgm = X_Audio_Sprite_TEMP.bgmTrack,
\r
171 tracks = X_Audio_Sprite_TEMP.tracks,
\r
172 bgms = X_Audio_Sprite_TEMP.BGMs,
\r
173 presets = X_Audio_Sprite_TEMP.presets,
\r
174 preset = presets[ name ],
\r
178 if( bgms[ name ] ){
\r
179 if( name !== X_Audio_Sprite_TEMP.bgmName ){
\r
181 X_Audio_Sprite_TEMP.bgmName = name;
\r
182 X_Audio_Sprite_TEMP.bgmPosition = preset[ 0 ];
\r
183 X_Audio_Sprite_TEMP.bgmPlaying = true;
\r
184 X_Audio_Sprite_TEMP.bgmLooped = false;
\r
189 if( 1 < tracks.length ){
\r
190 track = X_Audio_Sprite_TEMP.bgmTrack = X_Audio_Sprite_getTrackEnded();
\r
192 track = X_Audio_Sprite_TEMP.bgmTrack = tracks[ 0 ];
\r
195 .state( { looped : X_Audio_Sprite_TEMP.bgmLooped } )
\r
196 .play( preset[ 0 ], preset[ 1 ], true, preset[ 3 ], preset[ 4 ] )
\r
197 .seek( X_Audio_Sprite_TEMP.bgmPosition )
\r
198 .listen( 'looped', this, X_Audio_Sprite_handleEvent );
\r
200 if( 1 < tracks.length ){
\r
201 track = X_Audio_Sprite_getTrackEnded( X_Audio_Sprite_TEMP.bgmPlaying );
\r
203 .listen( 'looped', this, X_Audio_Sprite_handleEvent )
\r
204 .state( { looped : false } )
\r
205 .play( preset[ 0 ], preset[ 1 ], true, 0, X_Audio_Sprite_lengthSilence );
\r
207 // single track, iOS
\r
209 X_Audio_Sprite_TEMP.bgmPosition = bgm.currentTime();
\r
210 X_Audio_Sprite_TEMP.bgmTrack = null;
\r
212 track = tracks[ 0 ];
\r
214 .listen( 'looped', this, X_Audio_Sprite_handleEvent )
\r
215 .state( { looped : false } )
\r
216 .play( preset[ 0 ], preset[ 1 ], true, 0, X_Audio_Sprite_lengthSilence );
\r
219 return tracks.indexOf( track );
\r
224 pause : function( uid ){
\r
225 var track = X_Audio_Sprite_TEMP.tracks[ uid ];
\r
226 if( X_Audio_Sprite_TEMP.bgmTrack === track ){
\r
227 X_Audio_Sprite_TEMP.bgmPosition = track.currentTime();
\r
228 X_Audio_Sprite_TEMP.bgmPlaying = false;
\r
229 X_Audio_Sprite_TEMP.bgmTrack = null;
\r
231 console.log( 'pause' );
\r
232 track && track.play( 0, X_Audio_Sprite_lengthSilence, true, 0, X_Audio_Sprite_lengthSilence ).seek( 0 );
\r
236 seek : function( uid, position ){
\r
237 var track = X_Audio_Sprite_TEMP.tracks[ uid ],
\r
240 delete track.seekTime;
\r
241 end = X_AudioWrapper_getEndTime( track );
\r
242 position <= end && X_AudioWrapper_getStartTime( track, end ) <= position && track.seek( postion );
\r
247 volume : function( uid, opt_volume ){
\r
251 if( opt_volume === undefined ){
\r
252 return X_Audio_Sprite_TEMP.volume;
\r
254 for( i = X_Audio_Sprite_TEMP.tracks.length; i; ){
\r
255 X_Audio_Sprite_TEMP.tracks[ --i ].volume( opt_volume );
\r
259 track = X_Audio_Sprite_TEMP.tracks[ uid ];
\r
260 if( opt_volume === undefined ){
\r
261 return track ? track.volume() : -1;
\r
263 track && track.volume( opt_volume );
\r
267 state : function( uid, opt_obj ){
\r
268 var track = X_Audio_Sprite_TEMP.tracks[ uid ];
\r
270 if( opt_obj === undefined ){
\r
271 return track ? track.state() : { volume : X_Audio_Sprite_TEMP.volume };
\r
273 track && track.state( opt_obj );
\r
278 function X_Audio_Sprite_handleEvent( e ){
\r
280 case 'backendfound' :
\r
281 this.asyncDispatch( e );
\r
282 e.target.unlisten( 'nobackend', this, X_Audio_Sprite_handleEvent );
\r
284 if( e.backendName === 'HTML Audio' ){
\r
285 e.target.listen( [ X_Audio_HTMLAudio_playTrigger, 'loadeddata' ], this, X_Audio_Sprite_handleEvent );
\r
287 e.target.listen( 'canplaythrough', this, X_Audio_Sprite_handleEvent );
\r
292 this.asyncDispatch( e );
\r
293 e.target.unlisten( 'backendfound', this, X_Audio_Sprite_handleEvent );
\r
296 case 'canplaythrough' :
\r
297 case X_Audio_HTMLAudio_playTrigger :
\r
298 case 'loadeddata' :
\r
299 this.asyncDispatch( 'audioSpriteCanPlay' );
\r
303 if( e.target === X_Audio_Sprite_TEMP.bgmTrack ){
\r
304 X_Audio_Sprite_TEMP.bgmLooped = true;
\r
306 // single track | iOS
\r
307 if( X_Audio_Sprite_TEMP.bgmPlaying && !X_Audio_Sprite_TEMP.bgmTrack ){
\r
308 X_Audio_Sprite_TEMP.bgmTrack = e.target;
\r
309 this.play( X_Audio_Sprite_TEMP.bgmName );
\r
314 case X.Event.KILL_INSTANCE :
\r