3 * http://uupaa.hatenablog.com/entry/2011/12/12/213233
\r
4 * Mobile Opera11 は Audio をサポートするがイベントが取れない
\r
5 * iframe 内で生成して、Audio Sprite の preset で再生できないか?
\r
7 var X_Audio_Sprite_shouldUse = window.HTMLAudioElement && ( X_UA.iOS || X_UA.AndroidBrowser || X_UA.OperaMobile || X_UA.OperaTablet ),
\r
8 X_Audio_Sprite_needTouchFirst = !!X_UA.iOS,
\r
9 X_Audio_Sprite_inTouchAction = false,
\r
10 X_Audio_Sprite_enableMultiTrack = !X_UA.iOS,
\r
11 X_Audio_Sprite_enableVolume = window.HTMLAudioElement && ( !X_UA.iOS && !X_UA.AndroidBrowser && !X_UA.OperaMobile && !X_UA.OperaTablet ),
\r
12 X_Audio_Sprite_useVideoForMulti = 4 <= X_UA.AndroidBrowser,
\r
13 X_Audio_Sprite_maxTracks = X_UA.iOS < 6 ? 1 : X_Audio_Sprite_useVideoForMulti ? 2 : 9,
\r
14 X_Audio_Sprite_lengthSilence = 10000, // 一番最初の無音部分の長さ
\r
15 X_Audio_Sprite_lengthDistance = 5000, // 音間の無音の長さ
\r
16 X_Audio_Sprite_uid = 0,
\r
17 X_Audio_Sprite_members = {},
\r
18 X_Audio_Sprite_TEMP = {
\r
29 X_Audio_Sprite_instance;
\r
33 shouldUse : X_Audio_Sprite_shouldUse,
\r
35 needTouchFirst : X_Audio_Sprite_needTouchFirst,
\r
37 enableMultiTrack : X_Audio_Sprite_enableMultiTrack,
\r
39 create : function( setting ){
\r
41 if( X_Audio_Sprite_instance ){
\r
42 X_Audio_Sprite_instance.close();
\r
44 X_Audio_Sprite_instance = X_Class_override( new X.EventDispatcher(), X_Audio_Sprite_members );
\r
46 X_Audio_Sprite_instance.setup( setting );
\r
47 return X_Audio_Sprite_instance;
\r
52 // 再生が終わっているもの、終わりかけのものを探す
\r
53 function X_Audio_Sprite_getTrackEnded(){
\r
54 var tracks = X_Audio_Sprite_TEMP.tracks,
\r
56 i = 0, track, state, last = 1 / 0, _last, index;
\r
58 for( ; i < l; ++i ){
\r
59 track = tracks[ i ];
\r
60 state = track.state();
\r
61 if( !state.playing ) return track;
\r
62 if( track === X_Audio_Sprite_TEMP.bgmTrack ) continue;
\r
63 if( state.currentTime <= X_Audio_Sprite_lengthSilence + X_Audio_Sprite_lengthDistance ) return track;
\r
64 _last = state.endTime - state.currentTime;
\r
70 return tracks[ index ];
\r
75 * urls : [ 'xx.ogg', 'xx.mp3' ],
\r
79 * BGM_01 : [ '15.00', '45.500', true, '17.666', '50.999' ],
\r
80 * BGM_02 : [ '56.00', '1:15.230', true ]
\r
84 X_Audio_Sprite_members = {
\r
86 setup : function( setting ){
\r
88 var tracks = X_Audio_Sprite_TEMP.tracks,
\r
89 bgms = X_Audio_Sprite_TEMP.BGMs,
\r
90 presets = X_Audio_Sprite_TEMP.presets,
\r
91 urls = setting[ 'urls' ],
\r
92 n = setting[ 'numTracks' ] || 1,
\r
93 video = setting[ 'useVideo' ],
\r
95 volume : setting[ 'volume' ] || 0.5,
\r
98 endTime : X_Audio_Sprite_lengthSilence,
\r
103 n = n <= X_Audio_Sprite_maxTracks ? n : X_Audio_Sprite_maxTracks;
\r
105 video = video || ( 1 < n && X_Audio_Sprite_useVideoForMulti );
\r
107 for( k in setting ){
\r
109 if( X.Type.isArray( v ) && v !== urls){
\r
110 v = X.Object.cloneArray( v );
\r
111 for( i = v.length; i; ){
\r
113 if( i !== 2 ) v[ i ] = X_AudioWrapper_timeStringToNumber( v[ i ] );
\r
115 if( v[ 2 ] ) bgms[ k ] = v;
\r
120 for( i = 0; i < n; ++i ){
\r
121 if( i === 1 && X_Audio_Sprite_useVideoForMulti ){
\r
124 tracks.push( X.Audio.create( urls, option ) );
\r
128 tracks[ n - 1 ].listenOnce( [ 'backendfound', 'nobackend' ], this, X_Audio_Sprite_handleEvent );
\r
131 close : function(){
\r
132 var tracks = X_Audio_Sprite_TEMP.tracks,
\r
133 bgms = X_Audio_Sprite_TEMP.BGMs,
\r
134 presets = X_Audio_Sprite_TEMP.presets,
\r
137 while( tracks.length ){
\r
138 tracks.pop().kill();
\r
144 for( k in presets ){
\r
145 delete presets[ k ];
\r
148 X_Audio_Sprite_TEMP.bgmTrack = null;
\r
149 X_Audio_Sprite_TEMP.bgmPosition = 0;
\r
150 X_Audio_Sprite_TEMP.bgmName = '';
\r
151 X_Audio_Sprite_TEMP.bgmLooped = false;
\r
152 X_Audio_Sprite_TEMP.bgmPlaying = false;
\r
156 var wrapper = X_AudioProxy_getAudioWrapper( X_Audio_Sprite_TEMP.tracks[ 0 ] );
\r
157 //X_Audio_Sprite_inTouchAction = true;
\r
158 wrapper._rawObject.load();
\r
159 //X_Audio_Sprite_inTouchAction = false;
\r
163 * @return uid Number
\r
165 play : function( name ){
\r
166 var bgm = X_Audio_Sprite_TEMP.bgmTrack,
\r
167 tracks = X_Audio_Sprite_TEMP.tracks,
\r
168 bgms = X_Audio_Sprite_TEMP.BGMs,
\r
169 presets = X_Audio_Sprite_TEMP.presets,
\r
170 preset = presets[ name ],
\r
174 if( bgms[ name ] ){
\r
175 if( name !== X_Audio_Sprite_TEMP.bgmName ){
\r
177 X_Audio_Sprite_TEMP.bgmName = name;
\r
178 X_Audio_Sprite_TEMP.bgmPosition = preset[ 0 ];
\r
179 X_Audio_Sprite_TEMP.bgmPlaying = true;
\r
180 X_Audio_Sprite_TEMP.bgmLooped = false;
\r
185 if( 1 < tracks.length ){
\r
186 track = X_Audio_Sprite_TEMP.bgmTrack = X_Audio_Sprite_getTrackEnded();
\r
188 track = X_Audio_Sprite_TEMP.bgmTrack = tracks[ 0 ];
\r
191 .state( { looped : X_Audio_Sprite_TEMP.bgmLooped } )
\r
192 .play( preset[ 0 ], preset[ 1 ], true, preset[ 3 ], preset[ 4 ] )
\r
193 .seek( X_Audio_Sprite_TEMP.bgmPosition )
\r
194 .listen( 'looped', this, X_Audio_Sprite_handleEvent );
\r
196 if( 1 < tracks.length ){
\r
197 track = X_Audio_Sprite_getTrackEnded( X_Audio_Sprite_TEMP.bgmPlaying );
\r
199 .listen( 'looped', this, X_Audio_Sprite_handleEvent )
\r
200 .state( { looped : false } )
\r
201 .play( preset[ 0 ], preset[ 1 ], true, 0, X_Audio_Sprite_lengthSilence );
\r
203 // single track, iOS
\r
205 X_Audio_Sprite_TEMP.bgmPosition = bgm.currentTime();
\r
206 X_Audio_Sprite_TEMP.bgmTrack = null;
\r
208 track = tracks[ 0 ];
\r
210 .listen( 'looped', this, X_Audio_Sprite_handleEvent )
\r
211 .state( { looped : false } )
\r
212 .play( preset[ 0 ], preset[ 1 ], true, 0, X_Audio_Sprite_lengthSilence );
\r
215 return tracks.indexOf( track );
\r
220 pause : function( uid ){
\r
221 var track = X_Audio_Sprite_TEMP.tracks[ uid ];
\r
222 if( X_Audio_Sprite_TEMP.bgmTrack === track ){
\r
223 X_Audio_Sprite_TEMP.bgmPosition = track.currentTime();
\r
224 X_Audio_Sprite_TEMP.bgmPlaying = false;
\r
225 X_Audio_Sprite_TEMP.bgmTrack = null;
\r
227 console.log( 'pause' );
\r
228 track && track.play( 0, X_Audio_Sprite_lengthSilence, true, 0, X_Audio_Sprite_lengthSilence ).seek( 0 );
\r
232 seek : function( uid, position ){
\r
233 var track = X_Audio_Sprite_TEMP.tracks[ uid ],
\r
236 delete track.seekTime;
\r
237 end = X_AudioWrapper_getEndTime( track );
\r
238 position <= end && X_AudioWrapper_getStartTime( track, end ) <= position && track.seek( postion );
\r
243 volume : function( uid, opt_volume ){
\r
247 if( opt_volume === undefined ){
\r
248 return X_Audio_Sprite_TEMP.volume;
\r
250 for( i = X_Audio_Sprite_TEMP.tracks.length; i; ){
\r
251 X_Audio_Sprite_TEMP.tracks[ --i ].volume( opt_volume );
\r
255 track = X_Audio_Sprite_TEMP.tracks[ uid ];
\r
256 if( opt_volume === undefined ){
\r
257 return track ? track.volume() : -1;
\r
259 track && track.volume( opt_volume );
\r
263 state : function( uid, opt_obj ){
\r
264 var track = X_Audio_Sprite_TEMP.tracks[ uid ];
\r
266 if( opt_obj === undefined ){
\r
267 return track ? track.state() : { volume : X_Audio_Sprite_TEMP.volume };
\r
269 track && track.state( opt_obj );
\r
274 function X_Audio_Sprite_handleEvent( e ){
\r
276 case 'backendfound' :
\r
277 this.asyncDispatch( e );
\r
278 e.target.unlisten( 'nobackend', this, X_Audio_Sprite_handleEvent );
\r
279 if( e.backendName === 'Web Audio' ){
\r
280 e.target.listen( 'canplaythrough', this, X_Audio_Sprite_handleEvent );
\r
285 this.asyncDispatch( e );
\r
286 e.target.unlisten( 'backendfound', this, X_Audio_Sprite_handleEvent );
\r
289 case 'canplaythrough' :
\r
290 this.asyncDispatch( e );
\r
294 if( e.target === X_Audio_Sprite_TEMP.bgmTrack ){
\r
295 X_Audio_Sprite_TEMP.bgmLooped = true;
\r
297 // single track | iOS
\r
298 if( X_Audio_Sprite_TEMP.bgmPlaying && !X_Audio_Sprite_TEMP.bgmTrack ){
\r
299 X_Audio_Sprite_TEMP.bgmTrack = e.target;
\r
300 this.play( X_Audio_Sprite_TEMP.bgmName );
\r
305 case X.Event.KILL_INSTANCE :
\r