presets : {},\r
BGMs : {},\r
tracks : [],\r
- pauseTracks : [], // X_Event.DEACTIVATE によって pause した再生中のトラックたち。\r
+ pauseTracks : [], // X_EVENT_DEACTIVATE によって pause した再生中のトラックたち。\r
volume : 1,\r
bgmTrack : null,\r
bgmPosition : 0,\r
},\r
X_Audio_Sprite_instance;\r
\r
-X.Audio.Sprite = {\r
+X[ 'Audio' ][ 'Sprite' ] = {\r
\r
- shouldUse : X_Audio_Sprite_shouldUse,\r
+ 'shouldUse' : X_Audio_Sprite_shouldUse,\r
\r
- needTouchFirst : X_Audio_Sprite_needTouchFirst,\r
+ 'needTouchFirst' : X_Audio_Sprite_needTouchFirst,\r
\r
- enableMultiTrack : X_Audio_Sprite_enableMultiTrack,\r
+ 'enableMultiTrack' : X_Audio_Sprite_enableMultiTrack,\r
\r
- create : function( setting ){\r
+ 'create' : function( setting ){\r
// close()\r
if( X_Audio_Sprite_instance ){\r
X_Audio_Sprite_instance.close();\r
} else {\r
- X_Audio_Sprite_instance = X_Class_override( new X.EventDispatcher(), X_Audio_Sprite_members );\r
- X_ViewPort.listen( [ X_Event.VIEW_ACTIVATE, X_Event.VIEW_DEACTIVATE ], X_Audio_Sprite_instance, X_Audio_Sprite_handleEvent );\r
+ X_Audio_Sprite_instance = X_Class_override( X_EventDispatcher(), X_Audio_Sprite_members );\r
+ X_ViewPort[ 'listen' ]( [ X_EVENT_VIEW_ACTIVATE, X_EVENT_VIEW_DEACTIVATE ], X_Audio_Sprite_instance, X_Audio_Sprite_handleEvent );\r
};\r
X_Audio_Sprite_instance.setup( setting );\r
return X_Audio_Sprite_instance;\r
* BGM_02 : [ '56.00', '1:15.230', true ]\r
* }\r
* \r
- * X_Event.BACKEND_READY\r
- * X_Event.BACKEND_NONE\r
+ * X_EVENT_BACKEND_READY\r
+ * X_EVENT_BACKEND_NONE\r
* \r
- * X_Event.READY\r
- * X_Event.MEDIA_LOOPED\r
- * X_Event.MEDIA_ENDED\r
+ * X_EVENT_READY\r
+ * X_EVENT_MEDIA_LOOPED\r
+ * X_EVENT_MEDIA_ENDED\r
* \r
*/\r
\r
for( k in setting ){\r
v = setting[ k ];\r
if( X_Type_isArray( v ) && v !== urls ){\r
- v = X.Object.cloneArray( v );\r
+ v = X_Object_cloneArray( v );\r
for( i = v.length; i; ){\r
--i;\r
if( i !== 2 ) v[ i ] = X_AudioWrapper_timeStringToNumber( v[ i ] );\r
if( video || ( i === 1 && X_Audio_Sprite_useVideoForMulti ) ){\r
option[ 'useVideo' ] = true;\r
};\r
- tracks.push( X.Audio( urls, X.Object.clone( option ) ) );\r
+ tracks.push( X.Audio( urls, X_Object_clone( option ) ) );\r
};\r
\r
- tracks[ n - 1 ].listenOnce( [ X_Event.BACKEND_READY, X_Event.BACKEND_NONE ], this, X_Audio_Sprite_handleEvent );\r
+ tracks[ n - 1 ][ 'listenOnce' ]( [ X_EVENT_BACKEND_READY, X_EVENT_BACKEND_NONE ], this, X_Audio_Sprite_handleEvent );\r
\r
X_Audio_Sprite_instance.numTracks = n;\r
},\r
k;\r
\r
while( tracks.length ){\r
- tracks.pop().kill();\r
+ tracks.pop()[ 'kill' ]();\r
};\r
\r
for( k in bgms ){\r
//tracks[ i ].play( 0, X_Audio_Sprite_lengthSilence, true, 0, X_Audio_Sprite_lengthSilence ).seek( 0 );\r
this.pause( i );\r
} else {\r
- X_Audio_getAudioWrapper( tracks[ i ] )._rawObject.load();\r
+ X_Audio_getAudioWrapper( tracks[ i ] )[ '_rawObject' ].load();\r
};\r
};\r
},\r
track = X_Audio_Sprite_TEMP.bgmTrack = tracks[ 0 ];\r
};\r
\r
- if( track.listen( [ X_Event.MEDIA_PLAYING, X_Event.MEDIA_BEFORE_LOOP ], this, X_Audio_Sprite_handleEvent ).isPlaying() ){\r
+ if( track[ 'listen' ]( [ X_EVENT_MEDIA_PLAYING, X_EVENT_MEDIA_BEFORE_LOOP ], this, X_Audio_Sprite_handleEvent ).isPlaying() ){\r
track\r
.state( {\r
loop : true,\r
if( 1 < tracks.length ){\r
track = X_Audio_Sprite_getTrackEnded( X_Audio_Sprite_TEMP.bgmPlaying );\r
track\r
- .listen( [ X_Event.MEDIA_PLAYING, X_Event.MEDIA_BEFORE_LOOP ], this, X_Audio_Sprite_handleEvent )\r
+ [ 'listen' ]( [ X_EVENT_MEDIA_PLAYING, X_EVENT_MEDIA_BEFORE_LOOP ], this, X_Audio_Sprite_handleEvent )\r
.state( { looped : false } )\r
.play( preset[ 0 ], preset[ 1 ], true, 0, X_Audio_Sprite_lengthSilence );\r
} else {\r
};\r
track = tracks[ 0 ];\r
\r
- if( track.listen( [ X_Event.MEDIA_PLAYING, X_Event.MEDIA_BEFORE_LOOP ], this, X_Audio_Sprite_handleEvent ).isPlaying() ){\r
+ if( track[ 'listen' ]( [ X_EVENT_MEDIA_PLAYING, X_EVENT_MEDIA_BEFORE_LOOP ], this, X_Audio_Sprite_handleEvent ).isPlaying() ){\r
track\r
.state( {\r
loop : true,\r
X_Audio_Sprite_TEMP.bgmTrack = null;\r
};\r
track && track.play( 0, X_Audio_Sprite_lengthSilence, true, 0, X_Audio_Sprite_lengthSilence ).seek( 0 );\r
- this.asyncDispatch( X_Event.MEDIA_PAUSED );\r
+ this[ 'asyncDispatch' ]( X_EVENT_MEDIA_PAUSED );\r
return this;\r
},\r
\r
var i, tracks, track, _e;\r
\r
switch( e.type ){\r
- case X_Event.BACKEND_READY :\r
+ case X_EVENT_BACKEND_READY :\r
_e = {\r
- type : X_Event.BACKEND_READY,\r
- source : e.source,\r
- backendName : e.backendName\r
+ 'type' : X_EVENT_BACKEND_READY,\r
+ 'source' : e[ 'source' ],\r
+ 'backendName' : e[ 'backendName' ]\r
};\r
\r
if( X_Audio_Sprite_needTouchFirst ){\r
_e[ 'needTouchForLoad' ] = true;\r
};\r
};\r
- this.asyncDispatch( _e );\r
+ this[ 'asyncDispatch' ]( _e );\r
\r
e.target\r
- .unlisten( X_Event.BACKEND_NONE, this, X_Audio_Sprite_handleEvent )\r
- .listenOnce( X_Event.READY, this, X_Audio_Sprite_handleEvent );\r
+ [ 'unlisten' ]( X_EVENT_BACKEND_NONE, this, X_Audio_Sprite_handleEvent )\r
+ [ 'listenOnce' ]( X_EVENT_READY, this, X_Audio_Sprite_handleEvent );\r
\r
// READY, needTouchForPlay, needTouchForLoad\r
if( X_Audio_HTMLAudioWrapper_durationFix ){\r
};\r
break;\r
\r
- case X_Event.BACKEND_NONE :\r
- this.asyncDispatch( X_Event.BACKEND_NONE );\r
- e.target.unlisten( X_Event.BACKEND_READY, this, X_Audio_Sprite_handleEvent );\r
+ case X_EVENT_BACKEND_NONE :\r
+ this[ 'asyncDispatch' ]( X_EVENT_BACKEND_NONE );\r
+ e.target[ 'unlisten' ]( X_EVENT_BACKEND_READY, this, X_Audio_Sprite_handleEvent );\r
break;\r
\r
- case X_Event.READY :\r
+ case X_EVENT_READY :\r
console.log( 'X.AudioSprite - Ready!' );\r
if( X_Audio_Sprite_needTouchAndroid ){\r
for( i = 0; i < X_Audio_Sprite_TEMP.tracks.length; ++i ){\r
X_Audio_Sprite_instance.pause( i );\r
};\r
- e.target.listenOnce( X_Event.MEDIA_PLAYING, this, this.asyncDispatch, [ X_Event.READY ] ); // Android 標準ブラウザ\r
+ e.target[ 'listenOnce' ]( X_EVENT_MEDIA_PLAYING, this, this.asyncDispatch, [ X_EVENT_READY ] ); // Android 標準ブラウザ\r
return;\r
};\r
- this.asyncDispatch( X_Event.READY );\r
+ this[ 'asyncDispatch' ]( X_EVENT_READY );\r
break;\r
\r
- case X_Event.MEDIA_PLAYING :\r
- ( e.target === X_Audio_Sprite_TEMP.bgmTrack || !e.target.state().looped ) && this.asyncDispatch( X_Event.MEDIA_PLAYING );\r
+ case X_EVENT_MEDIA_PLAYING :\r
+ ( e.target === X_Audio_Sprite_TEMP.bgmTrack || !e.target.state().looped ) && this[ 'asyncDispatch' ]( X_EVENT_MEDIA_PLAYING );\r
break;\r
\r
- case X_Event.MEDIA_BEFORE_LOOP :\r
+ case X_EVENT_MEDIA_BEFORE_LOOP :\r
if( e.target === X_Audio_Sprite_TEMP.bgmTrack ){\r
X_Audio_Sprite_TEMP.bgmLooped = true;\r
- this.asyncDispatch( X_Event.MEDIA_LOOPED ); // TODO uid\r
+ this[ 'asyncDispatch' ]( X_EVENT_MEDIA_LOOPED ); // TODO uid\r
} else {\r
if( e.target.state().looped ){\r
- //this.asyncDispatch( X_Event.MEDIA_LOOPED ); // TODO uid\r
+ //this[ 'asyncDispatch' ]( X_EVENT_MEDIA_LOOPED ); // TODO uid\r
} else {\r
- this.asyncDispatch( X_Event.MEDIA_ENDED ); // TODO uid\r
+ this[ 'asyncDispatch' ]( X_EVENT_MEDIA_ENDED ); // TODO uid\r
};\r
\r
// single track | iOS\r
};\r
break;\r
\r
- case X_Event.VIEW_ACTIVATE :\r
+ case X_EVENT_VIEW_ACTIVATE :\r
console.log( '■ アクティブ' );\r
// track.play(); or iOS need touch??\r
tracks = X_Audio_Sprite_TEMP.pauseTracks;\r
while( tracks.length ) tracks.pop().play();\r
break;\r
\r
- case X_Event.VIEW_DEACTIVATE :\r
+ case X_EVENT_VIEW_DEACTIVATE :\r
console.log( '■ デアクティブ' );\r
// track.pause();\r
tracks = X_Audio_Sprite_TEMP.tracks;\r
};\r
break;\r
\r
- case X_Event.KILL_INSTANCE :\r
- X_ViewPort.unlisten( [ X_Event.VIEW_ACTIVATE, X_Event.VIEW_DEACTIVATE ], this, X_Audio_Sprite_handleEvent );\r
+ case X_EVENT_KILL_INSTANCE :\r
+ X_ViewPort[ 'unlisten' ]( [ X_EVENT_VIEW_ACTIVATE, X_EVENT_VIEW_DEACTIVATE ], this, X_Audio_Sprite_handleEvent );\r
this.close();\r
break;\r
};\r