diff --git a/alienblaster/project/jni/application/src/main.cpp b/alienblaster/project/jni/application/src/main.cpp index 347188e9c..c0ed125d8 100644 --- a/alienblaster/project/jni/application/src/main.cpp +++ b/alienblaster/project/jni/application/src/main.cpp @@ -20,6 +20,7 @@ #include "game.h" #include "SDL.h" #include +#include using namespace std; diff --git a/alienblaster/project/jni/application/src/mixer.cpp b/alienblaster/project/jni/application/src/mixer.cpp index 8f205dcdd..283c9db8a 100644 --- a/alienblaster/project/jni/application/src/mixer.cpp +++ b/alienblaster/project/jni/application/src/mixer.cpp @@ -35,6 +35,9 @@ Mixer & Mixer::mixer() } Mixer::Mixer() { + enabled = false; + return; + if (SDL_InitSubSystem(SDL_INIT_AUDIO) < 0) { printf("Couldn't initialize SDL audio subsystem: %s\n", SDL_GetError()); exit(1); diff --git a/alienblaster/project/jni/application/src/soundDB.cpp b/alienblaster/project/jni/application/src/soundDB.cpp index a6970f5da..7d1f95b1f 100644 --- a/alienblaster/project/jni/application/src/soundDB.cpp +++ b/alienblaster/project/jni/application/src/soundDB.cpp @@ -41,18 +41,20 @@ Mix_Chunk *SoundDB::loadWav( string fn ) { if ( searchResult ) { return searchResult; } + __android_log_print(ANDROID_LOG_INFO, "Alien Blaster", (string( "Loading sound " ) + fn).c_str() ); + string fn1 = fn; // Check if file exist - FILE * inputFile = fopen( fn.c_str(), "rb"); + FILE * inputFile = fopen( fn1.c_str(), "rb"); if (!inputFile) { - if( fn.size() > 4 && fn.find(".wav") != string::npos ) { - fn = fn.substr( 0, fn.size() - 4 ) + ".ogg"; - inputFile = fopen( fn.c_str(), "rb"); + if( fn1.size() > 4 && fn1.find(".wav") != string::npos ) { + fn1 = fn1.substr( 0, fn1.size() - 4 ) + ".ogg"; + inputFile = fopen( fn1.c_str(), "rb"); } if (!inputFile) { - cout << "ERROR: file " << fn << " does not exist!" << endl; + cout << "ERROR: file " << fn1 << " does not exist!" << endl; #ifdef ANDROID - __android_log_print(ANDROID_LOG_ERROR, "Alien Blaster", (string( "Cannot load sound " ) + fn).c_str() ); + __android_log_print(ANDROID_LOG_ERROR, "Alien Blaster", (string( "Cannot load sound " ) + fn1).c_str() ); #endif exit(1); } @@ -60,11 +62,11 @@ Mix_Chunk *SoundDB::loadWav( string fn ) { fclose(inputFile); // TODO: error-handling - Mix_Chunk *newSound = Mix_LoadWAV( fn.c_str() ); + Mix_Chunk *newSound = Mix_LoadWAV( fn1.c_str() ); if( !newSound ) { - cout << "ERROR: file " << fn << " cannot be loaded!" << endl; + cout << "ERROR: file " << fn1 << " cannot be loaded!" << endl; #ifdef ANDROID - __android_log_print(ANDROID_LOG_ERROR, "Alien Blaster", (string( "Cannot load sound " ) + fn).c_str() ); + __android_log_print(ANDROID_LOG_ERROR, "Alien Blaster", (string( "Cannot load sound " ) + fn1).c_str() ); #endif exit(1); } diff --git a/alienblaster/project/jni/application/src/surfaceDB.cpp b/alienblaster/project/jni/application/src/surfaceDB.cpp index f11c242b5..8f8650a48 100644 --- a/alienblaster/project/jni/application/src/surfaceDB.cpp +++ b/alienblaster/project/jni/application/src/surfaceDB.cpp @@ -51,32 +51,34 @@ SDL_Surface *SurfaceDB::loadSurface( string fn, bool alpha ) { if ( searchResult ) { return searchResult; } + __android_log_print(ANDROID_LOG_INFO, "Alien Blaster", (string( "Loading image " ) + fn).c_str() ); + string fn1 = fn; bool isPNG = false; // Check if file exist - FILE * inputFile = fopen( fn.c_str(), "rb"); + FILE * inputFile = fopen( fn1.c_str(), "rb"); if (!inputFile) { - if( fn.size() > 4 && fn.find(".bmp") != string::npos ) { + if( fn1.size() > 4 && fn1.find(".bmp") != string::npos ) { isPNG = true; - fn = fn.substr( 0, fn.size() - 4 ) + ".png"; - inputFile = fopen( fn.c_str(), "rb"); + fn1 = fn1.substr( 0, fn1.size() - 4 ) + ".png"; + inputFile = fopen( fn1.c_str(), "rb"); } if (!inputFile) { - cout << "ERROR: file " << fn << " does not exist!" << endl; + cout << "ERROR: file " << fn1 << " does not exist!" << endl; #ifdef ANDROID - __android_log_print(ANDROID_LOG_ERROR, "Alien Blaster", (string( "Cannot load image " ) + fn).c_str() ); + __android_log_print(ANDROID_LOG_ERROR, "Alien Blaster", (string( "Cannot load image " ) + fn1).c_str() ); #endif exit(1); } } fclose(inputFile); - SDL_Surface *newSurface = isPNG ? IMG_Load( fn.c_str() ) : SDL_LoadBMP( fn.c_str() ); + SDL_Surface *newSurface = isPNG ? IMG_Load( fn1.c_str() ) : SDL_LoadBMP( fn1.c_str() ); if( newSurface == NULL ) { - cout << "ERROR: Cannot load image " << fn << endl; + cout << "ERROR: Cannot load image " << fn1 << endl; #ifdef ANDROID - __android_log_print(ANDROID_LOG_ERROR, "Alien Blaster", (string( "Cannot load image " ) + fn).c_str() ); + __android_log_print(ANDROID_LOG_ERROR, "Alien Blaster", (string( "Cannot load image " ) + fn1).c_str() ); #endif exit(1); } diff --git a/alienblaster/project/jni/sdl/src/audio/android/SDL_androidaudio.c b/alienblaster/project/jni/sdl/src/audio/android/SDL_androidaudio.c index b21233f24..c303403e7 100644 --- a/alienblaster/project/jni/sdl/src/audio/android/SDL_androidaudio.c +++ b/alienblaster/project/jni/sdl/src/audio/android/SDL_androidaudio.c @@ -77,52 +77,33 @@ AudioBootStrap ANDROIDAUD_bootstrap = { }; -static SDL_mutex * audioMutex = NULL; -static SDL_cond * audioCond = NULL; -static SDL_cond * audioCond2 = NULL; static unsigned char * audioBuffer = NULL; static size_t audioBufferSize = 0; -static SDL_AudioSpec *audioFormat = NULL; -static int audioInitialized = 0; -static int audioPlayed = 0; + +// Extremely wicked JNI environment to call Java functions from C code static jbyteArray audioBufferJNI = NULL; static JNIEnv * jniEnv = NULL; +static jclass JavaAudioThreadClass = NULL; +static jobject JavaAudioThread = NULL; +static jmethodID JavaFillBuffer = NULL; +static jmethodID JavaInitAudio = NULL; +static jmethodID JavaDeinitAudio = NULL; + static Uint8 *ANDROIDAUD_GetAudioBuf(_THIS) { - return(this->hidden->mixbuf); + return(audioBuffer); } static void ANDROIDAUD_CloseAudio(_THIS) { - SDL_mutex * audioMutex1; - - if( audioMutex != NULL ) - { - audioMutex1 = audioMutex; - SDL_mutexP(audioMutex1); - audioInitialized = 0; - SDL_CondSignal(audioCond); - SDL_CondSignal(audioCond2); - audioMutex = NULL; - SDL_DestroyCond(audioCond); - SDL_DestroyCond(audioCond2); - audioCond = NULL; - audioCond2 = NULL; - audioFormat = NULL; - // TODO: this crashes JNI, so we're just memleaking it - /* - (*jniEnv)->ReleaseByteArrayElements(jniEnv, audioBufferJNI, (jbyte *)audioBuffer, 0); - (*jniEnv)->DeleteGlobalRef(jniEnv, audioBufferJNI); - */ - jniEnv = NULL; - audioBufferJNI = NULL; - audioBuffer = NULL; - audioBufferSize = 0; - SDL_mutexV(audioMutex1); - SDL_DestroyMutex(audioMutex1); - - } + (*jniEnv)->DeleteGlobalRef(jniEnv, audioBufferJNI); + audioBufferJNI = NULL; + audioBuffer = NULL; + audioBufferSize = 0; + + (*jniEnv)->CallIntMethod( jniEnv, JavaAudioThread, JavaDeinitAudio ); + if ( this->hidden != NULL ) { SDL_free(this->hidden); this->hidden = NULL; @@ -131,6 +112,15 @@ static void ANDROIDAUD_CloseAudio(_THIS) static int ANDROIDAUD_OpenAudio(_THIS, const char *devname, int iscapture) { + SDL_AudioSpec *audioFormat = &this->spec; + jintArray initArray = NULL; + int initData[4] = { 0, 0, 0, 0 }; // { rate, channels, encoding, bufsize }; + jobject * bufferObj; + jboolean isCopy = JNI_TRUE; + unsigned char *audioBuffer; + int audioBufferSize; + int bytesPerSample; + this->hidden = (struct SDL_PrivateAudioData *) SDL_malloc((sizeof *this->hidden)); if ( this->hidden == NULL ) { SDL_OutOfMemory(); @@ -143,39 +133,39 @@ static int ANDROIDAUD_OpenAudio(_THIS, const char *devname, int iscapture) __android_log_print(ANDROID_LOG_ERROR, "libSDL", "Application requested unsupported audio format - only S8 and S16 are supported"); return (-1); // TODO: enable format conversion? Don't know how to do that in SDL } + - if( audioMutex == NULL ) + initData[0] = audioFormat->freq; + initData[1] = audioFormat->channels; + bytesPerSample = (audioFormat->format & 0xFF) / 8; + initData[2] = ( bytesPerSample == 2 ) ? 1 : 0; + audioFormat->format = ( bytesPerSample == 2 ) ? AUDIO_S16 : AUDIO_S8; + initData[3] = audioFormat->size; + initArray = (*jniEnv)->NewIntArray(jniEnv, 4); + (*jniEnv)->SetIntArrayRegion(jniEnv, initArray, 0, 4, (jint *)initData); + + bufferObj = (*jniEnv)->CallObjectMethod( jniEnv, JavaAudioThread, JavaInitAudio, initArray ); + + if( ! bufferObj ) { - audioInitialized = 0; - audioFormat = &this->spec; - audioMutex = SDL_CreateMutex(); - audioCond = SDL_CreateCond(); - audioCond2 = SDL_CreateCond(); - audioPlayed == 0; + __android_log_print(ANDROID_LOG_INFO, "libSDL", "ANDROIDAUD_OpenAudio(): failed to get audio buffer from JNI"); + ANDROIDAUD_CloseAudio(this); + return(-1); } - SDL_mutexP(audioMutex); - - while( !audioInitialized ) - { - if( SDL_CondWaitTimeout( audioCond, audioMutex, 5000 ) != 0 ) - { - __android_log_print(ANDROID_LOG_INFO, "libSDL", "ANDROIDAUD_OpenAudio() failed! timeout when waiting callback"); - SDL_mutexV(audioMutex); - ANDROIDAUD_CloseAudio(this); - return(-1); - } - } + audioBufferJNI = (jbyteArray*)(*jniEnv)->NewGlobalRef(jniEnv, bufferObj); + audioBufferSize = (*jniEnv)->GetArrayLength(jniEnv, audioBufferJNI); + audioBuffer = (unsigned char *) (*jniEnv)->GetByteArrayElements(jniEnv, audioBufferJNI, &isCopy); + if( isCopy == JNI_TRUE ) + __android_log_print(ANDROID_LOG_ERROR, "libSDL", "ANDROIDAUD_OpenAudio(): JNI returns a copy of byte array - no audio will be played"); + + bytesPerSample = (audioFormat->format & 0xFF) / 8; + audioFormat->samples = audioBufferSize / bytesPerSample / audioFormat->channels; + audioFormat->size = audioBufferSize; + SDL_memset(audioBuffer, audioFormat->silence, audioFormat->size); SDL_CalculateAudioSpec(&this->spec); - this->hidden->mixbuf = audioBuffer; - this->hidden->mixlen = audioBufferSize; - - audioFormat = NULL; - - SDL_mutexV(audioMutex); - return(1); } @@ -187,19 +177,16 @@ static void ANDROIDAUD_WaitAudio(_THIS) static void ANDROIDAUD_PlayAudio(_THIS) { - SDL_mutexP(audioMutex); + (*jniEnv)->ReleaseByteArrayElements(jniEnv, audioBufferJNI, (jbyte *)audioBuffer, 0); + audioBuffer == NULL; - //audioBuffer = this->hidden->mixbuf; - //audioBufferSize = this->hidden->mixlen; + (*jniEnv)->CallIntMethod( jniEnv, JavaAudioThread, JavaDeinitAudio ); - audioPlayed = 1; - - SDL_CondSignal(audioCond2); - SDL_CondWaitTimeout( audioCond, audioMutex, 1000 ); + jboolean isCopy = JNI_TRUE; + audioBuffer = (unsigned char *) (*jniEnv)->GetByteArrayElements(jniEnv, audioBufferJNI, &isCopy); + if( isCopy == JNI_TRUE ) + __android_log_print(ANDROID_LOG_INFO, "libSDL", "ANDROIDAUD_PlayAudio() JNI returns a copy of byte array - that's slow"); - this->hidden->mixbuf = audioBuffer; - - SDL_mutexV(audioMutex); } #ifndef SDL_JAVA_PACKAGE_PATH @@ -209,125 +196,22 @@ static void ANDROIDAUD_PlayAudio(_THIS) #define JAVA_EXPORT_NAME1(name,package) JAVA_EXPORT_NAME2(name,package) #define JAVA_EXPORT_NAME(name) JAVA_EXPORT_NAME1(name,SDL_JAVA_PACKAGE_PATH) -extern jintArray JAVA_EXPORT_NAME(AudioThread_nativeAudioInit) (JNIEnv * env, jobject jobj) +extern int JAVA_EXPORT_NAME(AudioThread_nativeAudioInitJavaCallbacks) (JNIEnv * env, jobject thiz) { - jintArray ret = NULL; - int initData[4] = { 0, 0, 0, 0 }; // { rate, channels, encoding, bufsize }; + __android_log_print(ANDROID_LOG_INFO, "libSDL", "nativeAudioInitJavaCallbacks(): enter"); + jniEnv = env; + JavaAudioThread = thiz; - if( audioMutex == NULL ) - return ret; + JavaAudioThreadClass = (*jniEnv)->GetObjectClass(jniEnv, thiz); + JavaFillBuffer = (*jniEnv)->GetMethodID(jniEnv, JavaAudioThreadClass, "fillBuffer", "()I"); + JavaInitAudio = (*jniEnv)->GetMethodID(jniEnv, JavaAudioThreadClass, "initAudio", "([I)[B"); + JavaDeinitAudio = (*jniEnv)->GetMethodID(jniEnv, JavaAudioThreadClass, "deinitAudio", "()I"); + if( ! JavaFillBuffer ) + __android_log_print(ANDROID_LOG_ERROR, "libSDL", "nativeAudioInitJavaCallbacks(): JavaFillBuffer is NULL"); + if( ! JavaInitAudio ) + __android_log_print(ANDROID_LOG_ERROR, "libSDL", "nativeAudioInitJavaCallbacks(): JavaInitAudio is NULL"); + if( ! JavaInitAudio ) + __android_log_print(ANDROID_LOG_ERROR, "libSDL", "nativeAudioInitJavaCallbacks(): JavaDeinitAudio is NULL"); - SDL_mutexP(audioMutex); - if( audioInitialized == 0 ) - { - initData[0] = audioFormat->freq; - initData[1] = audioFormat->channels; - int bytesPerSample = (audioFormat->format & 0xFF) / 8; - initData[2] = ( bytesPerSample == 2 ) ? 1 : 0; - audioFormat->format = ( bytesPerSample == 2 ) ? AUDIO_S16 : AUDIO_S8; - initData[3] = audioFormat->size; - ret=(*env)->NewIntArray(env, 4); - (*env)->SetIntArrayRegion(env, ret, 0, 4, (jint *)initData); - } - - SDL_mutexV(audioMutex); - - return (ret); -}; - -extern jint JAVA_EXPORT_NAME(AudioThread_nativeAudioInit2) (JNIEnv * env, jobject jobj, jbyteArray buf) -{ - if( audioMutex == NULL ) - return 0; - - SDL_mutexP(audioMutex); - - if( audioInitialized == 0 ) - { - /* Allocate mixing buffer */ - audioBufferJNI = (jbyteArray*)(*env)->NewGlobalRef(env, buf); - audioBufferSize = (*env)->GetArrayLength(env, audioBufferJNI); - jboolean isCopy = JNI_TRUE; - audioBuffer = (unsigned char *) (*env)->GetByteArrayElements(env, audioBufferJNI, &isCopy); - if( isCopy == JNI_TRUE ) - __android_log_print(ANDROID_LOG_ERROR, "libSDL", "AudioThread_nativeAudioInit2() JNI returns a copy of byte array - no audio will be played"); - - jniEnv = env; - - int bytesPerSample = (audioFormat->format & 0xFF) / 8; - audioFormat->samples = audioBufferSize / bytesPerSample / audioFormat->channels; - audioFormat->size = audioBufferSize; - SDL_memset(audioBuffer, audioFormat->silence, audioFormat->size); - char t[512]; - //sprintf(t, "AudioThread_nativeAudioInit2() got byte array from JNI: size %i samples %i direct memory %i", audioBufferSize, audioFormat->samples, (isCopy == JNI_FALSE) ); - - /* - audioBuffer = (Uint8 *) SDL_AllocAudioMem(audioBufferSize); - if ( audioBuffer == NULL ) { - SDL_mutexV(audioMutex); - return NULL; - } - - ret = (*env)->NewDirectByteBuffer(env, audioBuffer, audioBufferSize); - */ - - audioInitialized = 1; - SDL_CondSignal(audioCond); - } - - SDL_mutexV(audioMutex); - - return 0; } - -extern jint JAVA_EXPORT_NAME(AudioThread_nativeAudioBufferLock) ( JNIEnv * env, jobject jobj ) -{ - int ret = 0; - - if( audioMutex == NULL ) - return(-1); - - SDL_mutexP(audioMutex); - - if( !audioInitialized ) - { - SDL_mutexV(audioMutex); - SDL_CondSignal(audioCond); - return (-1); - } - - if( audioPlayed == 0 ) - SDL_CondWaitTimeout(audioCond2, audioMutex, 1000); - - if( audioBuffer == NULL ) // Should not happen - ret = 0; - else - { - (*jniEnv)->ReleaseByteArrayElements(jniEnv, audioBufferJNI, (jbyte *)audioBuffer, 0); - audioBuffer == NULL; - ret = audioBufferSize; - } - - return ret; -}; - -extern jint JAVA_EXPORT_NAME(AudioThread_nativeAudioBufferUnlock) ( JNIEnv * env, jobject jobj ) -{ - if( audioMutex == NULL ) - return(-1); - - jboolean isCopy = JNI_TRUE; - audioBuffer = (unsigned char *) (*env)->GetByteArrayElements(env, audioBufferJNI, &isCopy); - if( isCopy == JNI_TRUE ) - __android_log_print(ANDROID_LOG_INFO, "libSDL", "AudioThread_nativeAudioBufferUnlock() JNI returns a copy of byte array - that's slow"); - - audioPlayed = 0; - - SDL_mutexV(audioMutex); - - SDL_CondSignal(audioCond); - - return 0; -} - diff --git a/alienblaster/project/jni/sdl/src/audio/android/SDL_androidaudio.h b/alienblaster/project/jni/sdl/src/audio/android/SDL_androidaudio.h index 53c32e67a..ae1e745d3 100644 --- a/alienblaster/project/jni/sdl/src/audio/android/SDL_androidaudio.h +++ b/alienblaster/project/jni/sdl/src/audio/android/SDL_androidaudio.h @@ -27,9 +27,6 @@ #include "../SDL_sysaudio.h" struct SDL_PrivateAudioData { - /* The file descriptor for the audio device */ - Uint8 *mixbuf; - Uint32 mixlen; }; #endif /* _SDL_androidaudio_h */ diff --git a/alienblaster/project/jni/sdl/src/video/android/SDL_androidvideo.c b/alienblaster/project/jni/sdl/src/video/android/SDL_androidvideo.c index 67fb38305..33f260ceb 100644 --- a/alienblaster/project/jni/sdl/src/video/android/SDL_androidvideo.c +++ b/alienblaster/project/jni/sdl/src/video/android/SDL_androidvideo.c @@ -238,7 +238,6 @@ JAVA_EXPORT_NAME(DemoRenderer_nativeDone) ( JNIEnv* env, jobject thiz ) void JAVA_EXPORT_NAME(DemoRenderer_nativeInitJavaCallbacks) ( JNIEnv* env, jobject thiz ) { - char classPath[1024]; JavaEnv = env; JavaRenderer = thiz; @@ -255,293 +254,3 @@ int CallJavaSwapBuffers() } - - - -/* Stuff from SDL 1.2 */ - -/* - - -// Pointer to in-memory video surface -static int memX = 0; -static int memY = 0; -// In-memory surfaces -static void * memBuffer1 = NULL; -static void * memBuffer2 = NULL; -static void * memBuffer = NULL; -static int sdl_opengl = 0; -// Some wicked GLES stuff -static GLuint texture = 0; - - -SDL_Surface *ANDROID_SetVideoMode(_THIS, SDL_Surface *current, - int width, int height, int bpp, Uint32 flags) -{ - __android_log_print(ANDROID_LOG_INFO, "libSDL", "SDL_SetVideoMode(): application requested mode %dx%d", width, height); - - if ( memBuffer1 ) - SDL_free( memBuffer1 ); - if ( memBuffer2 ) - SDL_free( memBuffer2 ); - - memBuffer = memBuffer1 = memBuffer2 = NULL; - - sdl_opengl = (flags & SDL_OPENGL) ? 1 : 0; - - memX = width; - memY = height; - - if( ! sdl_opengl ) - { - memBuffer1 = SDL_malloc(memX * memY * (bpp / 8)); - if ( ! memBuffer1 ) { - __android_log_print(ANDROID_LOG_INFO, "libSDL", "Couldn't allocate buffer for requested mode"); - SDL_SetError("Couldn't allocate buffer for requested mode"); - return(NULL); - } - SDL_memset(memBuffer1, 0, memX * memY * (bpp / 8)); - - if( flags & SDL_DOUBLEBUF ) - { - memBuffer2 = SDL_malloc(memX * memY * (bpp / 8)); - if ( ! memBuffer2 ) { - __android_log_print(ANDROID_LOG_INFO, "libSDL", "Couldn't allocate buffer for requested mode"); - SDL_SetError("Couldn't allocate buffer for requested mode"); - return(NULL); - } - SDL_memset(memBuffer2, 0, memX * memY * (bpp / 8)); - } - memBuffer = memBuffer1; - } - - if ( ! SDL_ReallocFormat(current, bpp, 0, 0, 0, 0) ) { - if(memBuffer) - SDL_free(memBuffer); - memBuffer = NULL; - __android_log_print(ANDROID_LOG_INFO, "libSDL", "Couldn't allocate new pixel format for requested mode"); - SDL_SetError("Couldn't allocate new pixel format for requested mode"); - return(NULL); - } - - current->flags = (flags & SDL_FULLSCREEN) | (flags & SDL_DOUBLEBUF) | (flags & SDL_OPENGL); - current->w = width; - current->h = height; - current->pitch = memX * (bpp / 8); - current->pixels = memBuffer; - - SdlGlRenderInit(); - - return(current); -} - -SDL_Rect **ANDROID_ListModes(_THIS, SDL_PixelFormat *format, Uint32 flags) -{ - if(format->BitsPerPixel != 16) - return NULL; - return SDL_modelist; -} - - -static int ANDROID_FlipHWSurface(_THIS, SDL_Surface *surface) -{ - if( ! sdl_opengl ) - { - glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, memX, memY, GL_RGB, GL_UNSIGNED_SHORT_5_6_5, memBuffer); - if( sWindowHeight < memY || sWindowWidth < memX ) - glDrawTexiOES(0, 0, 1, sWindowWidth, sWindowHeight); // Larger than screen - shrink to fit - else - glDrawTexiOES(0, sWindowHeight-memY, 1, memX, memY); // Smaller than screen - do not scale, it's faster that way - - if( surface->flags & SDL_DOUBLEBUF ) - { - if( memBuffer == memBuffer1 ) - memBuffer = memBuffer2; - else - memBuffer = memBuffer1; - surface->pixels = memBuffer; - } - } - - CallJavaSwapBuffers(); - - processAndroidTrackballKeyDelays( -1, 0 ); - - SDL_Delay(10); - - return(0); -}; - - -int ANDROID_SetColors(_THIS, int firstcolor, int ncolors, SDL_Color *colors) -{ - return(1); -} - -// TODO: use OpenGL textures here -static int ANDROID_AllocHWSurface(_THIS, SDL_Surface *surface) -{ - return(-1); -} -static void ANDROID_FreeHWSurface(_THIS, SDL_Surface *surface) -{ - return; -} - -static int ANDROID_LockHWSurface(_THIS, SDL_Surface *surface) -{ - return(0); -} - -static void ANDROID_UnlockHWSurface(_THIS, SDL_Surface *surface) -{ - return; -} - -static void ANDROID_UpdateRects(_THIS, int numrects, SDL_Rect *rects) -{ - ANDROID_FlipHWSurface(this, SDL_VideoSurface); -} - -void SdlGlRenderInit() -{ - // Set up an array of values to use as the sprite vertices. - static GLfloat vertices[] = - { - 0, 0, - 1, 0, - 0, 1, - 1, 1, - }; - - // Set up an array of values for the texture coordinates. - static GLfloat texcoords[] = - { - 0, 0, - 1, 0, - 0, 1, - 1, 1, - }; - - static GLint texcoordsCrop[] = - { - 0, 0, 0, 0, - }; - - static float clearColor = 0.0f; - static int clearColorDir = 1; - int textX, textY; - void * memBufferTemp; - - if( !sdl_opengl && memBuffer ) - { - // Texture sizes should be 2^n - textX = memX; - textY = memY; - - if( textX <= 256 ) - textX = 256; - else if( textX <= 512 ) - textX = 512; - else - textX = 1024; - - if( textY <= 256 ) - textY = 256; - else if( textY <= 512 ) - textY = 512; - else - textY = 1024; - - glViewport(0, 0, textX, textY); - - glClearColor(0,0,0,0); - // Set projection - glMatrixMode( GL_PROJECTION ); - glLoadIdentity(); - #if defined(GL_VERSION_ES_CM_1_0) - #define glOrtho glOrthof - #endif - glOrtho( 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f ); - - // Now Initialize modelview matrix - glMatrixMode( GL_MODELVIEW ); - glLoadIdentity(); - - glDisable(GL_DEPTH_TEST); - glDisable(GL_CULL_FACE); - glDisable(GL_DITHER); - glDisable(GL_MULTISAMPLE); - - glEnable(GL_TEXTURE_2D); - - glGenTextures(1, &texture); - - glBindTexture(GL_TEXTURE_2D, texture); - - glPixelStorei(GL_UNPACK_ALIGNMENT, 1); - - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); - - glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_NEAREST); - glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_NEAREST); - - glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE); - - void * textBuffer = SDL_malloc( textX*textY*2 ); - SDL_memset( textBuffer, 0, textX*textY*2 ); - - glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, textX, textY, 0, GL_RGB, GL_UNSIGNED_SHORT_5_6_5, textBuffer); - - glColor4f(1.0f, 1.0f, 1.0f, 1.0f); - - glEnableClientState(GL_VERTEX_ARRAY); - glEnableClientState(GL_TEXTURE_COORD_ARRAY); - - glVertexPointer(2, GL_FLOAT, 0, vertices); - glTexCoordPointer(2, GL_FLOAT, 0, texcoords); - - glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); - - texcoordsCrop[0] = 0; - texcoordsCrop[1] = memY; - texcoordsCrop[2] = memX; - texcoordsCrop[3] = -memY; - - glTexParameteriv(GL_TEXTURE_2D, GL_TEXTURE_CROP_RECT_OES, texcoordsCrop); - - glFinish(); - - SDL_free( textBuffer ); - } -} - -// Stubs to get rid of crashing in OpenGL mode -// The implementation dependent data for the window manager cursor -struct WMcursor { - int unused ; -}; - -void ANDROID_FreeWMCursor(_THIS, WMcursor *cursor) { - SDL_free (cursor); - return; -} -WMcursor * ANDROID_CreateWMCursor(_THIS, Uint8 *data, Uint8 *mask, int w, int h, int hot_x, int hot_y) { - WMcursor * cursor; - cursor = (WMcursor *) SDL_malloc (sizeof (WMcursor)) ; - if (cursor == NULL) { - SDL_OutOfMemory () ; - return NULL ; - } - return cursor; -} -int ANDROID_ShowWMCursor(_THIS, WMcursor *cursor) { - return 1; -} -void ANDROID_WarpWMCursor(_THIS, Uint16 x, Uint16 y) { } -void ANDROID_MoveWMCursor(_THIS, int x, int y) { } - -static void ANDROID_UpdateRects(_THIS, int numrects, SDL_Rect *rects); - -*/ diff --git a/alienblaster/project/src/Audio.java b/alienblaster/project/src/Audio.java index a17ac2503..397842867 100644 --- a/alienblaster/project/src/Audio.java +++ b/alienblaster/project/src/Audio.java @@ -16,9 +16,7 @@ import java.io.*; import java.nio.ByteBuffer; -// TODO: make audio single-threaded, the same way as video - -class AudioThread extends Thread { +class AudioThread { private Activity mParent; private AudioTrack mAudio; @@ -30,26 +28,19 @@ class AudioThread extends Thread { mParent = parent; mAudio = null; mAudioBuffer = null; - this.setPriority(Thread.MAX_PRIORITY); - this.start(); + nativeAudioInitJavaCallbacks(); } - @Override - public void run() + public int fillBuffer() + { + mAudio.write( mAudioBuffer, 0, mAudioBuffer.length ); + return 1; + } + + public byte[] initAudio(int[] initParams) { - while( !isInterrupted() ) - { if( mAudio == null ) { - int[] initParams = nativeAudioInit(); - if( initParams == null ) - { - try { - sleep(200); - } catch( java.lang.InterruptedException e ) { }; - } - else - { int rate = initParams[0]; int channels = initParams[1]; channels = ( channels == 1 ) ? AudioFormat.CHANNEL_CONFIGURATION_MONO : @@ -61,7 +52,6 @@ class AudioThread extends Thread { if( initParams[3] > bufSize ) bufSize = initParams[3]; mAudioBuffer = new byte[bufSize]; - nativeAudioInit2(mAudioBuffer); mAudio = new AudioTrack(AudioManager.STREAM_MUSIC, rate, channels, @@ -69,29 +59,22 @@ class AudioThread extends Thread { bufSize, AudioTrack.MODE_STREAM ); mAudio.play(); - } } - else - { - int len = nativeAudioBufferLock(); - if( len > 0 ) - mAudio.write( mAudioBuffer, 0, len ); - if( len < 0 ) - break; - nativeAudioBufferUnlock(); - } - } + return mAudioBuffer; + } + + public int deinitAudio() + { if( mAudio != null ) { mAudio.stop(); mAudio.release(); mAudio = null; } + mAudioBuffer = null; + return 1; } - private native int[] nativeAudioInit(); - private native int nativeAudioInit2(byte[] buf); - private native int nativeAudioBufferLock(); - private native int nativeAudioBufferUnlock(); + private native int nativeAudioInitJavaCallbacks(); } diff --git a/alienblaster/project/src/MainActivity.java b/alienblaster/project/src/MainActivity.java index 9edd95c69..8f5654e91 100644 --- a/alienblaster/project/src/MainActivity.java +++ b/alienblaster/project/src/MainActivity.java @@ -67,13 +67,7 @@ public class MainActivity extends Activity { { if( wakeLock != null ) wakeLock.release(); - if( mAudioThread != null ) - { - mAudioThread.interrupt(); - try { - mAudioThread.join(); - } catch( java.lang.InterruptedException e ) { }; - } + if( mGLView != null ) mGLView.exitApp(); super.onStop();