From 3e0a57b5f8b09c7f55064b4caff473b26e721b7a Mon Sep 17 00:00:00 2001 From: diaoshu Date: Sat, 31 Jan 2015 16:57:47 +0800 Subject: [PATCH 1/3] add AudioStreamer.podspec file then support for CocoaPods, change the project use ARC --- AudioStreamer.podspec | 15 + Classes/AudioStreamer.m | 2011 ---------------- Classes/{ => AudioStreamer}/AudioStreamer.h | 4 +- Classes/AudioStreamer/AudioStreamer.m | 2012 +++++++++++++++++ Classes/iPhoneStreamingPlayerAppDelegate.m | 7 - Classes/iPhoneStreamingPlayerViewController.m | 12 +- .../project.pbxproj | 22 +- main.m | 10 +- 8 files changed, 2054 insertions(+), 2039 deletions(-) create mode 100644 AudioStreamer.podspec delete mode 100644 Classes/AudioStreamer.m rename Classes/{ => AudioStreamer}/AudioStreamer.h (98%) create mode 100644 Classes/AudioStreamer/AudioStreamer.m diff --git a/AudioStreamer.podspec b/AudioStreamer.podspec new file mode 100644 index 0000000..f311bce --- /dev/null +++ b/AudioStreamer.podspec @@ -0,0 +1,15 @@ +Pod::Spec.new do |s| +s.name = 'AudioStreamer' +s.version = '0.1' +s.license = 'MIT' +s.summary = 'A streaming audio player class (AudioStreamer) for Mac OS X and iOS' +s.homepage = 'http://cocoawithlove.com' +s.author = { 'dejohn' => 'dongjia_9251@126.com' } +s.source = { :git => 'github.com/openboy2012/AudioStreamer.git', :tag => '0.1' } +s.ios.deployment_target = '5.1.1' +s.osx.deployment_target = '10.8' +s.source_files = 'Classes/AudioStreamer/*.{h,m}' +s.requires_arc = true +s.frameworks = 'AudioToolBox' +end + diff --git a/Classes/AudioStreamer.m b/Classes/AudioStreamer.m deleted file mode 100644 index 7296b2d..0000000 --- a/Classes/AudioStreamer.m +++ /dev/null @@ -1,2011 +0,0 @@ -// -// AudioStreamer.m -// StreamingAudioPlayer -// -// Created by Matt Gallagher on 27/09/08. -// Copyright 2008 Matt Gallagher. All rights reserved. -// -// This software is provided 'as-is', without any express or implied -// warranty. In no event will the authors be held liable for any damages -// arising from the use of this software. Permission is granted to anyone to -// use this software for any purpose, including commercial applications, and to -// alter it and redistribute it freely, subject to the following restrictions: -// -// 1. The origin of this software must not be misrepresented; you must not -// claim that you wrote the original software. If you use this software -// in a product, an acknowledgment in the product documentation would be -// appreciated but is not required. -// 2. Altered source versions must be plainly marked as such, and must not be -// misrepresented as being the original software. -// 3. This notice may not be removed or altered from any source -// distribution. -// - -#import "AudioStreamer.h" -#if TARGET_OS_IPHONE -#import -#endif - -#define BitRateEstimationMaxPackets 5000 -#define BitRateEstimationMinPackets 50 - -NSString * const ASStatusChangedNotification = @"ASStatusChangedNotification"; -NSString * const ASAudioSessionInterruptionOccuredNotification = @"ASAudioSessionInterruptionOccuredNotification"; - -NSString * const AS_NO_ERROR_STRING = @"No error."; -NSString * const AS_FILE_STREAM_GET_PROPERTY_FAILED_STRING = @"File stream get property failed."; -NSString * const AS_FILE_STREAM_SEEK_FAILED_STRING = @"File stream seek failed."; -NSString * const AS_FILE_STREAM_PARSE_BYTES_FAILED_STRING = @"Parse bytes failed."; -NSString * const AS_FILE_STREAM_OPEN_FAILED_STRING = @"Open audio file stream failed."; -NSString * const AS_FILE_STREAM_CLOSE_FAILED_STRING = @"Close audio file stream failed."; -NSString * const AS_AUDIO_QUEUE_CREATION_FAILED_STRING = @"Audio queue creation failed."; -NSString * const AS_AUDIO_QUEUE_BUFFER_ALLOCATION_FAILED_STRING = @"Audio buffer allocation failed."; -NSString * const AS_AUDIO_QUEUE_ENQUEUE_FAILED_STRING = @"Queueing of audio buffer failed."; -NSString * const AS_AUDIO_QUEUE_ADD_LISTENER_FAILED_STRING = @"Audio queue add listener failed."; -NSString * const AS_AUDIO_QUEUE_REMOVE_LISTENER_FAILED_STRING = @"Audio queue remove listener failed."; -NSString * const AS_AUDIO_QUEUE_START_FAILED_STRING = @"Audio queue start failed."; -NSString * const AS_AUDIO_QUEUE_BUFFER_MISMATCH_STRING = @"Audio queue buffers don't match."; -NSString * const AS_AUDIO_QUEUE_DISPOSE_FAILED_STRING = @"Audio queue dispose failed."; -NSString * const AS_AUDIO_QUEUE_PAUSE_FAILED_STRING = @"Audio queue pause failed."; -NSString * const AS_AUDIO_QUEUE_STOP_FAILED_STRING = @"Audio queue stop failed."; -NSString * const AS_AUDIO_DATA_NOT_FOUND_STRING = @"No audio data found."; -NSString * const AS_AUDIO_QUEUE_FLUSH_FAILED_STRING = @"Audio queue flush failed."; -NSString * const AS_GET_AUDIO_TIME_FAILED_STRING = @"Audio queue get current time failed."; -NSString * const AS_AUDIO_STREAMER_FAILED_STRING = @"Audio playback failed"; -NSString * const AS_NETWORK_CONNECTION_FAILED_STRING = @"Network connection failed"; -NSString * const AS_AUDIO_BUFFER_TOO_SMALL_STRING = @"Audio packets are larger than kAQDefaultBufSize."; - -@interface AudioStreamer () -@property (readwrite) AudioStreamerState state; -@property (readwrite) AudioStreamerState laststate; - -- (void)handlePropertyChangeForFileStream:(AudioFileStreamID)inAudioFileStream - fileStreamPropertyID:(AudioFileStreamPropertyID)inPropertyID - ioFlags:(UInt32 *)ioFlags; -- (void)handleAudioPackets:(const void *)inInputData - numberBytes:(UInt32)inNumberBytes - numberPackets:(UInt32)inNumberPackets - packetDescriptions:(AudioStreamPacketDescription *)inPacketDescriptions; -- (void)handleBufferCompleteForQueue:(AudioQueueRef)inAQ - buffer:(AudioQueueBufferRef)inBuffer; -- (void)handlePropertyChangeForQueue:(AudioQueueRef)inAQ - propertyID:(AudioQueuePropertyID)inID; - -#if TARGET_OS_IPHONE -- (void)handleInterruptionChangeToState:(NSNotification *)notification; -#endif - -- (void)internalSeekToTime:(double)newSeekTime; -- (void)enqueueBuffer; -- (void)handleReadFromStream:(CFReadStreamRef)aStream - eventType:(CFStreamEventType)eventType; - -@end - -#pragma mark Audio Callback Function Implementations - -// -// ASPropertyListenerProc -// -// Receives notification when the AudioFileStream has audio packets to be -// played. In response, this function creates the AudioQueue, getting it -// ready to begin playback (playback won't begin until audio packets are -// sent to the queue in ASEnqueueBuffer). -// -// This function is adapted from Apple's example in AudioFileStreamExample with -// kAudioQueueProperty_IsRunning listening added. -// -static void ASPropertyListenerProc(void * inClientData, - AudioFileStreamID inAudioFileStream, - AudioFileStreamPropertyID inPropertyID, - UInt32 * ioFlags) -{ - // this is called by audio file stream when it finds property values - AudioStreamer* streamer = (AudioStreamer *)inClientData; - [streamer - handlePropertyChangeForFileStream:inAudioFileStream - fileStreamPropertyID:inPropertyID - ioFlags:ioFlags]; -} - -// -// ASPacketsProc -// -// When the AudioStream has packets to be played, this function gets an -// idle audio buffer and copies the audio packets into it. The calls to -// ASEnqueueBuffer won't return until there are buffers available (or the -// playback has been stopped). -// -// This function is adapted from Apple's example in AudioFileStreamExample with -// CBR functionality added. -// -static void ASPacketsProc( void * inClientData, - UInt32 inNumberBytes, - UInt32 inNumberPackets, - const void * inInputData, - AudioStreamPacketDescription *inPacketDescriptions) -{ - // this is called by audio file stream when it finds packets of audio - AudioStreamer* streamer = (AudioStreamer *)inClientData; - [streamer - handleAudioPackets:inInputData - numberBytes:inNumberBytes - numberPackets:inNumberPackets - packetDescriptions:inPacketDescriptions]; -} - -// -// ASAudioQueueOutputCallback -// -// Called from the AudioQueue when playback of specific buffers completes. This -// function signals from the AudioQueue thread to the AudioStream thread that -// the buffer is idle and available for copying data. -// -// This function is unchanged from Apple's example in AudioFileStreamExample. -// -static void ASAudioQueueOutputCallback(void* inClientData, - AudioQueueRef inAQ, - AudioQueueBufferRef inBuffer) -{ - // this is called by the audio queue when it has finished decoding our data. - // The buffer is now free to be reused. - AudioStreamer* streamer = (AudioStreamer*)inClientData; - [streamer handleBufferCompleteForQueue:inAQ buffer:inBuffer]; -} - -// -// ASAudioQueueIsRunningCallback -// -// Called from the AudioQueue when playback is started or stopped. This -// information is used to toggle the observable "isPlaying" property and -// set the "finished" flag. -// -static void ASAudioQueueIsRunningCallback(void *inUserData, AudioQueueRef inAQ, AudioQueuePropertyID inID) -{ - AudioStreamer* streamer = (AudioStreamer *)inUserData; - [streamer handlePropertyChangeForQueue:inAQ propertyID:inID]; -} - -#if TARGET_OS_IPHONE -// -// ASAudioSessionInterruptionListener -// -// Invoked if the audio session is interrupted (like when the phone rings) -// -static void ASAudioSessionInterruptionListener(__unused void * inClientData, UInt32 inInterruptionState) { - [[NSNotificationCenter defaultCenter] postNotificationName:ASAudioSessionInterruptionOccuredNotification object:@(inInterruptionState)]; -} -#endif - -#pragma mark CFReadStream Callback Function Implementations - -// -// ReadStreamCallBack -// -// This is the callback for the CFReadStream from the network connection. This -// is where all network data is passed to the AudioFileStream. -// -// Invoked when an error occurs, the stream ends or we have data to read. -// -static void ASReadStreamCallBack -( - CFReadStreamRef aStream, - CFStreamEventType eventType, - void* inClientInfo -) -{ - AudioStreamer* streamer = (AudioStreamer *)inClientInfo; - [streamer handleReadFromStream:aStream eventType:eventType]; -} - -@implementation AudioStreamer - -@synthesize errorCode; -@synthesize state; -@synthesize laststate; -@synthesize bitRate; -@synthesize httpHeaders; -@synthesize fileExtension; - -// -// initWithURL -// -// Init method for the object. -// -- (id)initWithURL:(NSURL *)aURL -{ - self = [super init]; - if (self != nil) - { - url = [aURL retain]; - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(handleInterruptionChangeToState:) name:ASAudioSessionInterruptionOccuredNotification object:nil]; - } - return self; -} - -// -// dealloc -// -// Releases instance memory. -// -- (void)dealloc -{ - [[NSNotificationCenter defaultCenter] removeObserver:self name:ASAudioSessionInterruptionOccuredNotification object:nil]; - [self stop]; - [url release]; - [fileExtension release]; - [super dealloc]; -} - -// -// isFinishing -// -// returns YES if the audio has reached a stopping condition. -// -- (BOOL)isFinishing -{ - @synchronized (self) - { - if ((errorCode != AS_NO_ERROR && state != AS_INITIALIZED) || - ((state == AS_STOPPING || state == AS_STOPPED) && - stopReason != AS_STOPPING_TEMPORARILY)) - { - return YES; - } - } - - return NO; -} - -// -// runLoopShouldExit -// -// returns YES if the run loop should exit. -// -- (BOOL)runLoopShouldExit -{ - @synchronized(self) - { - if (errorCode != AS_NO_ERROR || - (state == AS_STOPPED && - stopReason != AS_STOPPING_TEMPORARILY)) - { - return YES; - } - } - - return NO; -} - -// -// stringForErrorCode: -// -// Converts an error code to a string that can be localized or presented -// to the user. -// -// Parameters: -// anErrorCode - the error code to convert -// -// returns the string representation of the error code -// -+ (NSString *)stringForErrorCode:(AudioStreamerErrorCode)anErrorCode -{ - switch (anErrorCode) - { - case AS_NO_ERROR: - return AS_NO_ERROR_STRING; - case AS_FILE_STREAM_GET_PROPERTY_FAILED: - return AS_FILE_STREAM_GET_PROPERTY_FAILED_STRING; - case AS_FILE_STREAM_SEEK_FAILED: - return AS_FILE_STREAM_SEEK_FAILED_STRING; - case AS_FILE_STREAM_PARSE_BYTES_FAILED: - return AS_FILE_STREAM_PARSE_BYTES_FAILED_STRING; - case AS_AUDIO_QUEUE_CREATION_FAILED: - return AS_AUDIO_QUEUE_CREATION_FAILED_STRING; - case AS_AUDIO_QUEUE_BUFFER_ALLOCATION_FAILED: - return AS_AUDIO_QUEUE_BUFFER_ALLOCATION_FAILED_STRING; - case AS_AUDIO_QUEUE_ENQUEUE_FAILED: - return AS_AUDIO_QUEUE_ENQUEUE_FAILED_STRING; - case AS_AUDIO_QUEUE_ADD_LISTENER_FAILED: - return AS_AUDIO_QUEUE_ADD_LISTENER_FAILED_STRING; - case AS_AUDIO_QUEUE_REMOVE_LISTENER_FAILED: - return AS_AUDIO_QUEUE_REMOVE_LISTENER_FAILED_STRING; - case AS_AUDIO_QUEUE_START_FAILED: - return AS_AUDIO_QUEUE_START_FAILED_STRING; - case AS_AUDIO_QUEUE_BUFFER_MISMATCH: - return AS_AUDIO_QUEUE_BUFFER_MISMATCH_STRING; - case AS_FILE_STREAM_OPEN_FAILED: - return AS_FILE_STREAM_OPEN_FAILED_STRING; - case AS_FILE_STREAM_CLOSE_FAILED: - return AS_FILE_STREAM_CLOSE_FAILED_STRING; - case AS_AUDIO_QUEUE_DISPOSE_FAILED: - return AS_AUDIO_QUEUE_DISPOSE_FAILED_STRING; - case AS_AUDIO_QUEUE_PAUSE_FAILED: - return AS_AUDIO_QUEUE_DISPOSE_FAILED_STRING; - case AS_AUDIO_QUEUE_FLUSH_FAILED: - return AS_AUDIO_QUEUE_FLUSH_FAILED_STRING; - case AS_AUDIO_DATA_NOT_FOUND: - return AS_AUDIO_DATA_NOT_FOUND_STRING; - case AS_GET_AUDIO_TIME_FAILED: - return AS_GET_AUDIO_TIME_FAILED_STRING; - case AS_NETWORK_CONNECTION_FAILED: - return AS_NETWORK_CONNECTION_FAILED_STRING; - case AS_AUDIO_QUEUE_STOP_FAILED: - return AS_AUDIO_QUEUE_STOP_FAILED_STRING; - case AS_AUDIO_STREAMER_FAILED: - return AS_AUDIO_STREAMER_FAILED_STRING; - case AS_AUDIO_BUFFER_TOO_SMALL: - return AS_AUDIO_BUFFER_TOO_SMALL_STRING; - default: - return AS_AUDIO_STREAMER_FAILED_STRING; - } - - return AS_AUDIO_STREAMER_FAILED_STRING; -} - -// -// presentAlertWithTitle:message: -// -// Common code for presenting error dialogs -// -// Parameters: -// title - title for the dialog -// message - main test for the dialog -// -- (void)presentAlertWithTitle:(NSString*)title message:(NSString*)message -{ -#if TARGET_OS_IPHONE - UIAlertView *alert = [ - [[UIAlertView alloc] - initWithTitle:title - message:message - delegate:nil - cancelButtonTitle:NSLocalizedString(@"OK", @"") - otherButtonTitles: nil] - autorelease]; - [alert - performSelector:@selector(show) - onThread:[NSThread mainThread] - withObject:nil - waitUntilDone:NO]; -#else - NSAlert *alert = - [NSAlert - alertWithMessageText:title - defaultButton:NSLocalizedString(@"OK", @"") - alternateButton:nil - otherButton:nil - informativeTextWithFormat:message]; - [alert - performSelector:@selector(runModal) - onThread:[NSThread mainThread] - withObject:nil - waitUntilDone:NO]; -#endif -} - -// -// failWithErrorCode: -// -// Sets the playback state to failed and logs the error. -// -// Parameters: -// anErrorCode - the error condition -// -- (void)failWithErrorCode:(AudioStreamerErrorCode)anErrorCode -{ - @synchronized(self) - { - if (errorCode != AS_NO_ERROR) - { - // Only set the error once. - return; - } - - errorCode = anErrorCode; - - if (err) - { - char *errChars = (char *)&err; - NSLog(@"%@ err: %c%c%c%c %d\n", - [AudioStreamer stringForErrorCode:anErrorCode], - errChars[3], errChars[2], errChars[1], errChars[0], - (int)err); - } - else - { - NSLog(@"%@", [AudioStreamer stringForErrorCode:anErrorCode]); - } - - if (state == AS_PLAYING || - state == AS_PAUSED || - state == AS_BUFFERING) - { - self.state = AS_STOPPING; - stopReason = AS_STOPPING_ERROR; - AudioQueueStop(audioQueue, true); - } - - if (self.shouldDisplayAlertOnError) - [self presentAlertWithTitle:NSLocalizedStringFromTable(@"File Error", @"Errors", nil) - message:NSLocalizedStringFromTable(@"Unable to configure network read stream.", @"Errors", nil)]; - } -} - -// -// mainThreadStateNotification -// -// Method invoked on main thread to send notifications to the main thread's -// notification center. -// -- (void)mainThreadStateNotification -{ - NSNotification *notification = - [NSNotification - notificationWithName:ASStatusChangedNotification - object:self]; - [[NSNotificationCenter defaultCenter] - postNotification:notification]; -} - -// -// state -// -// returns the state value. -// -- (AudioStreamerState)state -{ - @synchronized(self) - { - return state; - } -} - -// -// setState: -// -// Sets the state and sends a notification that the state has changed. -// -// This method -// -// Parameters: -// anErrorCode - the error condition -// -- (void)setState:(AudioStreamerState)aStatus -{ - @synchronized(self) - { - if (state != aStatus) - { - state = aStatus; - - if ([[NSThread currentThread] isEqual:[NSThread mainThread]]) - { - [self mainThreadStateNotification]; - } - else - { - [self - performSelectorOnMainThread:@selector(mainThreadStateNotification) - withObject:nil - waitUntilDone:NO]; - } - } - } -} - -// -// isPlaying -// -// returns YES if the audio currently playing. -// -- (BOOL)isPlaying -{ - if (state == AS_PLAYING) - { - return YES; - } - - return NO; -} - -// -// isPaused -// -// returns YES if the audio currently playing. -// -- (BOOL)isPaused -{ - if (state == AS_PAUSED) - { - return YES; - } - - return NO; -} - -// -// isWaiting -// -// returns YES if the AudioStreamer is waiting for a state transition of some -// kind. -// -- (BOOL)isWaiting -{ - @synchronized(self) - { - if ([self isFinishing] || - state == AS_STARTING_FILE_THREAD|| - state == AS_WAITING_FOR_DATA || - state == AS_WAITING_FOR_QUEUE_TO_START || - state == AS_BUFFERING) - { - return YES; - } - } - - return NO; -} - -// -// isIdle -// -// returns YES if the AudioStream is in the AS_INITIALIZED state (i.e. -// isn't doing anything). -// -- (BOOL)isIdle -{ - if (state == AS_INITIALIZED) - { - return YES; - } - - return NO; -} - -// -// isAborted -// -// returns YES if the AudioStream was stopped due to some errror, handled through failWithCodeError. -// -- (BOOL)isAborted -{ - if (state == AS_STOPPING && stopReason == AS_STOPPING_ERROR) - { - return YES; - } - - return NO; -} - -// -// hintForFileExtension: -// -// Generates a first guess for the file type based on the file's extension -// -// Parameters: -// fileExtension - the file extension -// -// returns a file type hint that can be passed to the AudioFileStream -// -+ (AudioFileTypeID)hintForFileExtension:(NSString *)fileExtension -{ - AudioFileTypeID fileTypeHint = kAudioFileAAC_ADTSType; - if ([fileExtension isEqual:@"mp3"]) - { - fileTypeHint = kAudioFileMP3Type; - } - else if ([fileExtension isEqual:@"wav"]) - { - fileTypeHint = kAudioFileWAVEType; - } - else if ([fileExtension isEqual:@"aifc"]) - { - fileTypeHint = kAudioFileAIFCType; - } - else if ([fileExtension isEqual:@"aiff"]) - { - fileTypeHint = kAudioFileAIFFType; - } - else if ([fileExtension isEqual:@"m4a"]) - { - fileTypeHint = kAudioFileM4AType; - } - else if ([fileExtension isEqual:@"mp4"]) - { - fileTypeHint = kAudioFileMPEG4Type; - } - else if ([fileExtension isEqual:@"caf"]) - { - fileTypeHint = kAudioFileCAFType; - } - else if ([fileExtension isEqual:@"aac"]) - { - fileTypeHint = kAudioFileAAC_ADTSType; - } - return fileTypeHint; -} - -// -// openReadStream -// -// Open the audioFileStream to parse data and the fileHandle as the data -// source. -// -- (BOOL)openReadStream -{ - @synchronized(self) - { - NSAssert([[NSThread currentThread] isEqual:internalThread], - @"File stream download must be started on the internalThread"); - NSAssert(stream == nil, @"Download stream already initialized"); - - // - // Create the HTTP GET request - // - CFHTTPMessageRef message= CFHTTPMessageCreateRequest(NULL, (CFStringRef)@"GET", (CFURLRef)url, kCFHTTPVersion1_1); - - // - // If we are creating this request to seek to a location, set the - // requested byte range in the headers. - // - if (fileLength > 0 && seekByteOffset > 0) - { - CFHTTPMessageSetHeaderFieldValue(message, CFSTR("Range"), - (CFStringRef)[NSString stringWithFormat:@"bytes=%ld-%ld", (long)seekByteOffset, (long)fileLength]); - discontinuous = YES; - } - - // - // Create the read stream that will receive data from the HTTP request - // - stream = CFReadStreamCreateForHTTPRequest(NULL, message); - CFRelease(message); - - // - // Enable stream redirection - // - if (CFReadStreamSetProperty( - stream, - kCFStreamPropertyHTTPShouldAutoredirect, - kCFBooleanTrue) == false) - { - [self failWithErrorCode:AS_FILE_STREAM_SET_PROPERTY_FAILED]; - - return NO; - } - - // - // Handle proxies - // - CFDictionaryRef proxySettings = CFNetworkCopySystemProxySettings(); - CFReadStreamSetProperty(stream, kCFStreamPropertyHTTPProxy, proxySettings); - CFRelease(proxySettings); - - // - // Handle SSL connections - // - if([[url scheme] isEqualToString:@"https"]) - { - NSDictionary *sslSettings = - [NSDictionary dictionaryWithObjectsAndKeys: - (NSString *)kCFStreamSocketSecurityLevelNegotiatedSSL, kCFStreamSSLLevel, - [NSNumber numberWithBool:YES], kCFStreamSSLAllowsExpiredCertificates, - [NSNumber numberWithBool:YES], kCFStreamSSLAllowsExpiredRoots, - [NSNumber numberWithBool:YES], kCFStreamSSLAllowsAnyRoot, - [NSNumber numberWithBool:NO], kCFStreamSSLValidatesCertificateChain, - [NSNull null], kCFStreamSSLPeerName, - nil]; - - CFReadStreamSetProperty(stream, kCFStreamPropertySSLSettings, sslSettings); - } - - // - // We're now ready to receive data - // - self.state = AS_WAITING_FOR_DATA; - - // - // Open the stream - // - if (!CFReadStreamOpen(stream)) - { - CFRelease(stream); - - [self failWithErrorCode:AS_FILE_STREAM_OPEN_FAILED]; - - return NO; - } - - // - // Set our callback function to receive the data - // - CFStreamClientContext context = {0, self, NULL, NULL, NULL}; - CFReadStreamSetClient( - stream, - kCFStreamEventHasBytesAvailable | kCFStreamEventErrorOccurred | kCFStreamEventEndEncountered, - ASReadStreamCallBack, - &context); - CFReadStreamScheduleWithRunLoop(stream, CFRunLoopGetCurrent(), kCFRunLoopCommonModes); - } - - return YES; -} - -// -// startInternal -// -// This is the start method for the AudioStream thread. This thread is created -// because it will be blocked when there are no audio buffers idle (and ready -// to receive audio data). -// -// Activity in this thread: -// - Creation and cleanup of all AudioFileStream and AudioQueue objects -// - Receives data from the CFReadStream -// - AudioFileStream processing -// - Copying of data from AudioFileStream into audio buffers -// - Stopping of the thread because of end-of-file -// - Stopping due to error or failure -// -// Activity *not* in this thread: -// - AudioQueue playback and notifications (happens in AudioQueue thread) -// - Actual download of NSURLConnection data (NSURLConnection's thread) -// - Creation of the AudioStreamer (other, likely "main" thread) -// - Invocation of -start method (other, likely "main" thread) -// - User/manual invocation of -stop (other, likely "main" thread) -// -// This method contains bits of the "main" function from Apple's example in -// AudioFileStreamExample. -// -- (void)startInternal -{ - NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init]; - - @synchronized(self) - { - if (state != AS_STARTING_FILE_THREAD) - { - if (state != AS_STOPPING && - state != AS_STOPPED) - { - NSLog(@"### Not starting audio thread. State code is: %ld", (long)state); - } - self.state = AS_INITIALIZED; - [pool release]; - return; - } - - #if TARGET_OS_IPHONE - // - // Set the audio session category so that we continue to play if the - // iPhone/iPod auto-locks. - // - AudioSessionInitialize ( - NULL, // 'NULL' to use the default (main) run loop - NULL, // 'NULL' to use the default run loop mode - ASAudioSessionInterruptionListener, // a reference to your interruption callback - self // data to pass to your interruption listener callback - ); - UInt32 sessionCategory = kAudioSessionCategory_MediaPlayback; - AudioSessionSetProperty ( - kAudioSessionProperty_AudioCategory, - sizeof (sessionCategory), - &sessionCategory - ); - AudioSessionSetActive(true); - #endif - - // initialize a mutex and condition so that we can block on buffers in use. - pthread_mutex_init(&queueBuffersMutex, NULL); - pthread_cond_init(&queueBufferReadyCondition, NULL); - - if (![self openReadStream]) - { - goto cleanup; - } - } - - // - // Process the run loop until playback is finished or failed. - // - BOOL isRunning = YES; - do - { - isRunning = [[NSRunLoop currentRunLoop] - runMode:NSDefaultRunLoopMode - beforeDate:[NSDate dateWithTimeIntervalSinceNow:0.25]]; - - @synchronized(self) { - if (seekWasRequested) { - [self internalSeekToTime:requestedSeekTime]; - seekWasRequested = NO; - } - } - - // - // If there are no queued buffers, we need to check here since the - // handleBufferCompleteForQueue:buffer: should not change the state - // (may not enter the synchronized section). - // - if (buffersUsed == 0 && self.state == AS_PLAYING) - { - err = AudioQueuePause(audioQueue); - if (err) - { - [self failWithErrorCode:AS_AUDIO_QUEUE_PAUSE_FAILED]; - return; - } - self.state = AS_BUFFERING; - } - } while (isRunning && ![self runLoopShouldExit]); - -cleanup: - - @synchronized(self) - { - // - // Cleanup the read stream if it is still open - // - if (stream) - { - CFReadStreamClose(stream); - CFRelease(stream); - stream = nil; - } - - // - // Close the audio file strea, - // - if (audioFileStream) - { - err = AudioFileStreamClose(audioFileStream); - audioFileStream = nil; - if (err) - { - [self failWithErrorCode:AS_FILE_STREAM_CLOSE_FAILED]; - } - } - - // - // Dispose of the Audio Queue - // - if (audioQueue) - { - err = AudioQueueDispose(audioQueue, true); - audioQueue = nil; - if (err) - { - [self failWithErrorCode:AS_AUDIO_QUEUE_DISPOSE_FAILED]; - } - } - - pthread_mutex_destroy(&queueBuffersMutex); - pthread_cond_destroy(&queueBufferReadyCondition); - -#if TARGET_OS_IPHONE - AudioSessionSetActive(false); -#endif - - [httpHeaders release]; - httpHeaders = nil; - - bytesFilled = 0; - packetsFilled = 0; - seekByteOffset = 0; - packetBufferSize = 0; - self.state = AS_INITIALIZED; - - [internalThread release]; - internalThread = nil; - } - - [pool release]; -} - -// -// start -// -// Calls startInternal in a new thread. -// -- (void)start -{ - @synchronized (self) - { - if (state == AS_PAUSED) - { - [self pause]; - } - else if (state == AS_INITIALIZED) - { - NSAssert([[NSThread currentThread] isEqual:[NSThread mainThread]], - @"Playback can only be started from the main thread."); - notificationCenter = - [[NSNotificationCenter defaultCenter] retain]; - self.state = AS_STARTING_FILE_THREAD; - internalThread = - [[NSThread alloc] - initWithTarget:self - selector:@selector(startInternal) - object:nil]; - [internalThread start]; - } - } -} - - -// internalSeekToTime: -// -// Called from our internal runloop to reopen the stream at a seeked location -// -- (void)internalSeekToTime:(double)newSeekTime -{ - if ([self calculatedBitRate] == 0.0 || fileLength <= 0) - { - return; - } - - // - // Calculate the byte offset for seeking - // - seekByteOffset = dataOffset + - (newSeekTime / self.duration) * (fileLength - dataOffset); - - // - // Attempt to leave 1 useful packet at the end of the file (although in - // reality, this may still seek too far if the file has a long trailer). - // - if (seekByteOffset > fileLength - 2 * packetBufferSize) - { - seekByteOffset = fileLength - 2 * packetBufferSize; - } - - // - // Store the old time from the audio queue and the time that we're seeking - // to so that we'll know the correct time progress after seeking. - // - seekTime = newSeekTime; - - // - // Attempt to align the seek with a packet boundary - // - double calculatedBitRate = [self calculatedBitRate]; - if (packetDuration > 0 && - calculatedBitRate > 0) - { - UInt32 ioFlags = 0; - SInt64 packetAlignedByteOffset; - SInt64 seekPacket = floor(newSeekTime / packetDuration); - err = AudioFileStreamSeek(audioFileStream, seekPacket, &packetAlignedByteOffset, &ioFlags); - if (!err && !(ioFlags & kAudioFileStreamSeekFlag_OffsetIsEstimated)) - { - seekTime -= ((seekByteOffset - dataOffset) - packetAlignedByteOffset) * 8.0 / calculatedBitRate; - seekByteOffset = packetAlignedByteOffset + dataOffset; - } - } - - // - // Close the current read straem - // - if (stream) - { - CFReadStreamClose(stream); - CFRelease(stream); - stream = nil; - } - - // - // Stop the audio queue - // - self.state = AS_STOPPING; - stopReason = AS_STOPPING_TEMPORARILY; - err = AudioQueueStop(audioQueue, true); - if (err) - { - [self failWithErrorCode:AS_AUDIO_QUEUE_STOP_FAILED]; - return; - } - - // - // Re-open the file stream. It will request a byte-range starting at - // seekByteOffset. - // - [self openReadStream]; -} - -// -// seekToTime: -// -// Attempts to seek to the new time. Will be ignored if the bitrate or fileLength -// are unknown. -// -// Parameters: -// newTime - the time to seek to -// -- (void)seekToTime:(double)newSeekTime -{ - @synchronized(self) - { - seekWasRequested = YES; - requestedSeekTime = newSeekTime; - } -} - -// -// progress -// -// returns the current playback progress. Will return zero if sampleRate has -// not yet been detected. -// -- (double)progress -{ - @synchronized(self) - { - if (sampleRate > 0 && (state == AS_STOPPING || ![self isFinishing])) - { - if (state != AS_PLAYING && state != AS_PAUSED && state != AS_BUFFERING && state != AS_STOPPING) - { - return lastProgress; - } - - AudioTimeStamp queueTime; - Boolean discontinuity; - err = AudioQueueGetCurrentTime(audioQueue, NULL, &queueTime, &discontinuity); - - const OSStatus AudioQueueStopped = 0x73746F70; // 0x73746F70 is 'stop' - if (err == AudioQueueStopped) - { - return lastProgress; - } - else if (err) - { - [self failWithErrorCode:AS_GET_AUDIO_TIME_FAILED]; - } - - double progress = seekTime + queueTime.mSampleTime / sampleRate; - if (progress < 0.0) - { - progress = 0.0; - } - - lastProgress = progress; - return progress; - } - } - - return lastProgress; -} - -// -// calculatedBitRate -// -// returns the bit rate, if known. Uses packet duration times running bits per -// packet if available, otherwise it returns the nominal bitrate. Will return -// zero if no useful option available. -// -- (double)calculatedBitRate -{ - if (packetDuration && processedPacketsCount > BitRateEstimationMinPackets) - { - double averagePacketByteSize = processedPacketsSizeTotal / processedPacketsCount; - return 8.0 * averagePacketByteSize / packetDuration; - } - - if (bitRate) - { - return (double)bitRate; - } - - return 0; -} - -// -// duration -// -// Calculates the duration of available audio from the bitRate and fileLength. -// -// returns the calculated duration in seconds. -// -- (double)duration -{ - double calculatedBitRate = [self calculatedBitRate]; - - if (calculatedBitRate == 0 || fileLength == 0) - { - return 0.0; - } - - return (fileLength - dataOffset) / (calculatedBitRate * 0.125); -} - -// -// pause -// -// A togglable pause function. -// -- (void)pause -{ - @synchronized(self) - { - if (state == AS_PLAYING || state == AS_STOPPING) - { - err = AudioQueuePause(audioQueue); - if (err) - { - [self failWithErrorCode:AS_AUDIO_QUEUE_PAUSE_FAILED]; - return; - } - self.laststate = state; - self.state = AS_PAUSED; - } - else if (state == AS_PAUSED) - { - err = AudioQueueStart(audioQueue, NULL); - if (err) - { - [self failWithErrorCode:AS_AUDIO_QUEUE_START_FAILED]; - return; - } - self.state = self.laststate; - } - } -} - -// -// stop -// -// This method can be called to stop downloading/playback before it completes. -// It is automatically called when an error occurs. -// -// If playback has not started before this method is called, it will toggle the -// "isPlaying" property so that it is guaranteed to transition to true and -// back to false -// -- (void)stop -{ - @synchronized(self) - { - if (audioQueue && - (state == AS_PLAYING || state == AS_PAUSED || - state == AS_BUFFERING || state == AS_WAITING_FOR_QUEUE_TO_START)) - { - self.state = AS_STOPPING; - stopReason = AS_STOPPING_USER_ACTION; - err = AudioQueueStop(audioQueue, true); - if (err) - { - [self failWithErrorCode:AS_AUDIO_QUEUE_STOP_FAILED]; - return; - } - } - else if (state != AS_INITIALIZED) - { - self.state = AS_STOPPED; - stopReason = AS_STOPPING_USER_ACTION; - } - seekWasRequested = NO; - } - - while (state != AS_INITIALIZED) - { - [NSThread sleepForTimeInterval:0.1]; - } -} - -// -// handleReadFromStream:eventType: -// -// Reads data from the network file stream into the AudioFileStream -// -// Parameters: -// aStream - the network file stream -// eventType - the event which triggered this method -// -- (void)handleReadFromStream:(CFReadStreamRef)aStream - eventType:(CFStreamEventType)eventType -{ - if (aStream != stream) - { - // - // Ignore messages from old streams - // - return; - } - - if (eventType == kCFStreamEventErrorOccurred) - { - [self failWithErrorCode:AS_AUDIO_DATA_NOT_FOUND]; - } - else if (eventType == kCFStreamEventEndEncountered) - { - @synchronized(self) - { - if ([self isFinishing]) - { - return; - } - } - - // - // If there is a partially filled buffer, pass it to the AudioQueue for - // processing - // - if (bytesFilled) - { - if (self.state == AS_WAITING_FOR_DATA) - { - // - // Force audio data smaller than one whole buffer to play. - // - self.state = AS_FLUSHING_EOF; - } - [self enqueueBuffer]; - } - - @synchronized(self) - { - if (state == AS_WAITING_FOR_DATA) - { - [self failWithErrorCode:AS_AUDIO_DATA_NOT_FOUND]; - } - - // - // We left the synchronized section to enqueue the buffer so we - // must check that we are !finished again before touching the - // audioQueue - // - else if (![self isFinishing]) - { - if (audioQueue) - { - // - // Set the progress at the end of the stream - // - err = AudioQueueFlush(audioQueue); - if (err) - { - [self failWithErrorCode:AS_AUDIO_QUEUE_FLUSH_FAILED]; - return; - } - - self.state = AS_STOPPING; - stopReason = AS_STOPPING_EOF; - err = AudioQueueStop(audioQueue, false); - if (err) - { - [self failWithErrorCode:AS_AUDIO_QUEUE_FLUSH_FAILED]; - return; - } - } - else - { - self.state = AS_STOPPED; - stopReason = AS_STOPPING_EOF; - } - } - } - } - else if (eventType == kCFStreamEventHasBytesAvailable) - { - if (!httpHeaders) - { - CFTypeRef message = - CFReadStreamCopyProperty(stream, kCFStreamPropertyHTTPResponseHeader); - httpHeaders = - (NSDictionary *)CFHTTPMessageCopyAllHeaderFields((CFHTTPMessageRef)message); - CFRelease(message); - - // - // Only read the content length if we seeked to time zero, otherwise - // we only have a subset of the total bytes. - // - if (seekByteOffset == 0) - { - fileLength = [[httpHeaders objectForKey:@"Content-Length"] integerValue]; - } - } - - if (!audioFileStream) - { - // - // Attempt to guess the file type from the URL. Reading the MIME type - // from the httpHeaders might be a better approach since lots of - // URL's don't have the right extension. - // - // If you have a fixed file-type, you may want to hardcode this. - // - if (!self.fileExtension) - { - self.fileExtension = [[url path] pathExtension]; - } - AudioFileTypeID fileTypeHint = - [AudioStreamer hintForFileExtension:self.fileExtension]; - - // create an audio file stream parser - err = AudioFileStreamOpen(self, ASPropertyListenerProc, ASPacketsProc, - fileTypeHint, &audioFileStream); - if (err) - { - [self failWithErrorCode:AS_FILE_STREAM_OPEN_FAILED]; - return; - } - } - - UInt8 bytes[kAQDefaultBufSize]; - CFIndex length; - @synchronized(self) - { - if ([self isFinishing] || !CFReadStreamHasBytesAvailable(stream)) - { - return; - } - - // - // Read the bytes from the stream - // - length = CFReadStreamRead(stream, bytes, kAQDefaultBufSize); - - if (length == -1) - { - [self failWithErrorCode:AS_AUDIO_DATA_NOT_FOUND]; - return; - } - - if (length == 0) - { - return; - } - } - - if (discontinuous) - { - err = AudioFileStreamParseBytes(audioFileStream, length, bytes, kAudioFileStreamParseFlag_Discontinuity); - if (err) - { - [self failWithErrorCode:AS_FILE_STREAM_PARSE_BYTES_FAILED]; - return; - } - } - else - { - err = AudioFileStreamParseBytes(audioFileStream, length, bytes, 0); - if (err) - { - [self failWithErrorCode:AS_FILE_STREAM_PARSE_BYTES_FAILED]; - return; - } - } - } -} - -// -// enqueueBuffer -// -// Called from ASPacketsProc and connectionDidFinishLoading to pass filled audio -// bufffers (filled by ASPacketsProc) to the AudioQueue for playback. This -// function does not return until a buffer is idle for further filling or -// the AudioQueue is stopped. -// -// This function is adapted from Apple's example in AudioFileStreamExample with -// CBR functionality added. -// -- (void)enqueueBuffer -{ - @synchronized(self) - { - if ([self isFinishing] || stream == 0) - { - return; - } - - inuse[fillBufferIndex] = true; // set in use flag - buffersUsed++; - - // enqueue buffer - AudioQueueBufferRef fillBuf = audioQueueBuffer[fillBufferIndex]; - fillBuf->mAudioDataByteSize = bytesFilled; - - if (packetsFilled) - { - err = AudioQueueEnqueueBuffer(audioQueue, fillBuf, packetsFilled, packetDescs); - } - else - { - err = AudioQueueEnqueueBuffer(audioQueue, fillBuf, 0, NULL); - } - - if (err) - { - [self failWithErrorCode:AS_AUDIO_QUEUE_ENQUEUE_FAILED]; - return; - } - - - if (state == AS_BUFFERING || - state == AS_WAITING_FOR_DATA || - state == AS_FLUSHING_EOF || - (state == AS_STOPPED && stopReason == AS_STOPPING_TEMPORARILY)) - { - // - // Fill all the buffers before starting. This ensures that the - // AudioFileStream stays a small amount ahead of the AudioQueue to - // avoid an audio glitch playing streaming files on iPhone SDKs < 3.0 - // - if (state == AS_FLUSHING_EOF || buffersUsed == kNumAQBufs - 1) - { - if (self.state == AS_BUFFERING) - { - err = AudioQueueStart(audioQueue, NULL); - if (err) - { - [self failWithErrorCode:AS_AUDIO_QUEUE_START_FAILED]; - return; - } - self.state = AS_PLAYING; - } - else - { - self.state = AS_WAITING_FOR_QUEUE_TO_START; - - err = AudioQueueStart(audioQueue, NULL); - if (err) - { - [self failWithErrorCode:AS_AUDIO_QUEUE_START_FAILED]; - return; - } - } - } - } - - // go to next buffer - if (++fillBufferIndex >= kNumAQBufs) fillBufferIndex = 0; - bytesFilled = 0; // reset bytes filled - packetsFilled = 0; // reset packets filled - } - - // wait until next buffer is not in use - pthread_mutex_lock(&queueBuffersMutex); - while (inuse[fillBufferIndex]) - { - pthread_cond_wait(&queueBufferReadyCondition, &queueBuffersMutex); - } - pthread_mutex_unlock(&queueBuffersMutex); -} - -// -// createQueue -// -// Method to create the AudioQueue from the parameters gathered by the -// AudioFileStream. -// -// Creation is deferred to the handling of the first audio packet (although -// it could be handled any time after kAudioFileStreamProperty_ReadyToProducePackets -// is true). -// -- (void)createQueue -{ - sampleRate = asbd.mSampleRate; - packetDuration = asbd.mFramesPerPacket / sampleRate; - - // create the audio queue - err = AudioQueueNewOutput(&asbd, ASAudioQueueOutputCallback, self, NULL, NULL, 0, &audioQueue); - if (err) - { - [self failWithErrorCode:AS_AUDIO_QUEUE_CREATION_FAILED]; - return; - } - - // start the queue if it has not been started already - // listen to the "isRunning" property - err = AudioQueueAddPropertyListener(audioQueue, kAudioQueueProperty_IsRunning, ASAudioQueueIsRunningCallback, self); - if (err) - { - [self failWithErrorCode:AS_AUDIO_QUEUE_ADD_LISTENER_FAILED]; - return; - } - - // get the packet size if it is available - UInt32 sizeOfUInt32 = sizeof(UInt32); - err = AudioFileStreamGetProperty(audioFileStream, kAudioFileStreamProperty_PacketSizeUpperBound, &sizeOfUInt32, &packetBufferSize); - if (err || packetBufferSize == 0) - { - err = AudioFileStreamGetProperty(audioFileStream, kAudioFileStreamProperty_MaximumPacketSize, &sizeOfUInt32, &packetBufferSize); - if (err || packetBufferSize == 0) - { - // No packet size available, just use the default - packetBufferSize = kAQDefaultBufSize; - } - } - - // allocate audio queue buffers - for (unsigned int i = 0; i < kNumAQBufs; ++i) - { - err = AudioQueueAllocateBuffer(audioQueue, packetBufferSize, &audioQueueBuffer[i]); - if (err) - { - [self failWithErrorCode:AS_AUDIO_QUEUE_BUFFER_ALLOCATION_FAILED]; - return; - } - } - - // get the cookie size - UInt32 cookieSize; - Boolean writable; - OSStatus ignorableError; - ignorableError = AudioFileStreamGetPropertyInfo(audioFileStream, kAudioFileStreamProperty_MagicCookieData, &cookieSize, &writable); - if (ignorableError) - { - return; - } - - // get the cookie data - void* cookieData = calloc(1, cookieSize); - ignorableError = AudioFileStreamGetProperty(audioFileStream, kAudioFileStreamProperty_MagicCookieData, &cookieSize, cookieData); - if (ignorableError) - { - return; - } - - // set the cookie on the queue. - ignorableError = AudioQueueSetProperty(audioQueue, kAudioQueueProperty_MagicCookie, cookieData, cookieSize); - free(cookieData); - if (ignorableError) - { - return; - } -} - -// -// handlePropertyChangeForFileStream:fileStreamPropertyID:ioFlags: -// -// Object method which handles implementation of ASPropertyListenerProc -// -// Parameters: -// inAudioFileStream - should be the same as self->audioFileStream -// inPropertyID - the property that changed -// ioFlags - the ioFlags passed in -// -- (void)handlePropertyChangeForFileStream:(AudioFileStreamID)inAudioFileStream - fileStreamPropertyID:(AudioFileStreamPropertyID)inPropertyID - ioFlags:(UInt32 *)ioFlags -{ - @synchronized(self) - { - if ([self isFinishing]) - { - return; - } - - if (inPropertyID == kAudioFileStreamProperty_ReadyToProducePackets) - { - discontinuous = true; - } - else if (inPropertyID == kAudioFileStreamProperty_DataOffset) - { - SInt64 offset; - UInt32 offsetSize = sizeof(offset); - err = AudioFileStreamGetProperty(inAudioFileStream, kAudioFileStreamProperty_DataOffset, &offsetSize, &offset); - if (err) - { - [self failWithErrorCode:AS_FILE_STREAM_GET_PROPERTY_FAILED]; - return; - } - dataOffset = offset; - - if (audioDataByteCount) - { - fileLength = dataOffset + audioDataByteCount; - } - } - else if (inPropertyID == kAudioFileStreamProperty_AudioDataByteCount) - { - UInt32 byteCountSize = sizeof(UInt64); - err = AudioFileStreamGetProperty(inAudioFileStream, kAudioFileStreamProperty_AudioDataByteCount, &byteCountSize, &audioDataByteCount); - if (err) - { - [self failWithErrorCode:AS_FILE_STREAM_GET_PROPERTY_FAILED]; - return; - } - fileLength = dataOffset + audioDataByteCount; - } - else if (inPropertyID == kAudioFileStreamProperty_DataFormat) - { - if (asbd.mSampleRate == 0) - { - UInt32 asbdSize = sizeof(asbd); - - // get the stream format. - err = AudioFileStreamGetProperty(inAudioFileStream, kAudioFileStreamProperty_DataFormat, &asbdSize, &asbd); - if (err) - { - [self failWithErrorCode:AS_FILE_STREAM_GET_PROPERTY_FAILED]; - return; - } - } - } - else if (inPropertyID == kAudioFileStreamProperty_FormatList) - { - Boolean outWriteable; - UInt32 formatListSize; - err = AudioFileStreamGetPropertyInfo(inAudioFileStream, kAudioFileStreamProperty_FormatList, &formatListSize, &outWriteable); - if (err) - { - [self failWithErrorCode:AS_FILE_STREAM_GET_PROPERTY_FAILED]; - return; - } - - AudioFormatListItem *formatList = malloc(formatListSize); - err = AudioFileStreamGetProperty(inAudioFileStream, kAudioFileStreamProperty_FormatList, &formatListSize, formatList); - if (err) - { - free(formatList); - [self failWithErrorCode:AS_FILE_STREAM_GET_PROPERTY_FAILED]; - return; - } - - for (int i = 0; i * sizeof(AudioFormatListItem) < formatListSize; i += sizeof(AudioFormatListItem)) - { - AudioStreamBasicDescription pasbd = formatList[i].mASBD; - - if (pasbd.mFormatID == kAudioFormatMPEG4AAC_HE || - pasbd.mFormatID == kAudioFormatMPEG4AAC_HE_V2) - { - // - // We've found HE-AAC, remember this to tell the audio queue - // when we construct it. - // -#if !TARGET_IPHONE_SIMULATOR - asbd = pasbd; -#endif - break; - } - } - free(formatList); - } - else - { -// NSLog(@"Property is %c%c%c%c", -// ((char *)&inPropertyID)[3], -// ((char *)&inPropertyID)[2], -// ((char *)&inPropertyID)[1], -// ((char *)&inPropertyID)[0]); - } - } -} - -// -// handleAudioPackets:numberBytes:numberPackets:packetDescriptions: -// -// Object method which handles the implementation of ASPacketsProc -// -// Parameters: -// inInputData - the packet data -// inNumberBytes - byte size of the data -// inNumberPackets - number of packets in the data -// inPacketDescriptions - packet descriptions -// -- (void)handleAudioPackets:(const void *)inInputData - numberBytes:(UInt32)inNumberBytes - numberPackets:(UInt32)inNumberPackets - packetDescriptions:(AudioStreamPacketDescription *)inPacketDescriptions; -{ - @synchronized(self) - { - if ([self isFinishing]) - { - return; - } - - if (bitRate == 0) - { - // - // m4a and a few other formats refuse to parse the bitrate so - // we need to set an "unparseable" condition here. If you know - // the bitrate (parsed it another way) you can set it on the - // class if needed. - // - bitRate = ~0; - } - - // we have successfully read the first packests from the audio stream, so - // clear the "discontinuous" flag - if (discontinuous) - { - discontinuous = false; - } - - if (!audioQueue) - { - [self createQueue]; - } - } - - // the following code assumes we're streaming VBR data. for CBR data, the second branch is used. - if (inPacketDescriptions) - { - for (int i = 0; i < inNumberPackets; ++i) - { - SInt64 packetOffset = inPacketDescriptions[i].mStartOffset; - SInt64 packetSize = inPacketDescriptions[i].mDataByteSize; - size_t bufSpaceRemaining; - - if (processedPacketsCount < BitRateEstimationMaxPackets) - { - processedPacketsSizeTotal += packetSize; - processedPacketsCount += 1; - } - - @synchronized(self) - { - // If the audio was terminated before this point, then - // exit. - if ([self isFinishing]) - { - return; - } - - if (packetSize > packetBufferSize) - { - [self failWithErrorCode:AS_AUDIO_BUFFER_TOO_SMALL]; - } - - bufSpaceRemaining = packetBufferSize - bytesFilled; - } - - // if the space remaining in the buffer is not enough for this packet, then enqueue the buffer. - if (bufSpaceRemaining < packetSize) - { - [self enqueueBuffer]; - } - - @synchronized(self) - { - // If the audio was terminated while waiting for a buffer, then - // exit. - if ([self isFinishing]) - { - return; - } - - // - // If there was some kind of issue with enqueueBuffer and we didn't - // make space for the new audio data then back out - // - if (bytesFilled + packetSize > packetBufferSize) - { - return; - } - - // copy data to the audio queue buffer - AudioQueueBufferRef fillBuf = audioQueueBuffer[fillBufferIndex]; - memcpy((char*)fillBuf->mAudioData + bytesFilled, (const char*)inInputData + packetOffset, packetSize); - - // fill out packet description - packetDescs[packetsFilled] = inPacketDescriptions[i]; - packetDescs[packetsFilled].mStartOffset = bytesFilled; - // keep track of bytes filled and packets filled - bytesFilled += packetSize; - packetsFilled += 1; - } - - // if that was the last free packet description, then enqueue the buffer. - size_t packetsDescsRemaining = kAQMaxPacketDescs - packetsFilled; - if (packetsDescsRemaining == 0) { - [self enqueueBuffer]; - } - } - } - else - { - size_t offset = 0; - while (inNumberBytes) - { - // if the space remaining in the buffer is not enough for this packet, then enqueue the buffer. - size_t bufSpaceRemaining = kAQDefaultBufSize - bytesFilled; - if (bufSpaceRemaining < inNumberBytes) - { - [self enqueueBuffer]; - } - - @synchronized(self) - { - // If the audio was terminated while waiting for a buffer, then - // exit. - if ([self isFinishing]) - { - return; - } - - bufSpaceRemaining = kAQDefaultBufSize - bytesFilled; - size_t copySize; - if (bufSpaceRemaining < inNumberBytes) - { - copySize = bufSpaceRemaining; - } - else - { - copySize = inNumberBytes; - } - - // - // If there was some kind of issue with enqueueBuffer and we didn't - // make space for the new audio data then back out - // - if (bytesFilled > packetBufferSize) - { - return; - } - - // copy data to the audio queue buffer - AudioQueueBufferRef fillBuf = audioQueueBuffer[fillBufferIndex]; - memcpy((char*)fillBuf->mAudioData + bytesFilled, (const char*)(inInputData + offset), copySize); - - - // keep track of bytes filled and packets filled - bytesFilled += copySize; - packetsFilled = 0; - inNumberBytes -= copySize; - offset += copySize; - } - } - } -} - -// -// handleBufferCompleteForQueue:buffer: -// -// Handles the buffer completetion notification from the audio queue -// -// Parameters: -// inAQ - the queue -// inBuffer - the buffer -// -- (void)handleBufferCompleteForQueue:(AudioQueueRef)inAQ - buffer:(AudioQueueBufferRef)inBuffer -{ - unsigned int bufIndex = -1; - for (unsigned int i = 0; i < kNumAQBufs; ++i) - { - if (inBuffer == audioQueueBuffer[i]) - { - bufIndex = i; - break; - } - } - - if (bufIndex == -1) - { - [self failWithErrorCode:AS_AUDIO_QUEUE_BUFFER_MISMATCH]; - pthread_mutex_lock(&queueBuffersMutex); - pthread_cond_signal(&queueBufferReadyCondition); - pthread_mutex_unlock(&queueBuffersMutex); - return; - } - - // signal waiting thread that the buffer is free. - pthread_mutex_lock(&queueBuffersMutex); - inuse[bufIndex] = false; - buffersUsed--; - -// -// Enable this logging to measure how many buffers are queued at any time. -// -#if LOG_QUEUED_BUFFERS - NSLog(@"Queued buffers: %ld", buffersUsed); -#endif - - pthread_cond_signal(&queueBufferReadyCondition); - pthread_mutex_unlock(&queueBuffersMutex); -} - -- (void)handlePropertyChange:(NSNumber *)num -{ - [self handlePropertyChangeForQueue:NULL propertyID:[num intValue]]; -} - -// -// handlePropertyChangeForQueue:propertyID: -// -// Implementation for ASAudioQueueIsRunningCallback -// -// Parameters: -// inAQ - the audio queue -// inID - the property ID -// -- (void)handlePropertyChangeForQueue:(AudioQueueRef)inAQ - propertyID:(AudioQueuePropertyID)inID -{ - NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init]; - - if (![[NSThread currentThread] isEqual:internalThread]) - { - [self - performSelector:@selector(handlePropertyChange:) - onThread:internalThread - withObject:[NSNumber numberWithInt:inID] - waitUntilDone:NO - modes:[NSArray arrayWithObject:NSDefaultRunLoopMode]]; - return; - } - @synchronized(self) - { - if (inID == kAudioQueueProperty_IsRunning) - { - if (state == AS_STOPPING) - { - // Should check value of isRunning to ensure this kAudioQueueProperty_IsRunning isn't - // the *start* of a very short stream - UInt32 isRunning = 0; - UInt32 size = sizeof(UInt32); - AudioQueueGetProperty(audioQueue, inID, &isRunning, &size); - if (isRunning == 0) - { - self.state = AS_STOPPED; - } - } - else if (state == AS_WAITING_FOR_QUEUE_TO_START) - { - // - // Note about this bug avoidance quirk: - // - // On cleanup of the AudioQueue thread, on rare occasions, there would - // be a crash in CFSetContainsValue as a CFRunLoopObserver was getting - // removed from the CFRunLoop. - // - // After lots of testing, it appeared that the audio thread was - // attempting to remove CFRunLoop observers from the CFRunLoop after the - // thread had already deallocated the run loop. - // - // By creating an NSRunLoop for the AudioQueue thread, it changes the - // thread destruction order and seems to avoid this crash bug -- or - // at least I haven't had it since (nasty hard to reproduce error!) - // - [NSRunLoop currentRunLoop]; - - self.state = AS_PLAYING; - } - else - { - NSLog(@"AudioQueue changed state in unexpected way."); - } - } - } - - [pool release]; -} - -#if TARGET_OS_IPHONE -// -// handleInterruptionChangeForQueue:propertyID: -// -// Implementation for ASAudioQueueInterruptionListener -// -// Parameters: -// inAQ - the audio queue -// inID - the property ID -// -- (void)handleInterruptionChangeToState:(NSNotification *)notification { - AudioQueuePropertyID inInterruptionState = (AudioQueuePropertyID) [notification.object unsignedIntValue]; - if (inInterruptionState == kAudioSessionBeginInterruption) - { - if ([self isPlaying]) { - [self pause]; - - pausedByInterruption = YES; - } - } - else if (inInterruptionState == kAudioSessionEndInterruption) - { - AudioSessionSetActive( true ); - - if ([self isPaused] && pausedByInterruption) { - [self pause]; // this is actually resume - - pausedByInterruption = NO; // this is redundant - } - } -} -#endif - -@end - - diff --git a/Classes/AudioStreamer.h b/Classes/AudioStreamer/AudioStreamer.h similarity index 98% rename from Classes/AudioStreamer.h rename to Classes/AudioStreamer/AudioStreamer.h index c011427..0c99f16 100644 --- a/Classes/AudioStreamer.h +++ b/Classes/AudioStreamer/AudioStreamer.h @@ -127,8 +127,8 @@ extern NSString * const ASStatusChangedNotification; AudioStreamPacketDescription packetDescs[kAQMaxPacketDescs]; // packet descriptions for enqueuing audio unsigned int fillBufferIndex; // the index of the audioQueueBuffer that is being filled UInt32 packetBufferSize; - size_t bytesFilled; // how many bytes have been filled - size_t packetsFilled; // how many packets have been filled + UInt32 bytesFilled; // how many bytes have been filled + UInt32 packetsFilled; // how many packets have been filled bool inuse[kNumAQBufs]; // flags to indicate that a buffer is still in use NSInteger buffersUsed; NSDictionary *httpHeaders; diff --git a/Classes/AudioStreamer/AudioStreamer.m b/Classes/AudioStreamer/AudioStreamer.m new file mode 100644 index 0000000..30710e0 --- /dev/null +++ b/Classes/AudioStreamer/AudioStreamer.m @@ -0,0 +1,2012 @@ +// +// AudioStreamer.m +// StreamingAudioPlayer +// +// Created by Matt Gallagher on 27/09/08. +// Copyright 2008 Matt Gallagher. All rights reserved. +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. Permission is granted to anyone to +// use this software for any purpose, including commercial applications, and to +// alter it and redistribute it freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source +// distribution. +// + +#import "AudioStreamer.h" +#if TARGET_OS_IPHONE +#import +#endif + +#define BitRateEstimationMaxPackets 5000 +#define BitRateEstimationMinPackets 50 + +NSString * const ASStatusChangedNotification = @"ASStatusChangedNotification"; +NSString * const ASAudioSessionInterruptionOccuredNotification = @"ASAudioSessionInterruptionOccuredNotification"; + +NSString * const AS_NO_ERROR_STRING = @"No error."; +NSString * const AS_FILE_STREAM_GET_PROPERTY_FAILED_STRING = @"File stream get property failed."; +NSString * const AS_FILE_STREAM_SEEK_FAILED_STRING = @"File stream seek failed."; +NSString * const AS_FILE_STREAM_PARSE_BYTES_FAILED_STRING = @"Parse bytes failed."; +NSString * const AS_FILE_STREAM_OPEN_FAILED_STRING = @"Open audio file stream failed."; +NSString * const AS_FILE_STREAM_CLOSE_FAILED_STRING = @"Close audio file stream failed."; +NSString * const AS_AUDIO_QUEUE_CREATION_FAILED_STRING = @"Audio queue creation failed."; +NSString * const AS_AUDIO_QUEUE_BUFFER_ALLOCATION_FAILED_STRING = @"Audio buffer allocation failed."; +NSString * const AS_AUDIO_QUEUE_ENQUEUE_FAILED_STRING = @"Queueing of audio buffer failed."; +NSString * const AS_AUDIO_QUEUE_ADD_LISTENER_FAILED_STRING = @"Audio queue add listener failed."; +NSString * const AS_AUDIO_QUEUE_REMOVE_LISTENER_FAILED_STRING = @"Audio queue remove listener failed."; +NSString * const AS_AUDIO_QUEUE_START_FAILED_STRING = @"Audio queue start failed."; +NSString * const AS_AUDIO_QUEUE_BUFFER_MISMATCH_STRING = @"Audio queue buffers don't match."; +NSString * const AS_AUDIO_QUEUE_DISPOSE_FAILED_STRING = @"Audio queue dispose failed."; +NSString * const AS_AUDIO_QUEUE_PAUSE_FAILED_STRING = @"Audio queue pause failed."; +NSString * const AS_AUDIO_QUEUE_STOP_FAILED_STRING = @"Audio queue stop failed."; +NSString * const AS_AUDIO_DATA_NOT_FOUND_STRING = @"No audio data found."; +NSString * const AS_AUDIO_QUEUE_FLUSH_FAILED_STRING = @"Audio queue flush failed."; +NSString * const AS_GET_AUDIO_TIME_FAILED_STRING = @"Audio queue get current time failed."; +NSString * const AS_AUDIO_STREAMER_FAILED_STRING = @"Audio playback failed"; +NSString * const AS_NETWORK_CONNECTION_FAILED_STRING = @"Network connection failed"; +NSString * const AS_AUDIO_BUFFER_TOO_SMALL_STRING = @"Audio packets are larger than kAQDefaultBufSize."; + +@interface AudioStreamer () +@property (readwrite) AudioStreamerState state; +@property (readwrite) AudioStreamerState laststate; + +- (void)handlePropertyChangeForFileStream:(AudioFileStreamID)inAudioFileStream + fileStreamPropertyID:(AudioFileStreamPropertyID)inPropertyID + ioFlags:(UInt32 *)ioFlags; +- (void)handleAudioPackets:(const void *)inInputData + numberBytes:(UInt32)inNumberBytes + numberPackets:(UInt32)inNumberPackets + packetDescriptions:(AudioStreamPacketDescription *)inPacketDescriptions; +- (void)handleBufferCompleteForQueue:(AudioQueueRef)inAQ + buffer:(AudioQueueBufferRef)inBuffer; +- (void)handlePropertyChangeForQueue:(AudioQueueRef)inAQ + propertyID:(AudioQueuePropertyID)inID; + +#if TARGET_OS_IPHONE +- (void)handleInterruptionChangeToState:(NSNotification *)notification; +#endif + +- (void)internalSeekToTime:(double)newSeekTime; +- (void)enqueueBuffer; +- (void)handleReadFromStream:(CFReadStreamRef)aStream + eventType:(CFStreamEventType)eventType; + +@end + +#pragma mark Audio Callback Function Implementations + +// +// ASPropertyListenerProc +// +// Receives notification when the AudioFileStream has audio packets to be +// played. In response, this function creates the AudioQueue, getting it +// ready to begin playback (playback won't begin until audio packets are +// sent to the queue in ASEnqueueBuffer). +// +// This function is adapted from Apple's example in AudioFileStreamExample with +// kAudioQueueProperty_IsRunning listening added. +// +static void ASPropertyListenerProc(void * inClientData, + AudioFileStreamID inAudioFileStream, + AudioFileStreamPropertyID inPropertyID, + UInt32 * ioFlags) +{ + // this is called by audio file stream when it finds property values + AudioStreamer* streamer = (__bridge AudioStreamer *)inClientData; + [streamer + handlePropertyChangeForFileStream:inAudioFileStream + fileStreamPropertyID:inPropertyID + ioFlags:ioFlags]; +} + +// +// ASPacketsProc +// +// When the AudioStream has packets to be played, this function gets an +// idle audio buffer and copies the audio packets into it. The calls to +// ASEnqueueBuffer won't return until there are buffers available (or the +// playback has been stopped). +// +// This function is adapted from Apple's example in AudioFileStreamExample with +// CBR functionality added. +// +static void ASPacketsProc( void * inClientData, + UInt32 inNumberBytes, + UInt32 inNumberPackets, + const void * inInputData, + AudioStreamPacketDescription *inPacketDescriptions) +{ + // this is called by audio file stream when it finds packets of audio + AudioStreamer* streamer = (__bridge AudioStreamer *)inClientData; + [streamer + handleAudioPackets:inInputData + numberBytes:inNumberBytes + numberPackets:inNumberPackets + packetDescriptions:inPacketDescriptions]; +} + +// +// ASAudioQueueOutputCallback +// +// Called from the AudioQueue when playback of specific buffers completes. This +// function signals from the AudioQueue thread to the AudioStream thread that +// the buffer is idle and available for copying data. +// +// This function is unchanged from Apple's example in AudioFileStreamExample. +// +static void ASAudioQueueOutputCallback(void* inClientData, + AudioQueueRef inAQ, + AudioQueueBufferRef inBuffer) +{ + // this is called by the audio queue when it has finished decoding our data. + // The buffer is now free to be reused. + AudioStreamer* streamer = (__bridge AudioStreamer*)inClientData; + [streamer handleBufferCompleteForQueue:inAQ buffer:inBuffer]; +} + +// +// ASAudioQueueIsRunningCallback +// +// Called from the AudioQueue when playback is started or stopped. This +// information is used to toggle the observable "isPlaying" property and +// set the "finished" flag. +// +static void ASAudioQueueIsRunningCallback(void *inUserData, AudioQueueRef inAQ, AudioQueuePropertyID inID) +{ + AudioStreamer* streamer = (__bridge AudioStreamer *)inUserData; + [streamer handlePropertyChangeForQueue:inAQ propertyID:inID]; +} + +#if TARGET_OS_IPHONE +// +// ASAudioSessionInterruptionListener +// +// Invoked if the audio session is interrupted (like when the phone rings) +// +static void ASAudioSessionInterruptionListener(__unused void * inClientData, UInt32 inInterruptionState) { + [[NSNotificationCenter defaultCenter] postNotificationName:ASAudioSessionInterruptionOccuredNotification object:@(inInterruptionState)]; +} +#endif + +#pragma mark CFReadStream Callback Function Implementations + +// +// ReadStreamCallBack +// +// This is the callback for the CFReadStream from the network connection. This +// is where all network data is passed to the AudioFileStream. +// +// Invoked when an error occurs, the stream ends or we have data to read. +// +static void ASReadStreamCallBack +( + CFReadStreamRef aStream, + CFStreamEventType eventType, + void* inClientInfo + ) +{ + AudioStreamer* streamer = (__bridge AudioStreamer *)inClientInfo; + [streamer handleReadFromStream:aStream eventType:eventType]; +} + +@implementation AudioStreamer + +@synthesize errorCode; +@synthesize state; +@synthesize laststate; +@synthesize bitRate; +@synthesize httpHeaders; +@synthesize fileExtension; + +// +// initWithURL +// +// Init method for the object. +// +- (id)initWithURL:(NSURL *)aURL +{ + self = [super init]; + if (self != nil) + { + url = aURL; + +#if TARGET_OS_IPHONE + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(handleInterruptionChangeToState:) name:ASAudioSessionInterruptionOccuredNotification object:nil]; +#endif + } + return self; +} + +// +// dealloc +// +// Releases instance memory. +// +- (void)dealloc +{ + [[NSNotificationCenter defaultCenter] removeObserver:self name:ASAudioSessionInterruptionOccuredNotification object:nil]; + [self stop]; +} + +// +// isFinishing +// +// returns YES if the audio has reached a stopping condition. +// +- (BOOL)isFinishing +{ + @synchronized (self) + { + if ((errorCode != AS_NO_ERROR && state != AS_INITIALIZED) || + ((state == AS_STOPPING || state == AS_STOPPED) && + stopReason != AS_STOPPING_TEMPORARILY)) + { + return YES; + } + } + + return NO; +} + +// +// runLoopShouldExit +// +// returns YES if the run loop should exit. +// +- (BOOL)runLoopShouldExit +{ + @synchronized(self) + { + if (errorCode != AS_NO_ERROR || + (state == AS_STOPPED && + stopReason != AS_STOPPING_TEMPORARILY)) + { + return YES; + } + } + + return NO; +} + +// +// stringForErrorCode: +// +// Converts an error code to a string that can be localized or presented +// to the user. +// +// Parameters: +// anErrorCode - the error code to convert +// +// returns the string representation of the error code +// ++ (NSString *)stringForErrorCode:(AudioStreamerErrorCode)anErrorCode +{ + switch (anErrorCode) + { + case AS_NO_ERROR: + return AS_NO_ERROR_STRING; + case AS_FILE_STREAM_GET_PROPERTY_FAILED: + return AS_FILE_STREAM_GET_PROPERTY_FAILED_STRING; + case AS_FILE_STREAM_SEEK_FAILED: + return AS_FILE_STREAM_SEEK_FAILED_STRING; + case AS_FILE_STREAM_PARSE_BYTES_FAILED: + return AS_FILE_STREAM_PARSE_BYTES_FAILED_STRING; + case AS_AUDIO_QUEUE_CREATION_FAILED: + return AS_AUDIO_QUEUE_CREATION_FAILED_STRING; + case AS_AUDIO_QUEUE_BUFFER_ALLOCATION_FAILED: + return AS_AUDIO_QUEUE_BUFFER_ALLOCATION_FAILED_STRING; + case AS_AUDIO_QUEUE_ENQUEUE_FAILED: + return AS_AUDIO_QUEUE_ENQUEUE_FAILED_STRING; + case AS_AUDIO_QUEUE_ADD_LISTENER_FAILED: + return AS_AUDIO_QUEUE_ADD_LISTENER_FAILED_STRING; + case AS_AUDIO_QUEUE_REMOVE_LISTENER_FAILED: + return AS_AUDIO_QUEUE_REMOVE_LISTENER_FAILED_STRING; + case AS_AUDIO_QUEUE_START_FAILED: + return AS_AUDIO_QUEUE_START_FAILED_STRING; + case AS_AUDIO_QUEUE_BUFFER_MISMATCH: + return AS_AUDIO_QUEUE_BUFFER_MISMATCH_STRING; + case AS_FILE_STREAM_OPEN_FAILED: + return AS_FILE_STREAM_OPEN_FAILED_STRING; + case AS_FILE_STREAM_CLOSE_FAILED: + return AS_FILE_STREAM_CLOSE_FAILED_STRING; + case AS_AUDIO_QUEUE_DISPOSE_FAILED: + return AS_AUDIO_QUEUE_DISPOSE_FAILED_STRING; + case AS_AUDIO_QUEUE_PAUSE_FAILED: + return AS_AUDIO_QUEUE_DISPOSE_FAILED_STRING; + case AS_AUDIO_QUEUE_FLUSH_FAILED: + return AS_AUDIO_QUEUE_FLUSH_FAILED_STRING; + case AS_AUDIO_DATA_NOT_FOUND: + return AS_AUDIO_DATA_NOT_FOUND_STRING; + case AS_GET_AUDIO_TIME_FAILED: + return AS_GET_AUDIO_TIME_FAILED_STRING; + case AS_NETWORK_CONNECTION_FAILED: + return AS_NETWORK_CONNECTION_FAILED_STRING; + case AS_AUDIO_QUEUE_STOP_FAILED: + return AS_AUDIO_QUEUE_STOP_FAILED_STRING; + case AS_AUDIO_STREAMER_FAILED: + return AS_AUDIO_STREAMER_FAILED_STRING; + case AS_AUDIO_BUFFER_TOO_SMALL: + return AS_AUDIO_BUFFER_TOO_SMALL_STRING; + default: + return AS_AUDIO_STREAMER_FAILED_STRING; + } + + return AS_AUDIO_STREAMER_FAILED_STRING; +} + +// +// presentAlertWithTitle:message: +// +// Common code for presenting error dialogs +// +// Parameters: +// title - title for the dialog +// message - main test for the dialog +// +- (void)presentAlertWithTitle:(NSString*)title message:(NSString*)message +{ +#if TARGET_OS_IPHONE + UIAlertView *alert = [ + [UIAlertView alloc] + initWithTitle:title + message:message + delegate:nil + cancelButtonTitle:NSLocalizedString(@"OK", @"") + otherButtonTitles: nil]; + [alert + performSelector:@selector(show) + onThread:[NSThread mainThread] + withObject:nil + waitUntilDone:NO]; +#else + // NSAlert *alert = + // [NSAlert + // alertWithMessageText:title + // defaultButton:NSLocalizedString(@"OK", @"") + // alternateButton:nil + // otherButton:nil + // informativeTextWithFormat:message]; + // [alert + // performSelector:@selector(runModal) + // onThread:[NSThread mainThread] + // withObject:nil + // waitUntilDone:NO]; +#endif +} + +// +// failWithErrorCode: +// +// Sets the playback state to failed and logs the error. +// +// Parameters: +// anErrorCode - the error condition +// +- (void)failWithErrorCode:(AudioStreamerErrorCode)anErrorCode +{ + @synchronized(self) + { + if (errorCode != AS_NO_ERROR) + { + // Only set the error once. + return; + } + + errorCode = anErrorCode; + + if (err) + { + char *errChars = (char *)&err; + NSLog(@"%@ err: %c%c%c%c %d\n", + [AudioStreamer stringForErrorCode:anErrorCode], + errChars[3], errChars[2], errChars[1], errChars[0], + (int)err); + } + else + { + NSLog(@"%@", [AudioStreamer stringForErrorCode:anErrorCode]); + } + + if (state == AS_PLAYING || + state == AS_PAUSED || + state == AS_BUFFERING) + { + self.state = AS_STOPPING; + stopReason = AS_STOPPING_ERROR; + AudioQueueStop(audioQueue, true); + } + + if (self.shouldDisplayAlertOnError) + [self presentAlertWithTitle:NSLocalizedStringFromTable(@"File Error", @"Errors", nil) + message:NSLocalizedStringFromTable(@"Unable to configure network read stream.", @"Errors", nil)]; + } +} + +// +// mainThreadStateNotification +// +// Method invoked on main thread to send notifications to the main thread's +// notification center. +// +- (void)mainThreadStateNotification +{ + NSNotification *notification = + [NSNotification + notificationWithName:ASStatusChangedNotification + object:self]; + [[NSNotificationCenter defaultCenter] + postNotification:notification]; +} + +// +// state +// +// returns the state value. +// +- (AudioStreamerState)state +{ + @synchronized(self) + { + return state; + } +} + +// +// setState: +// +// Sets the state and sends a notification that the state has changed. +// +// This method +// +// Parameters: +// anErrorCode - the error condition +// +- (void)setState:(AudioStreamerState)aStatus +{ + @synchronized(self) + { + if (state != aStatus) + { + state = aStatus; + + if ([[NSThread currentThread] isEqual:[NSThread mainThread]]) + { + [self mainThreadStateNotification]; + } + else + { + [self + performSelectorOnMainThread:@selector(mainThreadStateNotification) + withObject:nil + waitUntilDone:NO]; + } + } + } +} + +// +// isPlaying +// +// returns YES if the audio currently playing. +// +- (BOOL)isPlaying +{ + if (state == AS_PLAYING) + { + return YES; + } + + return NO; +} + +// +// isPaused +// +// returns YES if the audio currently playing. +// +- (BOOL)isPaused +{ + if (state == AS_PAUSED) + { + return YES; + } + + return NO; +} + +// +// isWaiting +// +// returns YES if the AudioStreamer is waiting for a state transition of some +// kind. +// +- (BOOL)isWaiting +{ + @synchronized(self) + { + if ([self isFinishing] || + state == AS_STARTING_FILE_THREAD|| + state == AS_WAITING_FOR_DATA || + state == AS_WAITING_FOR_QUEUE_TO_START || + state == AS_BUFFERING) + { + return YES; + } + } + + return NO; +} + +// +// isIdle +// +// returns YES if the AudioStream is in the AS_INITIALIZED state (i.e. +// isn't doing anything). +// +- (BOOL)isIdle +{ + if (state == AS_INITIALIZED) + { + return YES; + } + + return NO; +} + +// +// isAborted +// +// returns YES if the AudioStream was stopped due to some errror, handled through failWithCodeError. +// +- (BOOL)isAborted +{ + if (state == AS_STOPPING && stopReason == AS_STOPPING_ERROR) + { + return YES; + } + + return NO; +} + +// +// hintForFileExtension: +// +// Generates a first guess for the file type based on the file's extension +// +// Parameters: +// fileExtension - the file extension +// +// returns a file type hint that can be passed to the AudioFileStream +// ++ (AudioFileTypeID)hintForFileExtension:(NSString *)fileExtension +{ + AudioFileTypeID fileTypeHint = kAudioFileAAC_ADTSType; + if ([fileExtension isEqual:@"mp3"]) + { + fileTypeHint = kAudioFileMP3Type; + } + else if ([fileExtension isEqual:@"wav"]) + { + fileTypeHint = kAudioFileWAVEType; + } + else if ([fileExtension isEqual:@"aifc"]) + { + fileTypeHint = kAudioFileAIFCType; + } + else if ([fileExtension isEqual:@"aiff"]) + { + fileTypeHint = kAudioFileAIFFType; + } + else if ([fileExtension isEqual:@"m4a"]) + { + fileTypeHint = kAudioFileM4AType; + } + else if ([fileExtension isEqual:@"mp4"]) + { + fileTypeHint = kAudioFileMPEG4Type; + } + else if ([fileExtension isEqual:@"caf"]) + { + fileTypeHint = kAudioFileCAFType; + } + else if ([fileExtension isEqual:@"aac"]) + { + fileTypeHint = kAudioFileAAC_ADTSType; + } + return fileTypeHint; +} + +// +// openReadStream +// +// Open the audioFileStream to parse data and the fileHandle as the data +// source. +// +- (BOOL)openReadStream +{ + @synchronized(self) + { + NSAssert([[NSThread currentThread] isEqual:internalThread], + @"File stream download must be started on the internalThread"); + NSAssert(stream == nil, @"Download stream already initialized"); + + // + // Create the HTTP GET request + // + CFHTTPMessageRef message= CFHTTPMessageCreateRequest(NULL, (CFStringRef)@"GET", (__bridge CFURLRef)url, kCFHTTPVersion1_1); + + // + // If we are creating this request to seek to a location, set the + // requested byte range in the headers. + // + if (fileLength > 0 && seekByteOffset > 0) + { + CFHTTPMessageSetHeaderFieldValue(message, CFSTR("Range"), + (__bridge CFStringRef)[NSString stringWithFormat:@"bytes=%ld-%ld", (long)seekByteOffset, (long)fileLength]); + discontinuous = YES; + } + + // + // Create the read stream that will receive data from the HTTP request + // + stream = CFReadStreamCreateForHTTPRequest(NULL, message); + CFRelease(message); + + // + // Enable stream redirection + // + if (CFReadStreamSetProperty( + stream, + kCFStreamPropertyHTTPShouldAutoredirect, + kCFBooleanTrue) == false) + { + [self failWithErrorCode:AS_FILE_STREAM_SET_PROPERTY_FAILED]; + + return NO; + } + + // + // Handle proxies + // + CFDictionaryRef proxySettings = CFNetworkCopySystemProxySettings(); + CFReadStreamSetProperty(stream, kCFStreamPropertyHTTPProxy, proxySettings); + CFRelease(proxySettings); + + // + // Handle SSL connections + // + if([[url scheme] isEqualToString:@"https"]) + { + NSDictionary *sslSettings = + [NSDictionary dictionaryWithObjectsAndKeys: + @(YES), kCFStreamSSLValidatesCertificateChain, + [NSNull null], kCFStreamSSLPeerName, + nil]; + + CFReadStreamSetProperty(stream, kCFStreamPropertySSLSettings, (__bridge CFDictionaryRef)(sslSettings)); + } + + // + // We're now ready to receive data + // + self.state = AS_WAITING_FOR_DATA; + + // + // Open the stream + // + if (!CFReadStreamOpen(stream)) + { + CFRelease(stream); + + [self failWithErrorCode:AS_FILE_STREAM_OPEN_FAILED]; + + return NO; + } + + // + // Set our callback function to receive the data + // + CFStreamClientContext context = {0, (__bridge void *)(self), NULL, NULL, NULL}; + CFReadStreamSetClient( + stream, + kCFStreamEventHasBytesAvailable | kCFStreamEventErrorOccurred | kCFStreamEventEndEncountered, + ASReadStreamCallBack, + &context); + CFReadStreamScheduleWithRunLoop(stream, CFRunLoopGetCurrent(), kCFRunLoopCommonModes); + } + + return YES; +} + +// +// startInternal +// +// This is the start method for the AudioStream thread. This thread is created +// because it will be blocked when there are no audio buffers idle (and ready +// to receive audio data). +// +// Activity in this thread: +// - Creation and cleanup of all AudioFileStream and AudioQueue objects +// - Receives data from the CFReadStream +// - AudioFileStream processing +// - Copying of data from AudioFileStream into audio buffers +// - Stopping of the thread because of end-of-file +// - Stopping due to error or failure +// +// Activity *not* in this thread: +// - AudioQueue playback and notifications (happens in AudioQueue thread) +// - Actual download of NSURLConnection data (NSURLConnection's thread) +// - Creation of the AudioStreamer (other, likely "main" thread) +// - Invocation of -start method (other, likely "main" thread) +// - User/manual invocation of -stop (other, likely "main" thread) +// +// This method contains bits of the "main" function from Apple's example in +// AudioFileStreamExample. +// +- (void)startInternal +{ + @autoreleasepool { + @synchronized(self) + { + if (state != AS_STARTING_FILE_THREAD) + { + if (state != AS_STOPPING && + state != AS_STOPPED) + { + NSLog(@"### Not starting audio thread. State code is: %ld", (long)state); + } + self.state = AS_INITIALIZED; + return; + } + +#if TARGET_OS_IPHONE + // + // Set the audio session category so that we continue to play if the + // iPhone/iPod auto-locks. + // +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated" + AudioSessionInitialize ( + NULL, // 'NULL' to use the default (main) run loop + NULL, // 'NULL' to use the default run loop mode + ASAudioSessionInterruptionListener, // a reference to your interruption callback + (__bridge void *)(self) // data to pass to your interruption listener callback + ); + UInt32 sessionCategory = kAudioSessionCategory_MediaPlayback; + AudioSessionSetProperty ( + kAudioSessionProperty_AudioCategory, + sizeof (sessionCategory), + &sessionCategory + ); + AudioSessionSetActive(true); +#pragma clang diagnostic pop +#endif + + // initialize a mutex and condition so that we can block on buffers in use. + pthread_mutex_init(&queueBuffersMutex, NULL); + pthread_cond_init(&queueBufferReadyCondition, NULL); + + if (![self openReadStream]) + { + goto cleanup; + } + } + + // + // Process the run loop until playback is finished or failed. + // + BOOL isRunning = YES; + do + { + isRunning = [[NSRunLoop currentRunLoop] + runMode:NSDefaultRunLoopMode + beforeDate:[NSDate dateWithTimeIntervalSinceNow:0.25]]; + + @synchronized(self) { + if (seekWasRequested) { + [self internalSeekToTime:requestedSeekTime]; + seekWasRequested = NO; + } + } + + // + // If there are no queued buffers, we need to check here since the + // handleBufferCompleteForQueue:buffer: should not change the state + // (may not enter the synchronized section). + // + if (buffersUsed == 0 && self.state == AS_PLAYING) + { + err = AudioQueuePause(audioQueue); + if (err) + { + [self failWithErrorCode:AS_AUDIO_QUEUE_PAUSE_FAILED]; + return; + } + self.state = AS_BUFFERING; + } + } while (isRunning && ![self runLoopShouldExit]); + + cleanup: + + @synchronized(self) + { + // + // Cleanup the read stream if it is still open + // + if (stream) + { + CFReadStreamClose(stream); + CFRelease(stream); + stream = nil; + } + + // + // Close the audio file strea, + // + if (audioFileStream) + { + err = AudioFileStreamClose(audioFileStream); + audioFileStream = nil; + if (err) + { + [self failWithErrorCode:AS_FILE_STREAM_CLOSE_FAILED]; + } + } + + // + // Dispose of the Audio Queue + // + if (audioQueue) + { + err = AudioQueueDispose(audioQueue, true); + audioQueue = nil; + if (err) + { + [self failWithErrorCode:AS_AUDIO_QUEUE_DISPOSE_FAILED]; + } + } + + pthread_mutex_destroy(&queueBuffersMutex); + pthread_cond_destroy(&queueBufferReadyCondition); + +#if TARGET_OS_IPHONE +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated" + AudioSessionSetActive(false); +#pragma clang diagnostic pop +#endif + + httpHeaders = nil; + + bytesFilled = 0; + packetsFilled = 0; + seekByteOffset = 0; + packetBufferSize = 0; + self.state = AS_INITIALIZED; + + internalThread = nil; + } + + } + +} + +// +// start +// +// Calls startInternal in a new thread. +// +- (void)start +{ + @synchronized (self) + { + if (state == AS_PAUSED) + { + [self pause]; + } + else if (state == AS_INITIALIZED) + { + NSAssert([[NSThread currentThread] isEqual:[NSThread mainThread]], + @"Playback can only be started from the main thread."); + notificationCenter = + [NSNotificationCenter defaultCenter]; + self.state = AS_STARTING_FILE_THREAD; + internalThread = + [[NSThread alloc] + initWithTarget:self + selector:@selector(startInternal) + object:nil]; + [internalThread start]; + } + } +} + + +// internalSeekToTime: +// +// Called from our internal runloop to reopen the stream at a seeked location +// +- (void)internalSeekToTime:(double)newSeekTime +{ + if ([self calculatedBitRate] == 0.0 || fileLength <= 0) + { + return; + } + + // + // Calculate the byte offset for seeking + // + seekByteOffset = dataOffset + + (newSeekTime / self.duration) * (fileLength - dataOffset); + + // + // Attempt to leave 1 useful packet at the end of the file (although in + // reality, this may still seek too far if the file has a long trailer). + // + if (seekByteOffset > fileLength - 2 * packetBufferSize) + { + seekByteOffset = fileLength - 2 * packetBufferSize; + } + + // + // Store the old time from the audio queue and the time that we're seeking + // to so that we'll know the correct time progress after seeking. + // + seekTime = newSeekTime; + + // + // Attempt to align the seek with a packet boundary + // + double calculatedBitRate = [self calculatedBitRate]; + if (packetDuration > 0 && + calculatedBitRate > 0) + { + UInt32 ioFlags = 0; + SInt64 packetAlignedByteOffset; + SInt64 seekPacket = floor(newSeekTime / packetDuration); + err = AudioFileStreamSeek(audioFileStream, seekPacket, &packetAlignedByteOffset, &ioFlags); + if (!err && !(ioFlags & kAudioFileStreamSeekFlag_OffsetIsEstimated)) + { + seekTime -= ((seekByteOffset - dataOffset) - packetAlignedByteOffset) * 8.0 / calculatedBitRate; + seekByteOffset = packetAlignedByteOffset + dataOffset; + } + } + + // + // Close the current read straem + // + if (stream) + { + CFReadStreamClose(stream); + CFRelease(stream); + stream = nil; + } + + // + // Stop the audio queue + // + self.state = AS_STOPPING; + stopReason = AS_STOPPING_TEMPORARILY; + err = AudioQueueStop(audioQueue, true); + if (err) + { + [self failWithErrorCode:AS_AUDIO_QUEUE_STOP_FAILED]; + return; + } + + // + // Re-open the file stream. It will request a byte-range starting at + // seekByteOffset. + // + [self openReadStream]; +} + +// +// seekToTime: +// +// Attempts to seek to the new time. Will be ignored if the bitrate or fileLength +// are unknown. +// +// Parameters: +// newTime - the time to seek to +// +- (void)seekToTime:(double)newSeekTime +{ + @synchronized(self) + { + seekWasRequested = YES; + requestedSeekTime = newSeekTime; + } +} + +// +// progress +// +// returns the current playback progress. Will return zero if sampleRate has +// not yet been detected. +// +- (double)progress +{ + @synchronized(self) + { + if (sampleRate > 0 && (state == AS_STOPPING || ![self isFinishing])) + { + if (state != AS_PLAYING && state != AS_PAUSED && state != AS_BUFFERING && state != AS_STOPPING) + { + return lastProgress; + } + + AudioTimeStamp queueTime; + Boolean discontinuity; + err = AudioQueueGetCurrentTime(audioQueue, NULL, &queueTime, &discontinuity); + + const OSStatus AudioQueueStopped = 0x73746F70; // 0x73746F70 is 'stop' + if (err == AudioQueueStopped) + { + return lastProgress; + } + else if (err) + { + [self failWithErrorCode:AS_GET_AUDIO_TIME_FAILED]; + } + + double progress = seekTime + queueTime.mSampleTime / sampleRate; + if (progress < 0.0) + { + progress = 0.0; + } + + lastProgress = progress; + return progress; + } + } + + return lastProgress; +} + +// +// calculatedBitRate +// +// returns the bit rate, if known. Uses packet duration times running bits per +// packet if available, otherwise it returns the nominal bitrate. Will return +// zero if no useful option available. +// +- (double)calculatedBitRate +{ + if (packetDuration && processedPacketsCount > BitRateEstimationMinPackets) + { + double averagePacketByteSize = processedPacketsSizeTotal / processedPacketsCount; + return 8.0 * averagePacketByteSize / packetDuration; + } + + if (bitRate) + { + return (double)bitRate; + } + + return 0; +} + +// +// duration +// +// Calculates the duration of available audio from the bitRate and fileLength. +// +// returns the calculated duration in seconds. +// +- (double)duration +{ + double calculatedBitRate = [self calculatedBitRate]; + + if (calculatedBitRate == 0 || fileLength == 0) + { + return 0.0; + } + + return (fileLength - dataOffset) / (calculatedBitRate * 0.125); +} + +// +// pause +// +// A togglable pause function. +// +- (void)pause +{ + @synchronized(self) + { + if (state == AS_PLAYING || state == AS_STOPPING) + { + err = AudioQueuePause(audioQueue); + if (err) + { + [self failWithErrorCode:AS_AUDIO_QUEUE_PAUSE_FAILED]; + return; + } + self.laststate = state; + self.state = AS_PAUSED; + } + else if (state == AS_PAUSED) + { + err = AudioQueueStart(audioQueue, NULL); + if (err) + { + [self failWithErrorCode:AS_AUDIO_QUEUE_START_FAILED]; + return; + } + self.state = self.laststate; + } + } +} + +// +// stop +// +// This method can be called to stop downloading/playback before it completes. +// It is automatically called when an error occurs. +// +// If playback has not started before this method is called, it will toggle the +// "isPlaying" property so that it is guaranteed to transition to true and +// back to false +// +- (void)stop +{ + @synchronized(self) + { + if (audioQueue && + (state == AS_PLAYING || state == AS_PAUSED || + state == AS_BUFFERING || state == AS_WAITING_FOR_QUEUE_TO_START)) + { + self.state = AS_STOPPING; + stopReason = AS_STOPPING_USER_ACTION; + err = AudioQueueStop(audioQueue, true); + if (err) + { + [self failWithErrorCode:AS_AUDIO_QUEUE_STOP_FAILED]; + return; + } + } + else if (state != AS_INITIALIZED) + { + self.state = AS_STOPPED; + stopReason = AS_STOPPING_USER_ACTION; + } + seekWasRequested = NO; + } + + while (state != AS_INITIALIZED) + { + [NSThread sleepForTimeInterval:0.1]; + } +} + +// +// handleReadFromStream:eventType: +// +// Reads data from the network file stream into the AudioFileStream +// +// Parameters: +// aStream - the network file stream +// eventType - the event which triggered this method +// +- (void)handleReadFromStream:(CFReadStreamRef)aStream + eventType:(CFStreamEventType)eventType +{ + if (aStream != stream) + { + // + // Ignore messages from old streams + // + return; + } + + if (eventType == kCFStreamEventErrorOccurred) + { + [self failWithErrorCode:AS_AUDIO_DATA_NOT_FOUND]; + } + else if (eventType == kCFStreamEventEndEncountered) + { + @synchronized(self) + { + if ([self isFinishing]) + { + return; + } + } + + // + // If there is a partially filled buffer, pass it to the AudioQueue for + // processing + // + if (bytesFilled) + { + if (self.state == AS_WAITING_FOR_DATA) + { + // + // Force audio data smaller than one whole buffer to play. + // + self.state = AS_FLUSHING_EOF; + } + [self enqueueBuffer]; + } + + @synchronized(self) + { + if (state == AS_WAITING_FOR_DATA) + { + [self failWithErrorCode:AS_AUDIO_DATA_NOT_FOUND]; + } + + // + // We left the synchronized section to enqueue the buffer so we + // must check that we are !finished again before touching the + // audioQueue + // + else if (![self isFinishing]) + { + if (audioQueue) + { + // + // Set the progress at the end of the stream + // + err = AudioQueueFlush(audioQueue); + if (err) + { + [self failWithErrorCode:AS_AUDIO_QUEUE_FLUSH_FAILED]; + return; + } + + self.state = AS_STOPPING; + stopReason = AS_STOPPING_EOF; + err = AudioQueueStop(audioQueue, false); + if (err) + { + [self failWithErrorCode:AS_AUDIO_QUEUE_FLUSH_FAILED]; + return; + } + } + else + { + self.state = AS_STOPPED; + stopReason = AS_STOPPING_EOF; + } + } + } + } + else if (eventType == kCFStreamEventHasBytesAvailable) + { + if (!httpHeaders) + { + CFTypeRef message = + CFReadStreamCopyProperty(stream, kCFStreamPropertyHTTPResponseHeader); + httpHeaders = + (__bridge NSDictionary *)CFHTTPMessageCopyAllHeaderFields((CFHTTPMessageRef)message); + CFRelease(message); + + // + // Only read the content length if we seeked to time zero, otherwise + // we only have a subset of the total bytes. + // + if (seekByteOffset == 0) + { + fileLength = [[httpHeaders objectForKey:@"Content-Length"] integerValue]; + } + } + + if (!audioFileStream) + { + // + // Attempt to guess the file type from the URL. Reading the MIME type + // from the httpHeaders might be a better approach since lots of + // URL's don't have the right extension. + // + // If you have a fixed file-type, you may want to hardcode this. + // + if (!self.fileExtension) + { + self.fileExtension = [[url path] pathExtension]; + } + AudioFileTypeID fileTypeHint = + [AudioStreamer hintForFileExtension:self.fileExtension]; + + // create an audio file stream parser + err = AudioFileStreamOpen((__bridge void *)(self), ASPropertyListenerProc, ASPacketsProc, + fileTypeHint, &audioFileStream); + if (err) + { + [self failWithErrorCode:AS_FILE_STREAM_OPEN_FAILED]; + return; + } + } + + UInt8 bytes[kAQDefaultBufSize]; + CFIndex length; + @synchronized(self) + { + if ([self isFinishing] || !CFReadStreamHasBytesAvailable(stream)) + { + return; + } + + // + // Read the bytes from the stream + // + length = CFReadStreamRead(stream, bytes, kAQDefaultBufSize); + + if (length == -1) + { + [self failWithErrorCode:AS_AUDIO_DATA_NOT_FOUND]; + return; + } + + if (length == 0) + { + return; + } + } + + if (discontinuous) + { + err = AudioFileStreamParseBytes(audioFileStream, (UInt32)length, bytes, kAudioFileStreamParseFlag_Discontinuity); + if (err) + { + [self failWithErrorCode:AS_FILE_STREAM_PARSE_BYTES_FAILED]; + return; + } + } + else + { + err = AudioFileStreamParseBytes(audioFileStream, (UInt32)length, bytes, 0); + if (err) + { + [self failWithErrorCode:AS_FILE_STREAM_PARSE_BYTES_FAILED]; + return; + } + } + } +} + +// +// enqueueBuffer +// +// Called from ASPacketsProc and connectionDidFinishLoading to pass filled audio +// bufffers (filled by ASPacketsProc) to the AudioQueue for playback. This +// function does not return until a buffer is idle for further filling or +// the AudioQueue is stopped. +// +// This function is adapted from Apple's example in AudioFileStreamExample with +// CBR functionality added. +// +- (void)enqueueBuffer +{ + @synchronized(self) + { + if ([self isFinishing] || stream == 0) + { + return; + } + + inuse[fillBufferIndex] = true; // set in use flag + buffersUsed++; + + // enqueue buffer + AudioQueueBufferRef fillBuf = audioQueueBuffer[fillBufferIndex]; + fillBuf->mAudioDataByteSize = bytesFilled; + + if (packetsFilled) + { + err = AudioQueueEnqueueBuffer(audioQueue, fillBuf, packetsFilled, packetDescs); + } + else + { + err = AudioQueueEnqueueBuffer(audioQueue, fillBuf, 0, NULL); + } + + if (err) + { + [self failWithErrorCode:AS_AUDIO_QUEUE_ENQUEUE_FAILED]; + return; + } + + + if (state == AS_BUFFERING || + state == AS_WAITING_FOR_DATA || + state == AS_FLUSHING_EOF || + (state == AS_STOPPED && stopReason == AS_STOPPING_TEMPORARILY)) + { + // + // Fill all the buffers before starting. This ensures that the + // AudioFileStream stays a small amount ahead of the AudioQueue to + // avoid an audio glitch playing streaming files on iPhone SDKs < 3.0 + // + if (state == AS_FLUSHING_EOF || buffersUsed == kNumAQBufs - 1) + { + if (self.state == AS_BUFFERING) + { + err = AudioQueueStart(audioQueue, NULL); + if (err) + { + [self failWithErrorCode:AS_AUDIO_QUEUE_START_FAILED]; + return; + } + self.state = AS_PLAYING; + } + else + { + self.state = AS_WAITING_FOR_QUEUE_TO_START; + + err = AudioQueueStart(audioQueue, NULL); + if (err) + { + [self failWithErrorCode:AS_AUDIO_QUEUE_START_FAILED]; + return; + } + } + } + } + + // go to next buffer + if (++fillBufferIndex >= kNumAQBufs) fillBufferIndex = 0; + bytesFilled = 0; // reset bytes filled + packetsFilled = 0; // reset packets filled + } + + // wait until next buffer is not in use + pthread_mutex_lock(&queueBuffersMutex); + while (inuse[fillBufferIndex]) + { + pthread_cond_wait(&queueBufferReadyCondition, &queueBuffersMutex); + } + pthread_mutex_unlock(&queueBuffersMutex); +} + +// +// createQueue +// +// Method to create the AudioQueue from the parameters gathered by the +// AudioFileStream. +// +// Creation is deferred to the handling of the first audio packet (although +// it could be handled any time after kAudioFileStreamProperty_ReadyToProducePackets +// is true). +// +- (void)createQueue +{ + sampleRate = asbd.mSampleRate; + packetDuration = asbd.mFramesPerPacket / sampleRate; + + // create the audio queue + err = AudioQueueNewOutput(&asbd, ASAudioQueueOutputCallback, (__bridge void *)(self), NULL, NULL, 0, &audioQueue); + if (err) + { + [self failWithErrorCode:AS_AUDIO_QUEUE_CREATION_FAILED]; + return; + } + + // start the queue if it has not been started already + // listen to the "isRunning" property + err = AudioQueueAddPropertyListener(audioQueue, kAudioQueueProperty_IsRunning, ASAudioQueueIsRunningCallback, (__bridge void *)(self)); + if (err) + { + [self failWithErrorCode:AS_AUDIO_QUEUE_ADD_LISTENER_FAILED]; + return; + } + + // get the packet size if it is available + UInt32 sizeOfUInt32 = sizeof(UInt32); + err = AudioFileStreamGetProperty(audioFileStream, kAudioFileStreamProperty_PacketSizeUpperBound, &sizeOfUInt32, &packetBufferSize); + if (err || packetBufferSize == 0) + { + err = AudioFileStreamGetProperty(audioFileStream, kAudioFileStreamProperty_MaximumPacketSize, &sizeOfUInt32, &packetBufferSize); + if (err || packetBufferSize == 0) + { + // No packet size available, just use the default + packetBufferSize = kAQDefaultBufSize; + } + } + + // allocate audio queue buffers + for (unsigned int i = 0; i < kNumAQBufs; ++i) + { + err = AudioQueueAllocateBuffer(audioQueue, packetBufferSize, &audioQueueBuffer[i]); + if (err) + { + [self failWithErrorCode:AS_AUDIO_QUEUE_BUFFER_ALLOCATION_FAILED]; + return; + } + } + + // get the cookie size + UInt32 cookieSize; + Boolean writable; + OSStatus ignorableError; + ignorableError = AudioFileStreamGetPropertyInfo(audioFileStream, kAudioFileStreamProperty_MagicCookieData, &cookieSize, &writable); + if (ignorableError) + { + return; + } + + // get the cookie data + void* cookieData = calloc(1, cookieSize); + ignorableError = AudioFileStreamGetProperty(audioFileStream, kAudioFileStreamProperty_MagicCookieData, &cookieSize, cookieData); + if (ignorableError) + { + return; + } + + // set the cookie on the queue. + ignorableError = AudioQueueSetProperty(audioQueue, kAudioQueueProperty_MagicCookie, cookieData, cookieSize); + free(cookieData); + if (ignorableError) + { + return; + } +} + +// +// handlePropertyChangeForFileStream:fileStreamPropertyID:ioFlags: +// +// Object method which handles implementation of ASPropertyListenerProc +// +// Parameters: +// inAudioFileStream - should be the same as self->audioFileStream +// inPropertyID - the property that changed +// ioFlags - the ioFlags passed in +// +- (void)handlePropertyChangeForFileStream:(AudioFileStreamID)inAudioFileStream + fileStreamPropertyID:(AudioFileStreamPropertyID)inPropertyID + ioFlags:(UInt32 *)ioFlags +{ + @synchronized(self) + { + if ([self isFinishing]) + { + return; + } + + if (inPropertyID == kAudioFileStreamProperty_ReadyToProducePackets) + { + discontinuous = true; + } + else if (inPropertyID == kAudioFileStreamProperty_DataOffset) + { + SInt64 offset; + UInt32 offsetSize = sizeof(offset); + err = AudioFileStreamGetProperty(inAudioFileStream, kAudioFileStreamProperty_DataOffset, &offsetSize, &offset); + if (err) + { + [self failWithErrorCode:AS_FILE_STREAM_GET_PROPERTY_FAILED]; + return; + } + dataOffset = offset; + + if (audioDataByteCount) + { + fileLength = dataOffset + audioDataByteCount; + } + } + else if (inPropertyID == kAudioFileStreamProperty_AudioDataByteCount) + { + UInt32 byteCountSize = sizeof(UInt64); + err = AudioFileStreamGetProperty(inAudioFileStream, kAudioFileStreamProperty_AudioDataByteCount, &byteCountSize, &audioDataByteCount); + if (err) + { + [self failWithErrorCode:AS_FILE_STREAM_GET_PROPERTY_FAILED]; + return; + } + fileLength = dataOffset + audioDataByteCount; + } + else if (inPropertyID == kAudioFileStreamProperty_DataFormat) + { + if (asbd.mSampleRate == 0) + { + UInt32 asbdSize = sizeof(asbd); + + // get the stream format. + err = AudioFileStreamGetProperty(inAudioFileStream, kAudioFileStreamProperty_DataFormat, &asbdSize, &asbd); + if (err) + { + [self failWithErrorCode:AS_FILE_STREAM_GET_PROPERTY_FAILED]; + return; + } + } + } + else if (inPropertyID == kAudioFileStreamProperty_FormatList) + { + Boolean outWriteable; + UInt32 formatListSize; + err = AudioFileStreamGetPropertyInfo(inAudioFileStream, kAudioFileStreamProperty_FormatList, &formatListSize, &outWriteable); + if (err) + { + [self failWithErrorCode:AS_FILE_STREAM_GET_PROPERTY_FAILED]; + return; + } + + AudioFormatListItem *formatList = malloc(formatListSize); + err = AudioFileStreamGetProperty(inAudioFileStream, kAudioFileStreamProperty_FormatList, &formatListSize, formatList); + if (err) + { + free(formatList); + [self failWithErrorCode:AS_FILE_STREAM_GET_PROPERTY_FAILED]; + return; + } + + for (int i = 0; i * sizeof(AudioFormatListItem) < formatListSize; i += sizeof(AudioFormatListItem)) + { + AudioStreamBasicDescription pasbd = formatList[i].mASBD; + + if (pasbd.mFormatID == kAudioFormatMPEG4AAC_HE || + pasbd.mFormatID == kAudioFormatMPEG4AAC_HE_V2) + { + // + // We've found HE-AAC, remember this to tell the audio queue + // when we construct it. + // +#if !TARGET_IPHONE_SIMULATOR + asbd = pasbd; +#endif + break; + } + } + free(formatList); + } + else + { + // NSLog(@"Property is %c%c%c%c", + // ((char *)&inPropertyID)[3], + // ((char *)&inPropertyID)[2], + // ((char *)&inPropertyID)[1], + // ((char *)&inPropertyID)[0]); + } + } +} + +// +// handleAudioPackets:numberBytes:numberPackets:packetDescriptions: +// +// Object method which handles the implementation of ASPacketsProc +// +// Parameters: +// inInputData - the packet data +// inNumberBytes - byte size of the data +// inNumberPackets - number of packets in the data +// inPacketDescriptions - packet descriptions +// +- (void)handleAudioPackets:(const void *)inInputData + numberBytes:(UInt32)inNumberBytes + numberPackets:(UInt32)inNumberPackets + packetDescriptions:(AudioStreamPacketDescription *)inPacketDescriptions; +{ + @synchronized(self) + { + if ([self isFinishing]) + { + return; + } + + if (bitRate == 0) + { + // + // m4a and a few other formats refuse to parse the bitrate so + // we need to set an "unparseable" condition here. If you know + // the bitrate (parsed it another way) you can set it on the + // class if needed. + // + bitRate = ~0; + } + + // we have successfully read the first packests from the audio stream, so + // clear the "discontinuous" flag + if (discontinuous) + { + discontinuous = false; + } + + if (!audioQueue) + { + [self createQueue]; + } + } + + // the following code assumes we're streaming VBR data. for CBR data, the second branch is used. + if (inPacketDescriptions) + { + for (int i = 0; i < inNumberPackets; ++i) + { + SInt64 packetOffset = inPacketDescriptions[i].mStartOffset; + SInt64 packetSize = inPacketDescriptions[i].mDataByteSize; + size_t bufSpaceRemaining; + + if (processedPacketsCount < BitRateEstimationMaxPackets) + { + processedPacketsSizeTotal += packetSize; + processedPacketsCount += 1; + } + + @synchronized(self) + { + // If the audio was terminated before this point, then + // exit. + if ([self isFinishing]) + { + return; + } + + if (packetSize > packetBufferSize) + { + [self failWithErrorCode:AS_AUDIO_BUFFER_TOO_SMALL]; + } + + bufSpaceRemaining = packetBufferSize - bytesFilled; + } + + // if the space remaining in the buffer is not enough for this packet, then enqueue the buffer. + if (bufSpaceRemaining < packetSize) + { + [self enqueueBuffer]; + } + + @synchronized(self) + { + // If the audio was terminated while waiting for a buffer, then + // exit. + if ([self isFinishing]) + { + return; + } + + // + // If there was some kind of issue with enqueueBuffer and we didn't + // make space for the new audio data then back out + // + if (bytesFilled + packetSize > packetBufferSize) + { + return; + } + + // copy data to the audio queue buffer + AudioQueueBufferRef fillBuf = audioQueueBuffer[fillBufferIndex]; + memcpy((char*)fillBuf->mAudioData + bytesFilled, (const char*)inInputData + packetOffset, packetSize); + + // fill out packet description + packetDescs[packetsFilled] = inPacketDescriptions[i]; + packetDescs[packetsFilled].mStartOffset = bytesFilled; + // keep track of bytes filled and packets filled + bytesFilled += packetSize; + packetsFilled += 1; + } + + // if that was the last free packet description, then enqueue the buffer. + size_t packetsDescsRemaining = kAQMaxPacketDescs - packetsFilled; + if (packetsDescsRemaining == 0) { + [self enqueueBuffer]; + } + } + } + else + { + size_t offset = 0; + while (inNumberBytes) + { + // if the space remaining in the buffer is not enough for this packet, then enqueue the buffer. + size_t bufSpaceRemaining = kAQDefaultBufSize - bytesFilled; + if (bufSpaceRemaining < inNumberBytes) + { + [self enqueueBuffer]; + } + + @synchronized(self) + { + // If the audio was terminated while waiting for a buffer, then + // exit. + if ([self isFinishing]) + { + return; + } + + bufSpaceRemaining = kAQDefaultBufSize - bytesFilled; + size_t copySize; + if (bufSpaceRemaining < inNumberBytes) + { + copySize = bufSpaceRemaining; + } + else + { + copySize = inNumberBytes; + } + + // + // If there was some kind of issue with enqueueBuffer and we didn't + // make space for the new audio data then back out + // + if (bytesFilled > packetBufferSize) + { + return; + } + + // copy data to the audio queue buffer + AudioQueueBufferRef fillBuf = audioQueueBuffer[fillBufferIndex]; + memcpy((char*)fillBuf->mAudioData + bytesFilled, (const char*)(inInputData + offset), copySize); + + + // keep track of bytes filled and packets filled + bytesFilled += copySize; + packetsFilled = 0; + inNumberBytes -= copySize; + offset += copySize; + } + } + } +} + +// +// handleBufferCompleteForQueue:buffer: +// +// Handles the buffer completetion notification from the audio queue +// +// Parameters: +// inAQ - the queue +// inBuffer - the buffer +// +- (void)handleBufferCompleteForQueue:(AudioQueueRef)inAQ + buffer:(AudioQueueBufferRef)inBuffer +{ + unsigned int bufIndex = -1; + for (unsigned int i = 0; i < kNumAQBufs; ++i) + { + if (inBuffer == audioQueueBuffer[i]) + { + bufIndex = i; + break; + } + } + + if (bufIndex == -1) + { + [self failWithErrorCode:AS_AUDIO_QUEUE_BUFFER_MISMATCH]; + pthread_mutex_lock(&queueBuffersMutex); + pthread_cond_signal(&queueBufferReadyCondition); + pthread_mutex_unlock(&queueBuffersMutex); + return; + } + + // signal waiting thread that the buffer is free. + pthread_mutex_lock(&queueBuffersMutex); + inuse[bufIndex] = false; + buffersUsed--; + + // + // Enable this logging to measure how many buffers are queued at any time. + // +#if LOG_QUEUED_BUFFERS + NSLog(@"Queued buffers: %ld", buffersUsed); +#endif + + pthread_cond_signal(&queueBufferReadyCondition); + pthread_mutex_unlock(&queueBuffersMutex); +} + +- (void)handlePropertyChange:(NSNumber *)num +{ + [self handlePropertyChangeForQueue:NULL propertyID:[num intValue]]; +} + +// +// handlePropertyChangeForQueue:propertyID: +// +// Implementation for ASAudioQueueIsRunningCallback +// +// Parameters: +// inAQ - the audio queue +// inID - the property ID +// +- (void)handlePropertyChangeForQueue:(AudioQueueRef)inAQ + propertyID:(AudioQueuePropertyID)inID +{ + @autoreleasepool { + + if (![[NSThread currentThread] isEqual:internalThread]) + { + [self + performSelector:@selector(handlePropertyChange:) + onThread:internalThread + withObject:[NSNumber numberWithInt:inID] + waitUntilDone:NO + modes:[NSArray arrayWithObject:NSDefaultRunLoopMode]]; + return; + } + @synchronized(self) + { + if (inID == kAudioQueueProperty_IsRunning) + { + if (state == AS_STOPPING) + { + // Should check value of isRunning to ensure this kAudioQueueProperty_IsRunning isn't + // the *start* of a very short stream + UInt32 isRunning = 0; + UInt32 size = sizeof(UInt32); + AudioQueueGetProperty(audioQueue, inID, &isRunning, &size); + if (isRunning == 0) + { + self.state = AS_STOPPED; + } + } + else if (state == AS_WAITING_FOR_QUEUE_TO_START) + { + // + // Note about this bug avoidance quirk: + // + // On cleanup of the AudioQueue thread, on rare occasions, there would + // be a crash in CFSetContainsValue as a CFRunLoopObserver was getting + // removed from the CFRunLoop. + // + // After lots of testing, it appeared that the audio thread was + // attempting to remove CFRunLoop observers from the CFRunLoop after the + // thread had already deallocated the run loop. + // + // By creating an NSRunLoop for the AudioQueue thread, it changes the + // thread destruction order and seems to avoid this crash bug -- or + // at least I haven't had it since (nasty hard to reproduce error!) + // + [NSRunLoop currentRunLoop]; + + self.state = AS_PLAYING; + } + else + { + NSLog(@"AudioQueue changed state in unexpected way."); + } + } + } + } +} + +#if TARGET_OS_IPHONE +// +// handleInterruptionChangeForQueue:propertyID: +// +// Implementation for ASAudioQueueInterruptionListener +// +// Parameters: +// inAQ - the audio queue +// inID - the property ID +// +- (void)handleInterruptionChangeToState:(NSNotification *)notification { + AudioQueuePropertyID inInterruptionState = (AudioQueuePropertyID) [notification.object unsignedIntValue]; + if (inInterruptionState == kAudioSessionBeginInterruption) + { + if ([self isPlaying]) { + [self pause]; + + pausedByInterruption = YES; + } + } + else if (inInterruptionState == kAudioSessionEndInterruption) + { +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated" + AudioSessionSetActive(true); +#pragma clang diagnostic pop + + if ([self isPaused] && pausedByInterruption) { + + [self pause]; // this is actually resume + + pausedByInterruption = NO; // this is redundant + } + } +} +#endif + +@end + + diff --git a/Classes/iPhoneStreamingPlayerAppDelegate.m b/Classes/iPhoneStreamingPlayerAppDelegate.m index 67ddc52..7553403 100644 --- a/Classes/iPhoneStreamingPlayerAppDelegate.m +++ b/Classes/iPhoneStreamingPlayerAppDelegate.m @@ -38,11 +38,4 @@ - (void)applicationDidFinishLaunching:(UIApplication *)application { } -- (void)dealloc { - [viewController release]; - [window release]; - [super dealloc]; -} - - @end diff --git a/Classes/iPhoneStreamingPlayerViewController.m b/Classes/iPhoneStreamingPlayerViewController.m index 4c60558..101bb52 100644 --- a/Classes/iPhoneStreamingPlayerViewController.m +++ b/Classes/iPhoneStreamingPlayerViewController.m @@ -46,8 +46,7 @@ - (void)setButtonImageNamed:(NSString *)imageName { imageName = @"playButton"; } - [currentImageName autorelease]; - currentImageName = [imageName retain]; + currentImageName = imageName; UIImage *image = [UIImage imageNamed:imageName]; @@ -77,7 +76,6 @@ - (void)destroyStreamer progressUpdateTimer = nil; [streamer stop]; - [streamer release]; streamer = nil; } } @@ -97,13 +95,12 @@ - (void)createStreamer [self destroyStreamer]; NSString *escapedValue = - [(NSString *)CFURLCreateStringByAddingPercentEscapes( + (NSString *)CFBridgingRelease(CFURLCreateStringByAddingPercentEscapes( nil, (CFStringRef)downloadSourceField.text, NULL, NULL, - kCFStringEncodingUTF8) - autorelease]; + kCFStringEncodingUTF8)); NSURL *url = [NSURL URLWithString:escapedValue]; streamer = [[AudioStreamer alloc] initWithURL:url]; @@ -133,7 +130,7 @@ - (void)viewDidLoad { [super viewDidLoad]; - MPVolumeView *volumeView = [[[MPVolumeView alloc] initWithFrame:volumeSlider.bounds] autorelease]; + MPVolumeView *volumeView = [[MPVolumeView alloc] initWithFrame:volumeSlider.bounds]; [volumeSlider addSubview:volumeView]; [volumeView sizeToFit]; @@ -317,7 +314,6 @@ - (void)dealloc [progressUpdateTimer invalidate]; progressUpdateTimer = nil; } - [super dealloc]; } @end diff --git a/iPhoneStreamingPlayer.xcodeproj/project.pbxproj b/iPhoneStreamingPlayer.xcodeproj/project.pbxproj index 3784732..f8bcffb 100755 --- a/iPhoneStreamingPlayer.xcodeproj/project.pbxproj +++ b/iPhoneStreamingPlayer.xcodeproj/project.pbxproj @@ -13,12 +13,12 @@ 1DF5F4E00D08C38300B7A737 /* UIKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1DF5F4DF0D08C38300B7A737 /* UIKit.framework */; }; 288765A50DF7441C002DB57D /* CoreGraphics.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 288765A40DF7441C002DB57D /* CoreGraphics.framework */; }; 28D7ACF80DDB3853001CB0EB /* iPhoneStreamingPlayerViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 28D7ACF70DDB3853001CB0EB /* iPhoneStreamingPlayerViewController.m */; }; + 8E10B4BA1A7CA35500966B9E /* AudioStreamer.m in Sources */ = {isa = PBXBuildFile; fileRef = 8E10B4B91A7CA35500966B9E /* AudioStreamer.m */; }; C90A804C14134C9400810E93 /* MainWindow.xib in Resources */ = {isa = PBXBuildFile; fileRef = C9E673010FE8C55B0033BF43 /* MainWindow.xib */; }; C9423DF10EF8AA6B003B785B /* CFNetwork.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = C9423DF00EF8AA6B003B785B /* CFNetwork.framework */; }; C98DA4F4173200D0005FC5E7 /* Default-568h@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = C98DA4F3173200D0005FC5E7 /* Default-568h@2x.png */; }; C9AB93E20FCF816F0047C0FA /* AudioToolbox.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = C9AB93E10FCF816F0047C0FA /* AudioToolbox.framework */; }; C9AB93F30FCF81790047C0FA /* MediaPlayer.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = C9AB93F20FCF81790047C0FA /* MediaPlayer.framework */; }; - C9C2D87A0EB6E09C00A3D071 /* AudioStreamer.m in Sources */ = {isa = PBXBuildFile; fileRef = C9C2D8780EB6E09C00A3D071 /* AudioStreamer.m */; }; C9C2D8CE0EB6E31200A3D071 /* QuartzCore.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = C9C2D8CD0EB6E31200A3D071 /* QuartzCore.framework */; }; C9E673020FE8C55B0033BF43 /* iPhoneStreamingPlayerViewController.xib in Resources */ = {isa = PBXBuildFile; fileRef = C9E672FF0FE8C55B0033BF43 /* iPhoneStreamingPlayerViewController.xib */; }; C9E673090FE8C5650033BF43 /* playbutton.png in Resources */ = {isa = PBXBuildFile; fileRef = C9E673050FE8C5650033BF43 /* playbutton.png */; }; @@ -38,12 +38,12 @@ 28D7ACF70DDB3853001CB0EB /* iPhoneStreamingPlayerViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = iPhoneStreamingPlayerViewController.m; sourceTree = ""; }; 29B97316FDCFA39411CA2CEA /* main.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = ""; }; 32CA4F630368D1EE00C91783 /* iPhoneStreamingPlayer_Prefix.pch */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = iPhoneStreamingPlayer_Prefix.pch; sourceTree = ""; }; + 8E10B4B81A7CA35500966B9E /* AudioStreamer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AudioStreamer.h; sourceTree = ""; }; + 8E10B4B91A7CA35500966B9E /* AudioStreamer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AudioStreamer.m; sourceTree = ""; }; C9423DF00EF8AA6B003B785B /* CFNetwork.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CFNetwork.framework; path = System/Library/Frameworks/CFNetwork.framework; sourceTree = SDKROOT; }; C98DA4F3173200D0005FC5E7 /* Default-568h@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Default-568h@2x.png"; sourceTree = ""; }; C9AB93E10FCF816F0047C0FA /* AudioToolbox.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AudioToolbox.framework; path = System/Library/Frameworks/AudioToolbox.framework; sourceTree = SDKROOT; }; C9AB93F20FCF81790047C0FA /* MediaPlayer.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = MediaPlayer.framework; path = System/Library/Frameworks/MediaPlayer.framework; sourceTree = SDKROOT; }; - C9C2D8780EB6E09C00A3D071 /* AudioStreamer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AudioStreamer.m; sourceTree = ""; }; - C9C2D8790EB6E09C00A3D071 /* AudioStreamer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AudioStreamer.h; sourceTree = ""; }; C9C2D8CD0EB6E31200A3D071 /* QuartzCore.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = QuartzCore.framework; path = /System/Library/Frameworks/QuartzCore.framework; sourceTree = ""; }; C9E672FF0FE8C55B0033BF43 /* iPhoneStreamingPlayerViewController.xib */ = {isa = PBXFileReference; lastKnownFileType = file.xib; name = iPhoneStreamingPlayerViewController.xib; path = "iPhone Resources/iPhoneStreamingPlayerViewController.xib"; sourceTree = ""; }; C9E673010FE8C55B0033BF43 /* MainWindow.xib */ = {isa = PBXFileReference; lastKnownFileType = file.xib; name = MainWindow.xib; path = "iPhone Resources/MainWindow.xib"; sourceTree = ""; }; @@ -75,8 +75,7 @@ 080E96DDFE201D6D7F000001 /* Classes */ = { isa = PBXGroup; children = ( - C9C2D8790EB6E09C00A3D071 /* AudioStreamer.h */, - C9C2D8780EB6E09C00A3D071 /* AudioStreamer.m */, + 8E10B4B71A7CA35500966B9E /* AudioStreamer */, 1D3623240D0F684500981E51 /* iPhoneStreamingPlayerAppDelegate.h */, 1D3623250D0F684500981E51 /* iPhoneStreamingPlayerAppDelegate.m */, 28D7ACF60DDB3853001CB0EB /* iPhoneStreamingPlayerViewController.h */, @@ -143,6 +142,15 @@ name = Frameworks; sourceTree = ""; }; + 8E10B4B71A7CA35500966B9E /* AudioStreamer */ = { + isa = PBXGroup; + children = ( + 8E10B4B81A7CA35500966B9E /* AudioStreamer.h */, + 8E10B4B91A7CA35500966B9E /* AudioStreamer.m */, + ); + path = AudioStreamer; + sourceTree = ""; + }; /* End PBXGroup section */ /* Begin PBXNativeTarget section */ @@ -213,9 +221,9 @@ buildActionMask = 2147483647; files = ( 1D60589B0D05DD56006BFB54 /* main.m in Sources */, + 8E10B4BA1A7CA35500966B9E /* AudioStreamer.m in Sources */, 1D3623260D0F684500981E51 /* iPhoneStreamingPlayerAppDelegate.m in Sources */, 28D7ACF80DDB3853001CB0EB /* iPhoneStreamingPlayerViewController.m in Sources */, - C9C2D87A0EB6E09C00A3D071 /* AudioStreamer.m in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -226,6 +234,7 @@ isa = XCBuildConfiguration; buildSettings = { ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ENABLE_OBJC_ARC = YES; COPY_PHASE_STRIP = NO; GCC_DYNAMIC_NO_PIC = NO; GCC_OPTIMIZATION_LEVEL = 0; @@ -240,6 +249,7 @@ isa = XCBuildConfiguration; buildSettings = { ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ENABLE_OBJC_ARC = YES; COPY_PHASE_STRIP = YES; GCC_PRECOMPILE_PREFIX_HEADER = YES; GCC_PREFIX_HEADER = iPhoneStreamingPlayer_Prefix.pch; diff --git a/main.m b/main.m index 417a8dd..54089a1 100644 --- a/main.m +++ b/main.m @@ -28,12 +28,12 @@ #endif int main(int argc, const char *argv[]) { - NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init]; + @autoreleasepool { #if TARGET_OS_IPHONE - int retVal = UIApplicationMain(argc, (char **)argv, nil, nil); + int retVal = UIApplicationMain(argc, (char **)argv, nil, nil); #else - int retVal = NSApplicationMain(argc, argv); + int retVal = NSApplicationMain(argc, argv); #endif - [pool release]; - return retVal; + return retVal; + } } From 5e6b027580cf681880e9edfc9ab7c5659d852f93 Mon Sep 17 00:00:00 2001 From: diaoshu Date: Sun, 1 Feb 2015 18:05:59 +0800 Subject: [PATCH 2/3] =?UTF-8?q?=E6=8F=90=E4=BA=A4=E4=BB=A3=E7=A0=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- AudioStreamer.podspec | 4 ++-- LICENSE | 19 +++++++++++++++++++ .../project.pbxproj | 2 ++ 3 files changed, 23 insertions(+), 2 deletions(-) create mode 100644 LICENSE diff --git a/AudioStreamer.podspec b/AudioStreamer.podspec index f311bce..9fb303f 100644 --- a/AudioStreamer.podspec +++ b/AudioStreamer.podspec @@ -3,9 +3,9 @@ s.name = 'AudioStreamer' s.version = '0.1' s.license = 'MIT' s.summary = 'A streaming audio player class (AudioStreamer) for Mac OS X and iOS' -s.homepage = 'http://cocoawithlove.com' +s.homepage = 'https://github.com/openboy2012/AudioStreamer' s.author = { 'dejohn' => 'dongjia_9251@126.com' } -s.source = { :git => 'github.com/openboy2012/AudioStreamer.git', :tag => '0.1' } +s.source = { :git => 'https://github.com/openboy2012/AudioStreamer.git', :tag => '0.1' } s.ios.deployment_target = '5.1.1' s.osx.deployment_target = '10.8' s.source_files = 'Classes/AudioStreamer/*.{h,m}' diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..c777efc --- /dev/null +++ b/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2015 AudioStreamer + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/iPhoneStreamingPlayer.xcodeproj/project.pbxproj b/iPhoneStreamingPlayer.xcodeproj/project.pbxproj index f8bcffb..d86b1fa 100755 --- a/iPhoneStreamingPlayer.xcodeproj/project.pbxproj +++ b/iPhoneStreamingPlayer.xcodeproj/project.pbxproj @@ -241,6 +241,7 @@ GCC_PRECOMPILE_PREFIX_HEADER = YES; GCC_PREFIX_HEADER = iPhoneStreamingPlayer_Prefix.pch; INFOPLIST_FILE = iPhoneInfo.plist; + IPHONEOS_DEPLOYMENT_TARGET = 7.0; PRODUCT_NAME = iPhoneStreamingPlayer; }; name = Debug; @@ -254,6 +255,7 @@ GCC_PRECOMPILE_PREFIX_HEADER = YES; GCC_PREFIX_HEADER = iPhoneStreamingPlayer_Prefix.pch; INFOPLIST_FILE = iPhoneInfo.plist; + IPHONEOS_DEPLOYMENT_TARGET = 7.0; PRODUCT_NAME = iPhoneStreamingPlayer; }; name = Release; From a5bdd11736279239927579edff55d33dd6891b7e Mon Sep 17 00:00:00 2001 From: diaoshu Date: Sun, 1 Feb 2015 18:50:57 +0800 Subject: [PATCH 3/3] add AudioStreamer.podspec and support for cocoapods --- Classes/AudioStreamer/AudioStreamer.m | 79 ++++++++++----------------- 1 file changed, 30 insertions(+), 49 deletions(-) diff --git a/Classes/AudioStreamer/AudioStreamer.m b/Classes/AudioStreamer/AudioStreamer.m index 30710e0..016315a 100644 --- a/Classes/AudioStreamer/AudioStreamer.m +++ b/Classes/AudioStreamer/AudioStreamer.m @@ -102,10 +102,9 @@ static void ASPropertyListenerProc(void * inClientData, { // this is called by audio file stream when it finds property values AudioStreamer* streamer = (__bridge AudioStreamer *)inClientData; - [streamer - handlePropertyChangeForFileStream:inAudioFileStream - fileStreamPropertyID:inPropertyID - ioFlags:ioFlags]; + [streamer handlePropertyChangeForFileStream:inAudioFileStream + fileStreamPropertyID:inPropertyID + ioFlags:ioFlags]; } // @@ -127,11 +126,10 @@ static void ASPacketsProc( void * inClientData, { // this is called by audio file stream when it finds packets of audio AudioStreamer* streamer = (__bridge AudioStreamer *)inClientData; - [streamer - handleAudioPackets:inInputData - numberBytes:inNumberBytes - numberPackets:inNumberPackets - packetDescriptions:inPacketDescriptions]; + [streamer handleAudioPackets:inInputData + numberBytes:inNumberBytes + numberPackets:inNumberPackets + packetDescriptions:inPacketDescriptions]; } // @@ -150,7 +148,8 @@ static void ASAudioQueueOutputCallback(void* inClientData, // this is called by the audio queue when it has finished decoding our data. // The buffer is now free to be reused. AudioStreamer* streamer = (__bridge AudioStreamer*)inClientData; - [streamer handleBufferCompleteForQueue:inAQ buffer:inBuffer]; + [streamer handleBufferCompleteForQueue:inAQ + buffer:inBuffer]; } // @@ -163,7 +162,8 @@ static void ASAudioQueueOutputCallback(void* inClientData, static void ASAudioQueueIsRunningCallback(void *inUserData, AudioQueueRef inAQ, AudioQueuePropertyID inID) { AudioStreamer* streamer = (__bridge AudioStreamer *)inUserData; - [streamer handlePropertyChangeForQueue:inAQ propertyID:inID]; + [streamer handlePropertyChangeForQueue:inAQ + propertyID:inID]; } #if TARGET_OS_IPHONE @@ -439,12 +439,8 @@ - (void)failWithErrorCode:(AudioStreamerErrorCode)anErrorCode // - (void)mainThreadStateNotification { - NSNotification *notification = - [NSNotification - notificationWithName:ASStatusChangedNotification - object:self]; - [[NSNotificationCenter defaultCenter] - postNotification:notification]; + NSNotification *notification = [NSNotification notificationWithName:ASStatusChangedNotification object:self]; + [[NSNotificationCenter defaultCenter] postNotification:notification]; } // @@ -484,10 +480,7 @@ - (void)setState:(AudioStreamerState)aStatus } else { - [self - performSelectorOnMainThread:@selector(mainThreadStateNotification) - withObject:nil - waitUntilDone:NO]; + [self performSelectorOnMainThread:@selector(mainThreadStateNotification) withObject:nil waitUntilDone:NO]; } } } @@ -642,7 +635,7 @@ - (BOOL)openReadStream // // Create the HTTP GET request // - CFHTTPMessageRef message= CFHTTPMessageCreateRequest(NULL, (CFStringRef)@"GET", (__bridge CFURLRef)url, kCFHTTPVersion1_1); + CFHTTPMessageRef message = CFHTTPMessageCreateRequest(NULL, (CFStringRef)@"GET", (__bridge CFURLRef)url, kCFHTTPVersion1_1); // // If we are creating this request to seek to a location, set the @@ -650,8 +643,7 @@ - (BOOL)openReadStream // if (fileLength > 0 && seekByteOffset > 0) { - CFHTTPMessageSetHeaderFieldValue(message, CFSTR("Range"), - (__bridge CFStringRef)[NSString stringWithFormat:@"bytes=%ld-%ld", (long)seekByteOffset, (long)fileLength]); + CFHTTPMessageSetHeaderFieldValue(message, CFSTR("Range"),(__bridge CFStringRef)[NSString stringWithFormat:@"bytes=%lu-%lu", seekByteOffset, fileLength]); discontinuous = YES; } @@ -686,9 +678,7 @@ - (BOOL)openReadStream // if([[url scheme] isEqualToString:@"https"]) { - NSDictionary *sslSettings = - [NSDictionary dictionaryWithObjectsAndKeys: - @(YES), kCFStreamSSLValidatesCertificateChain, + NSDictionary *sslSettings = [NSDictionary dictionaryWithObjectsAndKeys:@(YES), kCFStreamSSLValidatesCertificateChain, [NSNull null], kCFStreamSSLPeerName, nil]; @@ -775,18 +765,15 @@ - (void)startInternal // #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wdeprecated" - AudioSessionInitialize ( - NULL, // 'NULL' to use the default (main) run loop - NULL, // 'NULL' to use the default run loop mode - ASAudioSessionInterruptionListener, // a reference to your interruption callback - (__bridge void *)(self) // data to pass to your interruption listener callback - ); + AudioSessionInitialize(NULL, // 'NULL' to use the default (main) run loop + NULL, // 'NULL' to use the default run loop mode + ASAudioSessionInterruptionListener, // a reference to your interruption callback + (__bridge void *)(self) // data to pass to your interruption listener callback + ); UInt32 sessionCategory = kAudioSessionCategory_MediaPlayback; - AudioSessionSetProperty ( - kAudioSessionProperty_AudioCategory, - sizeof (sessionCategory), - &sessionCategory - ); + AudioSessionSetProperty(kAudioSessionProperty_AudioCategory, + sizeof (sessionCategory), + &sessionCategory); AudioSessionSetActive(true); #pragma clang diagnostic pop #endif @@ -807,9 +794,8 @@ - (void)startInternal BOOL isRunning = YES; do { - isRunning = [[NSRunLoop currentRunLoop] - runMode:NSDefaultRunLoopMode - beforeDate:[NSDate dateWithTimeIntervalSinceNow:0.25]]; + isRunning = [[NSRunLoop currentRunLoop] runMode:NSDefaultRunLoopMode + beforeDate:[NSDate dateWithTimeIntervalSinceNow:0.25]]; @synchronized(self) { if (seekWasRequested) { @@ -917,14 +903,10 @@ - (void)start { NSAssert([[NSThread currentThread] isEqual:[NSThread mainThread]], @"Playback can only be started from the main thread."); - notificationCenter = - [NSNotificationCenter defaultCenter]; + notificationCenter = [NSNotificationCenter defaultCenter]; self.state = AS_STARTING_FILE_THREAD; internalThread = - [[NSThread alloc] - initWithTarget:self - selector:@selector(startInternal) - object:nil]; + [[NSThread alloc] initWithTarget:self selector:@selector(startInternal) object:nil]; [internalThread start]; } } @@ -1288,8 +1270,7 @@ - (void)handleReadFromStream:(CFReadStreamRef)aStream { CFTypeRef message = CFReadStreamCopyProperty(stream, kCFStreamPropertyHTTPResponseHeader); - httpHeaders = - (__bridge NSDictionary *)CFHTTPMessageCopyAllHeaderFields((CFHTTPMessageRef)message); + httpHeaders = (__bridge NSDictionary *)CFHTTPMessageCopyAllHeaderFields((CFHTTPMessageRef)message); CFRelease(message); //