Created
June 27, 2017 15:58
-
-
Save zonble/43c1287a4c208bbe0a8a2c9ddc08f23a to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#import <Foundation/Foundation.h> | |
@interface KKSimpleAirPlay2Player : NSObject | |
- (id)initWithURL:(NSURL *)inURL; | |
- (void)play; | |
- (void)pause; | |
@property (readonly, getter=isStopped) BOOL stopped; | |
@end |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#import "KKSimpleAirPlay2Player.h" | |
#import <AudioToolbox/AudioToolbox.h> | |
#import <AVFoundation/AVFoundation.h> | |
static void ZBAudioFileStreamPropertyListener(void * inClientData, AudioFileStreamID inAudioFileStream, AudioFileStreamPropertyID inPropertyID, UInt32 * ioFlags); | |
static void ZBAudioFileStreamPacketsCallback(void * inClientData, UInt32 inNumberBytes, UInt32 inNumberPackets, const void * inInputData, AudioStreamPacketDescription *inPacketDescriptions); | |
@interface KKSimpleAirPlay2Player () <NSURLConnectionDelegate> | |
{ | |
NSURLConnection *URLConnection; | |
struct { | |
BOOL stopped; | |
BOOL loaded; | |
} playerStatus ; | |
AudioFileStreamID audioFileStreamID; | |
AudioStreamBasicDescription streamDescription; | |
NSMutableArray *packets; | |
size_t readHead; | |
} | |
@property (strong, nonatomic) AVSampleBufferAudioRenderer *renderer; | |
@property (strong, nonatomic) AVSampleBufferRenderSynchronizer *synchronizer; | |
@property (strong, nonatomic) dispatch_queue_t queue; | |
@end | |
@implementation KKSimpleAirPlay2Player | |
- (id)initWithURL:(NSURL *)inURL | |
{ | |
self = [super init]; | |
if (self) { | |
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback mode:AVAudioSessionModeDefault routeSharingPolicy:AVAudioSessionRouteSharingPolicyLongForm options:0 error:nil]; | |
[[AVAudioSession sharedInstance] setActive:YES error:nil]; | |
playerStatus.stopped = NO; | |
packets = [[NSMutableArray alloc] init]; | |
AudioFileStreamOpen((__bridge void * _Nullable)(self), ZBAudioFileStreamPropertyListener, ZBAudioFileStreamPacketsCallback, kAudioFileMP3Type, &audioFileStreamID); | |
URLConnection = [[NSURLConnection alloc] initWithRequest:[NSURLRequest requestWithURL:inURL] delegate:self]; | |
self.renderer = [[AVSampleBufferAudioRenderer alloc] init]; | |
self.synchronizer = [[AVSampleBufferRenderSynchronizer alloc] init]; | |
[self.synchronizer addRenderer:self.renderer]; | |
self.queue = dispatch_queue_create(0, 0); | |
} | |
return self; | |
} | |
- (void)connection:(NSURLConnection *)connection didReceiveResponse:(NSURLResponse *)response | |
{ | |
if ([response isKindOfClass:[NSHTTPURLResponse class]]) { | |
if ([(NSHTTPURLResponse *)response statusCode] != 200) { | |
NSLog(@"HTTP code:%ld", [(NSHTTPURLResponse *)response statusCode]); | |
[connection cancel]; | |
playerStatus.stopped = YES; | |
} | |
} | |
} | |
- (double)framePerSecond | |
{ | |
if (streamDescription.mFramesPerPacket) { | |
return streamDescription.mSampleRate / streamDescription.mFramesPerPacket; | |
} | |
return 44100.0/1152.0; | |
} | |
- (void)play | |
{ | |
[self.synchronizer setRate:1]; | |
} | |
- (void)pause | |
{ | |
[self.synchronizer setRate:0]; | |
} | |
- (void)connection:(NSURLConnection *)connection didReceiveData:(NSData *)data | |
{ | |
AudioFileStreamParseBytes(audioFileStreamID, (UInt32)[data length], [data bytes], 0); | |
} | |
- (void)connectionDidFinishLoading:(NSURLConnection *)connection | |
{ | |
NSLog(@"Complete loading data"); | |
playerStatus.loaded = YES; | |
[self.renderer requestMediaDataWhenReadyOnQueue:self.queue usingBlock:^{ | |
[self _enqueueDataWithPacketsCount:96]; | |
}]; | |
} | |
- (void)connection:(NSURLConnection *)connection didFailWithError:(NSError *)error | |
{ | |
NSLog(@"Failed to load data: %@", [error localizedDescription]); | |
playerStatus.stopped = YES; | |
} | |
- (void)_createAudioQueueWithAudioStreamDescription:(AudioStreamBasicDescription *)audioStreamBasicDescription | |
{ | |
memcpy(&streamDescription, audioStreamBasicDescription, sizeof(AudioStreamBasicDescription)); | |
} | |
- (void)_storePacketsWithNumberOfBytes:(UInt32)inNumberBytes numberOfPackets:(UInt32)inNumberPackets inputData:(const void *)inInputData packetDescriptions:(AudioStreamPacketDescription *)inPacketDescriptions | |
{ | |
for (int i = 0; i < inNumberPackets; ++i) { | |
SInt64 packetStart = inPacketDescriptions[i].mStartOffset; | |
UInt32 packetSize = inPacketDescriptions[i].mDataByteSize; | |
assert(packetSize > 0); | |
NSData *packet = [NSData dataWithBytes:inInputData + packetStart length:packetSize]; | |
[packets addObject:packet]; | |
} | |
} | |
- (void)_enqueueDataWithPacketsCount:(size_t)inPacketCount | |
{ | |
if (readHead == [packets count]) { | |
if (playerStatus.loaded) { | |
[self.synchronizer setRate:0]; | |
playerStatus.stopped = YES; | |
return; | |
} | |
} | |
if (readHead + inPacketCount >= [packets count]) { | |
inPacketCount = [packets count] - readHead; | |
} | |
UInt32 totalSize = 0; | |
UInt32 index; | |
for (index = 0 ; index < inPacketCount ; index++) { | |
NSData *packet = packets[index + readHead]; | |
totalSize += packet.length; | |
} | |
void *block = calloc(totalSize, 1); | |
UInt32 offset = 0; | |
for (index = 0 ; index < inPacketCount ; index++) { | |
size_t readIndex = index + readHead; | |
NSData *packet = packets[readIndex]; | |
memcpy(block + offset, packet.bytes, packet.length); | |
offset += packet.length; | |
} | |
CMBlockBufferRef blockBuffer = NULL; | |
CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault, block, totalSize, kCFAllocatorDefault, NULL, 0, totalSize, 0, &blockBuffer); | |
CMSampleBufferRef buffer = NULL; | |
CMFormatDescriptionRef format = NULL; | |
CMSampleTimingInfo timing = { CMTimeMake(1, streamDescription.mSampleRate), kCMTimeZero, kCMTimeInvalid }; | |
OSStatus status = 0; | |
status = CMAudioFormatDescriptionCreate(kCFAllocatorDefault, &streamDescription, 0, NULL, 0, NULL, NULL, &format); | |
status = CMSampleBufferCreate(kCFAllocatorDefault, | |
blockBuffer, false, NULL, NULL, format, | |
totalSize, | |
1, &timing, 0, NULL, &buffer); | |
readHead += inPacketCount; | |
[self.renderer enqueueSampleBuffer:buffer]; | |
free(block); | |
} | |
@end | |
void ZBAudioFileStreamPropertyListener(void * inClientData, AudioFileStreamID inAudioFileStream, AudioFileStreamPropertyID inPropertyID, UInt32 * ioFlags) | |
{ | |
KKSimpleAirPlay2Player *self = (__bridge KKSimpleAirPlay2Player *)inClientData; | |
if (inPropertyID == kAudioFileStreamProperty_DataFormat) { | |
UInt32 dataSize = 0; | |
OSStatus status = 0; | |
AudioStreamBasicDescription audioStreamDescription; | |
Boolean writable = false; | |
status = AudioFileStreamGetPropertyInfo(inAudioFileStream, kAudioFileStreamProperty_DataFormat, &dataSize, &writable); | |
status = AudioFileStreamGetProperty(inAudioFileStream, kAudioFileStreamProperty_DataFormat, &dataSize, &audioStreamDescription); | |
NSLog(@"mSampleRate: %f", audioStreamDescription.mSampleRate); | |
NSLog(@"mFormatID: %u", audioStreamDescription.mFormatID); | |
NSLog(@"mFormatFlags: %u", audioStreamDescription.mFormatFlags); | |
NSLog(@"mBytesPerPacket: %u", audioStreamDescription.mBytesPerPacket); | |
NSLog(@"mFramesPerPacket: %u", audioStreamDescription.mFramesPerPacket); | |
NSLog(@"mBytesPerFrame: %u", audioStreamDescription.mBytesPerFrame); | |
NSLog(@"mChannelsPerFrame: %u", audioStreamDescription.mChannelsPerFrame); | |
NSLog(@"mBitsPerChannel: %u", audioStreamDescription.mBitsPerChannel); | |
NSLog(@"mReserved: %u", audioStreamDescription.mReserved); | |
[self _createAudioQueueWithAudioStreamDescription:&audioStreamDescription]; | |
} | |
} | |
void ZBAudioFileStreamPacketsCallback(void * inClientData, UInt32 inNumberBytes, UInt32 inNumberPackets, const void * inInputData, AudioStreamPacketDescription *inPacketDescriptions) | |
{ | |
KKSimpleAirPlay2Player *self = (__bridge KKSimpleAirPlay2Player *)inClientData; | |
[self _storePacketsWithNumberOfBytes:inNumberBytes numberOfPackets:inNumberPackets inputData:inInputData packetDescriptions:inPacketDescriptions]; | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Thanks for sharing this! It's very hard to find examples of
AVSampleBufferAudioRenderer
.In
_enqueueDataWithPacketsCount
, should there be aCFRelease(blockBuffer)
andCFRelease(buffer)
?