-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathAudioManager.m
executable file
·254 lines (201 loc) · 8.43 KB
/
AudioManager.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
#import "AudioManager.h"
#define kOutputBus 0
#define kInputBus 1
#define SAMPLE_RATE 16000.00
#ifdef __cplusplus
extern "C" {
#endif
void CheckError(OSStatus error, const char *operation)
{
if (error == noErr) return;
char str[20];
// see if it appears to be a 4-char-code
*(UInt32 *)(str + 1) = CFSwapInt32HostToBig(error);
if (isprint(str[1]) && isprint(str[2]) && isprint(str[3]) && isprint(str[4])) {
str[0] = str[5] = '\'';
str[6] = '\0';
} else
// no, format it as an integer
sprintf(str, "%d", (int)error);
fprintf(stderr, "Error: %s (%s)\n", operation, str);
exit(1);
}
OSStatus inputCallback (void *inRefCon,
AudioUnitRenderActionFlags * ioActionFlags,
const AudioTimeStamp * inTimeStamp,
UInt32 inOutputBusNumber,
UInt32 inNumberFrames,
AudioBufferList * ioData);
#ifdef __cplusplus
}
#endif
static AudioManager *audioManager = nil;
@interface AudioManager()
@property (nonatomic, assign, readwrite) AudioUnit audioUnit;
@property (nonatomic, assign, readwrite) UInt32 numChannels;
@property (nonatomic, assign, readwrite) BOOL isInterleaved;
@property (nonatomic, assign, readwrite) float *inData;
- (void)setupAudioSession;
- (void)setupAudioUnits;
@end
@implementation AudioManager
+ (AudioManager *)audioManager
{
@synchronized(self)
{
if (audioManager == nil) {
audioManager = [[AudioManager alloc] init];
}
}
return audioManager;
}
- (id)init
{
if (self = [super init])
{
self.inData = (float *)calloc(512, sizeof(float));
[self setupAudioSession];
[self setupAudioUnits];
return self;
}
return nil;
}
- (void)dealloc
{
free(self.inData);
}
#pragma mark - Audio Methods
- (void)setupAudioSession
{
NSError *err = nil;
if (![[AVAudioSession sharedInstance] setActive:YES error:&err]){
NSLog(@"Couldn't activate audio session: %@", err);
}
}
-(void)setupAudioUnits
{
// --- Audio Session Setup ---
// ---------------------------
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayAndRecord withOptions:AVAudioSessionCategoryOptionDefaultToSpeaker error:nil];
Float32 preferredBufferSize = 0.0232;
CheckError( AudioSessionSetProperty(kAudioSessionProperty_PreferredHardwareIOBufferDuration, sizeof(preferredBufferSize), &preferredBufferSize), "Couldn't set the preferred buffer duration");
// We define the audio component
AudioComponentDescription desc = {0};
desc.componentType = kAudioUnitType_Output;
desc.componentSubType = kAudioUnitSubType_RemoteIO;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
// find the AU component by description
AudioComponent inputComponent = AudioComponentFindNext(NULL, &desc);
CheckError( AudioComponentInstanceNew(inputComponent, &_audioUnit), "Couldn't create the output audio unit");
// define that we want record io on the input bus
UInt32 one = 1;
CheckError( AudioUnitSetProperty(_audioUnit,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Input,
kInputBus,
&one,
sizeof(one)), "Couldn't enable IO on the input scope of output unit");
/*
We need to specifie our format on which we want to work.
We use Linear PCM cause its uncompressed and we work on raw data.
for more informations check.
We want 16 bits, 2 bytes per packet/frames at 44khz
*/
AudioStreamBasicDescription audioFormat;
audioFormat.mSampleRate = SAMPLE_RATE;
audioFormat.mFormatID = kAudioFormatLinearPCM;
audioFormat.mFormatFlags = kAudioFormatFlagIsPacked | kAudioFormatFlagIsSignedInteger;
audioFormat.mFramesPerPacket = 1;
audioFormat.mChannelsPerFrame = 1;
audioFormat.mBitsPerChannel = 16;
audioFormat.mBytesPerPacket = 2;
audioFormat.mBytesPerFrame = 2;
self.numChannels = audioFormat.mChannelsPerFrame;
// set the format on the output stream
UInt32 size = sizeof(AudioStreamBasicDescription);
CheckError(AudioUnitSetProperty(_audioUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Output,
kInputBus,
&audioFormat,
size),
"Couldn't set the ASBD on the audio unit (after setting its sampling rate)");
if (audioFormat.mFormatFlags & kAudioFormatFlagIsNonInterleaved) {
// The audio is non-interleaved
printf("Not interleaved!\n");
self.isInterleaved = NO;
} else {
printf ("Format is interleaved\n");
self.isInterleaved = YES;
}
/**
We need to define a callback structure which holds
a pointer to the recordingCallback and a reference to
the audio processor object
*/
AURenderCallbackStruct callbackStruct;
// set recording callback
callbackStruct.inputProc = inputCallback; // recordingCallback pointer
callbackStruct.inputProcRefCon = (__bridge void *)(self);
// set input callback to recording callback on the input bus
CheckError( AudioUnitSetProperty(_audioUnit,
kAudioOutputUnitProperty_SetInputCallback,
kAudioUnitScope_Global,
0,
&callbackStruct,
sizeof(callbackStruct)), "Couldn't set the callback on the input unit");
CheckError(AudioUnitInitialize(_audioUnit), "Couldn't initialize the output unit");
NSLog(@"Initialized.");
}
#pragma mark controll stream
- (void)stop {
CheckError( AudioOutputUnitStop(_audioUnit), "Couldn't stop the output unit");
}
- (void)start {
CheckError( AudioOutputUnitStart(_audioUnit), "Couldn't start the output unit");
}
OSStatus inputCallback (void *inRefCon,
AudioUnitRenderActionFlags * ioActionFlags,
const AudioTimeStamp * inTimeStamp,
UInt32 inOutputBusNumber,
UInt32 inNumberFrames,
AudioBufferList * ioData)
{
@autoreleasepool {
// the data gets rendered here
AudioBuffer buffer;
/**
This is the reference to the object who owns the callback.
*/
AudioManager *audioManager = (__bridge AudioManager*) inRefCon;
/**
on this point we define the number of channels, which is mono
for the iphone. the number of frames is usally 512 or 1024.
*/
buffer.mDataByteSize = inNumberFrames * 2; // sample size
buffer.mNumberChannels = 1; // one channel
buffer.mData = malloc( inNumberFrames * 2 ); // buffer size
// we put our buffer into a bufferlist array for rendering
AudioBufferList bufferList;
bufferList.mNumberBuffers = 1;
bufferList.mBuffers[0] = buffer;
// render input and check for error
CheckError( AudioUnitRender([audioManager audioUnit], ioActionFlags, inTimeStamp, inOutputBusNumber, inNumberFrames, &bufferList), "Couldn't render the output unit");
// Convert the audio in something manageable
// For SInt16s ...
if ( ! audioManager.isInterleaved ) {
for (int i=0; i < audioManager.numChannels; ++i) {
vDSP_vflt16((SInt16 *)bufferList.mBuffers[i].mData, 1, audioManager.inData+i, audioManager.numChannels, inNumberFrames);
}
}
else {
vDSP_vflt16((SInt16 *)bufferList.mBuffers[0].mData, 1, audioManager.inData, 1, inNumberFrames*audioManager.numChannels);
}
float scale = 1.0 / (float)INT16_MAX;
vDSP_vsmul(audioManager.inData, 1, &scale, audioManager.inData, 1, inNumberFrames*audioManager.numChannels);
// clean up the buffer
free(bufferList.mBuffers[0].mData);
return noErr;
}
}
@end