macOS Version(s) Used to Build
macOS 12 Monterey
Xcode Version(s)
Xcode 13.4.1
Description
If noteOn events are called while loading AudioKit's AppleSampler() it can occasionally crash on line 121 of DSPBase.mm. Here is a test project to illustrate the crash: https://github.com/NickCulbertson/TestRenderBlock
This is the current AUInternalRenderBlock:
AUInternalRenderBlock DSPBase::internalRenderBlock()
{
return ^AUAudioUnitStatus(
AudioUnitRenderActionFlags *actionFlags,
const AudioTimeStamp *timestamp,
AUAudioFrameCount frameCount,
NSInteger outputBusNumber,
AudioBufferList *outputData,
const AURenderEvent *realtimeEventListHead,
AURenderPullInputBlock __unsafe_unretained pullInputBlock)
{
assert( (outputBusNumber == 0) && "We don't yet support multiple output busses" );
if (pullInputBlock) {
if (bCanProcessInPlace && inputBufferLists.size() == 1) {
// pull input directly to output buffer
inputBufferLists[0] = outputData;
AudioUnitRenderActionFlags inputFlags = 0;
pullInputBlock(&inputFlags, timestamp, frameCount, 0, inputBufferLists[0]);
}
else {
// pull input to internal buffer
for (size_t i = 0; i < inputBufferLists.size(); i++) {
inputBufferLists[i] = internalBufferLists[i];
UInt32 byteSize = frameCount * sizeof(float);
for (UInt32 ch = 0; ch < inputBufferLists[i]->mNumberBuffers; ch++) {
inputBufferLists[i]->mBuffers[ch].mDataByteSize = byteSize;
}
AudioUnitRenderActionFlags inputFlags = 0;
pullInputBlock(&inputFlags, timestamp, frameCount, i, inputBufferLists[i]); //CRASH HERE
}
}
}
outputBufferList = outputData;
processWithEvents(timestamp, frameCount, realtimeEventListHead);
return noErr;
};
}
Just a thought, but this is Apple's generated render block in DSPKernel when making an AUv3. It might be useful for comparison:
// MARK: - AUAudioUnit (AUAudioUnitImplementation)
// Subclassers must provide a AUInternalRenderBlock (via a getter) to implement rendering.
- (AUInternalRenderBlock)internalRenderBlock {
/*
Capture in locals to avoid ObjC member lookups. If "self" is captured in
render, we're doing it wrong.
*/
// Specify captured objects are mutable.
__block OverdriveSynthDSPKernel *state = &_kernel;
__block BufferedInputBus *input = &_inputBus;
return ^AUAudioUnitStatus(AudioUnitRenderActionFlags *actionFlags,
const AudioTimeStamp *timestamp,
AVAudioFrameCount frameCount,
NSInteger outputBusNumber,
AudioBufferList *outputData,
const AURenderEvent *realtimeEventListHead,
AURenderPullInputBlock __unsafe_unretained pullInputBlock) {
AudioUnitRenderActionFlags pullFlags = 0;
if (frameCount > state->maximumFramesToRender()) {
return kAudioUnitErr_TooManyFramesToProcess;
}
AUAudioUnitStatus err = input->pullInput(&pullFlags, timestamp, frameCount, 0, pullInputBlock);
if (err != noErr) { return err; }
AudioBufferList *inAudioBufferList = input->mutableAudioBufferList;
/*
Important:
If the caller passed non-null output pointers (outputData->mBuffers[x].mData), use those.
If the caller passed null output buffer pointers, process in memory owned by the Audio Unit
and modify the (outputData->mBuffers[x].mData) pointers to point to this owned memory.
The Audio Unit is responsible for preserving the validity of this memory until the next call to render,
or deallocateRenderResources is called.
If your algorithm cannot process in-place, you will need to preallocate an output buffer
and use it here.
See the description of the canProcessInPlace property.
*/
// If passed null output buffer pointers, process in-place in the input buffer.
AudioBufferList *outAudioBufferList = outputData;
if (outAudioBufferList->mBuffers[0].mData == nullptr) {
for (UInt32 i = 0; i < outAudioBufferList->mNumberBuffers; ++i) {
outAudioBufferList->mBuffers[i].mData = inAudioBufferList->mBuffers[i].mData;
}
}
state->setBuffers(inAudioBufferList, outAudioBufferList);
state->processWithEvents(timestamp, frameCount, realtimeEventListHead, nil /* MIDIOutEventBlock */);
return noErr;
};
}
bug