Search code examples
objective-caudio-streamingrtp

Objective c: Send audio data in rtp packet via socket


In my app, I have to capture microphone and send audio data in rtp packet. But I only see receive rtp data like iOS RTP live audio receiving or unanswered one.

I used following code with AsuncUdpSocket to sent audio data but it wasn't wrap in rtp packet. Is there any library to wrap my audio data into rtp packet?

initial AsyncUdpSocket:

 udpSender = [[GCDAsyncUdpSocket alloc] initWithDelegate:self delegateQueue:dispatch_get_main_queue()];

NSError *error;
[udpSender connectToHost:@"192.168.1.29" onPort:1024 error:&error];   

I send audio data in my playback callback function:

static OSStatus playbackCallback(void *inRefCon, 
                             AudioUnitRenderActionFlags *ioActionFlags, 
                             const AudioTimeStamp *inTimeStamp, 
                             UInt32 inBusNumber, 
                             UInt32 inNumberFrames, 
                             AudioBufferList *ioData) {    

/**
 This is the reference to the object who owns the callback.
 */
AudioProcessor *audioProcessor = (AudioProcessor*) inRefCon;

// iterate over incoming stream an copy to output stream
for (int i=0; i < ioData->mNumberBuffers; i++) { 
    AudioBuffer buffer = ioData->mBuffers[i];

    // find minimum size
    UInt32 size = min(buffer.mDataByteSize, [audioProcessor audioBuffer].mDataByteSize);

    // copy buffer to audio buffer which gets played after function return
    memcpy(buffer.mData, [audioProcessor audioBuffer].mData, size);

    // set data size
    buffer.mDataByteSize = size;

    //Send data to remote server      

    NSMutableData *data=[[NSMutableData alloc] init];
    Float32 *frame = (Float32*)buffer.mData;
    [data appendBytes:frame length:size];
    if ([udpSender isConnected])
    {
        [udpSender sendData:data withTimeout:-1 tag:1];
    }


}



return noErr;
} 

How do I accomplish this?

Thanks.


Solution

  • Finally, here's my solution.

    Setup the microphone capture process:

    -(void)open {
    NSError *error;
    m_capture = [[AVCaptureSession alloc]init];
    AVCaptureDevice *audioDev = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
    if (audioDev == nil)
    {
        printf("Couldn't create audio capture device");
        return ;
    }
    //m_capture.sessionPreset = AVCaptureSessionPresetLow;
    
    // create mic device
    AVCaptureDeviceInput *audioIn = [AVCaptureDeviceInput deviceInputWithDevice:audioDev error:&error];
    if (error != nil)
    {
        printf("Couldn't create audio input");
        return ;
    }
    
    
    // add mic device in capture object
    if ([m_capture canAddInput:audioIn] == NO)
    {
        printf("Couldn't add audio input");
        return ;
    }
    [m_capture addInput:audioIn];
    // export audio data
    AVCaptureAudioDataOutput *audioOutput = [[AVCaptureAudioDataOutput alloc] init];
    [audioOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
    if ([m_capture canAddOutput:audioOutput] == NO)
    {
        printf("Couldn't add audio output");
        return ;
    }
    
    
    [m_capture addOutput:audioOutput];
    [audioOutput connectionWithMediaType:AVMediaTypeAudio];
    [m_capture startRunning];
    return ;
    }
    

    Capture the microphone data:

    -(void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
    char szBuf[450];
    int  nSize = sizeof(szBuf);
    
    if (isConnect == YES)
    {
    if ([self encoderAAC:sampleBuffer aacData:szBuf aacLen:&nSize] == YES)
    {
        [self sendAudioData:szBuf len:nSize channel:0];
    }
    
    }
    

    Initial the socket

    -(void)initialSocket{
        //Use socket
        printf("initialSocket\n");
        CFReadStreamRef readStream = NULL;
        CFWriteStreamRef writeStream = NULL;
    
        NSString *ip = @"192.168.1.147";   //Your IP Address
        uint *port = 22133;
    
        CFStreamCreatePairWithSocketToHost(kCFAllocatorDefault, (__bridge CFStringRef)ip, port, &readStream,  &writeStream);
        if (readStream && writeStream) {
        CFReadStreamSetProperty(readStream, kCFStreamPropertyShouldCloseNativeSocket, kCFBooleanTrue);
        CFWriteStreamSetProperty(writeStream, kCFStreamPropertyShouldCloseNativeSocket, kCFBooleanTrue);
    
        iStream = (__bridge NSInputStream *)readStream;
        [iStream setDelegate:self];
        [iStream scheduleInRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
        [iStream open];
    
        oStream = (__bridge NSOutputStream *)writeStream;
        [oStream setDelegate:self];
        [oStream scheduleInRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
        [oStream open];
        }
    }
    

    Send data to socket when capture the data form microphone.

    -(void)sendAudioData: (char *)buffer len:(int)len channel:(UInt32)channel
    {
        Float32 *frame = (Float32*)buffer;
        [globalData appendBytes:frame length:len];
    
        if (isConnect == YES)
        {
            if ([oStream streamStatus] == NSStreamStatusOpen)
            {
                [oStream write:globalData.mutableBytes maxLength:globalData.length];
    
    
                globalData = [[NSMutableData alloc] init];
    
            }
        }
    
    }
    

    Hope this will help someone.