Search code examples
javaandroidaudio-recordingbroadcast

how to Stream mic Audio to the same device throw speakers


I am trying to get audio from the mic and stream it via speakers attached to the aux I used this code and it didn't work in the init of the audio recorder but my real question is "is this the right way to do it or there is a better way to do it " and if it is how to fix the problem of init audio

public class MainActivity extends AppCompatActivity {


    private static final String TAG = "MainActivity";
    // the buttons for start and Stop BoadCast
    Button mStartBoadCast;
    Button mStopBoadCast;

    // variables for audio recording
    AudioRecord recorder;
    private int sampleRate = 44100;
    private int channelConfig = AudioFormat.CHANNEL_IN_DEFAULT;
    private int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
    int minBufSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat);
    private boolean status = true;

    // audio instance is meant for playing audio input from stream
    private AudioTrack speaker;


    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);

        //init layout views
        initViews();

        // init start boadcast method with the button
        initStartBoadCast();

        // init stop boadcast method with the button
        initStartBoadCast();

    }

    private void initStartBoadCast() {
        mStartBoadCast.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View v) {
                startBoadCast();
            }
        });
    }

    private void initStopBoadCast() {
        mStartBoadCast.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View v) {
                stopBoadCast();
            }
        });
    }

    private void stopBoadCast() {
        //todo: add the function to stop boad casr
        status = false;
        recorder.release();
        speaker.release();

    }

    private void startPlayingAudio(byte[] buffer, int minBufSize) {

        status = true;


        speaker = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, channelConfig, audioFormat, minBufSize, AudioTrack.MODE_STREAM);

        speaker.play();

        while (status) {

            speaker.write(buffer, 0, minBufSize);
            Log.d("VR", "Writing buffer content to speaker");


        }


    }

    private void startBoadCast() {


        status = true;


        Log.d("VS", "Socket Created");

        byte[] buffer = new byte[minBufSize];


        recorder = findAudioRecord();
        Log.d("VS", "Recorder initialized");


        recorder.startRecording();


        while (status) {


            //reading data from MIC into buffer
            minBufSize = recorder.read(buffer, 0, buffer.length);

            /**
             * here we finished recording then we will start to play the recorded audio
             */
            startPlayingAudio(buffer, minBufSize);

            System.out.println("MinBufferSize: " + minBufSize);
        }


    }

    private void initViews() {
        mStartBoadCast = findViewById(R.id.start_boadcast);
        mStopBoadCast = findViewById(R.id.stop_boadcast);
    }


    private static int[] mSampleRates = new int[]{8000, 11025, 22050, 44100};

    public AudioRecord findAudioRecord() {
        for (int rate : mSampleRates) {
            for (short audioFormat : new short[]{AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT}) {
                for (short channelConfig : new short[]{AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_IN_STEREO}) {
                    try {
                        Log.d(TAG, "Attempting rate " + rate + "Hz, bits: " + audioFormat + ", channel: "
                                + channelConfig);
                        int bufferSize = AudioRecord.getMinBufferSize(rate, channelConfig, audioFormat);

                        if (bufferSize != AudioRecord.ERROR_BAD_VALUE) {
                            // check if we can instantiate and have a success
                            AudioRecord recorder = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, rate, channelConfig, audioFormat, bufferSize);

                            if (recorder.getState() == AudioRecord.STATE_INITIALIZED)
                                return recorder;
                        }
                    } catch (Exception e) {
                        Log.e(TAG, rate + "Exception, keep trying.", e);
                    }
                }
            }
        }
        return null;
    }


}

Solution

  • The deal is in the detail, You didn't tell us how this does not work or what kind of error it gives. I had to do a similar program and this is how i did it.

    The Audio Class first:

    class audio {
    
    AudioRecord arec;
    AudioTrack atrack;
    private volatile boolean isRecording= false;
    
    private static int buffer_size;
    //final short[] buffer = new short[buffersize];
    //short[] readbuffer = new short[buffersize];
    
    
    private int sample_rate;//the rate of recording used to initialise AudioRecord
    private int[] msample_rates = new int[]{44100, 22050, 11025, 8000};
    
    private short audio_format;
    private short[] audio_formats = new short[]{AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT};
    
    private short channel_config;
    private short[] channel_configs = new short[]{AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_IN_STEREO};
    private short channelOutConfig;
    
    public AudioRecord findAudioRecord(){
        for (int rate_f : msample_rates){
            for (short audioformat_f : audio_formats){
                for (short channelconfig_f : channel_configs){
    
                    try {
                        Log.i("AudioC", "Attempting rate : "+ rate_f + "Hz, bits: " + audioformat_f + ", Channel: " + channelconfig_f);
                        int buffersize_f = AudioRecord.getMinBufferSize(rate_f, channelconfig_f, audioformat_f);
    
                        Log.i("AudioC", "Buffersize: " + buffersize_f);
                        if (buffersize_f != AudioRecord.ERROR_BAD_VALUE){
                            //Check of we can instantiate and have a success
                            AudioRecord recorder = new AudioRecord(MediaRecorder.AudioSource.MIC, rate_f, channelconfig_f, audioformat_f, buffersize_f);
    
                            Log.i("AudioC", "Recorder State Value: " + recorder.getState());
                            if(recorder.getState() == AudioRecord.STATE_INITIALIZED){
                                Log.i("Audio", "Success");
                                //global values
                                buffer_size = buffersize_f;
                                sample_rate = rate_f;
                                audio_format = audioformat_f;
                                channel_config = channelconfig_f;
    
                                if (channelconfig_f == AudioFormat.CHANNEL_IN_MONO) channelOutConfig = AudioFormat.CHANNEL_OUT_MONO;
                                else channelOutConfig = AudioFormat.CHANNEL_OUT_STEREO;
                                return recorder;
                            }
    
                        }
                    }catch (Exception e){
                        Log.i("AudioC", rate_f + " Exception, keep trying." + e);
                    }
                }
            }
        }
        Log.i("AudioC", "Failed to initialise the audio record state");
        return null;
    
    }
    
    public void run(){
    
        isRecording = true;
        //initialization
        android.os.Process.setThreadPriority(Process.THREAD_PRIORITY_AUDIO);
    
        //getValidSampleRates();
       // int buffersize = AudioRecord.getMinBufferSize(sample_rate,AudioFormat.CHANNEL_IN_MONO,AudioFormat.ENCODING_PCM_16BIT);
       // arec = new AudioRecord(MediaRecorder.AudioSource.MIC,sample_rate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffersize);
        //The above lines were replaced by a looping function in order to test every occurrence of rate,audioformat and channelconfig which is usually different for different adroid devices
        arec = findAudioRecord();//Still failed to initialize the Audio Recorder
    
    
        atrack = new AudioTrack(AudioManager.STREAM_VOICE_CALL,
                sample_rate,
                channelOutConfig,
                audio_format,
                buffer_size,
                AudioTrack.MODE_STREAM);
        atrack.setPlaybackRate(sample_rate);
        //run
        byte[] buffer = new byte[buffer_size];
        arec.startRecording();
        atrack.play();
    
                while (isRecording){
                    arec.read(buffer, 0, buffer_size);
                    atrack.write(buffer, 0,buffer.length);
                }
                arec.release();
                atrack.release();
    }
    
    
    public void stop(){
        isRecording= false;
        arec.stop();
        atrack.stop();
        arec.release();
        atrack.release();
    }
    

    }

    And then call it from a thread or an intent service from the activity(Main Activity) to avoid the app from freezing. Example from thread:

    class playerTask implements Runnable{
    
    public audio mic_player = new audio(); //The audio class we declared above
    private Thread t;
    
    public playerTask(){
    
    }
    public void execTask(){
    
        t = new Thread(this,"playing_Thread");
        t.start();
    
    }
    public void abortTask(){
    
        mic_player.stop();
    }
    
    public void run(){
        mic_player.run();
    
    }
    

    }

    And then from your activity,

    public class YourActivity extends AppCompatActivity {
    
        ToggleButton onOff;
        boolean playing = false;
    
    
        // Once the app start recording, the recording thread freezes the screen because of the while loop, the it works
        //directly with the main thread hence doesnt release the hand until forced to stop
        //For that we need to create it's own thread hence being able to play sound without freezing the remaining of the app
        //private Handler myHandler
    
    
        @Override
        protected void onCreate(Bundle savedInstanceState) {
            super.onCreate(savedInstanceState);
            setContentView(R.layout.activity_activity_main);
    
            //Handler
    
            //Route the sound to the AUX only and always
            AudioManager audioManager = (AudioManager)getSystemService(Context.AUDIO_SERVICE);
            audioManager.setMode(AudioManager.MODE_IN_CALL);
            audioManager.setSpeakerphoneOn(false);
            audioManager.setWiredHeadsetOn(true);
            audioManager.setBluetoothScoOn(false);
    
            final playerTask pl_task = new playerTask();
          /////Use a toggle button to start or stop the recording///You could use anything
            onOff = (ToggleButton)findViewById(R.id.OnOff);
            onOff.setTextOff("PLAY");
            onOff.setTextOn("STOP");
            onOff.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener(){
                public void onCheckedChanged(CompoundButton buttonview, boolean isCheked){
                    //Running thread
    
                    if (isCheked){
                        messagefield.setText("Playing");
                        pl_task.execTask();  ///Start streaming
    
                    }
                    else{
                        messagefield.setText("Not Playing");
                        pl_task.abortTask();  ///Stop streaming
    
                    }
                }
            });
    

    }

    These are the permissions you need to request:

       <uses-permission android:name="android.permission.RECORD_AUDIO" />
    <uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />