android - How exactly does the AudioRecord class work? -


please see other questions because think related:
question 1
question 2
question 3
code using performs pass through of audio signals obtained @ mic speaker, when press buton:

public class mainactivity extends activity {     audiomanager = null;     audiorecord record =null;     audiotrack track =null;     final int sample_frequency = 44100;     final int size_of_record_array = 1024;  // 1024 original     final int wav_sample_multiplication_factor = 1;     int i= 0;     boolean isplaying = false;     private volatile boolean keepthreadrunning;     private randomaccessfile statefile, statefiletemp;     private file delfile, renfile;     string statefileloc = environment.getexternalstoragedirectory().getpath();      class mythread extends thread{         private volatile boolean needstopassthrough;         // /*         mythread(){             super();         }          mythread(boolean newptv){             this.needstopassthrough = newptv;         }         // */          // /*         @override         public void run(){             // short[] lin = new short[size_of_record_array];             byte[] lin = new byte[size_of_record_array];             int num = 0;             // /*             if(needstopassthrough){                 record.startrecording();                 track.play();             }             // */             while (keepthreadrunning) {             // while (!isinterrupted()) {                 num = record.read(lin, 0, size_of_record_array);                 for(i=0;i<lin.length;i++)                     lin[i] *= wav_sample_multiplication_factor;                  track.write(lin, 0, num);             }             // /*             record.stop();             track.stop();             record.release();             track.release();             // */         }         // */          // /*         public void stopthread(){             keepthreadrunning = false;         }         // */     }      mythread newthread;      private void init() {         int min = audiorecord.getminbuffersize(sample_frequency, audioformat.channel_in_mono, audioformat.encoding_pcm_16bit);         record = new audiorecord(mediarecorder.audiosource.voice_communication, sample_frequency, audioformat.channel_in_mono,                                  audioformat.encoding_pcm_16bit, min);         int maxjitter = audiotrack.getminbuffersize(sample_frequency, audioformat.channel_out_mono, audioformat.encoding_pcm_16bit);         track = new audiotrack(audiomanager.mode_in_communication, sample_frequency, audioformat.channel_out_mono,                                audioformat.encoding_pcm_16bit, maxjitter, audiotrack.mode_stream);         = (audiomanager) this.getsystemservice(context.audio_service);         am.setmode(audiomanager.mode_in_communication);          try {             statefile = new randomaccessfile(statefileloc+"/appstate.txt", "rwd");             statefiletemp = new randomaccessfile(statefileloc+"/appstatetemp.txt", "rwd");         } catch (filenotfoundexception e) {             // todo auto-generated catch block             e.printstacktrace();         }         delfile = new file(statefileloc+"/appstate.txt");         renfile = new file(statefileloc+"/appstatetemp.txt");      }      @override     protected void onresume(){         super.onresume();         // newthread.stopthread();         log.d("mylog", "onresume() called");         init();         keepthreadrunning = true;         try {             if(statefile.readint() == 1){                 isplaying = true;                 log.d("mylog", "readint == 1");             }             else{                 isplaying = false;                 log.d("mylog", "readint <> 1");             }         } catch (ioexception e) {             // todo auto-generated catch block             e.printstacktrace();         }         // */           // newthread = new mythread(true);         newthread = new mythread(isplaying);         newthread.start();      }      @override     protected void onpause(){         super.onpause();         log.d("mylog", "onpause() called");         newthread.stopthread();         // android.os.process.killprocess(android.os.process.mypid());         try {             if(isplaying)                 statefiletemp.writeint(1);             else                 statefiletemp.writeint(0);              delfile.delete();              renfile.renameto(delfile);          } catch (ioexception e) {             // todo auto-generated catch block             e.printstacktrace();         }     }      @override     protected void oncreate(bundle savedinstancestate) {         super.oncreate(savedinstancestate);         setcontentview(r.layout.activity_main);         setvolumecontrolstream(audiomanager.mode_in_communication);         log.d("mylog","oncreate() called");     }      @override     public boolean oncreateoptionsmenu(menu menu) {         // inflate menu; adds items action bar if present.         getmenuinflater().inflate(r.menu.main, menu);         return true;     }      @override     protected void ondestroy() {         super.ondestroy();         newthread.stopthread();         // android.os.process.killprocess(android.os.process.mypid());         // killprocess(android.os.process.mypid());         // newthread.interrupt();         delfile.delete();          log.d("mylog", "ondestroy() called");     }      public void passstop(view view){         button playbtn = (button) findviewbyid(r.id.button1);           // /*         if(!isplaying){             record.startrecording();             track.play();             isplaying = true;             playbtn.settext("pause");         }         else{            record.stop();            track.pause();            isplaying=false;            playbtn.settext("pass through");         }         // */     }   

the files appstate.txt , appstatetemp.txt added save whether pass through being performed when app last lost focus, not significant here. want know is:

  1. what happens when record.read() called without calling record.startrecording() ?

  2. what significance of size_of_record_array? thought should @ least value returned audiorecord.getminbuffersize() in program doesn't affect output @ if set 1.

  3. if use 16 bit pcm encoding need @ least short variable store digital equivalent of audio samples. in code if change lin variable short array byte array, there no apparent change in output. how read function store digital samples in array? automatically allocate 2 byte elements each sample? if case, little endian or big endian?

question 1 , 3 should easy check app, here goes:

1: happens when record.read() called without calling record.startrecording() ?

i expect there no flow of data underlying audio input stream, , read() therefore returns 0 or possibly error code, indicating no data has been read.


2: significance of size_of_record_array? thought should @ least value returned audiorecord.getminbuffersize() in program doesn't affect output @ if set 1.

the value of getminbuffersize important when specify buffer size in call audiorecord constructor. you're changing size_of_record_array amount of data you're reading each call read() - , while isn't particularly idea call read() once per byte (because of overhead of function calls), can imagine still work.


3: if use 16 bit pcm encoding need @ least short variable store digital equivalent of audio samples. in code if change lin variable short array byte array, there no apparent change in output. how read function store digital samples in array? automatically allocate 2 byte elements each sample? if case, little endian or big endian?

the underlying native code uses byte version. short version wrapper around byte version. yes, pair of bytes used each sample in case.
endianness; little-endian on vast majority of android devices out there.