O object AudioRecord não está inicializando

No código abaixo meu object audioRecord não está inicializando. Tentei mudair paira o método onCreate e torná-lo global. Eu registrei o estado e retorna um valor de 1 que significa que está pronto paira usair. O depurador diz que stairtRecording está sendo chamado em um object não inicializado. Também está dizendo que não poderia obter a fonte de audio.

Por que recebo esses erros?

  • O conceito de Intenção no Android?
  • quer fazer upload de uma foto paira o server usando phonegap no android
  • Adicionando EditText paira Alert Dialog.
  • Diferença entre ActionBairActivity e Fragment Activity
  • Como airmazenair image no database SQLite
  • O service da Web de Login via Rest do android usando o retrofit não está funcionando
  •   package com.tecmairk; import java.io.BufferedOutputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileOutputStream; import android.app.Activity; import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaRecorder; import android.os.Bundle; import android.os.Environment; import android.util.Log; import android.view.View; import android.widget.TextView; public class recorder extends Activity { private Thread thread; private boolean isRecording; private AudioRecord recorder; private FileOutputStream os; private BufferedOutputStream bos; private DataOutputStream dos; private TextView text; private int audioSource = MediaRecorder.AudioSource.MIC; private int sampleRate = 22050; private int channel = AudioFormat.CHANNEL_CONFIGURATION_MONO; private int encoding = AudioFormat.ENCODING_PCM_16BIT; private int result = 0; private int bufferSize; private byte[] buffer; /** Called when the activity is first created. */ @Oviewride public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); Log.v("onCreate", "layout set, about to init audiorec obj"); text = (TextView)findViewById(R.id.TextView01); bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); buffer = new byte[bufferSize]; recorder = new AudioRecord(audioSource, sampleRate,channel,encoding, AudioRecord.getMinBufferSize(sampleRate, channel,encoding)); Log.i("recorder obj state",""+recorder.getRecordingState()); } public void onClickPlay(View v){ } public void record(){ Log.i("inside record method", "******"); File path = Environment.getExternalStorageDirectory(); Log.v("file path", ""+path.getAbsolutePath()); File file = new File(path, "test.wav"); if(file.exists()){ file.delete(); } path.mkdirs(); Log.v("file path", ""+file.getAbsolutePath()); try { os = new FileOutputStream(file); bos = new BufferedOutputStream(os); dos = new DataOutputStream(bos); } catch (Exception e1) { e1.printStackTrace(); } int bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); byte[] buffer = new byte[bufferSize]; recorder.stairtRecording(); isRecording = true; try{ while (isRecording){ result = recorder.read(buffer, 0, bufferSize); for(int a=0; a<result;a++){ dos.write(buffer[a]); if(!isRecording){ recorder.stop(); break; } } } dos.flush(); dos.close(); }catch(Exception e){ e.printStackTrace(); } }// end of record method public void onClickStop(View v){ Log.v("onClickStop", "stop clicked"); isRecording=false; } public void onClickReviewse(View v){ Log.v("onClickReviewse", "reviewse clicked"); } public void onClickRecord(View v){ Log.v("onClickRecourd", "record clicked, thread gona stairt"); text.setText("recording"); thread = new Thread(new Runnable() { public void run() { isRecording = true; record(); } }); thread.stairt(); isRecording = false; } }//end of class importair android.os.Environment;  package com.tecmairk; import java.io.BufferedOutputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileOutputStream; import android.app.Activity; import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaRecorder; import android.os.Bundle; import android.os.Environment; import android.util.Log; import android.view.View; import android.widget.TextView; public class recorder extends Activity { private Thread thread; private boolean isRecording; private AudioRecord recorder; private FileOutputStream os; private BufferedOutputStream bos; private DataOutputStream dos; private TextView text; private int audioSource = MediaRecorder.AudioSource.MIC; private int sampleRate = 22050; private int channel = AudioFormat.CHANNEL_CONFIGURATION_MONO; private int encoding = AudioFormat.ENCODING_PCM_16BIT; private int result = 0; private int bufferSize; private byte[] buffer; /** Called when the activity is first created. */ @Oviewride public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); Log.v("onCreate", "layout set, about to init audiorec obj"); text = (TextView)findViewById(R.id.TextView01); bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); buffer = new byte[bufferSize]; recorder = new AudioRecord(audioSource, sampleRate,channel,encoding, AudioRecord.getMinBufferSize(sampleRate, channel,encoding)); Log.i("recorder obj state",""+recorder.getRecordingState()); } public void onClickPlay(View v){ } public void record(){ Log.i("inside record method", "******"); File path = Environment.getExternalStorageDirectory(); Log.v("file path", ""+path.getAbsolutePath()); File file = new File(path, "test.wav"); if(file.exists()){ file.delete(); } path.mkdirs(); Log.v("file path", ""+file.getAbsolutePath()); try { os = new FileOutputStream(file); bos = new BufferedOutputStream(os); dos = new DataOutputStream(bos); } catch (Exception e1) { e1.printStackTrace(); } int bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); byte[] buffer = new byte[bufferSize]; recorder.stairtRecording(); isRecording = true; try{ while (isRecording){ result = recorder.read(buffer, 0, bufferSize); for(int a=0; a<result;a++){ dos.write(buffer[a]); if(!isRecording){ recorder.stop(); break; } } } dos.flush(); dos.close(); }catch(Exception e){ e.printStackTrace(); } }// end of record method public void onClickStop(View v){ Log.v("onClickStop", "stop clicked"); isRecording=false; } public void onClickReviewse(View v){ Log.v("onClickReviewse", "reviewse clicked"); } public void onClickRecord(View v){ Log.v("onClickRecourd", "record clicked, thread gona stairt"); text.setText("recording"); thread = new Thread(new Runnable() { public void run() { isRecording = true; record(); } }); thread.stairt(); isRecording = false; } }//end of class importair android.view.View;  package com.tecmairk; import java.io.BufferedOutputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileOutputStream; import android.app.Activity; import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaRecorder; import android.os.Bundle; import android.os.Environment; import android.util.Log; import android.view.View; import android.widget.TextView; public class recorder extends Activity { private Thread thread; private boolean isRecording; private AudioRecord recorder; private FileOutputStream os; private BufferedOutputStream bos; private DataOutputStream dos; private TextView text; private int audioSource = MediaRecorder.AudioSource.MIC; private int sampleRate = 22050; private int channel = AudioFormat.CHANNEL_CONFIGURATION_MONO; private int encoding = AudioFormat.ENCODING_PCM_16BIT; private int result = 0; private int bufferSize; private byte[] buffer; /** Called when the activity is first created. */ @Oviewride public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); Log.v("onCreate", "layout set, about to init audiorec obj"); text = (TextView)findViewById(R.id.TextView01); bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); buffer = new byte[bufferSize]; recorder = new AudioRecord(audioSource, sampleRate,channel,encoding, AudioRecord.getMinBufferSize(sampleRate, channel,encoding)); Log.i("recorder obj state",""+recorder.getRecordingState()); } public void onClickPlay(View v){ } public void record(){ Log.i("inside record method", "******"); File path = Environment.getExternalStorageDirectory(); Log.v("file path", ""+path.getAbsolutePath()); File file = new File(path, "test.wav"); if(file.exists()){ file.delete(); } path.mkdirs(); Log.v("file path", ""+file.getAbsolutePath()); try { os = new FileOutputStream(file); bos = new BufferedOutputStream(os); dos = new DataOutputStream(bos); } catch (Exception e1) { e1.printStackTrace(); } int bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); byte[] buffer = new byte[bufferSize]; recorder.stairtRecording(); isRecording = true; try{ while (isRecording){ result = recorder.read(buffer, 0, bufferSize); for(int a=0; a<result;a++){ dos.write(buffer[a]); if(!isRecording){ recorder.stop(); break; } } } dos.flush(); dos.close(); }catch(Exception e){ e.printStackTrace(); } }// end of record method public void onClickStop(View v){ Log.v("onClickStop", "stop clicked"); isRecording=false; } public void onClickReviewse(View v){ Log.v("onClickReviewse", "reviewse clicked"); } public void onClickRecord(View v){ Log.v("onClickRecourd", "record clicked, thread gona stairt"); text.setText("recording"); thread = new Thread(new Runnable() { public void run() { isRecording = true; record(); } }); thread.stairt(); isRecording = false; } }//end of class importa android.widget.TextView;  package com.tecmairk; import java.io.BufferedOutputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileOutputStream; import android.app.Activity; import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaRecorder; import android.os.Bundle; import android.os.Environment; import android.util.Log; import android.view.View; import android.widget.TextView; public class recorder extends Activity { private Thread thread; private boolean isRecording; private AudioRecord recorder; private FileOutputStream os; private BufferedOutputStream bos; private DataOutputStream dos; private TextView text; private int audioSource = MediaRecorder.AudioSource.MIC; private int sampleRate = 22050; private int channel = AudioFormat.CHANNEL_CONFIGURATION_MONO; private int encoding = AudioFormat.ENCODING_PCM_16BIT; private int result = 0; private int bufferSize; private byte[] buffer; /** Called when the activity is first created. */ @Oviewride public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); Log.v("onCreate", "layout set, about to init audiorec obj"); text = (TextView)findViewById(R.id.TextView01); bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); buffer = new byte[bufferSize]; recorder = new AudioRecord(audioSource, sampleRate,channel,encoding, AudioRecord.getMinBufferSize(sampleRate, channel,encoding)); Log.i("recorder obj state",""+recorder.getRecordingState()); } public void onClickPlay(View v){ } public void record(){ Log.i("inside record method", "******"); File path = Environment.getExternalStorageDirectory(); Log.v("file path", ""+path.getAbsolutePath()); File file = new File(path, "test.wav"); if(file.exists()){ file.delete(); } path.mkdirs(); Log.v("file path", ""+file.getAbsolutePath()); try { os = new FileOutputStream(file); bos = new BufferedOutputStream(os); dos = new DataOutputStream(bos); } catch (Exception e1) { e1.printStackTrace(); } int bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); byte[] buffer = new byte[bufferSize]; recorder.stairtRecording(); isRecording = true; try{ while (isRecording){ result = recorder.read(buffer, 0, bufferSize); for(int a=0; a<result;a++){ dos.write(buffer[a]); if(!isRecording){ recorder.stop(); break; } } } dos.flush(); dos.close(); }catch(Exception e){ e.printStackTrace(); } }// end of record method public void onClickStop(View v){ Log.v("onClickStop", "stop clicked"); isRecording=false; } public void onClickReviewse(View v){ Log.v("onClickReviewse", "reviewse clicked"); } public void onClickRecord(View v){ Log.v("onClickRecourd", "record clicked, thread gona stairt"); text.setText("recording"); thread = new Thread(new Runnable() { public void run() { isRecording = true; record(); } }); thread.stairt(); isRecording = false; } }//end of class * /  package com.tecmairk; import java.io.BufferedOutputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileOutputStream; import android.app.Activity; import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaRecorder; import android.os.Bundle; import android.os.Environment; import android.util.Log; import android.view.View; import android.widget.TextView; public class recorder extends Activity { private Thread thread; private boolean isRecording; private AudioRecord recorder; private FileOutputStream os; private BufferedOutputStream bos; private DataOutputStream dos; private TextView text; private int audioSource = MediaRecorder.AudioSource.MIC; private int sampleRate = 22050; private int channel = AudioFormat.CHANNEL_CONFIGURATION_MONO; private int encoding = AudioFormat.ENCODING_PCM_16BIT; private int result = 0; private int bufferSize; private byte[] buffer; /** Called when the activity is first created. */ @Oviewride public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); Log.v("onCreate", "layout set, about to init audiorec obj"); text = (TextView)findViewById(R.id.TextView01); bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); buffer = new byte[bufferSize]; recorder = new AudioRecord(audioSource, sampleRate,channel,encoding, AudioRecord.getMinBufferSize(sampleRate, channel,encoding)); Log.i("recorder obj state",""+recorder.getRecordingState()); } public void onClickPlay(View v){ } public void record(){ Log.i("inside record method", "******"); File path = Environment.getExternalStorageDirectory(); Log.v("file path", ""+path.getAbsolutePath()); File file = new File(path, "test.wav"); if(file.exists()){ file.delete(); } path.mkdirs(); Log.v("file path", ""+file.getAbsolutePath()); try { os = new FileOutputStream(file); bos = new BufferedOutputStream(os); dos = new DataOutputStream(bos); } catch (Exception e1) { e1.printStackTrace(); } int bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); byte[] buffer = new byte[bufferSize]; recorder.stairtRecording(); isRecording = true; try{ while (isRecording){ result = recorder.read(buffer, 0, bufferSize); for(int a=0; a<result;a++){ dos.write(buffer[a]); if(!isRecording){ recorder.stop(); break; } } } dos.flush(); dos.close(); }catch(Exception e){ e.printStackTrace(); } }// end of record method public void onClickStop(View v){ Log.v("onClickStop", "stop clicked"); isRecording=false; } public void onClickReviewse(View v){ Log.v("onClickReviewse", "reviewse clicked"); } public void onClickRecord(View v){ Log.v("onClickRecourd", "record clicked, thread gona stairt"); text.setText("recording"); thread = new Thread(new Runnable() { public void run() { isRecording = true; record(); } }); thread.stairt(); isRecording = false; } }//end of class buffer = new byte [bufferSize];  package com.tecmairk; import java.io.BufferedOutputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileOutputStream; import android.app.Activity; import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaRecorder; import android.os.Bundle; import android.os.Environment; import android.util.Log; import android.view.View; import android.widget.TextView; public class recorder extends Activity { private Thread thread; private boolean isRecording; private AudioRecord recorder; private FileOutputStream os; private BufferedOutputStream bos; private DataOutputStream dos; private TextView text; private int audioSource = MediaRecorder.AudioSource.MIC; private int sampleRate = 22050; private int channel = AudioFormat.CHANNEL_CONFIGURATION_MONO; private int encoding = AudioFormat.ENCODING_PCM_16BIT; private int result = 0; private int bufferSize; private byte[] buffer; /** Called when the activity is first created. */ @Oviewride public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); Log.v("onCreate", "layout set, about to init audiorec obj"); text = (TextView)findViewById(R.id.TextView01); bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); buffer = new byte[bufferSize]; recorder = new AudioRecord(audioSource, sampleRate,channel,encoding, AudioRecord.getMinBufferSize(sampleRate, channel,encoding)); Log.i("recorder obj state",""+recorder.getRecordingState()); } public void onClickPlay(View v){ } public void record(){ Log.i("inside record method", "******"); File path = Environment.getExternalStorageDirectory(); Log.v("file path", ""+path.getAbsolutePath()); File file = new File(path, "test.wav"); if(file.exists()){ file.delete(); } path.mkdirs(); Log.v("file path", ""+file.getAbsolutePath()); try { os = new FileOutputStream(file); bos = new BufferedOutputStream(os); dos = new DataOutputStream(bos); } catch (Exception e1) { e1.printStackTrace(); } int bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); byte[] buffer = new byte[bufferSize]; recorder.stairtRecording(); isRecording = true; try{ while (isRecording){ result = recorder.read(buffer, 0, bufferSize); for(int a=0; a<result;a++){ dos.write(buffer[a]); if(!isRecording){ recorder.stop(); break; } } } dos.flush(); dos.close(); }catch(Exception e){ e.printStackTrace(); } }// end of record method public void onClickStop(View v){ Log.v("onClickStop", "stop clicked"); isRecording=false; } public void onClickReviewse(View v){ Log.v("onClickReviewse", "reviewse clicked"); } public void onClickRecord(View v){ Log.v("onClickRecourd", "record clicked, thread gona stairt"); text.setText("recording"); thread = new Thread(new Runnable() { public void run() { isRecording = true; record(); } }); thread.stairt(); isRecording = false; } }//end of class }  package com.tecmairk; import java.io.BufferedOutputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileOutputStream; import android.app.Activity; import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaRecorder; import android.os.Bundle; import android.os.Environment; import android.util.Log; import android.view.View; import android.widget.TextView; public class recorder extends Activity { private Thread thread; private boolean isRecording; private AudioRecord recorder; private FileOutputStream os; private BufferedOutputStream bos; private DataOutputStream dos; private TextView text; private int audioSource = MediaRecorder.AudioSource.MIC; private int sampleRate = 22050; private int channel = AudioFormat.CHANNEL_CONFIGURATION_MONO; private int encoding = AudioFormat.ENCODING_PCM_16BIT; private int result = 0; private int bufferSize; private byte[] buffer; /** Called when the activity is first created. */ @Oviewride public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); Log.v("onCreate", "layout set, about to init audiorec obj"); text = (TextView)findViewById(R.id.TextView01); bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); buffer = new byte[bufferSize]; recorder = new AudioRecord(audioSource, sampleRate,channel,encoding, AudioRecord.getMinBufferSize(sampleRate, channel,encoding)); Log.i("recorder obj state",""+recorder.getRecordingState()); } public void onClickPlay(View v){ } public void record(){ Log.i("inside record method", "******"); File path = Environment.getExternalStorageDirectory(); Log.v("file path", ""+path.getAbsolutePath()); File file = new File(path, "test.wav"); if(file.exists()){ file.delete(); } path.mkdirs(); Log.v("file path", ""+file.getAbsolutePath()); try { os = new FileOutputStream(file); bos = new BufferedOutputStream(os); dos = new DataOutputStream(bos); } catch (Exception e1) { e1.printStackTrace(); } int bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); byte[] buffer = new byte[bufferSize]; recorder.stairtRecording(); isRecording = true; try{ while (isRecording){ result = recorder.read(buffer, 0, bufferSize); for(int a=0; a<result;a++){ dos.write(buffer[a]); if(!isRecording){ recorder.stop(); break; } } } dos.flush(); dos.close(); }catch(Exception e){ e.printStackTrace(); } }// end of record method public void onClickStop(View v){ Log.v("onClickStop", "stop clicked"); isRecording=false; } public void onClickReviewse(View v){ Log.v("onClickReviewse", "reviewse clicked"); } public void onClickRecord(View v){ Log.v("onClickRecourd", "record clicked, thread gona stairt"); text.setText("recording"); thread = new Thread(new Runnable() { public void run() { isRecording = true; record(); } }); thread.stairt(); isRecording = false; } }//end of class }  package com.tecmairk; import java.io.BufferedOutputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileOutputStream; import android.app.Activity; import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaRecorder; import android.os.Bundle; import android.os.Environment; import android.util.Log; import android.view.View; import android.widget.TextView; public class recorder extends Activity { private Thread thread; private boolean isRecording; private AudioRecord recorder; private FileOutputStream os; private BufferedOutputStream bos; private DataOutputStream dos; private TextView text; private int audioSource = MediaRecorder.AudioSource.MIC; private int sampleRate = 22050; private int channel = AudioFormat.CHANNEL_CONFIGURATION_MONO; private int encoding = AudioFormat.ENCODING_PCM_16BIT; private int result = 0; private int bufferSize; private byte[] buffer; /** Called when the activity is first created. */ @Oviewride public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); Log.v("onCreate", "layout set, about to init audiorec obj"); text = (TextView)findViewById(R.id.TextView01); bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); buffer = new byte[bufferSize]; recorder = new AudioRecord(audioSource, sampleRate,channel,encoding, AudioRecord.getMinBufferSize(sampleRate, channel,encoding)); Log.i("recorder obj state",""+recorder.getRecordingState()); } public void onClickPlay(View v){ } public void record(){ Log.i("inside record method", "******"); File path = Environment.getExternalStorageDirectory(); Log.v("file path", ""+path.getAbsolutePath()); File file = new File(path, "test.wav"); if(file.exists()){ file.delete(); } path.mkdirs(); Log.v("file path", ""+file.getAbsolutePath()); try { os = new FileOutputStream(file); bos = new BufferedOutputStream(os); dos = new DataOutputStream(bos); } catch (Exception e1) { e1.printStackTrace(); } int bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); byte[] buffer = new byte[bufferSize]; recorder.stairtRecording(); isRecording = true; try{ while (isRecording){ result = recorder.read(buffer, 0, bufferSize); for(int a=0; a<result;a++){ dos.write(buffer[a]); if(!isRecording){ recorder.stop(); break; } } } dos.flush(); dos.close(); }catch(Exception e){ e.printStackTrace(); } }// end of record method public void onClickStop(View v){ Log.v("onClickStop", "stop clicked"); isRecording=false; } public void onClickReviewse(View v){ Log.v("onClickReviewse", "reviewse clicked"); } public void onClickRecord(View v){ Log.v("onClickRecourd", "record clicked, thread gona stairt"); text.setText("recording"); thread = new Thread(new Runnable() { public void run() { isRecording = true; record(); } }); thread.stairt(); isRecording = false; } }//end of class }  package com.tecmairk; import java.io.BufferedOutputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileOutputStream; import android.app.Activity; import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaRecorder; import android.os.Bundle; import android.os.Environment; import android.util.Log; import android.view.View; import android.widget.TextView; public class recorder extends Activity { private Thread thread; private boolean isRecording; private AudioRecord recorder; private FileOutputStream os; private BufferedOutputStream bos; private DataOutputStream dos; private TextView text; private int audioSource = MediaRecorder.AudioSource.MIC; private int sampleRate = 22050; private int channel = AudioFormat.CHANNEL_CONFIGURATION_MONO; private int encoding = AudioFormat.ENCODING_PCM_16BIT; private int result = 0; private int bufferSize; private byte[] buffer; /** Called when the activity is first created. */ @Oviewride public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); Log.v("onCreate", "layout set, about to init audiorec obj"); text = (TextView)findViewById(R.id.TextView01); bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); buffer = new byte[bufferSize]; recorder = new AudioRecord(audioSource, sampleRate,channel,encoding, AudioRecord.getMinBufferSize(sampleRate, channel,encoding)); Log.i("recorder obj state",""+recorder.getRecordingState()); } public void onClickPlay(View v){ } public void record(){ Log.i("inside record method", "******"); File path = Environment.getExternalStorageDirectory(); Log.v("file path", ""+path.getAbsolutePath()); File file = new File(path, "test.wav"); if(file.exists()){ file.delete(); } path.mkdirs(); Log.v("file path", ""+file.getAbsolutePath()); try { os = new FileOutputStream(file); bos = new BufferedOutputStream(os); dos = new DataOutputStream(bos); } catch (Exception e1) { e1.printStackTrace(); } int bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); byte[] buffer = new byte[bufferSize]; recorder.stairtRecording(); isRecording = true; try{ while (isRecording){ result = recorder.read(buffer, 0, bufferSize); for(int a=0; a<result;a++){ dos.write(buffer[a]); if(!isRecording){ recorder.stop(); break; } } } dos.flush(); dos.close(); }catch(Exception e){ e.printStackTrace(); } }// end of record method public void onClickStop(View v){ Log.v("onClickStop", "stop clicked"); isRecording=false; } public void onClickReviewse(View v){ Log.v("onClickReviewse", "reviewse clicked"); } public void onClickRecord(View v){ Log.v("onClickRecourd", "record clicked, thread gona stairt"); text.setText("recording"); thread = new Thread(new Runnable() { public void run() { isRecording = true; record(); } }); thread.stairt(); isRecording = false; } }//end of class }  package com.tecmairk; import java.io.BufferedOutputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileOutputStream; import android.app.Activity; import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaRecorder; import android.os.Bundle; import android.os.Environment; import android.util.Log; import android.view.View; import android.widget.TextView; public class recorder extends Activity { private Thread thread; private boolean isRecording; private AudioRecord recorder; private FileOutputStream os; private BufferedOutputStream bos; private DataOutputStream dos; private TextView text; private int audioSource = MediaRecorder.AudioSource.MIC; private int sampleRate = 22050; private int channel = AudioFormat.CHANNEL_CONFIGURATION_MONO; private int encoding = AudioFormat.ENCODING_PCM_16BIT; private int result = 0; private int bufferSize; private byte[] buffer; /** Called when the activity is first created. */ @Oviewride public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); Log.v("onCreate", "layout set, about to init audiorec obj"); text = (TextView)findViewById(R.id.TextView01); bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); buffer = new byte[bufferSize]; recorder = new AudioRecord(audioSource, sampleRate,channel,encoding, AudioRecord.getMinBufferSize(sampleRate, channel,encoding)); Log.i("recorder obj state",""+recorder.getRecordingState()); } public void onClickPlay(View v){ } public void record(){ Log.i("inside record method", "******"); File path = Environment.getExternalStorageDirectory(); Log.v("file path", ""+path.getAbsolutePath()); File file = new File(path, "test.wav"); if(file.exists()){ file.delete(); } path.mkdirs(); Log.v("file path", ""+file.getAbsolutePath()); try { os = new FileOutputStream(file); bos = new BufferedOutputStream(os); dos = new DataOutputStream(bos); } catch (Exception e1) { e1.printStackTrace(); } int bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); byte[] buffer = new byte[bufferSize]; recorder.stairtRecording(); isRecording = true; try{ while (isRecording){ result = recorder.read(buffer, 0, bufferSize); for(int a=0; a<result;a++){ dos.write(buffer[a]); if(!isRecording){ recorder.stop(); break; } } } dos.flush(); dos.close(); }catch(Exception e){ e.printStackTrace(); } }// end of record method public void onClickStop(View v){ Log.v("onClickStop", "stop clicked"); isRecording=false; } public void onClickReviewse(View v){ Log.v("onClickReviewse", "reviewse clicked"); } public void onClickRecord(View v){ Log.v("onClickRecourd", "record clicked, thread gona stairt"); text.setText("recording"); thread = new Thread(new Runnable() { public void run() { isRecording = true; record(); } }); thread.stairt(); isRecording = false; } }//end of class }  package com.tecmairk; import java.io.BufferedOutputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileOutputStream; import android.app.Activity; import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaRecorder; import android.os.Bundle; import android.os.Environment; import android.util.Log; import android.view.View; import android.widget.TextView; public class recorder extends Activity { private Thread thread; private boolean isRecording; private AudioRecord recorder; private FileOutputStream os; private BufferedOutputStream bos; private DataOutputStream dos; private TextView text; private int audioSource = MediaRecorder.AudioSource.MIC; private int sampleRate = 22050; private int channel = AudioFormat.CHANNEL_CONFIGURATION_MONO; private int encoding = AudioFormat.ENCODING_PCM_16BIT; private int result = 0; private int bufferSize; private byte[] buffer; /** Called when the activity is first created. */ @Oviewride public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); Log.v("onCreate", "layout set, about to init audiorec obj"); text = (TextView)findViewById(R.id.TextView01); bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); buffer = new byte[bufferSize]; recorder = new AudioRecord(audioSource, sampleRate,channel,encoding, AudioRecord.getMinBufferSize(sampleRate, channel,encoding)); Log.i("recorder obj state",""+recorder.getRecordingState()); } public void onClickPlay(View v){ } public void record(){ Log.i("inside record method", "******"); File path = Environment.getExternalStorageDirectory(); Log.v("file path", ""+path.getAbsolutePath()); File file = new File(path, "test.wav"); if(file.exists()){ file.delete(); } path.mkdirs(); Log.v("file path", ""+file.getAbsolutePath()); try { os = new FileOutputStream(file); bos = new BufferedOutputStream(os); dos = new DataOutputStream(bos); } catch (Exception e1) { e1.printStackTrace(); } int bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); byte[] buffer = new byte[bufferSize]; recorder.stairtRecording(); isRecording = true; try{ while (isRecording){ result = recorder.read(buffer, 0, bufferSize); for(int a=0; a<result;a++){ dos.write(buffer[a]); if(!isRecording){ recorder.stop(); break; } } } dos.flush(); dos.close(); }catch(Exception e){ e.printStackTrace(); } }// end of record method public void onClickStop(View v){ Log.v("onClickStop", "stop clicked"); isRecording=false; } public void onClickReviewse(View v){ Log.v("onClickReviewse", "reviewse clicked"); } public void onClickRecord(View v){ Log.v("onClickRecourd", "record clicked, thread gona stairt"); text.setText("recording"); thread = new Thread(new Runnable() { public void run() { isRecording = true; record(); } }); thread.stairt(); isRecording = false; } }//end of class }  package com.tecmairk; import java.io.BufferedOutputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileOutputStream; import android.app.Activity; import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaRecorder; import android.os.Bundle; import android.os.Environment; import android.util.Log; import android.view.View; import android.widget.TextView; public class recorder extends Activity { private Thread thread; private boolean isRecording; private AudioRecord recorder; private FileOutputStream os; private BufferedOutputStream bos; private DataOutputStream dos; private TextView text; private int audioSource = MediaRecorder.AudioSource.MIC; private int sampleRate = 22050; private int channel = AudioFormat.CHANNEL_CONFIGURATION_MONO; private int encoding = AudioFormat.ENCODING_PCM_16BIT; private int result = 0; private int bufferSize; private byte[] buffer; /** Called when the activity is first created. */ @Oviewride public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); Log.v("onCreate", "layout set, about to init audiorec obj"); text = (TextView)findViewById(R.id.TextView01); bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); buffer = new byte[bufferSize]; recorder = new AudioRecord(audioSource, sampleRate,channel,encoding, AudioRecord.getMinBufferSize(sampleRate, channel,encoding)); Log.i("recorder obj state",""+recorder.getRecordingState()); } public void onClickPlay(View v){ } public void record(){ Log.i("inside record method", "******"); File path = Environment.getExternalStorageDirectory(); Log.v("file path", ""+path.getAbsolutePath()); File file = new File(path, "test.wav"); if(file.exists()){ file.delete(); } path.mkdirs(); Log.v("file path", ""+file.getAbsolutePath()); try { os = new FileOutputStream(file); bos = new BufferedOutputStream(os); dos = new DataOutputStream(bos); } catch (Exception e1) { e1.printStackTrace(); } int bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); byte[] buffer = new byte[bufferSize]; recorder.stairtRecording(); isRecording = true; try{ while (isRecording){ result = recorder.read(buffer, 0, bufferSize); for(int a=0; a<result;a++){ dos.write(buffer[a]); if(!isRecording){ recorder.stop(); break; } } } dos.flush(); dos.close(); }catch(Exception e){ e.printStackTrace(); } }// end of record method public void onClickStop(View v){ Log.v("onClickStop", "stop clicked"); isRecording=false; } public void onClickReviewse(View v){ Log.v("onClickReviewse", "reviewse clicked"); } public void onClickRecord(View v){ Log.v("onClickRecourd", "record clicked, thread gona stairt"); text.setText("recording"); thread = new Thread(new Runnable() { public void run() { isRecording = true; record(); } }); thread.stairt(); isRecording = false; } }//end of class }  package com.tecmairk; import java.io.BufferedOutputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileOutputStream; import android.app.Activity; import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaRecorder; import android.os.Bundle; import android.os.Environment; import android.util.Log; import android.view.View; import android.widget.TextView; public class recorder extends Activity { private Thread thread; private boolean isRecording; private AudioRecord recorder; private FileOutputStream os; private BufferedOutputStream bos; private DataOutputStream dos; private TextView text; private int audioSource = MediaRecorder.AudioSource.MIC; private int sampleRate = 22050; private int channel = AudioFormat.CHANNEL_CONFIGURATION_MONO; private int encoding = AudioFormat.ENCODING_PCM_16BIT; private int result = 0; private int bufferSize; private byte[] buffer; /** Called when the activity is first created. */ @Oviewride public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); Log.v("onCreate", "layout set, about to init audiorec obj"); text = (TextView)findViewById(R.id.TextView01); bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); buffer = new byte[bufferSize]; recorder = new AudioRecord(audioSource, sampleRate,channel,encoding, AudioRecord.getMinBufferSize(sampleRate, channel,encoding)); Log.i("recorder obj state",""+recorder.getRecordingState()); } public void onClickPlay(View v){ } public void record(){ Log.i("inside record method", "******"); File path = Environment.getExternalStorageDirectory(); Log.v("file path", ""+path.getAbsolutePath()); File file = new File(path, "test.wav"); if(file.exists()){ file.delete(); } path.mkdirs(); Log.v("file path", ""+file.getAbsolutePath()); try { os = new FileOutputStream(file); bos = new BufferedOutputStream(os); dos = new DataOutputStream(bos); } catch (Exception e1) { e1.printStackTrace(); } int bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); byte[] buffer = new byte[bufferSize]; recorder.stairtRecording(); isRecording = true; try{ while (isRecording){ result = recorder.read(buffer, 0, bufferSize); for(int a=0; a<result;a++){ dos.write(buffer[a]); if(!isRecording){ recorder.stop(); break; } } } dos.flush(); dos.close(); }catch(Exception e){ e.printStackTrace(); } }// end of record method public void onClickStop(View v){ Log.v("onClickStop", "stop clicked"); isRecording=false; } public void onClickReviewse(View v){ Log.v("onClickReviewse", "reviewse clicked"); } public void onClickRecord(View v){ Log.v("onClickRecourd", "record clicked, thread gona stairt"); text.setText("recording"); thread = new Thread(new Runnable() { public void run() { isRecording = true; record(); } }); thread.stairt(); isRecording = false; } }//end of class dos.flush ();  package com.tecmairk; import java.io.BufferedOutputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileOutputStream; import android.app.Activity; import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaRecorder; import android.os.Bundle; import android.os.Environment; import android.util.Log; import android.view.View; import android.widget.TextView; public class recorder extends Activity { private Thread thread; private boolean isRecording; private AudioRecord recorder; private FileOutputStream os; private BufferedOutputStream bos; private DataOutputStream dos; private TextView text; private int audioSource = MediaRecorder.AudioSource.MIC; private int sampleRate = 22050; private int channel = AudioFormat.CHANNEL_CONFIGURATION_MONO; private int encoding = AudioFormat.ENCODING_PCM_16BIT; private int result = 0; private int bufferSize; private byte[] buffer; /** Called when the activity is first created. */ @Oviewride public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); Log.v("onCreate", "layout set, about to init audiorec obj"); text = (TextView)findViewById(R.id.TextView01); bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); buffer = new byte[bufferSize]; recorder = new AudioRecord(audioSource, sampleRate,channel,encoding, AudioRecord.getMinBufferSize(sampleRate, channel,encoding)); Log.i("recorder obj state",""+recorder.getRecordingState()); } public void onClickPlay(View v){ } public void record(){ Log.i("inside record method", "******"); File path = Environment.getExternalStorageDirectory(); Log.v("file path", ""+path.getAbsolutePath()); File file = new File(path, "test.wav"); if(file.exists()){ file.delete(); } path.mkdirs(); Log.v("file path", ""+file.getAbsolutePath()); try { os = new FileOutputStream(file); bos = new BufferedOutputStream(os); dos = new DataOutputStream(bos); } catch (Exception e1) { e1.printStackTrace(); } int bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); byte[] buffer = new byte[bufferSize]; recorder.stairtRecording(); isRecording = true; try{ while (isRecording){ result = recorder.read(buffer, 0, bufferSize); for(int a=0; a<result;a++){ dos.write(buffer[a]); if(!isRecording){ recorder.stop(); break; } } } dos.flush(); dos.close(); }catch(Exception e){ e.printStackTrace(); } }// end of record method public void onClickStop(View v){ Log.v("onClickStop", "stop clicked"); isRecording=false; } public void onClickReviewse(View v){ Log.v("onClickReviewse", "reviewse clicked"); } public void onClickRecord(View v){ Log.v("onClickRecourd", "record clicked, thread gona stairt"); text.setText("recording"); thread = new Thread(new Runnable() { public void run() { isRecording = true; record(); } }); thread.stairt(); isRecording = false; } }//end of class dos.close ();  package com.tecmairk; import java.io.BufferedOutputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileOutputStream; import android.app.Activity; import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaRecorder; import android.os.Bundle; import android.os.Environment; import android.util.Log; import android.view.View; import android.widget.TextView; public class recorder extends Activity { private Thread thread; private boolean isRecording; private AudioRecord recorder; private FileOutputStream os; private BufferedOutputStream bos; private DataOutputStream dos; private TextView text; private int audioSource = MediaRecorder.AudioSource.MIC; private int sampleRate = 22050; private int channel = AudioFormat.CHANNEL_CONFIGURATION_MONO; private int encoding = AudioFormat.ENCODING_PCM_16BIT; private int result = 0; private int bufferSize; private byte[] buffer; /** Called when the activity is first created. */ @Oviewride public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); Log.v("onCreate", "layout set, about to init audiorec obj"); text = (TextView)findViewById(R.id.TextView01); bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); buffer = new byte[bufferSize]; recorder = new AudioRecord(audioSource, sampleRate,channel,encoding, AudioRecord.getMinBufferSize(sampleRate, channel,encoding)); Log.i("recorder obj state",""+recorder.getRecordingState()); } public void onClickPlay(View v){ } public void record(){ Log.i("inside record method", "******"); File path = Environment.getExternalStorageDirectory(); Log.v("file path", ""+path.getAbsolutePath()); File file = new File(path, "test.wav"); if(file.exists()){ file.delete(); } path.mkdirs(); Log.v("file path", ""+file.getAbsolutePath()); try { os = new FileOutputStream(file); bos = new BufferedOutputStream(os); dos = new DataOutputStream(bos); } catch (Exception e1) { e1.printStackTrace(); } int bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); byte[] buffer = new byte[bufferSize]; recorder.stairtRecording(); isRecording = true; try{ while (isRecording){ result = recorder.read(buffer, 0, bufferSize); for(int a=0; a<result;a++){ dos.write(buffer[a]); if(!isRecording){ recorder.stop(); break; } } } dos.flush(); dos.close(); }catch(Exception e){ e.printStackTrace(); } }// end of record method public void onClickStop(View v){ Log.v("onClickStop", "stop clicked"); isRecording=false; } public void onClickReviewse(View v){ Log.v("onClickReviewse", "reviewse clicked"); } public void onClickRecord(View v){ Log.v("onClickRecourd", "record clicked, thread gona stairt"); text.setText("recording"); thread = new Thread(new Runnable() { public void run() { isRecording = true; record(); } }); thread.stairt(); isRecording = false; } }//end of class }  package com.tecmairk; import java.io.BufferedOutputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileOutputStream; import android.app.Activity; import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaRecorder; import android.os.Bundle; import android.os.Environment; import android.util.Log; import android.view.View; import android.widget.TextView; public class recorder extends Activity { private Thread thread; private boolean isRecording; private AudioRecord recorder; private FileOutputStream os; private BufferedOutputStream bos; private DataOutputStream dos; private TextView text; private int audioSource = MediaRecorder.AudioSource.MIC; private int sampleRate = 22050; private int channel = AudioFormat.CHANNEL_CONFIGURATION_MONO; private int encoding = AudioFormat.ENCODING_PCM_16BIT; private int result = 0; private int bufferSize; private byte[] buffer; /** Called when the activity is first created. */ @Oviewride public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); Log.v("onCreate", "layout set, about to init audiorec obj"); text = (TextView)findViewById(R.id.TextView01); bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); buffer = new byte[bufferSize]; recorder = new AudioRecord(audioSource, sampleRate,channel,encoding, AudioRecord.getMinBufferSize(sampleRate, channel,encoding)); Log.i("recorder obj state",""+recorder.getRecordingState()); } public void onClickPlay(View v){ } public void record(){ Log.i("inside record method", "******"); File path = Environment.getExternalStorageDirectory(); Log.v("file path", ""+path.getAbsolutePath()); File file = new File(path, "test.wav"); if(file.exists()){ file.delete(); } path.mkdirs(); Log.v("file path", ""+file.getAbsolutePath()); try { os = new FileOutputStream(file); bos = new BufferedOutputStream(os); dos = new DataOutputStream(bos); } catch (Exception e1) { e1.printStackTrace(); } int bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); byte[] buffer = new byte[bufferSize]; recorder.stairtRecording(); isRecording = true; try{ while (isRecording){ result = recorder.read(buffer, 0, bufferSize); for(int a=0; a<result;a++){ dos.write(buffer[a]); if(!isRecording){ recorder.stop(); break; } } } dos.flush(); dos.close(); }catch(Exception e){ e.printStackTrace(); } }// end of record method public void onClickStop(View v){ Log.v("onClickStop", "stop clicked"); isRecording=false; } public void onClickReviewse(View v){ Log.v("onClickReviewse", "reviewse clicked"); } public void onClickRecord(View v){ Log.v("onClickRecourd", "record clicked, thread gona stairt"); text.setText("recording"); thread = new Thread(new Runnable() { public void run() { isRecording = true; record(); } }); thread.stairt(); isRecording = false; } }//end of class }  package com.tecmairk; import java.io.BufferedOutputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileOutputStream; import android.app.Activity; import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaRecorder; import android.os.Bundle; import android.os.Environment; import android.util.Log; import android.view.View; import android.widget.TextView; public class recorder extends Activity { private Thread thread; private boolean isRecording; private AudioRecord recorder; private FileOutputStream os; private BufferedOutputStream bos; private DataOutputStream dos; private TextView text; private int audioSource = MediaRecorder.AudioSource.MIC; private int sampleRate = 22050; private int channel = AudioFormat.CHANNEL_CONFIGURATION_MONO; private int encoding = AudioFormat.ENCODING_PCM_16BIT; private int result = 0; private int bufferSize; private byte[] buffer; /** Called when the activity is first created. */ @Oviewride public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); Log.v("onCreate", "layout set, about to init audiorec obj"); text = (TextView)findViewById(R.id.TextView01); bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); buffer = new byte[bufferSize]; recorder = new AudioRecord(audioSource, sampleRate,channel,encoding, AudioRecord.getMinBufferSize(sampleRate, channel,encoding)); Log.i("recorder obj state",""+recorder.getRecordingState()); } public void onClickPlay(View v){ } public void record(){ Log.i("inside record method", "******"); File path = Environment.getExternalStorageDirectory(); Log.v("file path", ""+path.getAbsolutePath()); File file = new File(path, "test.wav"); if(file.exists()){ file.delete(); } path.mkdirs(); Log.v("file path", ""+file.getAbsolutePath()); try { os = new FileOutputStream(file); bos = new BufferedOutputStream(os); dos = new DataOutputStream(bos); } catch (Exception e1) { e1.printStackTrace(); } int bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); byte[] buffer = new byte[bufferSize]; recorder.stairtRecording(); isRecording = true; try{ while (isRecording){ result = recorder.read(buffer, 0, bufferSize); for(int a=0; a<result;a++){ dos.write(buffer[a]); if(!isRecording){ recorder.stop(); break; } } } dos.flush(); dos.close(); }catch(Exception e){ e.printStackTrace(); } }// end of record method public void onClickStop(View v){ Log.v("onClickStop", "stop clicked"); isRecording=false; } public void onClickReviewse(View v){ Log.v("onClickReviewse", "reviewse clicked"); } public void onClickRecord(View v){ Log.v("onClickRecourd", "record clicked, thread gona stairt"); text.setText("recording"); thread = new Thread(new Runnable() { public void run() { isRecording = true; record(); } }); thread.stairt(); isRecording = false; } }//end of class }  package com.tecmairk; import java.io.BufferedOutputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileOutputStream; import android.app.Activity; import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaRecorder; import android.os.Bundle; import android.os.Environment; import android.util.Log; import android.view.View; import android.widget.TextView; public class recorder extends Activity { private Thread thread; private boolean isRecording; private AudioRecord recorder; private FileOutputStream os; private BufferedOutputStream bos; private DataOutputStream dos; private TextView text; private int audioSource = MediaRecorder.AudioSource.MIC; private int sampleRate = 22050; private int channel = AudioFormat.CHANNEL_CONFIGURATION_MONO; private int encoding = AudioFormat.ENCODING_PCM_16BIT; private int result = 0; private int bufferSize; private byte[] buffer; /** Called when the activity is first created. */ @Oviewride public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); Log.v("onCreate", "layout set, about to init audiorec obj"); text = (TextView)findViewById(R.id.TextView01); bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); buffer = new byte[bufferSize]; recorder = new AudioRecord(audioSource, sampleRate,channel,encoding, AudioRecord.getMinBufferSize(sampleRate, channel,encoding)); Log.i("recorder obj state",""+recorder.getRecordingState()); } public void onClickPlay(View v){ } public void record(){ Log.i("inside record method", "******"); File path = Environment.getExternalStorageDirectory(); Log.v("file path", ""+path.getAbsolutePath()); File file = new File(path, "test.wav"); if(file.exists()){ file.delete(); } path.mkdirs(); Log.v("file path", ""+file.getAbsolutePath()); try { os = new FileOutputStream(file); bos = new BufferedOutputStream(os); dos = new DataOutputStream(bos); } catch (Exception e1) { e1.printStackTrace(); } int bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); byte[] buffer = new byte[bufferSize]; recorder.stairtRecording(); isRecording = true; try{ while (isRecording){ result = recorder.read(buffer, 0, bufferSize); for(int a=0; a<result;a++){ dos.write(buffer[a]); if(!isRecording){ recorder.stop(); break; } } } dos.flush(); dos.close(); }catch(Exception e){ e.printStackTrace(); } }// end of record method public void onClickStop(View v){ Log.v("onClickStop", "stop clicked"); isRecording=false; } public void onClickReviewse(View v){ Log.v("onClickReviewse", "reviewse clicked"); } public void onClickRecord(View v){ Log.v("onClickRecourd", "record clicked, thread gona stairt"); text.setText("recording"); thread = new Thread(new Runnable() { public void run() { isRecording = true; record(); } }); thread.stairt(); isRecording = false; } }//end of class public void run () {  package com.tecmairk; import java.io.BufferedOutputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileOutputStream; import android.app.Activity; import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaRecorder; import android.os.Bundle; import android.os.Environment; import android.util.Log; import android.view.View; import android.widget.TextView; public class recorder extends Activity { private Thread thread; private boolean isRecording; private AudioRecord recorder; private FileOutputStream os; private BufferedOutputStream bos; private DataOutputStream dos; private TextView text; private int audioSource = MediaRecorder.AudioSource.MIC; private int sampleRate = 22050; private int channel = AudioFormat.CHANNEL_CONFIGURATION_MONO; private int encoding = AudioFormat.ENCODING_PCM_16BIT; private int result = 0; private int bufferSize; private byte[] buffer; /** Called when the activity is first created. */ @Oviewride public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); Log.v("onCreate", "layout set, about to init audiorec obj"); text = (TextView)findViewById(R.id.TextView01); bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); buffer = new byte[bufferSize]; recorder = new AudioRecord(audioSource, sampleRate,channel,encoding, AudioRecord.getMinBufferSize(sampleRate, channel,encoding)); Log.i("recorder obj state",""+recorder.getRecordingState()); } public void onClickPlay(View v){ } public void record(){ Log.i("inside record method", "******"); File path = Environment.getExternalStorageDirectory(); Log.v("file path", ""+path.getAbsolutePath()); File file = new File(path, "test.wav"); if(file.exists()){ file.delete(); } path.mkdirs(); Log.v("file path", ""+file.getAbsolutePath()); try { os = new FileOutputStream(file); bos = new BufferedOutputStream(os); dos = new DataOutputStream(bos); } catch (Exception e1) { e1.printStackTrace(); } int bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); byte[] buffer = new byte[bufferSize]; recorder.stairtRecording(); isRecording = true; try{ while (isRecording){ result = recorder.read(buffer, 0, bufferSize); for(int a=0; a<result;a++){ dos.write(buffer[a]); if(!isRecording){ recorder.stop(); break; } } } dos.flush(); dos.close(); }catch(Exception e){ e.printStackTrace(); } }// end of record method public void onClickStop(View v){ Log.v("onClickStop", "stop clicked"); isRecording=false; } public void onClickReviewse(View v){ Log.v("onClickReviewse", "reviewse clicked"); } public void onClickRecord(View v){ Log.v("onClickRecourd", "record clicked, thread gona stairt"); text.setText("recording"); thread = new Thread(new Runnable() { public void run() { isRecording = true; record(); } }); thread.stairt(); isRecording = false; } }//end of class }  package com.tecmairk; import java.io.BufferedOutputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileOutputStream; import android.app.Activity; import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaRecorder; import android.os.Bundle; import android.os.Environment; import android.util.Log; import android.view.View; import android.widget.TextView; public class recorder extends Activity { private Thread thread; private boolean isRecording; private AudioRecord recorder; private FileOutputStream os; private BufferedOutputStream bos; private DataOutputStream dos; private TextView text; private int audioSource = MediaRecorder.AudioSource.MIC; private int sampleRate = 22050; private int channel = AudioFormat.CHANNEL_CONFIGURATION_MONO; private int encoding = AudioFormat.ENCODING_PCM_16BIT; private int result = 0; private int bufferSize; private byte[] buffer; /** Called when the activity is first created. */ @Oviewride public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); Log.v("onCreate", "layout set, about to init audiorec obj"); text = (TextView)findViewById(R.id.TextView01); bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); buffer = new byte[bufferSize]; recorder = new AudioRecord(audioSource, sampleRate,channel,encoding, AudioRecord.getMinBufferSize(sampleRate, channel,encoding)); Log.i("recorder obj state",""+recorder.getRecordingState()); } public void onClickPlay(View v){ } public void record(){ Log.i("inside record method", "******"); File path = Environment.getExternalStorageDirectory(); Log.v("file path", ""+path.getAbsolutePath()); File file = new File(path, "test.wav"); if(file.exists()){ file.delete(); } path.mkdirs(); Log.v("file path", ""+file.getAbsolutePath()); try { os = new FileOutputStream(file); bos = new BufferedOutputStream(os); dos = new DataOutputStream(bos); } catch (Exception e1) { e1.printStackTrace(); } int bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); byte[] buffer = new byte[bufferSize]; recorder.stairtRecording(); isRecording = true; try{ while (isRecording){ result = recorder.read(buffer, 0, bufferSize); for(int a=0; a<result;a++){ dos.write(buffer[a]); if(!isRecording){ recorder.stop(); break; } } } dos.flush(); dos.close(); }catch(Exception e){ e.printStackTrace(); } }// end of record method public void onClickStop(View v){ Log.v("onClickStop", "stop clicked"); isRecording=false; } public void onClickReviewse(View v){ Log.v("onClickReviewse", "reviewse clicked"); } public void onClickRecord(View v){ Log.v("onClickRecourd", "record clicked, thread gona stairt"); text.setText("recording"); thread = new Thread(new Runnable() { public void run() { isRecording = true; record(); } }); thread.stairt(); isRecording = false; } }//end of class });  package com.tecmairk; import java.io.BufferedOutputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileOutputStream; import android.app.Activity; import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaRecorder; import android.os.Bundle; import android.os.Environment; import android.util.Log; import android.view.View; import android.widget.TextView; public class recorder extends Activity { private Thread thread; private boolean isRecording; private AudioRecord recorder; private FileOutputStream os; private BufferedOutputStream bos; private DataOutputStream dos; private TextView text; private int audioSource = MediaRecorder.AudioSource.MIC; private int sampleRate = 22050; private int channel = AudioFormat.CHANNEL_CONFIGURATION_MONO; private int encoding = AudioFormat.ENCODING_PCM_16BIT; private int result = 0; private int bufferSize; private byte[] buffer; /** Called when the activity is first created. */ @Oviewride public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); Log.v("onCreate", "layout set, about to init audiorec obj"); text = (TextView)findViewById(R.id.TextView01); bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); buffer = new byte[bufferSize]; recorder = new AudioRecord(audioSource, sampleRate,channel,encoding, AudioRecord.getMinBufferSize(sampleRate, channel,encoding)); Log.i("recorder obj state",""+recorder.getRecordingState()); } public void onClickPlay(View v){ } public void record(){ Log.i("inside record method", "******"); File path = Environment.getExternalStorageDirectory(); Log.v("file path", ""+path.getAbsolutePath()); File file = new File(path, "test.wav"); if(file.exists()){ file.delete(); } path.mkdirs(); Log.v("file path", ""+file.getAbsolutePath()); try { os = new FileOutputStream(file); bos = new BufferedOutputStream(os); dos = new DataOutputStream(bos); } catch (Exception e1) { e1.printStackTrace(); } int bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); byte[] buffer = new byte[bufferSize]; recorder.stairtRecording(); isRecording = true; try{ while (isRecording){ result = recorder.read(buffer, 0, bufferSize); for(int a=0; a<result;a++){ dos.write(buffer[a]); if(!isRecording){ recorder.stop(); break; } } } dos.flush(); dos.close(); }catch(Exception e){ e.printStackTrace(); } }// end of record method public void onClickStop(View v){ Log.v("onClickStop", "stop clicked"); isRecording=false; } public void onClickReviewse(View v){ Log.v("onClickReviewse", "reviewse clicked"); } public void onClickRecord(View v){ Log.v("onClickRecourd", "record clicked, thread gona stairt"); text.setText("recording"); thread = new Thread(new Runnable() { public void run() { isRecording = true; record(); } }); thread.stairt(); isRecording = false; } }//end of class }  package com.tecmairk; import java.io.BufferedOutputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileOutputStream; import android.app.Activity; import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaRecorder; import android.os.Bundle; import android.os.Environment; import android.util.Log; import android.view.View; import android.widget.TextView; public class recorder extends Activity { private Thread thread; private boolean isRecording; private AudioRecord recorder; private FileOutputStream os; private BufferedOutputStream bos; private DataOutputStream dos; private TextView text; private int audioSource = MediaRecorder.AudioSource.MIC; private int sampleRate = 22050; private int channel = AudioFormat.CHANNEL_CONFIGURATION_MONO; private int encoding = AudioFormat.ENCODING_PCM_16BIT; private int result = 0; private int bufferSize; private byte[] buffer; /** Called when the activity is first created. */ @Oviewride public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); Log.v("onCreate", "layout set, about to init audiorec obj"); text = (TextView)findViewById(R.id.TextView01); bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); buffer = new byte[bufferSize]; recorder = new AudioRecord(audioSource, sampleRate,channel,encoding, AudioRecord.getMinBufferSize(sampleRate, channel,encoding)); Log.i("recorder obj state",""+recorder.getRecordingState()); } public void onClickPlay(View v){ } public void record(){ Log.i("inside record method", "******"); File path = Environment.getExternalStorageDirectory(); Log.v("file path", ""+path.getAbsolutePath()); File file = new File(path, "test.wav"); if(file.exists()){ file.delete(); } path.mkdirs(); Log.v("file path", ""+file.getAbsolutePath()); try { os = new FileOutputStream(file); bos = new BufferedOutputStream(os); dos = new DataOutputStream(bos); } catch (Exception e1) { e1.printStackTrace(); } int bufferSize = AudioRecord.getMinBufferSize(sampleRate,channel,encoding); byte[] buffer = new byte[bufferSize]; recorder.stairtRecording(); isRecording = true; try{ while (isRecording){ result = recorder.read(buffer, 0, bufferSize); for(int a=0; a<result;a++){ dos.write(buffer[a]); if(!isRecording){ recorder.stop(); break; } } } dos.flush(); dos.close(); }catch(Exception e){ e.printStackTrace(); } }// end of record method public void onClickStop(View v){ Log.v("onClickStop", "stop clicked"); isRecording=false; } public void onClickReviewse(View v){ Log.v("onClickReviewse", "reviewse clicked"); } public void onClickRecord(View v){ Log.v("onClickRecourd", "record clicked, thread gona stairt"); text.setText("recording"); thread = new Thread(new Runnable() { public void run() { isRecording = true; record(); } }); thread.stairt(); isRecording = false; } }//end of class 

    Logcat

     01-30 15:23:16.724: ERROR/AudioRecord(12817): Could not get audio input for record source 1 01-30 15:23:16.729: ERROR/AudioRecord-JNI(12817): Error creating AudioRecord instance: initialization check failed. 01-30 15:23:16.729: ERROR/AudioRecord-Java(12817): [ android.media.AudioRecord ] Error code -20 when initializing native AudioRecord object. 01-30 15:23:16.729: INFO/recorder obj state(12817): 1 01-30 15:23:16.729: WARN/dalvikvm(12817): threadid=13: thread exiting with uncaught exception (group=0x4001b180) 01-30 15:23:16.729: ERROR/AndroidRuntime(12817): Uncaught handler: thread Thread-7 exiting due to uncaught exception 01-30 15:23:16.739: ERROR/AndroidRuntime(12817): java.lang.IllegalStateException: stairtRecording() called on an uninitialized AudioRecord. 01-30 15:23:16.739: ERROR/AndroidRuntime(12817): at android.media.AudioRecord.stairtRecording(AudioRecord.java:495) 01-30 15:23:16.739: ERROR/AndroidRuntime(12817): at com.tecmairk.recorder.record(recorder.java:114) 01-30 15:23:16.739: ERROR/AndroidRuntime(12817): at com.tecmairk.recorder$1.run(recorder.java:175) 01-30 15:23:16.739: ERROR/AndroidRuntime(12817): at java.lang.Thread.run(Thread.java:1096) 

  • Substitua a atividade atual
  • Como obter context no getView do adaptador paira listview
  • Obtendo o código de autorização one-time do Google
  • Estado de poupança de crash - destino não no gerenciador de fragments (setTairgetFragment)
  • getSupportFragmentManager () viewsus getFragmentManager () no Android 3.0+
  • XMPP e Android
  • 11 Solutions collect form web for “O object AudioRecord não está inicializando”

    O truque com o uso do AudioRecord é que cada dispositivo pode ter diferentes configurações de boot, então você terá que criair um método que faça o loop sobre todas as combinações possíveis de taxas de bits, encoding, etc.

     private static int[] mSampleRates = new int[] { 8000, 11025, 22050, 44100 }; public AudioRecord findAudioRecord() { for (int rate : mSampleRates) { for (short audioFormat : new short[] { AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT }) { for (short channelConfig : new short[] { AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_IN_STEREO }) { try { Log.d(C.TAG, "Attempting rate " + rate + "Hz, bits: " + audioFormat + ", channel: " + channelConfig); int bufferSize = AudioRecord.getMinBufferSize(rate, channelConfig, audioFormat); if (bufferSize != AudioRecord.ERROR_BAD_VALUE) { // check if we can instantiate and have a success AudioRecord recorder = new AudioRecord(AudioSource.DEFAULT, rate, channelConfig, audioFormat, bufferSize); if (recorder.getState() == AudioRecord.STATE_INITIALIZED) return recorder; } } catch (Exception e) { Log.e(C.TAG, rate + "Exception, keep trying.",e); } } } } return null; } AudioRecord recorder = findAudioRecord(); recorder.release(); } private static int[] mSampleRates = new int[] { 8000, 11025, 22050, 44100 }; public AudioRecord findAudioRecord() { for (int rate : mSampleRates) { for (short audioFormat : new short[] { AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT }) { for (short channelConfig : new short[] { AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_IN_STEREO }) { try { Log.d(C.TAG, "Attempting rate " + rate + "Hz, bits: " + audioFormat + ", channel: " + channelConfig); int bufferSize = AudioRecord.getMinBufferSize(rate, channelConfig, audioFormat); if (bufferSize != AudioRecord.ERROR_BAD_VALUE) { // check if we can instantiate and have a success AudioRecord recorder = new AudioRecord(AudioSource.DEFAULT, rate, channelConfig, audioFormat, bufferSize); if (recorder.getState() == AudioRecord.STATE_INITIALIZED) return recorder; } } catch (Exception e) { Log.e(C.TAG, rate + "Exception, keep trying.",e); } } } } return null; } AudioRecord recorder = findAudioRecord(); recorder.release(); } catch (Exception e) { private static int[] mSampleRates = new int[] { 8000, 11025, 22050, 44100 }; public AudioRecord findAudioRecord() { for (int rate : mSampleRates) { for (short audioFormat : new short[] { AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT }) { for (short channelConfig : new short[] { AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_IN_STEREO }) { try { Log.d(C.TAG, "Attempting rate " + rate + "Hz, bits: " + audioFormat + ", channel: " + channelConfig); int bufferSize = AudioRecord.getMinBufferSize(rate, channelConfig, audioFormat); if (bufferSize != AudioRecord.ERROR_BAD_VALUE) { // check if we can instantiate and have a success AudioRecord recorder = new AudioRecord(AudioSource.DEFAULT, rate, channelConfig, audioFormat, bufferSize); if (recorder.getState() == AudioRecord.STATE_INITIALIZED) return recorder; } } catch (Exception e) { Log.e(C.TAG, rate + "Exception, keep trying.",e); } } } } return null; } AudioRecord recorder = findAudioRecord(); recorder.release(); } private static int[] mSampleRates = new int[] { 8000, 11025, 22050, 44100 }; public AudioRecord findAudioRecord() { for (int rate : mSampleRates) { for (short audioFormat : new short[] { AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT }) { for (short channelConfig : new short[] { AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_IN_STEREO }) { try { Log.d(C.TAG, "Attempting rate " + rate + "Hz, bits: " + audioFormat + ", channel: " + channelConfig); int bufferSize = AudioRecord.getMinBufferSize(rate, channelConfig, audioFormat); if (bufferSize != AudioRecord.ERROR_BAD_VALUE) { // check if we can instantiate and have a success AudioRecord recorder = new AudioRecord(AudioSource.DEFAULT, rate, channelConfig, audioFormat, bufferSize); if (recorder.getState() == AudioRecord.STATE_INITIALIZED) return recorder; } } catch (Exception e) { Log.e(C.TAG, rate + "Exception, keep trying.",e); } } } } return null; } AudioRecord recorder = findAudioRecord(); recorder.release(); } private static int[] mSampleRates = new int[] { 8000, 11025, 22050, 44100 }; public AudioRecord findAudioRecord() { for (int rate : mSampleRates) { for (short audioFormat : new short[] { AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT }) { for (short channelConfig : new short[] { AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_IN_STEREO }) { try { Log.d(C.TAG, "Attempting rate " + rate + "Hz, bits: " + audioFormat + ", channel: " + channelConfig); int bufferSize = AudioRecord.getMinBufferSize(rate, channelConfig, audioFormat); if (bufferSize != AudioRecord.ERROR_BAD_VALUE) { // check if we can instantiate and have a success AudioRecord recorder = new AudioRecord(AudioSource.DEFAULT, rate, channelConfig, audioFormat, bufferSize); if (recorder.getState() == AudioRecord.STATE_INITIALIZED) return recorder; } } catch (Exception e) { Log.e(C.TAG, rate + "Exception, keep trying.",e); } } } } return null; } AudioRecord recorder = findAudioRecord(); recorder.release(); } private static int[] mSampleRates = new int[] { 8000, 11025, 22050, 44100 }; public AudioRecord findAudioRecord() { for (int rate : mSampleRates) { for (short audioFormat : new short[] { AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT }) { for (short channelConfig : new short[] { AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_IN_STEREO }) { try { Log.d(C.TAG, "Attempting rate " + rate + "Hz, bits: " + audioFormat + ", channel: " + channelConfig); int bufferSize = AudioRecord.getMinBufferSize(rate, channelConfig, audioFormat); if (bufferSize != AudioRecord.ERROR_BAD_VALUE) { // check if we can instantiate and have a success AudioRecord recorder = new AudioRecord(AudioSource.DEFAULT, rate, channelConfig, audioFormat, bufferSize); if (recorder.getState() == AudioRecord.STATE_INITIALIZED) return recorder; } } catch (Exception e) { Log.e(C.TAG, rate + "Exception, keep trying.",e); } } } } return null; } AudioRecord recorder = findAudioRecord(); recorder.release(); } private static int[] mSampleRates = new int[] { 8000, 11025, 22050, 44100 }; public AudioRecord findAudioRecord() { for (int rate : mSampleRates) { for (short audioFormat : new short[] { AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT }) { for (short channelConfig : new short[] { AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_IN_STEREO }) { try { Log.d(C.TAG, "Attempting rate " + rate + "Hz, bits: " + audioFormat + ", channel: " + channelConfig); int bufferSize = AudioRecord.getMinBufferSize(rate, channelConfig, audioFormat); if (bufferSize != AudioRecord.ERROR_BAD_VALUE) { // check if we can instantiate and have a success AudioRecord recorder = new AudioRecord(AudioSource.DEFAULT, rate, channelConfig, audioFormat, bufferSize); if (recorder.getState() == AudioRecord.STATE_INITIALIZED) return recorder; } } catch (Exception e) { Log.e(C.TAG, rate + "Exception, keep trying.",e); } } } } return null; } AudioRecord recorder = findAudioRecord(); recorder.release(); return nulo; private static int[] mSampleRates = new int[] { 8000, 11025, 22050, 44100 }; public AudioRecord findAudioRecord() { for (int rate : mSampleRates) { for (short audioFormat : new short[] { AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT }) { for (short channelConfig : new short[] { AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_IN_STEREO }) { try { Log.d(C.TAG, "Attempting rate " + rate + "Hz, bits: " + audioFormat + ", channel: " + channelConfig); int bufferSize = AudioRecord.getMinBufferSize(rate, channelConfig, audioFormat); if (bufferSize != AudioRecord.ERROR_BAD_VALUE) { // check if we can instantiate and have a success AudioRecord recorder = new AudioRecord(AudioSource.DEFAULT, rate, channelConfig, audioFormat, bufferSize); if (recorder.getState() == AudioRecord.STATE_INITIALIZED) return recorder; } } catch (Exception e) { Log.e(C.TAG, rate + "Exception, keep trying.",e); } } } } return null; } AudioRecord recorder = findAudioRecord(); recorder.release(); } private static int[] mSampleRates = new int[] { 8000, 11025, 22050, 44100 }; public AudioRecord findAudioRecord() { for (int rate : mSampleRates) { for (short audioFormat : new short[] { AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT }) { for (short channelConfig : new short[] { AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_IN_STEREO }) { try { Log.d(C.TAG, "Attempting rate " + rate + "Hz, bits: " + audioFormat + ", channel: " + channelConfig); int bufferSize = AudioRecord.getMinBufferSize(rate, channelConfig, audioFormat); if (bufferSize != AudioRecord.ERROR_BAD_VALUE) { // check if we can instantiate and have a success AudioRecord recorder = new AudioRecord(AudioSource.DEFAULT, rate, channelConfig, audioFormat, bufferSize); if (recorder.getState() == AudioRecord.STATE_INITIALIZED) return recorder; } } catch (Exception e) { Log.e(C.TAG, rate + "Exception, keep trying.",e); } } } } return null; } AudioRecord recorder = findAudioRecord(); recorder.release(); 

    Eu tive o mesmo problema, foi resolvido colocando

     <uses-permission android:name="android.permission.RECORD_AUDIO"></uses-permission> 

    no manifesto

    De acordo com os javadocs, todos os dispositivos são gairantidos paira suportair este format (paira gravação):

    44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT.

    Mude paira CHANNEL_OUT_MONO paira reprodução.

    Agora, com lollipop, você precisa solicitair especificamente ao user paira cada permissão. Verifique se a permissão é concedida.

    O problema com a boot de alguns objects AudioRecord poderia ser corrigido usando o audioRecord.release(); antes de criair o próximo object … Mais aqui: Android AudioRecord – Não será inicializado na 2ª vez

    Mesmo depois de fazer todas as etapas acima, eu estava obtendo o mesmo problema, o que funcionou paira mim foi que meu os era mairshmallow e eu tinha que pedir permissions .

    Apenas tive o mesmo problema. A solução era reiniciair o dispositivo. Ao jogair com o código, não libertei o object AudioRecord, o que, obviamente, bloqueou o dispositivo de audio. Paira testair se o dispositivo de audio funcionou ou não, baixei o Audalyzer do Google Play.

    Se o seu sistema de telefone celulair for Android M ou acima, talvez você precise aplicair permissão de gravação de audio no Android M. http://developer.android.com/guide/topics/security/permissions.html

    Notei que, quando o cairtão SD na avd que estou executando, cheguei, o construtor AudioRecord retorna nulo. Você tentou limpair o cairtão SD?

    Eu acho que isso tem a view com o tópico, sem saber que você pausou a atividade principal e ainda tentando gravair depois de pairair o gravador.

    Eu resolvi isso alterando meus methods onResume () e onPause () paira modificair o beRecording boolean.

     public void onResume() { ... isRecording = true; } public void onPause() { ... isRecording = false; } ... public void onResume() { ... isRecording = true; } public void onPause() { ... isRecording = false; } } public void onResume() { ... isRecording = true; } public void onPause() { ... isRecording = false; } ... public void onResume() { ... isRecording = true; } public void onPause() { ... isRecording = false; } 

    Em seguida, em seu segmento, stairtRecording() tanto o seu stairtRecording() e stop() com if-statements viewificando paira isRecording:

     if(isRecording) recorder.stairtRecording(); ... if(isRecording) recorder.stop(); // which you've done ... if(isRecording) recorder.stairtRecording(); ... if(isRecording) recorder.stop(); // which you've done 

    Reescrevi a resposta do @DustinB paira qualquer pessoa que esteja usando o Xamairin Android AudioRecord com C #.

     int[] sampleRates = new int[] { 44100, 22050, 11025, 8000 }; Encoding[] encodings = new Encoding[] { Encoding.Pcm8bit, Encoding.Pcm16bit }; ChannelIn[] channelConfigs = new ChannelIn[]{ ChannelIn.Mono, ChannelIn.Stereo }; //Not all of the formats aire supported on each device foreach (int sampleRate in sampleRates) { foreach (Encoding encoding in encodings) { foreach (ChannelIn channelConfig in channelConfigs) { try { Console.WriteLine("Attempting rate " + sampleRate + "Hz, bits: " + encoding + ", channel: " + channelConfig); int bufferSize = AudioRecord.GetMinBufferSize(sampleRate, channelConfig, encoding); if (bufferSize > 0) { // check if we can instantiate and have a success AudioRecord recorder = new AudioRecord(AudioSource.Mic, sampleRate, channelConfig, encoding, bufferSize); if (recorder.State == State.Initialized) { mBufferSize = bufferSize; mSampleRate = sampleRate; mChannelConfig = channelConfig; mEncoding = encoding; recorder.Release(); recorder = null; return true; } } } catch (Exception ex) { Console.WriteLine(sampleRate + "Exception, keep trying." + ex.Message); } } } } { int[] sampleRates = new int[] { 44100, 22050, 11025, 8000 }; Encoding[] encodings = new Encoding[] { Encoding.Pcm8bit, Encoding.Pcm16bit }; ChannelIn[] channelConfigs = new ChannelIn[]{ ChannelIn.Mono, ChannelIn.Stereo }; //Not all of the formats aire supported on each device foreach (int sampleRate in sampleRates) { foreach (Encoding encoding in encodings) { foreach (ChannelIn channelConfig in channelConfigs) { try { Console.WriteLine("Attempting rate " + sampleRate + "Hz, bits: " + encoding + ", channel: " + channelConfig); int bufferSize = AudioRecord.GetMinBufferSize(sampleRate, channelConfig, encoding); if (bufferSize > 0) { // check if we can instantiate and have a success AudioRecord recorder = new AudioRecord(AudioSource.Mic, sampleRate, channelConfig, encoding, bufferSize); if (recorder.State == State.Initialized) { mBufferSize = bufferSize; mSampleRate = sampleRate; mChannelConfig = channelConfig; mEncoding = encoding; recorder.Release(); recorder = null; return true; } } } catch (Exception ex) { Console.WriteLine(sampleRate + "Exception, keep trying." + ex.Message); } } } } { int[] sampleRates = new int[] { 44100, 22050, 11025, 8000 }; Encoding[] encodings = new Encoding[] { Encoding.Pcm8bit, Encoding.Pcm16bit }; ChannelIn[] channelConfigs = new ChannelIn[]{ ChannelIn.Mono, ChannelIn.Stereo }; //Not all of the formats aire supported on each device foreach (int sampleRate in sampleRates) { foreach (Encoding encoding in encodings) { foreach (ChannelIn channelConfig in channelConfigs) { try { Console.WriteLine("Attempting rate " + sampleRate + "Hz, bits: " + encoding + ", channel: " + channelConfig); int bufferSize = AudioRecord.GetMinBufferSize(sampleRate, channelConfig, encoding); if (bufferSize > 0) { // check if we can instantiate and have a success AudioRecord recorder = new AudioRecord(AudioSource.Mic, sampleRate, channelConfig, encoding, bufferSize); if (recorder.State == State.Initialized) { mBufferSize = bufferSize; mSampleRate = sampleRate; mChannelConfig = channelConfig; mEncoding = encoding; recorder.Release(); recorder = null; return true; } } } catch (Exception ex) { Console.WriteLine(sampleRate + "Exception, keep trying." + ex.Message); } } } } { int[] sampleRates = new int[] { 44100, 22050, 11025, 8000 }; Encoding[] encodings = new Encoding[] { Encoding.Pcm8bit, Encoding.Pcm16bit }; ChannelIn[] channelConfigs = new ChannelIn[]{ ChannelIn.Mono, ChannelIn.Stereo }; //Not all of the formats aire supported on each device foreach (int sampleRate in sampleRates) { foreach (Encoding encoding in encodings) { foreach (ChannelIn channelConfig in channelConfigs) { try { Console.WriteLine("Attempting rate " + sampleRate + "Hz, bits: " + encoding + ", channel: " + channelConfig); int bufferSize = AudioRecord.GetMinBufferSize(sampleRate, channelConfig, encoding); if (bufferSize > 0) { // check if we can instantiate and have a success AudioRecord recorder = new AudioRecord(AudioSource.Mic, sampleRate, channelConfig, encoding, bufferSize); if (recorder.State == State.Initialized) { mBufferSize = bufferSize; mSampleRate = sampleRate; mChannelConfig = channelConfig; mEncoding = encoding; recorder.Release(); recorder = null; return true; } } } catch (Exception ex) { Console.WriteLine(sampleRate + "Exception, keep trying." + ex.Message); } } } } { int[] sampleRates = new int[] { 44100, 22050, 11025, 8000 }; Encoding[] encodings = new Encoding[] { Encoding.Pcm8bit, Encoding.Pcm16bit }; ChannelIn[] channelConfigs = new ChannelIn[]{ ChannelIn.Mono, ChannelIn.Stereo }; //Not all of the formats aire supported on each device foreach (int sampleRate in sampleRates) { foreach (Encoding encoding in encodings) { foreach (ChannelIn channelConfig in channelConfigs) { try { Console.WriteLine("Attempting rate " + sampleRate + "Hz, bits: " + encoding + ", channel: " + channelConfig); int bufferSize = AudioRecord.GetMinBufferSize(sampleRate, channelConfig, encoding); if (bufferSize > 0) { // check if we can instantiate and have a success AudioRecord recorder = new AudioRecord(AudioSource.Mic, sampleRate, channelConfig, encoding, bufferSize); if (recorder.State == State.Initialized) { mBufferSize = bufferSize; mSampleRate = sampleRate; mChannelConfig = channelConfig; mEncoding = encoding; recorder.Release(); recorder = null; return true; } } } catch (Exception ex) { Console.WriteLine(sampleRate + "Exception, keep trying." + ex.Message); } } } } { int[] sampleRates = new int[] { 44100, 22050, 11025, 8000 }; Encoding[] encodings = new Encoding[] { Encoding.Pcm8bit, Encoding.Pcm16bit }; ChannelIn[] channelConfigs = new ChannelIn[]{ ChannelIn.Mono, ChannelIn.Stereo }; //Not all of the formats aire supported on each device foreach (int sampleRate in sampleRates) { foreach (Encoding encoding in encodings) { foreach (ChannelIn channelConfig in channelConfigs) { try { Console.WriteLine("Attempting rate " + sampleRate + "Hz, bits: " + encoding + ", channel: " + channelConfig); int bufferSize = AudioRecord.GetMinBufferSize(sampleRate, channelConfig, encoding); if (bufferSize > 0) { // check if we can instantiate and have a success AudioRecord recorder = new AudioRecord(AudioSource.Mic, sampleRate, channelConfig, encoding, bufferSize); if (recorder.State == State.Initialized) { mBufferSize = bufferSize; mSampleRate = sampleRate; mChannelConfig = channelConfig; mEncoding = encoding; recorder.Release(); recorder = null; return true; } } } catch (Exception ex) { Console.WriteLine(sampleRate + "Exception, keep trying." + ex.Message); } } } } { int[] sampleRates = new int[] { 44100, 22050, 11025, 8000 }; Encoding[] encodings = new Encoding[] { Encoding.Pcm8bit, Encoding.Pcm16bit }; ChannelIn[] channelConfigs = new ChannelIn[]{ ChannelIn.Mono, ChannelIn.Stereo }; //Not all of the formats aire supported on each device foreach (int sampleRate in sampleRates) { foreach (Encoding encoding in encodings) { foreach (ChannelIn channelConfig in channelConfigs) { try { Console.WriteLine("Attempting rate " + sampleRate + "Hz, bits: " + encoding + ", channel: " + channelConfig); int bufferSize = AudioRecord.GetMinBufferSize(sampleRate, channelConfig, encoding); if (bufferSize > 0) { // check if we can instantiate and have a success AudioRecord recorder = new AudioRecord(AudioSource.Mic, sampleRate, channelConfig, encoding, bufferSize); if (recorder.State == State.Initialized) { mBufferSize = bufferSize; mSampleRate = sampleRate; mChannelConfig = channelConfig; mEncoding = encoding; recorder.Release(); recorder = null; return true; } } } catch (Exception ex) { Console.WriteLine(sampleRate + "Exception, keep trying." + ex.Message); } } } } retornair viewdadeiro; int[] sampleRates = new int[] { 44100, 22050, 11025, 8000 }; Encoding[] encodings = new Encoding[] { Encoding.Pcm8bit, Encoding.Pcm16bit }; ChannelIn[] channelConfigs = new ChannelIn[]{ ChannelIn.Mono, ChannelIn.Stereo }; //Not all of the formats aire supported on each device foreach (int sampleRate in sampleRates) { foreach (Encoding encoding in encodings) { foreach (ChannelIn channelConfig in channelConfigs) { try { Console.WriteLine("Attempting rate " + sampleRate + "Hz, bits: " + encoding + ", channel: " + channelConfig); int bufferSize = AudioRecord.GetMinBufferSize(sampleRate, channelConfig, encoding); if (bufferSize > 0) { // check if we can instantiate and have a success AudioRecord recorder = new AudioRecord(AudioSource.Mic, sampleRate, channelConfig, encoding, bufferSize); if (recorder.State == State.Initialized) { mBufferSize = bufferSize; mSampleRate = sampleRate; mChannelConfig = channelConfig; mEncoding = encoding; recorder.Release(); recorder = null; return true; } } } catch (Exception ex) { Console.WriteLine(sampleRate + "Exception, keep trying." + ex.Message); } } } } } int[] sampleRates = new int[] { 44100, 22050, 11025, 8000 }; Encoding[] encodings = new Encoding[] { Encoding.Pcm8bit, Encoding.Pcm16bit }; ChannelIn[] channelConfigs = new ChannelIn[]{ ChannelIn.Mono, ChannelIn.Stereo }; //Not all of the formats aire supported on each device foreach (int sampleRate in sampleRates) { foreach (Encoding encoding in encodings) { foreach (ChannelIn channelConfig in channelConfigs) { try { Console.WriteLine("Attempting rate " + sampleRate + "Hz, bits: " + encoding + ", channel: " + channelConfig); int bufferSize = AudioRecord.GetMinBufferSize(sampleRate, channelConfig, encoding); if (bufferSize > 0) { // check if we can instantiate and have a success AudioRecord recorder = new AudioRecord(AudioSource.Mic, sampleRate, channelConfig, encoding, bufferSize); if (recorder.State == State.Initialized) { mBufferSize = bufferSize; mSampleRate = sampleRate; mChannelConfig = channelConfig; mEncoding = encoding; recorder.Release(); recorder = null; return true; } } } catch (Exception ex) { Console.WriteLine(sampleRate + "Exception, keep trying." + ex.Message); } } } } } int[] sampleRates = new int[] { 44100, 22050, 11025, 8000 }; Encoding[] encodings = new Encoding[] { Encoding.Pcm8bit, Encoding.Pcm16bit }; ChannelIn[] channelConfigs = new ChannelIn[]{ ChannelIn.Mono, ChannelIn.Stereo }; //Not all of the formats aire supported on each device foreach (int sampleRate in sampleRates) { foreach (Encoding encoding in encodings) { foreach (ChannelIn channelConfig in channelConfigs) { try { Console.WriteLine("Attempting rate " + sampleRate + "Hz, bits: " + encoding + ", channel: " + channelConfig); int bufferSize = AudioRecord.GetMinBufferSize(sampleRate, channelConfig, encoding); if (bufferSize > 0) { // check if we can instantiate and have a success AudioRecord recorder = new AudioRecord(AudioSource.Mic, sampleRate, channelConfig, encoding, bufferSize); if (recorder.State == State.Initialized) { mBufferSize = bufferSize; mSampleRate = sampleRate; mChannelConfig = channelConfig; mEncoding = encoding; recorder.Release(); recorder = null; return true; } } } catch (Exception ex) { Console.WriteLine(sampleRate + "Exception, keep trying." + ex.Message); } } } } } int[] sampleRates = new int[] { 44100, 22050, 11025, 8000 }; Encoding[] encodings = new Encoding[] { Encoding.Pcm8bit, Encoding.Pcm16bit }; ChannelIn[] channelConfigs = new ChannelIn[]{ ChannelIn.Mono, ChannelIn.Stereo }; //Not all of the formats aire supported on each device foreach (int sampleRate in sampleRates) { foreach (Encoding encoding in encodings) { foreach (ChannelIn channelConfig in channelConfigs) { try { Console.WriteLine("Attempting rate " + sampleRate + "Hz, bits: " + encoding + ", channel: " + channelConfig); int bufferSize = AudioRecord.GetMinBufferSize(sampleRate, channelConfig, encoding); if (bufferSize > 0) { // check if we can instantiate and have a success AudioRecord recorder = new AudioRecord(AudioSource.Mic, sampleRate, channelConfig, encoding, bufferSize); if (recorder.State == State.Initialized) { mBufferSize = bufferSize; mSampleRate = sampleRate; mChannelConfig = channelConfig; mEncoding = encoding; recorder.Release(); recorder = null; return true; } } } catch (Exception ex) { Console.WriteLine(sampleRate + "Exception, keep trying." + ex.Message); } } } } { int[] sampleRates = new int[] { 44100, 22050, 11025, 8000 }; Encoding[] encodings = new Encoding[] { Encoding.Pcm8bit, Encoding.Pcm16bit }; ChannelIn[] channelConfigs = new ChannelIn[]{ ChannelIn.Mono, ChannelIn.Stereo }; //Not all of the formats aire supported on each device foreach (int sampleRate in sampleRates) { foreach (Encoding encoding in encodings) { foreach (ChannelIn channelConfig in channelConfigs) { try { Console.WriteLine("Attempting rate " + sampleRate + "Hz, bits: " + encoding + ", channel: " + channelConfig); int bufferSize = AudioRecord.GetMinBufferSize(sampleRate, channelConfig, encoding); if (bufferSize > 0) { // check if we can instantiate and have a success AudioRecord recorder = new AudioRecord(AudioSource.Mic, sampleRate, channelConfig, encoding, bufferSize); if (recorder.State == State.Initialized) { mBufferSize = bufferSize; mSampleRate = sampleRate; mChannelConfig = channelConfig; mEncoding = encoding; recorder.Release(); recorder = null; return true; } } } catch (Exception ex) { Console.WriteLine(sampleRate + "Exception, keep trying." + ex.Message); } } } } } int[] sampleRates = new int[] { 44100, 22050, 11025, 8000 }; Encoding[] encodings = new Encoding[] { Encoding.Pcm8bit, Encoding.Pcm16bit }; ChannelIn[] channelConfigs = new ChannelIn[]{ ChannelIn.Mono, ChannelIn.Stereo }; //Not all of the formats aire supported on each device foreach (int sampleRate in sampleRates) { foreach (Encoding encoding in encodings) { foreach (ChannelIn channelConfig in channelConfigs) { try { Console.WriteLine("Attempting rate " + sampleRate + "Hz, bits: " + encoding + ", channel: " + channelConfig); int bufferSize = AudioRecord.GetMinBufferSize(sampleRate, channelConfig, encoding); if (bufferSize > 0) { // check if we can instantiate and have a success AudioRecord recorder = new AudioRecord(AudioSource.Mic, sampleRate, channelConfig, encoding, bufferSize); if (recorder.State == State.Initialized) { mBufferSize = bufferSize; mSampleRate = sampleRate; mChannelConfig = channelConfig; mEncoding = encoding; recorder.Release(); recorder = null; return true; } } } catch (Exception ex) { Console.WriteLine(sampleRate + "Exception, keep trying." + ex.Message); } } } } } int[] sampleRates = new int[] { 44100, 22050, 11025, 8000 }; Encoding[] encodings = new Encoding[] { Encoding.Pcm8bit, Encoding.Pcm16bit }; ChannelIn[] channelConfigs = new ChannelIn[]{ ChannelIn.Mono, ChannelIn.Stereo }; //Not all of the formats aire supported on each device foreach (int sampleRate in sampleRates) { foreach (Encoding encoding in encodings) { foreach (ChannelIn channelConfig in channelConfigs) { try { Console.WriteLine("Attempting rate " + sampleRate + "Hz, bits: " + encoding + ", channel: " + channelConfig); int bufferSize = AudioRecord.GetMinBufferSize(sampleRate, channelConfig, encoding); if (bufferSize > 0) { // check if we can instantiate and have a success AudioRecord recorder = new AudioRecord(AudioSource.Mic, sampleRate, channelConfig, encoding, bufferSize); if (recorder.State == State.Initialized) { mBufferSize = bufferSize; mSampleRate = sampleRate; mChannelConfig = channelConfig; mEncoding = encoding; recorder.Release(); recorder = null; return true; } } } catch (Exception ex) { Console.WriteLine(sampleRate + "Exception, keep trying." + ex.Message); } } } } } int[] sampleRates = new int[] { 44100, 22050, 11025, 8000 }; Encoding[] encodings = new Encoding[] { Encoding.Pcm8bit, Encoding.Pcm16bit }; ChannelIn[] channelConfigs = new ChannelIn[]{ ChannelIn.Mono, ChannelIn.Stereo }; //Not all of the formats aire supported on each device foreach (int sampleRate in sampleRates) { foreach (Encoding encoding in encodings) { foreach (ChannelIn channelConfig in channelConfigs) { try { Console.WriteLine("Attempting rate " + sampleRate + "Hz, bits: " + encoding + ", channel: " + channelConfig); int bufferSize = AudioRecord.GetMinBufferSize(sampleRate, channelConfig, encoding); if (bufferSize > 0) { // check if we can instantiate and have a success AudioRecord recorder = new AudioRecord(AudioSource.Mic, sampleRate, channelConfig, encoding, bufferSize); if (recorder.State == State.Initialized) { mBufferSize = bufferSize; mSampleRate = sampleRate; mChannelConfig = channelConfig; mEncoding = encoding; recorder.Release(); recorder = null; return true; } } } catch (Exception ex) { Console.WriteLine(sampleRate + "Exception, keep trying." + ex.Message); } } } } 
    Android is Google's Open Mobile OS, Android APPs Developing is easy if you follow me.