I have a problem here. I have 2 methods, one of them shows the sound waves ie (you activate it and what you talk it interprets in lines), the other one when it is activated it records what the person speaks for example.
I'm trying to run both at the same time, because separate already work. If anyone has an idea how I can do this, thank you very much
Here is the project in rar for download link
And here is the class with the separate methods
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaPlayer;
import android.media.MediaRecorder;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Environment;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.ImageView;
import java.io.File;
import java.io.IOException;
public class MainActivity extends AppCompatActivity implements OnClickListener {
//REFERENTE AS ONDAS SONORAS. . .
int frequency = 8000;
int channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_DEFAULT;
int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
private RealDoubleFFT transformer;
int blockSize = 256;
Button startStopButton;
boolean started = false;
RecordAudio recordTask;
ImageView imageView;
Bitmap bitmap;
Canvas canvas;
Paint paint;
//REFERENTE A GRAVAÇÃO DE AUDIO. . .
private MediaPlayer mediaPlayer;
private MediaRecorder recorder;
private String OUTPUT_FILE;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
startStopButton = (Button) this.findViewById(R.id.StartStopButton);
startStopButton.setOnClickListener(this);
transformer = new RealDoubleFFT(blockSize);
imageView = (ImageView) this.findViewById(R.id.ImageView01);
bitmap = Bitmap.createBitmap((int) 256, (int) 100,
Bitmap.Config.ARGB_8888);
canvas = new Canvas(bitmap);
paint = new Paint();
paint.setColor(Color.GREEN);
imageView.setImageBitmap(bitmap);
OUTPUT_FILE= Environment.getExternalStorageDirectory()+"/audioRecorder.3gpp";
}
//REFERENTE AS ONDAS. . .
public class RecordAudio extends AsyncTask<Void, double[], Void> {
@Override
protected Void doInBackground(Void... arg0) {
try {
// int bufferSize = AudioRecord.getMinBufferSize(frequency,
// AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
int bufferSize = AudioRecord.getMinBufferSize(frequency,
channelConfiguration, audioEncoding);
AudioRecord audioRecord = new AudioRecord(
MediaRecorder.AudioSource.MIC, frequency,
channelConfiguration, audioEncoding, bufferSize);
short[] buffer = new short[blockSize];
double[] toTransform = new double[blockSize];
audioRecord.startRecording();
// started = true; hopes this should true before calling
// following while loop
while (started) {
int bufferReadResult = audioRecord.read(buffer, 0,
blockSize);
for (int i = 0; i < blockSize && i < bufferReadResult; i++) {
toTransform[i] = (double) buffer[i] / 32768.0; // signed
// 16
} // bit
transformer.ft(toTransform);
publishProgress(toTransform);
}
audioRecord.stop();
} catch (Throwable t) {
t.printStackTrace();
Log.e("AudioRecord", "Recording Failed");
}
return null;
}
@Override
protected void onProgressUpdate(double[]... toTransform) {
canvas.drawColor(Color.BLACK);
for (int i = 0; i < toTransform[0].length; i++) {
int x = i;
int downy = (int) (100 - (toTransform[0][i] * 10));
int upy = 100;
canvas.drawLine(x, downy, x, upy, paint);
}
imageView.invalidate();
// TODO Auto-generated method stub
// super.onProgressUpdate(values);
}
}
public void onClick(View arg0) {
// TODO Auto-generated method stub
if (started) {
started = false;
startStopButton.setText("Start");
recordTask.cancel(true);
} else {
started = true;
startStopButton.setText("Stop");
recordTask = new RecordAudio();
recordTask.execute();
}
}
//REFERENTE A GRAVAÇÃO. . .
public void buttonTaped(View view){
switch(view.getId()){
case R.id.startBtn:
try{
beginRecording();
}catch (Exception e){
e.printStackTrace();
}
break;
case R.id.finishBtn:
try{
stopRecording();
}catch (Exception e){
e.printStackTrace();
}
break;
case R.id.playBtn:
try{
playRecording();
}catch (Exception e){
e.printStackTrace();
}
break;
case R.id.stopBtn:
try{
stopPlayback();
}catch (Exception e){
e.printStackTrace();
}
break;
}
}
private void stopPlayback(){
if(mediaPlayer != null) mediaPlayer.stop();
}
private void playRecording() throws IOException {
ditchMediaPlayer();
mediaPlayer = new MediaPlayer();
mediaPlayer.setDataSource(OUTPUT_FILE);
mediaPlayer.prepare();
mediaPlayer.start();
}
private void ditchMediaPlayer(){
if(mediaPlayer != null){
try{
mediaPlayer.release();
}catch(Exception e){
e.printStackTrace();
}
}
}
private void stopRecording(){
if(recorder != null) recorder.stop();
}
private void beginRecording() throws IOException, InterruptedException {
ditchMediaRecorder();
File outFile = new File(OUTPUT_FILE);
if(outFile.exists()) outFile.delete();
recorder = new MediaRecorder();
recorder.setAudioSource(MediaRecorder.AudioSource.MIC);
recorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
recorder.setOutputFile(OUTPUT_FILE);
recorder.prepare();
recorder.start();
}
private void ditchMediaRecorder(){
if(recorder != null) recorder.release();
}
}
And here's the xml:
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
>
<ImageView
android:id="@+id/ImageView01"
android:layout_width="wrap_content"
android:layout_height="wrap_content">
</ImageView>
<Button android:text="ondas"
android:id="@+id/StartStopButton"
android:layout_width="wrap_content"
android:layout_height="wrap_content">
</Button>
<Button
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="gravar"
android:id="@+id/startBtn"
android:layout_gravity="center_horizontal"
android:onClick="buttonTaped" />
<Button
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="finaliza a Gravação"
android:id="@+id/finishBtn"
android:layout_gravity="center_horizontal"
android:onClick="buttonTaped" />
<Button
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="escuta"
android:id="@+id/playBtn"
android:layout_gravity="center_horizontal"
android:onClick="buttonTaped" />
<Button
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="para de escutar"
android:id="@+id/stopBtn"
android:layout_gravity="center_horizontal"
android:onClick="buttonTaped" />
</LinearLayout>
I also used the Complex1D, RealDoubleFFT, and Real DoubleFFT_Mixed classes. They find themselves in the archio rar that is for download
There are also buttons to stop recording, play it, and stop playback you were using for testing.
Any help is welcome, thank you in advance for your attention.