1
I got a problem here. I have 2 methods, one of them shows the sound waves ie (you activate it and what you speak it interprets in lines), the other when it is activated it records what the person speaks for example.
I am trying to run both at the same time, because separated already work. If anyone has an idea of how I can do this, I really appreciate
Here is the project in rar for download https://mega.nz/#! Q58R2RLL! 8dvXYbJ_cQujyO8pv4bm73fZgkKsKDhZpteJc66Asf4
And here’s the class with the separate methods
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaPlayer;
import android.media.MediaRecorder;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Environment;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.ImageView;
import java.io.File;
import java.io.IOException;
public class MainActivity extends AppCompatActivity implements OnClickListener {
//REFERENTE AS ONDAS SONORAS. . .
int frequency = 8000;
int channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_DEFAULT;
int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
private RealDoubleFFT transformer;
int blockSize = 256;
Button startStopButton;
boolean started = false;
RecordAudio recordTask;
ImageView imageView;
Bitmap bitmap;
Canvas canvas;
Paint paint;
//REFERENTE A GRAVAÇÃO DE AUDIO. . .
private MediaPlayer mediaPlayer;
private MediaRecorder recorder;
private String OUTPUT_FILE;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
startStopButton = (Button) this.findViewById(R.id.StartStopButton);
startStopButton.setOnClickListener(this);
transformer = new RealDoubleFFT(blockSize);
imageView = (ImageView) this.findViewById(R.id.ImageView01);
bitmap = Bitmap.createBitmap((int) 256, (int) 100,
Bitmap.Config.ARGB_8888);
canvas = new Canvas(bitmap);
paint = new Paint();
paint.setColor(Color.GREEN);
imageView.setImageBitmap(bitmap);
OUTPUT_FILE= Environment.getExternalStorageDirectory()+"/audioRecorder.3gpp";
}
//REFERENTE AS ONDAS. . .
public class RecordAudio extends AsyncTask<Void, double[], Void> {
@Override
protected Void doInBackground(Void... arg0) {
try {
// int bufferSize = AudioRecord.getMinBufferSize(frequency,
// AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
int bufferSize = AudioRecord.getMinBufferSize(frequency,
channelConfiguration, audioEncoding);
AudioRecord audioRecord = new AudioRecord(
MediaRecorder.AudioSource.MIC, frequency,
channelConfiguration, audioEncoding, bufferSize);
short[] buffer = new short[blockSize];
double[] toTransform = new double[blockSize];
audioRecord.startRecording();
// started = true; hopes this should true before calling
// following while loop
while (started) {
int bufferReadResult = audioRecord.read(buffer, 0,
blockSize);
for (int i = 0; i < blockSize && i < bufferReadResult; i++) {
toTransform[i] = (double) buffer[i] / 32768.0; // signed
// 16
} // bit
transformer.ft(toTransform);
publishProgress(toTransform);
}
audioRecord.stop();
} catch (Throwable t) {
t.printStackTrace();
Log.e("AudioRecord", "Recording Failed");
}
return null;
}
@Override
protected void onProgressUpdate(double[]... toTransform) {
canvas.drawColor(Color.BLACK);
for (int i = 0; i < toTransform[0].length; i++) {
int x = i;
int downy = (int) (100 - (toTransform[0][i] * 10));
int upy = 100;
canvas.drawLine(x, downy, x, upy, paint);
}
imageView.invalidate();
// TODO Auto-generated method stub
// super.onProgressUpdate(values);
}
}
public void onClick(View arg0) {
// TODO Auto-generated method stub
if (started) {
started = false;
startStopButton.setText("Start");
recordTask.cancel(true);
} else {
started = true;
startStopButton.setText("Stop");
recordTask = new RecordAudio();
recordTask.execute();
}
}
//REFERENTE A GRAVAÇÃO. . .
public void buttonTaped(View view){
switch(view.getId()){
case R.id.startBtn:
try{
beginRecording();
}catch (Exception e){
e.printStackTrace();
}
break;
case R.id.finishBtn:
try{
stopRecording();
}catch (Exception e){
e.printStackTrace();
}
break;
case R.id.playBtn:
try{
playRecording();
}catch (Exception e){
e.printStackTrace();
}
break;
case R.id.stopBtn:
try{
stopPlayback();
}catch (Exception e){
e.printStackTrace();
}
break;
}
}
private void stopPlayback(){
if(mediaPlayer != null) mediaPlayer.stop();
}
private void playRecording() throws IOException {
ditchMediaPlayer();
mediaPlayer = new MediaPlayer();
mediaPlayer.setDataSource(OUTPUT_FILE);
mediaPlayer.prepare();
mediaPlayer.start();
}
private void ditchMediaPlayer(){
if(mediaPlayer != null){
try{
mediaPlayer.release();
}catch(Exception e){
e.printStackTrace();
}
}
}
private void stopRecording(){
if(recorder != null) recorder.stop();
}
private void beginRecording() throws IOException, InterruptedException {
ditchMediaRecorder();
File outFile = new File(OUTPUT_FILE);
if(outFile.exists()) outFile.delete();
recorder = new MediaRecorder();
recorder.setAudioSource(MediaRecorder.AudioSource.MIC);
recorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
recorder.setOutputFile(OUTPUT_FILE);
recorder.prepare();
recorder.start();
}
private void ditchMediaRecorder(){
if(recorder != null) recorder.release();
}
}
And here is the xml:
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
>
<ImageView
android:id="@+id/ImageView01"
android:layout_width="wrap_content"
android:layout_height="wrap_content">
</ImageView>
<Button android:text="ondas"
android:id="@+id/StartStopButton"
android:layout_width="wrap_content"
android:layout_height="wrap_content">
</Button>
<Button
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="gravar"
android:id="@+id/startBtn"
android:layout_gravity="center_horizontal"
android:onClick="buttonTaped" />
<Button
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="finaliza a Gravação"
android:id="@+id/finishBtn"
android:layout_gravity="center_horizontal"
android:onClick="buttonTaped" />
<Button
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="escuta"
android:id="@+id/playBtn"
android:layout_gravity="center_horizontal"
android:onClick="buttonTaped" />
<Button
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="para de escutar"
android:id="@+id/stopBtn"
android:layout_gravity="center_horizontal"
android:onClick="buttonTaped" />
</LinearLayout>
I also used the Complex1d, Realdoublefft and Real Doublefft_mixed classes. They are in the file that is for download
There are also buttons to stop recording, play it and stop playback you were using for testing.
Any help is welcome, I thank you in advance for the attention.
Thanks for the tip Leandro, I haven’t tried this way I will try today and inform here if you got or not big hug!
– CristianCotrena
I tried here, but there were the same errors that were occurring before, when I run them alone inside the threads works, but when I run the two together the problem. But anyway thanks for the attention, if you know of any other possible alternative, please comment :)
– CristianCotrena