在Android中捕获声音进行分析和可视化频率

我是Android新手,我试图制作一个捕获audio声音然后显示其中存在的频率的程序。 我find了一个绘制graphics均衡器的graphics部分的例子。 在这个例子中,它使用了一个types为AudioRecord的对象来捕捉audio声音。 用于将audio信号分解为分量频率的技术采用称为离散傅立叶变换(DFT)的math变换,并且使用快速傅立叶变换(FFT)来执行DFT。 这个例子使用一个实现FFT的包。 该软件包链接在这里www.netlib.org/fftpack/jfftpack.tgz 。 问题是,我运行这个例子后,按下开始button后,graphics均衡器不会出现在显示屏上。

以下是活动类的源代码:

package com.audio.processing; import android.app.Activity; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaRecorder; import android.os.AsyncTask; import android.os.Bundle; import android.util.Log; import android.view.View; import android.view.View.OnClickListener; import android.widget.Button; import android.widget.ImageView; import ca.uol.aig.fftpack.RealDoubleFFT; public class AudioProcessing extends Activity implements OnClickListener{ int frequency = 8000; int channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO; int audioEncoding = AudioFormat.ENCODING_PCM_16BIT; private RealDoubleFFT transformer; int blockSize = 256; Button startStopButton; boolean started = false; RecordAudio recordTask; ImageView imageView; Bitmap bitmap; Canvas canvas; Paint paint; /** Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); startStopButton = (Button) this.findViewById(R.id.StartStopButton); startStopButton.setOnClickListener(this); transformer = new RealDoubleFFT(blockSize); imageView = (ImageView) this.findViewById(R.id.ImageView01); bitmap = Bitmap.createBitmap((int)256,(int)100,Bitmap.Config.ARGB_8888); canvas = new Canvas(bitmap); paint = new Paint(); paint.setColor(Color.GREEN); imageView.setImageBitmap(bitmap); } private class RecordAudio extends AsyncTask<Void, double[], Void> { @Override protected Void doInBackground(Void... params) { try { int bufferSize = AudioRecord.getMinBufferSize(frequency, channelConfiguration, audioEncoding); AudioRecord audioRecord = new AudioRecord( MediaRecorder.AudioSource.DEFAULT, frequency, channelConfiguration, audioEncoding, bufferSize); short[] buffer = new short[blockSize]; double[] toTransform = new double[blockSize]; audioRecord.startRecording(); while (started) { int bufferReadResult = audioRecord.read(buffer, 0, blockSize); for (int i = 0; i < blockSize && i < bufferReadResult; i++) { toTransform[i] = (double) buffer[i] / 32768.0; // signed 16 bit } transformer.ft(toTransform); publishProgress(toTransform); } audioRecord.stop(); } catch (Throwable t) { Log.e("AudioRecord", "Recording Failed"); } return null; } } protected void onProgressUpdate(double[]... toTransform) { canvas.drawColor(Color.BLACK); for (int i = 0; i < toTransform[0].length; i++) { int x = i; int downy = (int) (100 - (toTransform[0][i] * 10)); int upy = 100; canvas.drawLine(x, downy, x, upy, paint); } imageView.invalidate(); } public void onClick(View v) { if (started) { started = false; startStopButton.setText("Start"); recordTask.cancel(true); } else { started = true; startStopButton.setText("Stop"); recordTask = new RecordAudio(); recordTask.execute(); } } } 

这是main.xml:

 <?xml version="1.0" encoding="utf-8"?> <LinearLayout xmlns:android="http://schemas.android.com/apk/res/android" android:orientation="vertical" android:layout_width="fill_parent" android:layout_height="fill_parent" > <TextView android:layout_width="fill_parent" android:layout_height="wrap_content" android:text="@string/hello" /> <ImageView android:id="@+id/ImageView01" android:layout_width="wrap_content" android:layout_height="wrap_content"></ImageView><Button android:text="Start" android:id="@+id/StartStopButton" android:layout_width="wrap_content" android:layout_height="wrap_content"></Button> </LinearLayout> 

在AndroidManifest.xml中,我设置了RECORD_AUDIO权限。 提前致谢 !

Solutions Collecting From Web of "在Android中捕获声音进行分析和可视化频率"

这是工作代码。 我自己试了一下 它工作正常。

 package com.example.frequencytest; import android.app.Activity; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaRecorder; import android.os.AsyncTask; import android.os.Bundle; import android.util.Log; import android.view.Menu; import android.view.View; import android.view.View.OnClickListener; import android.widget.Button; import android.widget.ImageView; import ca.uol.aig.fftpack.RealDoubleFFT; public class MainActivity extends Activity implements OnClickListener { int frequency = 8000; int channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO; int audioEncoding = AudioFormat.ENCODING_PCM_16BIT; private RealDoubleFFT transformer; int blockSize = 256; Button startStopButton; boolean started = false; RecordAudio recordTask; ImageView imageView; Bitmap bitmap; Canvas canvas; Paint paint; //AudioRecord audioRecord; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); startStopButton = (Button) this.findViewById(R.id.start_stop_btn); startStopButton.setOnClickListener(this); transformer = new RealDoubleFFT(blockSize); imageView = (ImageView) this.findViewById(R.id.imageView1); bitmap = Bitmap.createBitmap((int) 256, (int) 100, Bitmap.Config.ARGB_8888); canvas = new Canvas(bitmap); paint = new Paint(); paint.setColor(Color.GREEN); imageView.setImageBitmap(bitmap); } public class RecordAudio extends AsyncTask<Void, double[], Void> { @Override protected Void doInBackground(Void... arg0) { try { // int bufferSize = AudioRecord.getMinBufferSize(frequency, // AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); int bufferSize = AudioRecord.getMinBufferSize(frequency, channelConfiguration, audioEncoding); AudioRecord audioRecord = new AudioRecord( MediaRecorder.AudioSource.MIC, frequency, channelConfiguration, audioEncoding, bufferSize); short[] buffer = new short[blockSize]; double[] toTransform = new double[blockSize]; audioRecord.startRecording(); // started = true; hopes this should true before calling // following while loop while (started) { int bufferReadResult = audioRecord.read(buffer, 0, blockSize); for (int i = 0; i < blockSize && i < bufferReadResult; i++) { toTransform[i] = (double) buffer[i] / 32768.0; // signed // 16 } // bit transformer.ft(toTransform); publishProgress(toTransform); } audioRecord.stop(); } catch (Throwable t) { t.printStackTrace(); Log.e("AudioRecord", "Recording Failed"); } return null; } @Override protected void onProgressUpdate(double[]... toTransform) { canvas.drawColor(Color.BLACK); for (int i = 0; i < toTransform[0].length; i++) { int x = i; int downy = (int) (100 - (toTransform[0][i] * 10)); int upy = 100; canvas.drawLine(x, downy, x, upy, paint); } imageView.invalidate(); // TODO Auto-generated method stub // super.onProgressUpdate(values); } } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.activity_main, menu); return true; } public void onClick(View arg0) { // TODO Auto-generated method stub if (started) { started = false; startStopButton.setText("Start"); recordTask.cancel(true); } else { started = true; startStopButton.setText("Stop"); recordTask = new RecordAudio(); recordTask.execute(); } } } 

是的,我也有这个项目,我也有同样的错误,但现在join许可后,现在好了。 很可能你没有把它添加到androidmanifest.xml中的正确位置。它应该在application标签之外。

  <uses-permission android:name="android.permission.RECORD_AUDIO"> </uses-permission> 

onProgressUpdate方法应该属于RecordAudio,因为在你的代码中它属于AudioProcessing。 检查大括号,它应该基于上述更正工作