如何从Android直播

我想用潜望镜这样做的方式进行现场直播。 我在网上做了一个快速的search,发现了一堆类似ffmpeg的库,它们使用本地库,但根据MediaCodec编码,应该直接支持Android SDK吗?

https://developer.android.com/reference/android/media/MediaCodec.html

所以我想知道为什么编码需要外部本地库?

此外,我尝试了一些库,但他们似乎太慢,特别是对于高比特率(我在Nexus 5X上以1280×720以2500Kbps速度获得16 fps,我希望达到1080),那么stream行的广播应用程序如何做到这一点?

有这么多的处理,似乎是不可能实现与普通电话(不是顶级的)高品质的stream媒体。

Solutions Collecting From Web of "如何从Android直播"

MediaCodec API在高分辨率stream中趋于变慢。 我build议你应该使用FFmpeg,有一个很好的Java包装JavaCV (它也适用于Android)。 这是一个简短的例子,应该让你去

public class MainActivity extends Activity implements OnClickListener { private PowerManager.WakeLock mWakeLock; private String ffmpeg_link = "rtmp://live:live@128.122.151.108:1935/live/test.flv"; private volatile FFmpegFrameRecorder recorder; boolean recording = false; long startTime = 0; private int sampleAudioRateInHz = 44100; private int imageWidth = 320; private int imageHeight = 240; private int frameRate = 30; private Thread audioThread; volatile boolean runAudioThread = true; private AudioRecord audioRecord; private AudioRecordRunnable audioRecordRunnable; private CameraView cameraView; private IplImage yuvIplimage = null; private Button recordButton; private LinearLayout mainLayout; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); setContentView(R.layout.activity_main); initLayout(); initRecorder(); } @Override protected void onResume() { super.onResume(); if (mWakeLock == null) { PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE); mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, LOG_TAG); mWakeLock.acquire(); } } @Override protected void onPause() { super.onPause(); if (mWakeLock != null) { mWakeLock.release(); mWakeLock = null; } } @Override protected void onDestroy() { super.onDestroy(); recording = false; } private void initLayout() { mainLayout = (LinearLayout) this.findViewById(R.id.record_layout); recordButton = (Button) findViewById(R.id.recorder_control); recordButton.setText("Start"); recordButton.setOnClickListener(this); cameraView = new CameraView(this); LinearLayout.LayoutParams layoutParam = new LinearLayout.LayoutParams(imageWidth, imageHeight); mainLayout.addView(cameraView, layoutParam); Log.v(LOG_TAG, "added cameraView to mainLayout"); } private void initRecorder() { Log.w(LOG_TAG,"initRecorder"); if (yuvIplimage == null) { // Recreated after frame size is set in surface change method yuvIplimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_8U, 2); //yuvIplimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_32S, 2); Log.v(LOG_TAG, "IplImage.create"); } recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1); Log.v(LOG_TAG, "FFmpegFrameRecorder: " + ffmpeg_link + " imageWidth: " + imageWidth + " imageHeight " + imageHeight); recorder.setFormat("flv"); Log.v(LOG_TAG, "recorder.setFormat(\"flv\")"); recorder.setSampleRate(sampleAudioRateInHz); Log.v(LOG_TAG, "recorder.setSampleRate(sampleAudioRateInHz)"); // re-set in the surface changed method as well recorder.setFrameRate(frameRate); Log.v(LOG_TAG, "recorder.setFrameRate(frameRate)"); // Create audio recording thread audioRecordRunnable = new AudioRecordRunnable(); audioThread = new Thread(audioRecordRunnable); } // Start the capture public void startRecording() { try { recorder.start(); startTime = System.currentTimeMillis(); recording = true; audioThread.start(); } catch (FFmpegFrameRecorder.Exception e) { e.printStackTrace(); } } public void stopRecording() { // This should stop the audio thread from running runAudioThread = false; if (recorder != null && recording) { recording = false; Log.v(LOG_TAG,"Finishing recording, calling stop and release on recorder"); try { recorder.stop(); recorder.release(); } catch (FFmpegFrameRecorder.Exception e) { e.printStackTrace(); } recorder = null; } } @Override public boolean onKeyDown(int keyCode, KeyEvent event) { // Quit when back button is pushed if (keyCode == KeyEvent.KEYCODE_BACK) { if (recording) { stopRecording(); } finish(); return true; } return super.onKeyDown(keyCode, event); } @Override public void onClick(View v) { if (!recording) { startRecording(); Log.w(LOG_TAG, "Start Button Pushed"); recordButton.setText("Stop"); } else { stopRecording(); Log.w(LOG_TAG, "Stop Button Pushed"); recordButton.setText("Start"); } } //--------------------------------------------- // audio thread, gets and encodes audio data //--------------------------------------------- class AudioRecordRunnable implements Runnable { @Override public void run() { // Set the thread priority android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO); // Audio int bufferSize; short[] audioData; int bufferReadResult; bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT); audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize); audioData = new short[bufferSize]; Log.d(LOG_TAG, "audioRecord.startRecording()"); audioRecord.startRecording(); // Audio Capture/Encoding Loop while (runAudioThread) { // Read from audioRecord bufferReadResult = audioRecord.read(audioData, 0, audioData.length); if (bufferReadResult > 0) { //Log.v(LOG_TAG,"audioRecord bufferReadResult: " + bufferReadResult); // Changes in this variable may not be picked up despite it being "volatile" if (recording) { try { // Write to FFmpegFrameRecorder recorder.record(ShortBuffer.wrap(audioData, 0, bufferReadResult)); } catch (FFmpegFrameRecorder.Exception e) { Log.v(LOG_TAG,e.getMessage()); e.printStackTrace(); } } } } Log.v(LOG_TAG,"AudioThread Finished"); /* Capture/Encoding finished, release recorder */ if (audioRecord != null) { audioRecord.stop(); audioRecord.release(); audioRecord = null; Log.v(LOG_TAG,"audioRecord released"); } } } class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback { private boolean previewRunning = false; private SurfaceHolder holder; private Camera camera; private byte[] previewBuffer; long videoTimestamp = 0; Bitmap bitmap; Canvas canvas; public CameraView(Context _context) { super(_context); holder = this.getHolder(); holder.addCallback(this); holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); } @Override public void surfaceCreated(SurfaceHolder holder) { camera = Camera.open(); try { camera.setPreviewDisplay(holder); camera.setPreviewCallback(this); Camera.Parameters currentParams = camera.getParameters(); Log.v(LOG_TAG,"Preview Framerate: " + currentParams.getPreviewFrameRate()); Log.v(LOG_TAG,"Preview imageWidth: " + currentParams.getPreviewSize().width + " imageHeight: " + currentParams.getPreviewSize().height); // Use these values imageWidth = currentParams.getPreviewSize().width; imageHeight = currentParams.getPreviewSize().height; frameRate = currentParams.getPreviewFrameRate(); bitmap = Bitmap.createBitmap(imageWidth, imageHeight, Bitmap.Config.ALPHA_8); /* Log.v(LOG_TAG,"Creating previewBuffer size: " + imageWidth * imageHeight * ImageFormat.getBitsPerPixel(currentParams.getPreviewFormat())/8); previewBuffer = new byte[imageWidth * imageHeight * ImageFormat.getBitsPerPixel(currentParams.getPreviewFormat())/8]; camera.addCallbackBuffer(previewBuffer); camera.setPreviewCallbackWithBuffer(this); */ camera.startPreview(); previewRunning = true; } catch (IOException e) { Log.v(LOG_TAG,e.getMessage()); e.printStackTrace(); } } public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { Log.v(LOG_TAG,"Surface Changed: width " + width + " height: " + height); // We would do this if we want to reset the camera parameters /* if (!recording) { if (previewRunning){ camera.stopPreview(); } try { //Camera.Parameters cameraParameters = camera.getParameters(); //p.setPreviewSize(imageWidth, imageHeight); //p.setPreviewFrameRate(frameRate); //camera.setParameters(cameraParameters); camera.setPreviewDisplay(holder); camera.startPreview(); previewRunning = true; } catch (IOException e) { Log.e(LOG_TAG,e.getMessage()); e.printStackTrace(); } } */ // Get the current parameters Camera.Parameters currentParams = camera.getParameters(); Log.v(LOG_TAG,"Preview Framerate: " + currentParams.getPreviewFrameRate()); Log.v(LOG_TAG,"Preview imageWidth: " + currentParams.getPreviewSize().width + " imageHeight: " + currentParams.getPreviewSize().height); // Use these values imageWidth = currentParams.getPreviewSize().width; imageHeight = currentParams.getPreviewSize().height; frameRate = currentParams.getPreviewFrameRate(); // Create the yuvIplimage if needed yuvIplimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_8U, 2); //yuvIplimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_32S, 2); } @Override public void surfaceDestroyed(SurfaceHolder holder) { try { camera.setPreviewCallback(null); previewRunning = false; camera.release(); } catch (RuntimeException e) { Log.v(LOG_TAG,e.getMessage()); e.printStackTrace(); } } @Override public void onPreviewFrame(byte[] data, Camera camera) { if (yuvIplimage != null && recording) { videoTimestamp = 1000 * (System.currentTimeMillis() - startTime); // Put the camera preview frame right into the yuvIplimage object yuvIplimage.getByteBuffer().put(data); // FAQ about IplImage: // - For custom raw processing of data, getByteBuffer() returns an NIO direct // buffer wrapped around the memory pointed by imageData, and under Android we can // also use that Buffer with Bitmap.copyPixelsFromBuffer() and copyPixelsToBuffer(). // - To get a BufferedImage from an IplImage, we may call getBufferedImage(). // - The createFrom() factory method can construct an IplImage from a BufferedImage. // - There are also a few copy*() methods for BufferedImage<->IplImage data transfers. // Let's try it.. // This works but only on transparency // Need to find the right Bitmap and IplImage matching types /* bitmap.copyPixelsFromBuffer(yuvIplimage.getByteBuffer()); //bitmap.setPixel(10,10,Color.MAGENTA); canvas = new Canvas(bitmap); Paint paint = new Paint(); paint.setColor(Color.GREEN); float leftx = 20; float topy = 20; float rightx = 50; float bottomy = 100; RectF rectangle = new RectF(leftx,topy,rightx,bottomy); canvas.drawRect(rectangle, paint); bitmap.copyPixelsToBuffer(yuvIplimage.getByteBuffer()); */ //Log.v(LOG_TAG,"Writing Frame"); try { // Get the correct time recorder.setTimestamp(videoTimestamp); // Record the image into FFmpegFrameRecorder recorder.record(yuvIplimage); } catch (FFmpegFrameRecorder.Exception e) { Log.v(LOG_TAG,e.getMessage()); e.printStackTrace(); } } } } } 

你是对的。 你必须考虑Android ndk处理密集的东西。
学习和build立这个yt watchme和这个迷你吧 。 也走过webrtc
另一个rticonnextdds videodemo安卓