Android使用MediaCodec将摄像头采集的视频编码为h264

Honey ·
更新时间:2024-11-10
· 916 次阅读

本文实例为大家分享了Android使用MediaCodec将摄像头采集的视频编码为h264,供大家参考,具体内容如下

MainActivity.java

import android.app.Activity; import android.graphics.ImageFormat; import android.hardware.Camera; import android.hardware.Camera.Parameters; import android.hardware.Camera.PreviewCallback; import android.os.Bundle; import android.view.SurfaceHolder; import android.view.SurfaceView; import java.io.IOException; import java.util.concurrent.ArrayBlockingQueue; public class MainActivity extends Activity implements SurfaceHolder.Callback,PreviewCallback{ private SurfaceView surfaceview; private SurfaceHolder surfaceHolder; private Camera camera; private Parameters parameters; int width = 1280; int height = 720; int framerate = 30; int biterate = 8500*1000; private static int yuvqueuesize = 10; //待解码视频缓冲队列,静态成员! public static ArrayBlockingQueue<byte[]> YUVQueue = new ArrayBlockingQueue<byte[]>(yuvqueuesize); private AvcEncoder avcCodec; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); surfaceview = (SurfaceView)findViewById(R.id.surfaceview); surfaceHolder = surfaceview.getHolder(); surfaceHolder.addCallback(this); } @Override public void surfaceCreated(SurfaceHolder holder) { camera = getBackCamera(); startcamera(camera); //创建AvEncoder对象 avcCodec = new AvcEncoder(width,height,framerate,biterate); //启动编码线程 avcCodec.StartEncoderThread(); } @Override public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { } @Override public void surfaceDestroyed(SurfaceHolder holder) { if (null != camera) { camera.setPreviewCallback(null); camera.stopPreview(); camera.release(); camera = null; avcCodec.StopThread(); } } @Override public void onPreviewFrame(byte[] data, android.hardware.Camera camera) { //将当前帧图像保存在队列中 putYUVData(data,data.length); } public void putYUVData(byte[] buffer, int length) { if (YUVQueue.size() >= 10) { YUVQueue.poll(); } YUVQueue.add(buffer); } private void startcamera(Camera mCamera){ if(mCamera != null){ try { mCamera.setPreviewCallback(this); mCamera.setDisplayOrientation(90); if(parameters == null){ parameters = mCamera.getParameters(); } //获取默认的camera配置 parameters = mCamera.getParameters(); //设置预览格式 parameters.setPreviewFormat(ImageFormat.NV21); //设置预览图像分辨率 parameters.setPreviewSize(width, height); //配置camera参数 mCamera.setParameters(parameters); //将完全初始化的SurfaceHolder传入到setPreviewDisplay(SurfaceHolder)中 //没有surface的话,相机不会开启preview预览 mCamera.setPreviewDisplay(surfaceHolder); //调用startPreview()用以更新preview的surface,必须要在拍照之前start Preview mCamera.startPreview(); } catch (IOException e) { e.printStackTrace(); } } } private Camera getBackCamera() { Camera c = null; try { //获取Camera的实例 c = Camera.open(0); } catch (Exception e) { e.printStackTrace(); } //获取Camera的实例失败时返回null return c; } }

2.AvcEncoder.java

import android.media.MediaCodec; import android.media.MediaCodecInfo; import android.media.MediaFormat; import android.os.Environment; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import static android.media.MediaCodec.BUFFER_FLAG_CODEC_CONFIG; import static android.media.MediaCodec.BUFFER_FLAG_KEY_FRAME; public class AvcEncoder { private final static String TAG = "MeidaCodec"; private int TIMEOUT_USEC = 12000; private MediaCodec mediaCodec; int m_width; int m_height; int m_framerate; public byte[] configbyte; public AvcEncoder(int width, int height, int framerate, int bitrate) { m_width = width; m_height = height; m_framerate = framerate; MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", width, height); mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar); mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, width*height*5); mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30); mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); try { mediaCodec = MediaCodec.createEncoderByType("video/avc"); } catch (IOException e) { e.printStackTrace(); } //配置编码器参数 mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); //启动编码器 mediaCodec.start(); //创建保存编码后数据的文件 createfile(); } private static String path = Environment.getExternalStorageDirectory().getAbsolutePath() + "/test1.h264"; private BufferedOutputStream outputStream; private void createfile(){ File file = new File(path); if(file.exists()){ file.delete(); } try { outputStream = new BufferedOutputStream(new FileOutputStream(file)); } catch (Exception e){ e.printStackTrace(); } } private void StopEncoder() { try { mediaCodec.stop(); mediaCodec.release(); } catch (Exception e){ e.printStackTrace(); } } public boolean isRuning = false; public void StopThread(){ isRuning = false; try { StopEncoder(); outputStream.flush(); outputStream.close(); } catch (IOException e) { e.printStackTrace(); } } int count = 0; public void StartEncoderThread(){ Thread EncoderThread = new Thread(new Runnable() { @Override public void run() { isRuning = true; byte[] input = null; long pts = 0; long generateIndex = 0; while (isRuning) { //访问MainActivity用来缓冲待解码数据的队列 if (MainActivity.YUVQueue.size() >0){ //从缓冲队列中取出一帧 input = MainActivity.YUVQueue.poll(); byte[] yuv420sp = new byte[m_width*m_height*3/2]; //把待编码的视频帧转换为YUV420格式 NV21ToNV12(input,yuv420sp,m_width,m_height); input = yuv420sp; } if (input != null) { try { long startMs = System.currentTimeMillis(); //编码器输入缓冲区 ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers(); //编码器输出缓冲区 ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers(); int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1); if (inputBufferIndex >= 0) { pts = computePresentationTime(generateIndex); ByteBuffer inputBuffer = inputBuffers[inputBufferIndex]; inputBuffer.clear(); //把转换后的YUV420格式的视频帧放到编码器输入缓冲区中 inputBuffer.put(input); mediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, pts, 0); generateIndex += 1; } MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC); while (outputBufferIndex >= 0) { //Log.i("AvcEncoder", "Get H264 Buffer Success! flag = "+bufferInfo.flags+",pts = "+bufferInfo.presentationTimeUs+""); ByteBuffer outputBuffer = outputBuffers[outputBufferIndex]; byte[] outData = new byte[bufferInfo.size]; outputBuffer.get(outData); if(bufferInfo.flags == BUFFER_FLAG_CODEC_CONFIG){ configbyte = new byte[bufferInfo.size]; configbyte = outData; }else if(bufferInfo.flags == BUFFER_FLAG_KEY_FRAME){ byte[] keyframe = new byte[bufferInfo.size + configbyte.length]; System.arraycopy(configbyte, 0, keyframe, 0, configbyte.length); //把编码后的视频帧从编码器输出缓冲区中拷贝出来 System.arraycopy(outData, 0, keyframe, configbyte.length, outData.length); outputStream.write(keyframe, 0, keyframe.length); }else{ //写到文件中 outputStream.write(outData, 0, outData.length); } mediaCodec.releaseOutputBuffer(outputBufferIndex, false); outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC); } } catch (Throwable t) { t.printStackTrace(); } } else { try { Thread.sleep(500); } catch (InterruptedException e) { e.printStackTrace(); } } } } }); EncoderThread.start(); } private void NV21ToNV12(byte[] nv21,byte[] nv12,int width,int height){ if(nv21 == null || nv12 == null)return; int framesize = width*height; int i = 0,j = 0; System.arraycopy(nv21, 0, nv12, 0, framesize); for(i = 0; i < framesize; i++){ nv12[i] = nv21[i]; } for (j = 0; j < framesize/2; j+=2) { nv12[framesize + j-1] = nv21[j+framesize]; } for (j = 0; j < framesize/2; j+=2) { nv12[framesize + j] = nv21[j+framesize-1]; } } /** * Generates the presentation time for frame N, in microseconds. */ private long computePresentationTime(long frameIndex) { return 132 + frameIndex * 1000000 / m_framerate; } }

3.activity_main.xml

<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android" android:layout_width="match_parent" android:layout_height="match_parent" > <SurfaceView android:id="@+id/surfaceview" android:layout_width="match_parent" android:layout_height="match_parent"/> </RelativeLayout>

4.添加权限

<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/> <uses-permission android:name="android.permission.CAMERA" /> <uses-permission android:name="android.permission.INTERNET" /> 您可能感兴趣的文章:Android实现调用摄像头拍照与视频功能Android开发实现的IntentUtil跳转多功能工具类【包含视频、音频、图片、摄像头等操作功能】android开发之调用手机的摄像头使用MediaRecorder录像并播放Android开发教程之调用摄像头功能的方法详解Android中判断是否有前置摄像头、后置摄像头的方法Android判断用户是否允许了摄像头权限实例代码Android实现调用摄像头进行拍照功能Android调用前后摄像头同时工作实例代码Android实现调用摄像头Android实现手机摄像头的自动对焦



编码 视频编码 mediacodec 摄像 h2 摄像头 Android

需要 登录 后方可回复, 如果你还没有账号请 注册新账号