服务器之家:专注于VPS、云服务器配置技术及软件下载分享
分类导航

PHP教程|ASP.NET教程|Java教程|ASP教程|编程技术|正则表达式|C/C++|IOS|C#|Swift|Android|VB|R语言|JavaScript|易语言|vb.net|

服务器之家 - 编程语言 - Android - Android使用MediaCodec将摄像头采集的视频编码为h264

Android使用MediaCodec将摄像头采集的视频编码为h264

2022-11-02 14:08CrazyDiode Android

这篇文章主要为大家详细介绍了Android使用MediaCodec将摄像头采集的视频编码为h264,具有一定的参考价值,感兴趣的小伙伴们可以参考一下

本文实例为大家分享了Android使用MediaCodec将摄像头采集的视频编码为h264,供大家参考,具体内容如下

MainActivity.java

?
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
import android.app.Activity;
import android.graphics.ImageFormat;
import android.hardware.Camera;
import android.hardware.Camera.Parameters;
import android.hardware.Camera.PreviewCallback;
import android.os.Bundle;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
 
import java.io.IOException;
import java.util.concurrent.ArrayBlockingQueue;
 
public class MainActivity extends Activity implements SurfaceHolder.Callback,PreviewCallback{
 
  private SurfaceView surfaceview;
 
  private SurfaceHolder surfaceHolder;
 
  private Camera camera;
 
  private Parameters parameters;
 
  int width = 1280;
 
  int height = 720;
 
  int framerate = 30;
 
  int biterate = 8500*1000;
 
  private static int yuvqueuesize = 10;
 
  //待解码视频缓冲队列,静态成员!
  public static ArrayBlockingQueue<byte[]> YUVQueue = new ArrayBlockingQueue<byte[]>(yuvqueuesize);
 
  private AvcEncoder avcCodec;
 
 
  @Override
  protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);
    surfaceview = (SurfaceView)findViewById(R.id.surfaceview);
    surfaceHolder = surfaceview.getHolder();
    surfaceHolder.addCallback(this);
  }
 
 
  @Override
  public void surfaceCreated(SurfaceHolder holder) {
    camera = getBackCamera();
    startcamera(camera);
    //创建AvEncoder对象
    avcCodec = new AvcEncoder(width,height,framerate,biterate);
    //启动编码线程
    avcCodec.StartEncoderThread();
 
  }
 
  @Override
  public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
 
  }
 
  @Override
  public void surfaceDestroyed(SurfaceHolder holder) {
    if (null != camera) {
      camera.setPreviewCallback(null);
      camera.stopPreview();
      camera.release();
      camera = null;
      avcCodec.StopThread();
    }
  }
 
 
  @Override
  public void onPreviewFrame(byte[] data, android.hardware.Camera camera) {
    //将当前帧图像保存在队列中
    putYUVData(data,data.length);
  }
 
  public void putYUVData(byte[] buffer, int length) {
    if (YUVQueue.size() >= 10) {
      YUVQueue.poll();
    }
    YUVQueue.add(buffer);
  }
 
 
  private void startcamera(Camera mCamera){
    if(mCamera != null){
      try {
        mCamera.setPreviewCallback(this);
        mCamera.setDisplayOrientation(90);
        if(parameters == null){
          parameters = mCamera.getParameters();
        }
        //获取默认的camera配置
        parameters = mCamera.getParameters();
        //设置预览格式
        parameters.setPreviewFormat(ImageFormat.NV21);
        //设置预览图像分辨率
        parameters.setPreviewSize(width, height);
        //配置camera参数
        mCamera.setParameters(parameters);
        //将完全初始化的SurfaceHolder传入到setPreviewDisplay(SurfaceHolder)中
        //没有surface的话,相机不会开启preview预览
        mCamera.setPreviewDisplay(surfaceHolder);
        //调用startPreview()用以更新preview的surface,必须要在拍照之前start Preview
        mCamera.startPreview();
 
      } catch (IOException e) {
        e.printStackTrace();
      }
    }
  }
 
  private Camera getBackCamera() {
    Camera c = null;
    try {
      //获取Camera的实例
      c = Camera.open(0);
    } catch (Exception e) {
      e.printStackTrace();
    }
    //获取Camera的实例失败时返回null
    return c;
  }
 
 
}

2.AvcEncoder.java

 

?
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.os.Environment;
 
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
 
import static android.media.MediaCodec.BUFFER_FLAG_CODEC_CONFIG;
import static android.media.MediaCodec.BUFFER_FLAG_KEY_FRAME;
 
 
public class AvcEncoder
{
  private final static String TAG = "MeidaCodec";
 
  private int TIMEOUT_USEC = 12000;
 
  private MediaCodec mediaCodec;
  int m_width;
  int m_height;
  int m_framerate;
 
  public byte[] configbyte;
 
 
  public AvcEncoder(int width, int height, int framerate, int bitrate) {
 
    m_width = width;
    m_height = height;
    m_framerate = framerate;
    MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", width, height);
    mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
    mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, width*height*5);
    mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
    mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
    try {
      mediaCodec = MediaCodec.createEncoderByType("video/avc");
    } catch (IOException e) {
      e.printStackTrace();
    }
    //配置编码器参数
    mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    //启动编码器
    mediaCodec.start();
    //创建保存编码后数据的文件
    createfile();
  }
 
  private static String path = Environment.getExternalStorageDirectory().getAbsolutePath() + "/test1.h264";
  private BufferedOutputStream outputStream;
 
  private void createfile(){
    File file = new File(path);
    if(file.exists()){
      file.delete();
    }
    try {
      outputStream = new BufferedOutputStream(new FileOutputStream(file));
    } catch (Exception e){
      e.printStackTrace();
    }
  }
 
  private void StopEncoder() {
    try {
      mediaCodec.stop();
      mediaCodec.release();
    } catch (Exception e){
      e.printStackTrace();
    }
  }
 
  public boolean isRuning = false;
 
  public void StopThread(){
    isRuning = false;
    try {
      StopEncoder();
      outputStream.flush();
      outputStream.close();
    } catch (IOException e) {
      e.printStackTrace();
    }
  }
 
  int count = 0;
 
  public void StartEncoderThread(){
    Thread EncoderThread = new Thread(new Runnable() {
 
      @Override
      public void run() {
        isRuning = true;
        byte[] input = null;
        long pts = 0;
        long generateIndex = 0;
 
        while (isRuning) {
          //访问MainActivity用来缓冲待解码数据的队列
          if (MainActivity.YUVQueue.size() >0){
            //从缓冲队列中取出一帧
            input = MainActivity.YUVQueue.poll();
            byte[] yuv420sp = new byte[m_width*m_height*3/2];
            //把待编码的视频帧转换为YUV420格式
            NV21ToNV12(input,yuv420sp,m_width,m_height);
            input = yuv420sp;
          }
          if (input != null) {
            try {
              long startMs = System.currentTimeMillis();
              //编码器输入缓冲区
              ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
              //编码器输出缓冲区
              ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
              int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);
              if (inputBufferIndex >= 0) {
                pts = computePresentationTime(generateIndex);
                ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
                inputBuffer.clear();
                //把转换后的YUV420格式的视频帧放到编码器输入缓冲区中
                inputBuffer.put(input);
                mediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, pts, 0);
                generateIndex += 1;
              }
 
              MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
              int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
              while (outputBufferIndex >= 0) {
                //Log.i("AvcEncoder", "Get H264 Buffer Success! flag = "+bufferInfo.flags+",pts = "+bufferInfo.presentationTimeUs+"");
                ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
                byte[] outData = new byte[bufferInfo.size];
                outputBuffer.get(outData);
                if(bufferInfo.flags == BUFFER_FLAG_CODEC_CONFIG){
                  configbyte = new byte[bufferInfo.size];
                  configbyte = outData;
                }else if(bufferInfo.flags == BUFFER_FLAG_KEY_FRAME){
                  byte[] keyframe = new byte[bufferInfo.size + configbyte.length];
                  System.arraycopy(configbyte, 0, keyframe, 0, configbyte.length);
                  //把编码后的视频帧从编码器输出缓冲区中拷贝出来
                  System.arraycopy(outData, 0, keyframe, configbyte.length, outData.length);
 
                  outputStream.write(keyframe, 0, keyframe.length);
                }else{
                  //写到文件中
                  outputStream.write(outData, 0, outData.length);
                }
 
                mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
                outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
              }
 
            } catch (Throwable t) {
              t.printStackTrace();
            }
          } else {
            try {
              Thread.sleep(500);
            } catch (InterruptedException e) {
              e.printStackTrace();
            }
          }
        }
      }
    });
    EncoderThread.start();
 
  }
 
  private void NV21ToNV12(byte[] nv21,byte[] nv12,int width,int height){
    if(nv21 == null || nv12 == null)return;
    int framesize = width*height;
    int i = 0,j = 0;
    System.arraycopy(nv21, 0, nv12, 0, framesize);
    for(i = 0; i < framesize; i++){
      nv12[i] = nv21[i];
    }
    for (j = 0; j < framesize/2; j+=2)
    {
      nv12[framesize + j-1] = nv21[j+framesize];
    }
    for (j = 0; j < framesize/2; j+=2)
    {
      nv12[framesize + j] = nv21[j+framesize-1];
    }
  }
 
  /**
   * Generates the presentation time for frame N, in microseconds.
   */
  private long computePresentationTime(long frameIndex) {
    return 132 + frameIndex * 1000000 / m_framerate;
  }
}

3.activity_main.xml

?
1
2
3
4
5
6
7
8
9
10
11
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
  android:layout_width="match_parent"
  android:layout_height="match_parent" >
 
  <SurfaceView
    android:id="@+id/surfaceview"
    android:layout_width="match_parent"
    android:layout_height="match_parent"/>
 
 
</RelativeLayout>

4.添加权限

?
1
2
3
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.INTERNET" />

以上就是本文的全部内容,希望对大家的学习有所帮助,也希望大家多多支持服务器之家。

原文链接:https://www.cnblogs.com/CoderTian/p/6224605.html

延伸 · 阅读

精彩推荐