欢迎您访问程序员文章站本站旨在为大家提供分享程序员计算机编程知识!
您现在的位置是: 首页

Android接受RTP传输的Camera数据

程序员文章站 2024-02-10 23:53:10
...

上一篇说了如何发送的,那么这边就看下接收端的代码。
接受端主要是接受到数据后,解码,然后在SufaceView上预览。
这个比较简单,我就直接贴代码了。

主要参考了这篇博客:android硬编码h264数据,并使用rtp推送数据流,实现一个简单的直播-MediaCodec(二)

public class ClientTextureView extends TextureView implements  TextureView.SurfaceTextureListener{
    private static  final String MIME_TYPE = "video/avc";
    private static final String TAG = "ClientTextureView" ;
    private static final int PORT = 5004;
    private DatagramSocket mSocket;
    private MediaCodec mDecode;
    private byte[] mRtpData =  new byte[80000];
    private byte[] mH264Data = new byte[80000];

    public ClientTextureView(Context context, AttributeSet attrs) {
        super(context, attrs);
        setSurfaceTextureListener(this);
        try {
            mSocket = new DatagramSocket(PORT);
            mSocket.setReuseAddress(true);
            mSocket.setBroadcast(true);
        } catch (SocketException e) {
            e.printStackTrace();
        }
    }

    @RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
    @Override
    public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
        Log.i(TAG, "SurfaceTexture width " + width + "; height" + height);
        new PreviewThread(new Surface(surface), width, height);
    }

    @Override
    public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
        if (mSocket != null){
            mSocket.close();
            mSocket = null;
        }
        return false;
    }
    @Override
    public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) { }
    @Override
    public void onSurfaceTextureUpdated(SurfaceTexture surface) { }

    private  class  PreviewThread extends  Thread {
        DatagramPacket datagramPacket = null;
        @RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
        public PreviewThread(Surface surface, int width , int height){
            Log.i(TAG, "PreviewThread surface width " + width + "; height" + height);
            try {
                mDecode = MediaCodec.createDecoderByType(MIME_TYPE);
                final MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, width, height);
                format.setInteger(MediaFormat.KEY_BIT_RATE, 40000);
                format.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
                format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
                byte[] header_sps = {0, 0, 0, 1, 103, 66, 0 , 41, -115, -115, 64, 80 , 30 , -48 , 15 ,8,-124, 83, -128};
                byte[] header_pps = {0, 0, 0, 1, 104, -54, 67, -56};
                format.setByteBuffer("csd-0", ByteBuffer.wrap(header_sps));
                format.setByteBuffer("csd-1", ByteBuffer.wrap(header_pps));

                mDecode.configure(format, surface, null, 0);
                mDecode.start();
            } catch (IOException e) {
                e.printStackTrace();
            }
            start();
        }

        @RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
        @Override
        public void run() {
            byte[] data = new byte[80000];
            int h264Length = 0;
            while (true){
                if (mSocket != null){
                    try {
                        datagramPacket = new DatagramPacket(data, data.length);
                        mSocket.receive(datagramPacket);
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }
                mRtpData = datagramPacket.getData();
                if (mRtpData != null ){
                    if (mRtpData[0] == -128 && mRtpData[1] == 96){
                        int l1 = (mRtpData[12] << 24) & 0xff000000;
                        int l2 = (mRtpData[13] << 16) & 0x00ff0000;
                        int l3 = (mRtpData[14] << 8) & 0x0000ff00;
                        int l4 = mRtpData[15] & 0x000000FF;
                        h264Length = l1 + l2 + l3 + l4;
                        Log.i(TAG, "run: h264Length="+h264Length);
                        System.arraycopy(mRtpData,16, mH264Data,0,h264Length);
                        Log.i(TAG, "run:h264Data[0]="+mH264Data[0]+","+mH264Data[1]+","+mH264Data[2]+","+mH264Data[3]
                                +","+mH264Data[4]+","+mH264Data[5]+","+mH264Data[6]+","+mH264Data[7]
                                +","+mH264Data[8]+","+mH264Data[9]+","+mH264Data[10]
                                +","+mH264Data[11]+","+mH264Data[12]+","+mH264Data[13]
                                +","+mH264Data[14]+","+mH264Data[15]+","+mH264Data[16]
                                +","+mH264Data[17]+","+mH264Data[18]+","+mH264Data[19]
                                +","+mH264Data[20]+","+mH264Data[21]+","+mH264Data[22]);
                        offerDecoder(mH264Data,mH264Data.length);
                    }
                }
            }
        }
    }

    @RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
    private void offerDecoder(byte[] input, int length) {
         Log.d(TAG, "offerDecoder: ");
        try {
            ByteBuffer[] inputBuffers = mDecode.getInputBuffers();
            int inputBufferIndex = mDecode.dequeueInputBuffer(0);
            if (inputBufferIndex >= 0) {
                ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
                inputBuffer.clear();
                try{
                    inputBuffer.put(input, 0, length);
                }catch (Exception e){
                    e.printStackTrace();
                }
                mDecode.queueInputBuffer(inputBufferIndex, 0, length, 0, 0);
            }
            MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();

            int outputBufferIndex = mDecode.dequeueOutputBuffer(bufferInfo, 0);
            while (outputBufferIndex >= 0) {
                //If a valid surface was specified when configuring the codec,
                //passing true renders this output buffer to the surface.
                mDecode.releaseOutputBuffer(outputBufferIndex, true);
                outputBufferIndex = mDecode.dequeueOutputBuffer(bufferInfo, 0);
            }
        } catch (Throwable t) {
            t.printStackTrace();
        }
    }
}

MainActivity

public class MainActivity extends AppCompatActivity {

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);
        setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
    }
}

AndroidManifest

<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
    package="com.byd.rtpclientdemo">

    <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
    <uses-permission android:name="android.permission.INTERNET"/>
    <uses-permission android:name="android.permission.RECORD_VIDEO"/>
    <uses-permission android:name="android.permission.RECORD_AUDIO"/>
    <uses-permission android:name="android.permission.CAMERA" />
    <uses-feature android:name="android.hardware.camera" />
    <uses-feature android:name="android.hardware.camera.autofocus" />
    <uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
    <uses-feature android:name="android.hardware.wifi" android:required="true" />

    <application
        android:allowBackup="true"
        android:icon="@mipmap/ic_launcher"
        android:label="@string/app_name"
        android:roundIcon="@mipmap/ic_launcher_round"
        android:supportsRtl="true"
        android:theme="@style/AppTheme">
        <activity android:name=".MainActivity">
            <intent-filter>
                <action android:name="android.intent.action.MAIN" />

                <category android:name="android.intent.category.LAUNCHER" />
            </intent-filter>
        </activity>
    </application>

</manifest>

这篇的代码跟参考的差不多,我就不上传到GitHub了,大家可以参考上面的那片。如需要上传的话,欢迎留言。