技术背景
我们在做Android平台GB28181设备接入模块的时候,遇到这样的场景,比如执法记录仪或智慧工地等场景下,由于GB28181设备接入模块,注册到国标平台后,平时只是心跳保持,或还有实时位置订阅,查看视频的时候,是按需看,而且有时候,网络环境并不是太好,所以,催生了这样一个诉求:部分开发者希望能本地录像的时候,录制高分辨率(比如1920*1080),国标平台侧发起实时视频查看请求的时候,上传低分辨率(如1280*720)数据,有点类似于IPC的主码流和子码流。
技术实现
基于这样的技术诉求,我们的解决方案是两路编码。由于摄像头的局限性,不可能同时采集两个分辨率的原始数据,所以需要按照高分辨率采集,然后,低分辨率的数据,以缩放的形式获得。
代码示例
本文以Android的camera2摄像头采集为例,先说数据源这块,处理如下:
@Override public void onCameraImageData(Image image) { if (null == libPublisher) return; if (isPushingRtmp || isRTSPPublisherRunning || isGB28181StreamRunning || isRecording) { if (0 == publisherHandle) return; Image.Plane[] planes = image.getPlanes(); int w = image.getWidth(), h = image.getHeight(); int y_offset = 0, u_offset = 0, v_offset = 0; Rect crop_rect = image.getCropRect(); if (crop_rect != null && !crop_rect.isEmpty()) { w = crop_rect.width(); h = crop_rect.height(); y_offset += crop_rect.top * planes[0].getRowStride() + crop_rect.left * planes[0].getPixelStride(); u_offset += (crop_rect.top / 2) * planes[1].getRowStride() + (crop_rect.left / 2) * planes[1].getPixelStride(); v_offset += (crop_rect.top / 2) * planes[2].getRowStride() + (crop_rect.left / 2) * planes[2].getPixelStride(); ; // Log.i(TAG, "crop w:" + w + " h:" + h + " y_offset:"+ y_offset + " u_offset:" + u_offset + " v_offset:" + v_offset); } int scale_w = 0, scale_h = 0, scale_filter_mode = 0; scale_filter_mode = 3; int rotation_degree = cameraImageRotationDegree_; if (rotation_degree < 0) { Log.i(TAG, "onCameraImageData rotation_degree < 0, may need to set orientation_ to 0, 90, 180 or 270"); return; } if (!post_image_lock_.tryLock()) { Log.i(TAG, "post_image_lock_.tryLock return false"); return; } try { if (publisherHandle != 0) { if (isPushingRtmp || isRTSPPublisherRunning || isGB28181StreamRunning || isRecording) { libPublisher.PostLayerImageYUV420888ByteBuffer(publisherHandle, 0, 0, 0, planes[0].getBuffer(), y_offset, planes[0].getRowStride(), planes[1].getBuffer(), u_offset, planes[1].getRowStride(), planes[2].getBuffer(), v_offset, planes[2].getRowStride(), planes[1].getPixelStride(), w, h, 0, 0, scale_w, scale_h, scale_filter_mode, rotation_degree); } } }catch (Exception e) { Log.e(TAG, "onCameraImageData Exception:", e); }finally { post_image_lock_.unlock(); } } }
大家注意到,有个PostLayerImageYUV420888ByteBuffer()接口,我们针对这个接口的设计如下:
/* * SmartPublisherJniV2.java * SmartPublisherJniV2 * * Author: https://daniusdk.com * Created by DaniuLive on 2015/09/20. */ /** * 投递层YUV420888图像, 专门为android.media.Image的android.graphics.ImageFormat.YUV_420_888格式提供的接口 * * @param index: 层索引, 必须大于等于0 * * @param left: 层叠加的左上角坐标, 对于第0层的话传0 * * @param top: 层叠加的左上角坐标, 对于第0层的话传0 * * @param y_plane: 对应android.media.Image.Plane[0].getBuffer() * * @param y_offset: 图像偏移, 这个主要目的是用来做clip的,一般传0 * * @param y_row_stride: 对应android.media.Image.Plane[0].getRowStride() * * @param u_plane: android.media.Image.Plane[1].getBuffer() * * @param u_offset: 图像偏移, 这个主要目的是用来做clip的,一般传0 * * @param u_row_stride: android.media.Image.Plane[1].getRowStride() * * @param v_plane: 对应android.media.Image.Plane[2].getBuffer() * * @param v_offset: 图像偏移, 这个主要目的是用来做clip的,一般传0 * * @param v_row_stride: 对应android.media.Image.Plane[2].getRowStride() * * @param uv_pixel_stride: 对应android.media.Image.Plane[1].getPixelStride() * * @param width: width, 必须大于1, 且必须是偶数 * * @param height: height, 必须大于1, 且必须是偶数 * * @param is_vertical_flip: 是否垂直翻转, 0不翻转, 1翻转 * * @param is_horizontal_flip:是否水平翻转, 0不翻转, 1翻转 * * @param scale_width: 缩放宽,必须是偶数, 0或负数不缩放 * * @param scale_height: 缩放高, 必须是偶数, 0或负数不缩放 * * @param scale_filter_mode: 缩放质量, 传0使用默认速度,可选等级范围是:[1,3],值越大缩放质量越好, 但速度越慢 * * @param rotation_degree: 顺时针旋转, 必须是0, 90, 180, 270, 注意:旋转是在缩放, 垂直/水品反转之后再做, 请留意顺序 * * @return {0} if successful */ public native int PostLayerImageYUV420888ByteBuffer(long handle, int index, int left, int top, ByteBuffer y_plane, int y_offset, int y_row_stride, ByteBuffer u_plane, int u_offset, int u_row_stride, ByteBuffer v_plane, int v_offset, int v_row_stride, int uv_pixel_stride, int width, int height, int is_vertical_flip, int is_horizontal_flip, int scale_width, int scale_height, int scale_filter_mode, int rotation_degree);
可以看到,我们这里有scale_width和scale_height两个参数,可以指定缩放宽高,甚至如果摄像头采集的方向不对,可以设置rotation_degree接口,来实现视频数据的旋转。
这样下来,两路数据源的问题就解决了。
国标平台侧发起视频请求后,Android平台GB28181设备接入模块的处理如下:
@Override public void ntsOnInvitePlay(String deviceId, SessionDescription session_des) { handler_.postDelayed(new Runnable() { @Override public void run() { // 先振铃响应下 gb28181_agent_.respondPlayInvite(180, device_id_); MediaSessionDescription video_des = null; SDPRtpMapAttribute ps_rtpmap_attr = null; // 28181 视频使用PS打包 Vector<MediaSessionDescription> video_des_list = session_des_.getVideoPSDescriptions(); if (video_des_list != null && !video_des_list.isEmpty()) { for(MediaSessionDescription m : video_des_list) { if (m != null && m.isValidAddressType() && m.isHasAddress() ) { video_des = m; ps_rtpmap_attr = video_des.getPSRtpMapAttribute(); break; } } } if (null == video_des) { gb28181_agent_.respondPlayInvite(488, device_id_); Log.i(TAG, "ntsOnInvitePlay get video description is null, response 488, device_id:" + device_id_); return; } if (null == ps_rtpmap_attr) { gb28181_agent_.respondPlayInvite(488, device_id_); Log.i(TAG, "ntsOnInvitePlay get ps rtp map attribute is null, response 488, device_id:" + device_id_); return; } Log.i(TAG,"ntsOnInvitePlay, device_id:" +device_id_+", is_tcp:" + video_des.isRTPOverTCP() + " rtp_port:" + video_des.getPort() + " ssrc:" + video_des.getSSRC() + " address_type:" + video_des.getAddressType() + " address:" + video_des.getAddress()); long rtp_sender_handle = libPublisher.CreateRTPSender(0); if ( rtp_sender_handle == 0 ) { gb28181_agent_.respondPlayInvite(488, device_id_); Log.i(TAG, "ntsOnInvitePlay CreateRTPSender failed, response 488, device_id:" + device_id_); return; } gb28181_rtp_payload_type_ = ps_rtpmap_attr.getPayloadType(); gb28181_rtp_encoding_name_ = ps_rtpmap_attr.getEncodingName(); libPublisher.SetRTPSenderTransportProtocol(rtp_sender_handle, video_des.isRTPOverUDP()?0:1); libPublisher.SetRTPSenderIPAddressType(rtp_sender_handle, video_des.isIPv4()?0:1); libPublisher.SetRTPSenderLocalPort(rtp_sender_handle, 0); libPublisher.SetRTPSenderSSRC(rtp_sender_handle, video_des.getSSRC()); libPublisher.SetRTPSenderSocketSendBuffer(rtp_sender_handle, 2*1024*1024); // 设置到2M libPublisher.SetRTPSenderClockRate(rtp_sender_handle, ps_rtpmap_attr.getClockRate()); libPublisher.SetRTPSenderDestination(rtp_sender_handle, video_des.getAddress(), video_des.getPort()); if ( libPublisher.InitRTPSender(rtp_sender_handle) != 0 ) { gb28181_agent_.respondPlayInvite(488, device_id_); libPublisher.DestoryRTPSender(rtp_sender_handle); return; } int local_port = libPublisher.GetRTPSenderLocalPort(rtp_sender_handle); if (local_port == 0) { gb28181_agent_.respondPlayInvite(488, device_id_); libPublisher.DestoryRTPSender(rtp_sender_handle); return; } Log.i(TAG,"get local_port:" + local_port); String local_ip_addr = IPAddrUtils.getIpAddress(context_); MediaSessionDescription local_video_des = new MediaSessionDescription(video_des.getType()); local_video_des.addFormat(String.valueOf(ps_rtpmap_attr.getPayloadType())); local_video_des.addRtpMapAttribute(ps_rtpmap_attr); local_video_des.setAddressType(video_des.getAddressType()); local_video_des.setAddress(local_ip_addr); local_video_des.setPort(local_port); local_video_des.setTransportProtocol(video_des.getTransportProtocol()); local_video_des.setSSRC(video_des.getSSRC()); if (!gb28181_agent_.respondPlayInviteOK(device_id_,local_video_des) ) { libPublisher.DestoryRTPSender(rtp_sender_handle); Log.e(TAG, "ntsOnInvitePlay call respondPlayInviteOK failed."); return; } gb28181_rtp_sender_handle_ = rtp_sender_handle; } private String device_id_; private SessionDescription session_des_; public Runnable set(String device_id, SessionDescription session_des) { this.device_id_ = device_id; this.session_des_ = session_des; return this; } }.set(deviceId, session_des),0); }
Ack处理如下:
@Override public void ntsOnAckPlay(String deviceId) { handler_.postDelayed(new Runnable() { @Override public void run() { Log.i(TAG,"ntsOnACKPlay, device_id:" +device_id_); if (!isRTSPPublisherRunning && !isPushingRtmp && !isRecording) { InitAndSetConfig(); } libPublisher.SetGB28181RTPSender(publisherHandle, gb28181_rtp_sender_handle_, gb28181_rtp_payload_type_, gb28181_rtp_encoding_name_); //libPublisher.SetGBTCPConnectTimeout(publisherHandle, 10*60*1000); //libPublisher.SetGBInitialTCPReconnectInterval(publisherHandle, 1000); //libPublisher.SetGBInitialTCPMaxReconnectAttempts(publisherHandle, 3); int startRet = libPublisher.StartGB28181MediaStream(publisherHandle); if (startRet != 0) { if (!isRTSPPublisherRunning && !isPushingRtmp && !isRecording) { if (publisherHandle != 0) { long handle = publisherHandle; publisherHandle = 0; libPublisher.SmartPublisherClose(handle); } } destoryRTPSender(); Log.e(TAG, "Failed to start GB28181 service.."); return; } if (!isRTSPPublisherRunning && !isPushingRtmp && !isRecording) { CheckInitAudioRecorder(); } startLayerPostThread(); isGB28181StreamRunning = true; } private String device_id_; public Runnable set(String device_id) { this.device_id_ = device_id; return this; } }.set(deviceId),0); }
本地录像代码如下:
class ButtonStartRecorderListener implements View.OnClickListener { public void onClick(View v) { if (isRecording) { stopRecorder(); if (!isPushingRtmp && !isRTSPPublisherRunning && !isGB28181StreamRunning) { ConfigControlEnable(true); } btnStartRecorder.setText("实时录像"); btnPauseRecorder.setText("暂停录像"); btnPauseRecorder.setEnabled(false); isPauseRecording = true; return; } Log.i(TAG, "onClick start recorder.."); if (libPublisher == null) return; if (!isPushingRtmp && !isRTSPPublisherRunning&& !isGB28181StreamRunning) { InitAndSetConfig(); } ConfigRecorderParam(); int startRet = libPublisher.SmartPublisherStartRecorder(publisherHandle); if (startRet != 0) { if (!isPushingRtmp && !isRTSPPublisherRunning && !isGB28181StreamRunning) { if (publisherHandle != 0) { long handle = publisherHandle; publisherHandle = 0; libPublisher.SmartPublisherClose(handle); } } Log.e(TAG, "Failed to start recorder."); return; } if (!isPushingRtmp && !isRTSPPublisherRunning && !isGB28181StreamRunning) { CheckInitAudioRecorder(); ConfigControlEnable(false); } startLayerPostThread(); btnStartRecorder.setText("停止录像"); isRecording = true; btnPauseRecorder.setEnabled(true); isPauseRecording = true; } }
停止录像:
//停止录像 private void stopRecorder() { if(!isRecording) return; isRecording = false; if (!isPushingRtmp && !isRTSPPublisherRunning && !isGB28181StreamRunning) stopLayerPostThread(); if (!isPushingRtmp && !isRTSPPublisherRunning && !isGB28181StreamRunning) { if (audioRecord_ != null) { Log.i(TAG, "stopRecorder, call audioRecord_.StopRecording.."); audioRecord_.Stop(); if (audioRecordCallback_ != null) { audioRecord_.RemoveCallback(audioRecordCallback_); audioRecordCallback_ = null; } audioRecord_ = null; } } if (null == libPublisher || 0 == publisherHandle) return; libPublisher.SmartPublisherStopRecorder(publisherHandle); if (!isPushingRtmp && !isRTSPPublisherRunning && !isGB28181StreamRunning) { releasePublisherHandle(); } }
我们还专门做了暂停、恢复录像处理:
class ButtonPauseRecorderListener implements View.OnClickListener { public void onClick(View v) { if (isRecording) { if(isPauseRecording) { int ret = libPublisher.SmartPublisherPauseRecorder(publisherHandle, 1); if (ret == 0) { isPauseRecording = false; btnPauseRecorder.setText("恢复录像"); } else if(ret == 3) { Log.e(TAG, "Pause recorder failed, please re-try again.."); } else { Log.e(TAG, "Pause recorder failed.."); } } else { int ret = libPublisher.SmartPublisherPauseRecorder(publisherHandle, 0); if (ret == 0) { isPauseRecording = true; btnPauseRecorder.setText("暂停录像"); } else if(ret == 3) { Log.e(TAG, "Resume recorder failed, please re-try again.."); } else { Log.e(TAG, "Resume recorder failed.."); } } } } }
技术总结
需要注意的是,如果需要实现两路不同分辨率的编码,那么需要GB28181设备接入端实时上传的视频一个低分辨率和本地录像的一个高分辨率,分别对应两个publisher实例,并且确保视频和音频数据投递的地方,两个实例都投递数据,分别编码,两路视频编码,建议使用硬编码,软编性能达不到。