Unity3D下Linux平台播放RTSP或RTMP流
  rWdoMCxJ3E9e 2023年11月02日 71 0

背景

尽管Windows平台有诸多优势,Linux平台的发展还是势不可挡,特别实在传统行业,然而Linux生态构建,总是差点意思,特别是有些常用的组件,本文基于已有的Linux平台RTSP、RTMP播放模块,构建Unity下的RTSP和RTMP直播播放。

技术实现

实际上,Unity层面这块没什么好介绍的,和Windows、Android、iOS平台一样,调用原生的播放模块,回调解码后的数据,在Unity绘制,主要的技术难点,还在原生的处理,也就是拉流、解码、回调数据这块。

先上个只管感受图,本视频以Windows平台采集秒表计时器窗体,然后编码打包传输到RTMP服务,Unity3D的Linux平台RTMP播放器拉流播放,整体延迟毫秒级。

Unity3D下Linux平台播放RTSP或RTMP流_unity播放rtmp

Linux平台,我们是回调的YUV的数据,也就是 NT_SP_E_VIDEO_FRAME_FROMAT_I420:

/*定义视频帧图像格式*/
public enum NT_SP_E_VIDEO_FRAME_FORMAT : uint
{
NT_SP_E_VIDEO_FRAME_FORMAT_RGB32 = 1, // 32位的rgb格式, r, g, b各占8, 另外一个字节保留, 内存字节格式为: bb gg rr xx, 主要是和windows位图匹配, 在小端模式下,按DWORD类型操作,最高位是xx, 依次是rr, gg, bb
NT_SP_E_VIDEO_FRAME_FORMAT_ARGB = 2, // 32位的argb格式,内存字节格式是: bb gg rr aa 这种类型,和windows位图匹配
NT_SP_E_VIDEO_FRAME_FROMAT_I420 = 3, // YUV420格式, 三个分量保存在三个面上
}

开始播放之前,把回调设置下去:

//video frame callback (YUV/RGB)
videoctrl[sel].video_frame_call_back_ = new SP_SDKVideoFrameCallBack(NT_SP_SetVideoFrameCallBack);
NTSmartPlayerSDK.NT_SP_SetVideoFrameCallBack(videoctrl[sel].player_handle_, (Int32)NT.NTSmartPlayerDefine.NT_SP_E_VIDEO_FRAME_FORMAT.NT_SP_E_VIDEO_FRAME_FROMAT_I420, window_handle_, videoctrl[sel].video_frame_call_back_);

视频帧结构:

/*定义视频帧结构.*/
[StructLayoutAttribute(LayoutKind.Sequential)]
public struct NT_SP_VideoFrame
{
public Int32 format_; // 图像格式, 请参考NT_SP_E_VIDEO_FRAME_FORMAT
public Int32 width_; // 图像宽
public Int32 height_; // 图像高

public Int64 timestamp_; // 时间戳, 一般是0,不使用, 以ms为单位的

//具体的图像数据, argb和rgb32只用第一个, I420用前三个
public IntPtr plane0_;
public IntPtr plane1_;
public IntPtr plane2_;
public IntPtr plane3_;

// 每一个平面的每一行的字节数,对于argb和rgb32,为了保持和windows位图兼容,必须是width_*4
// 对于I420, stride0_ 是y的步长, stride1_ 是u的步长, stride2_ 是v的步长,
public Int32 stride0_;
public Int32 stride1_;
public Int32 stride2_;
public Int32 stride3_;
}

具体回调处理:

private void SDKVideoFrameCallBack(UInt32 status, IntPtr frame, int sel)
{
//这里拿到回调frame,进行相关操作
NT_SP_VideoFrame video_frame = (NT_SP_VideoFrame)Marshal.PtrToStructure(frame, typeof(NT_SP_VideoFrame));

VideoFrame u3d_frame = new VideoFrame();

u3d_frame.width_ = video_frame.width_;
u3d_frame.height_ = video_frame.height_;

u3d_frame.timestamp_ = (UInt64)video_frame.timestamp_;

int d_y_stride = video_frame.width_;
int d_u_stride = (video_frame.width_ + 1) / 2;
int d_v_stride = d_u_stride;

int d_y_size = d_y_stride * video_frame.height_;
int d_u_size = d_u_stride * ((video_frame.height_ + 1) / 2);
int d_v_size = d_u_size;

int u_v_height = ((u3d_frame.height_ + 1) / 2);

u3d_frame.y_stride_ = d_y_stride;
u3d_frame.u_stride_ = d_u_stride;
u3d_frame.v_stride_ = d_v_stride;

u3d_frame.y_data_ = new byte[d_y_size];
u3d_frame.u_data_ = new byte[d_u_size];
u3d_frame.v_data_ = new byte[d_v_size];


CopyFramePlane(u3d_frame.y_data_, d_y_stride,
video_frame.plane0_, video_frame.stride0_, u3d_frame.height_);

CopyFramePlane(u3d_frame.u_data_, d_u_stride,
video_frame.plane1_, video_frame.stride1_, u_v_height);

CopyFramePlane(u3d_frame.v_data_, d_v_stride,
video_frame.plane2_, video_frame.stride2_, u_v_height);

lock (videoctrl[sel].frame_lock_ )
{
videoctrl[sel].cur_video_frame_ = u3d_frame;
}
}

Unity层拿到video frame后,刷新即可:

private void UpdateProc(int sel)
{
VideoFrame video_frame = null;

lock (videoctrl[sel].frame_lock_)
{
video_frame = videoctrl[sel].cur_video_frame_;

videoctrl[sel].cur_video_frame_ = null;
}

if ( video_frame == null )
return;

if (!videoctrl[sel].is_need_get_frame_)
return;

if (videoctrl[sel].player_handle_ == IntPtr.Zero )
return;

if ( !videoctrl[sel].is_need_init_texture_)
{
if ( video_frame.width_ != videoctrl[sel].video_width_
|| video_frame.height_ != videoctrl[sel].video_height_
|| video_frame.y_stride_ != videoctrl[sel].y_row_bytes_
|| video_frame.u_stride_ != videoctrl[sel].u_row_bytes_
|| video_frame.v_stride_ != videoctrl[sel].v_row_bytes_ )
{
videoctrl[sel].is_need_init_texture_ = true;
}
}

if (videoctrl[sel].is_need_init_texture_)
{
if (InitYUVTexture(video_frame, sel))
{
videoctrl[sel].is_need_init_texture_ = false;
}
}

UpdateYUVTexture(video_frame, sel);
}

UpdateYUVTexture相关实现:

private void UpdateYUVTexture(VideoFrame video_frame, int sel)
{
if (video_frame.y_data_ == null || video_frame.u_data_ == null || video_frame.v_data_ == null)
{
Debug.Log("video frame with null..");
return;
}

if (videoctrl[sel].yTexture_ != null)
{
videoctrl[sel].yTexture_.LoadRawTextureData(video_frame.y_data_);
videoctrl[sel].yTexture_.Apply();
}

if (videoctrl[sel].uTexture_ != null)
{
videoctrl[sel].uTexture_.LoadRawTextureData(video_frame.u_data_);
videoctrl[sel].uTexture_.Apply();
}

if (videoctrl[sel].vTexture_ != null)
{
videoctrl[sel].vTexture_.LoadRawTextureData(video_frame.v_data_);
videoctrl[sel].vTexture_.Apply();
}
}

相关Player封装:

/*
* SmartPlayerLinuxMono.cs
*
* WebSite: https://daniusdk.com
* Github: https://github.com/daniulive/SmarterStreaming
*/

public void Play(int sel)
{
if (videoctrl[sel].is_running)
{
Debug.Log("已经在播放..");
return;
}

lock (videoctrl[sel].frame_lock_)
{
videoctrl[sel].cur_video_frame_ = null;
}

OpenPlayer(sel);

if (videoctrl[sel].player_handle_ == IntPtr.Zero)
return;

//设置播放URL
NTSmartPlayerSDK.NT_SP_SetURL(videoctrl[sel].player_handle_, videoctrl[sel].videoUrl);

/* ++ 播放前参数配置可加在此处 ++ */

int play_buffer_time_ = 0;
NTSmartPlayerSDK.NT_SP_SetBuffer(videoctrl[sel].player_handle_, play_buffer_time_); //设置buffer time

int is_using_tcp = 0; //TCP模式

NTSmartPlayerSDK.NT_SP_SetRTSPTcpMode(videoctrl[sel].player_handle_, is_using_tcp);

int timeout = 10;
NTSmartPlayerSDK.NT_SP_SetRtspTimeout(videoctrl[sel].player_handle_, timeout);

int is_auto_switch_tcp_udp = 1;
NTSmartPlayerSDK.NT_SP_SetRtspAutoSwitchTcpUdp(videoctrl[sel].player_handle_, is_auto_switch_tcp_udp);

Boolean is_mute_ = false;
NTSmartPlayerSDK.NT_SP_SetMute(videoctrl[sel].player_handle_, is_mute_ ? 1 : 0); //是否启动播放的时候静音

int is_fast_startup = 1;
NTSmartPlayerSDK.NT_SP_SetFastStartup(videoctrl[sel].player_handle_, is_fast_startup); //设置快速启动模式

Boolean is_low_latency_ = false;
NTSmartPlayerSDK.NT_SP_SetLowLatencyMode(videoctrl[sel].player_handle_, is_low_latency_ ? 1 : 0); //设置是否启用低延迟模式

//设置旋转角度(设置0, 90, 180, 270度有效,其他值无效)
int rotate_degrees = 0;
NTSmartPlayerSDK.NT_SP_SetRotation(videoctrl[sel].player_handle_, rotate_degrees);

int volume = 100;
NTSmartPlayerSDK.NT_SP_SetAudioVolume(videoctrl[sel].player_handle_, volume); //设置播放音量, 范围是[0, 100], 0是静音,100是最大音量, 默认是100


// 设置上传下载报速度
int is_report = 0;
int report_interval = 1;
NTSmartPlayerSDK.NT_SP_SetReportDownloadSpeed(videoctrl[sel].player_handle_, is_report, report_interval);
/* -- 播放前参数配置可加在此处 -- */

//video frame callback (YUV/RGB)
videoctrl[sel].video_frame_call_back_ = new SP_SDKVideoFrameCallBack(NT_SP_SetVideoFrameCallBack);
NTSmartPlayerSDK.NT_SP_SetVideoFrameCallBack(videoctrl[sel].player_handle_, (Int32)NT.NTSmartPlayerDefine.NT_SP_E_VIDEO_FRAME_FORMAT.NT_SP_E_VIDEO_FRAME_FROMAT_I420, window_handle_, videoctrl[sel].video_frame_call_back_);

UInt32 flag = NTSmartPlayerSDK.NT_SP_StartPlay(videoctrl[sel].player_handle_);

if (flag == DANIULIVE_RETURN_OK)
{
videoctrl[sel].is_need_get_frame_ = true;
Debug.Log("播放成功");
}
else
{
videoctrl[sel].is_need_get_frame_ = false;
Debug.LogError("播放失败");
}

videoctrl[sel].is_running = true;
}

调用到的OpenPlayer实现:

OpenPlayer主要是调用底层NT_SP_Open()接口,获取播放实例句柄,然后设置Event回调等。

private void OpenPlayer(int sel)
{
window_handle_ = IntPtr.Zero;

if (videoctrl[sel].player_handle_ == IntPtr.Zero)
{
videoctrl[sel].player_handle_ = new IntPtr();
UInt32 ret_open = NTSmartPlayerSDK.NT_SP_Open(out videoctrl[sel].player_handle_, window_handle_, 0, IntPtr.Zero);
if (ret_open != 0)
{
videoctrl[sel].player_handle_ = IntPtr.Zero;
Debug.LogError("调用NT_SP_Open失败..");
return;
}
}

videoctrl[sel].event_call_back_ = new SP_SDKEventCallBack(NT_SP_SDKEventCallBack);
NTSmartPlayerSDK.NT_SP_SetEventCallBack(videoctrl[sel].player_handle_, window_handle_, videoctrl[sel].event_call_back_);

videoctrl[sel].sdk_video_frame_call_back_ = new VideoControl.SetVideoFrameCallBack(SDKVideoFrameCallBack);
videoctrl[sel].sdk_event_call_back_ = new VideoControl.SetEventCallBack(SDKEventCallBack);
}

关闭播放:

private void ClosePlayer(int sel)
{
videoctrl[sel].is_need_get_frame_ = false;
videoctrl[sel].is_need_init_texture_ = false;

if (videoctrl[sel].player_handle_ == IntPtr.Zero)
{
return;
}

UInt32 flag = NTSmartPlayerSDK.NT_SP_StopPlay(videoctrl[sel].player_handle_);
if (flag == DANIULIVE_RETURN_OK)
{
Debug.Log("停止成功");
}
else
{
Debug.LogError("停止失败");
}

videoctrl[sel].player_handle_ = IntPtr.Zero;

videoctrl[sel].is_running = false;
}

总结

Unity环境下的直播播放,Windows平台或者Android的比较多,用在Linux平台的少之又少,一方面Linux平台本身需求不大,另一方面,Linux平台这块,可参考的例程不多,实际上,如果已经完成Windows或Android平台下的核心功能实现,再移植到Linux下,非常方便。

Unity下,简单来说就是拉流解码回调,上层绘制,其实也没有那么复杂,需要注意的是DllImport的写法、之前C++结构体或枚举的转换、Unity3D对Linux的版本兼容等一些细节,对熟悉C#的开发者来说,不具备多大的技术难度。


【版权声明】本文内容来自摩杜云社区用户原创、第三方投稿、转载,内容版权归原作者所有。本网站的目的在于传递更多信息,不拥有版权,亦不承担相应法律责任。如果您发现本社区中有涉嫌抄袭的内容,欢迎发送邮件进行举报,并提供相关证据,一经查实,本社区将立刻删除涉嫌侵权内容,举报邮箱: cloudbbs@moduyun.com

  1. 分享:
最后一次编辑于 2023年11月08日 0

暂无评论