益阳学校网站建设苏州网页设计app
2026/1/1 2:58:52 网站建设 项目流程
益阳学校网站建设,苏州网页设计app,怎样注册网络平台,济南建行网点在移动端音视频开发中#xff0c;我们经常面临一个架构抉择#xff1a;是追求极致的低延迟#xff08;如无人机图传、实时指挥#xff09;#xff0c;还是追求丰富的功能处理#xff08;如加水印、AI分析、画中画#xff09;#xff1f; 通常#xff0c;实现前者需要…在移动端音视频开发中我们经常面临一个架构抉择是追求极致的低延迟如无人机图传、实时指挥还是追求丰富的功能处理如加水印、AI分析、画中画通常实现前者需要“透传Relay”避免编解码的耗时实现后者需要“转码Transcoding”需要获取 YUV/RGB 数据。本文将结合SmartPlayer.java核心代码深入剖析如何利用大牛直播 SDK (SmartPlayer SmartPublisher)的灵活性在同一套架构中同时实现这两种截然不同的业务场景并构建一个支持 RTSP/RTMP 拉流、推流、录像及轻量级 RTSP 服务的全能终端。核心架构设计Wrapper 模式与事件驱动为了保证业务逻辑与底层 SDK 的解耦以及多线程环境下的稳定性我们在SmartPlayer与 Native JNI 之间构建了一层封装。LibPlayerWrapper: 负责播放控制、参数配置及数据回调的线程安全封装。LibPublisherWrapper: 负责推流、录像、RTSP 服务及图层处理的封装。EventListener: 将底层的状态回调连接成功、断开、快照结果等透传至 UI 层实现逻辑与视图分离。在SmartPlayer.java的startPlayLogic方法中我们根据业务需求isRelayMode决定数据流向。这是整个系统的“路由”中心。如果不需要二次编码那么点“开始播放”我们只是做预览播放。private boolean startPlayLogic() { if (isPlaying) return false; if (!mPlayerWrapper.open()) return false; // 设置通用参数 mPlayerWrapper.setUrl(mPlaybackUrl); mPlayerWrapper.setSurface(mSurfaceView); mPlayerWrapper.setRenderScaleMode(1); mPlayerWrapper.setFastStartup(true); mPlayerWrapper.setAudioOutputType(1); mPlayerWrapper.setMute(isMute); mPlayerWrapper.setRotation(mRotateDegrees); mPlayerWrapper.setRTSPConfig(10, 1); if (!isRelayMode) { Log.i(TAG, 二次编码模式: 设置 ExternalRender); mPlayerWrapper.setExternalRender(new I420ExternalRender(mPublisherArray)); } // 硬解配置 mPlayerWrapper.setHWDecoder(isHardwareDecoder, isHardwareDecoder); if (!mPlayerWrapper.startPlay()) { Log.e(TAG, StartPlay failed); mPlayerWrapper.close(); return false; } isPlaying true; return true; }如果需要透传转发调用startPullLogic()/stopPullLogic():private boolean startPullLogic() { if (isPulling) return false; if (!mPlayerWrapper.open()) return false; mPlayerWrapper.setUrl(mPlaybackUrl); mPlayerWrapper.setRTSPConfig(10, 1); // 拉流模式强制设置数据回调 mPlayerWrapper.setAudioDataCallback(new PlayerAudioDataCallback(mStreamPublisher)); mPlayerWrapper.setPullStreamAudioTranscodeAAC(true); mPlayerWrapper.setVideoDataCallback(new PlayerVideoDataCallback(mStreamPublisher)); if (!mPlayerWrapper.startPullStream()) { if (!isPlaying) mPlayerWrapper.close(); return false; } isPulling true; return true; } private void stopPullLogic() { if (!isPulling) return; isPulling false; mPlayerWrapper.stopPullStream(); if (!isPlaying) mPlayerWrapper.close(); } private void stopPlayLogic() { if (!isPlaying) return; isPlaying false; mPlayerWrapper.stopPlay(); if (!isPulling) { mPlayerWrapper.close(); } }场景一高性能透传Relay Mode透传模式的精髓在于“拿来主义”。我们不需要解码视频帧而是直接从播放器底层 hook 住编码后的数据包AVPacket直接喂给推流器。1.1 获取编码数据我们需要实现NTVideoDataCallback和NTAudioDataCallback。在SmartPlayer.java中PlayerVideoDataCallback负责将数据直接投递给LibPublisherWrapper。/* 引用自 SmartPlayer.java Inner Classes */ class PlayerVideoDataCallback implements NTVideoDataCallback { private WeakReferenceLibPublisherWrapper publisher_; private int video_buffer_size 0; private ByteBuffer video_buffer_ null; // ... 构造函数 ... Override public ByteBuffer getVideoByteBuffer(int size) { // 动态管理 Buffer复用内存减少 GC if( size 1 ) return null; if ( size video_buffer_size video_buffer_ ! null ) { return video_buffer_; } video_buffer_size size 1024; video_buffer_size (video_buffer_size0xf) (~0xf); // 16字节对齐 video_buffer_ ByteBuffer.allocateDirect(video_buffer_size); return video_buffer_; } Override public void onVideoDataCallback(int ret, int video_codec_id, int sample_size, int is_key_frame, long timestamp, int width, int height, long presentation_timestamp) { if ( video_buffer_ null) return; LibPublisherWrapper publisher publisher_.get(); if (null publisher || !publisher.is_publishing()) return; video_buffer_.rewind(); // 【关键】直接投递编码后的数据不进行解码 publisher.PostVideoEncodedData(video_codec_id, video_buffer_, sample_size, is_key_frame, timestamp, presentation_timestamp); } }1.2 推流端配置避坑指南在透传模式下推流器不需要配置编码器参数如码率、GOP、FPS因为它不需要编码。private void initAndSetConfig() { if (libPublisher null || !mStreamPublisher.empty()) return; long handle libPublisher.SmartPublisherOpen(mContext, mAudioOpt, mVideoOpt, mVideoWidth, mVideoHeight); if (handle 0) return; int fps 25; int gop fps * 2; initializePublisher(libPublisher, handle, mVideoWidth, mVideoHeight, fps, gop, isRelayMode); mStreamPublisher.set(libPublisher, handle); } private void initializePublisher(SmartPublisherJniV2 lib, long handle, int width, int height, int fps, int gop, boolean isRelay) { // 【关键修改】如果是透传模式不需要配置编码器参数因为数据已经是编码好的 if (!isRelay) { if (mVideoEncodeType 1) { // HW H.264 int kbps LibPublisherWrapper.estimate_video_hardware_kbps(width, height, fps, true); // 成功设置硬编后进一步设置详细参数 if (lib.SetSmartPublisherVideoHWEncoder(handle, kbps) 0) { lib.SetNativeMediaNDK(handle, 0); // 默认0 lib.SetVideoHWEncoderBitrateMode(handle, 1); // 1:VBR, 0:CQ lib.SetVideoHWEncoderQuality(handle, 39); // 质量参数 lib.SetAVCHWEncoderProfile(handle, 0x08); // High Profile lib.SetAVCHWEncoderLevel(handle, 0x1000); // Level 4.1 } } else if (mVideoEncodeType 2) { // HW H.265 int kbps LibPublisherWrapper.estimate_video_hardware_kbps(width, height, fps, false); lib.SetSmartPublisherVideoHevcHWEncoder(handle, kbps); lib.SetVideoHWEncoderBitrateMode(handle, 1); lib.SetVideoHWEncoderQuality(handle, 39); } else { // SW H.264 int quality LibPublisherWrapper.estimate_video_software_quality(width, height, true); int maxKbps LibPublisherWrapper.estimate_video_vbr_max_kbps(width, height, fps); lib.SmartPublisherSetSwVBRMode(handle, 1, quality, maxKbps); } lib.SmartPublisherSetGopInterval(handle, gop); lib.SmartPublisherSetFPS(handle, fps); lib.SmartPublisherSetAudioCodecType(handle, 1); // AAC } // 关键点设置更新后的 EventHandlePublisherV2 lib.SetSmartPublisherEventCallbackV2(handle, new EventHandlePublisherV2(mUiHandler)); }此外在透传模式下严禁启动本地音频采集麦克风和图层线程否则会造成数据冲突或资源浪费。private boolean startPushRtmpLogic() { initAndSetConfig(); if (!mStreamPublisher.SetURL(mRelayStreamUrl)) return false; if (!mStreamPublisher.StartPublisher()) { mStreamPublisher.try_release(); return false; } if (!isRelayMode) { startAudioRecorder(); startLayerPostThread(); } return true; } private void stopPushLogic() { mStreamPublisher.StopPublisher(); mStreamPublisher.try_release(); if (!mStreamPublisher.is_publishing()) { stopAudioRecorder(); stopLayerPostThread(); } }场景二二次编码与动态水印Transcoding Mode当需要给视频加水印、跑马灯或者做画中画时我们必须拿到YUV数据。SDK 提供了NTExternalRender接口结合LayerPostThread实现多图层叠加。2.1 获取 YUV 数据并投递我们定义I420ExternalRender类它实现了 SDK 的渲染回调。/* 引用自 SmartPlayer.java Inner Classes */ private static class I420ExternalRender implements NTExternalRender { // ... 变量定义 ... Override public int getNTFrameFormat() { return NT_FRAME_FORMAT_I420; // 指定回调格式为 I420 } Override public void onNTFrameSizeChanged(int width, int height) { // 初始化 ByteBuffer分配 Y, U, V 平面的内存 width_ width; height_ height; y_row_bytes_ width; u_row_bytes_ (width 1) / 2; v_row_bytes_ (width 1) / 2; y_buffer_ ByteBuffer.allocateDirect(y_row_bytes_ * height_); u_buffer_ ByteBuffer.allocateDirect(u_row_bytes_ * ((height_ 1) / 2)); v_buffer_ ByteBuffer.allocateDirect(v_row_bytes_ * ((height_ 1) / 2)); } Override public void onNTRenderFrame(int width, int height, long timestamp) { // ... Buffer rewind ... if (publisher_list_ ! null) { for (WeakReferenceLibPublisherWrapper ref : publisher_list_) { LibPublisherWrapper p ref.get(); if (p ! null !p.empty()) { // 【核心】将 Player 解码后的 YUV 数据投递给 Publisher 的视频层Layer 0 p.PostLayerImageI420ByteBuffer(0, 0, 0, y_buffer_, 0, y_row_bytes_, u_buffer_, 0, u_row_bytes_, v_buffer_, 0, v_row_bytes_, width_, height_, 0, 0, 0, 0, 0, 0); } } } } }2.2 动态水印多图层叠加LayerPostThread是一个独立的线程用于定期生成时间戳位图、Logo 位图并投递到 Publisher 的上层Layer 1, Layer 2...。SDK 内部会负责将 Layer 0 (视频) 和 Layer X (水印) 进行硬件或软件混合。/* 引用自 LayerPostThread.java */ private void on_update_layers(ListLibPublisherWrapper publisher_list, boolean is_run_on_thread, int w, int h) { // ... 省略部分逻辑 ... // 1. 投递时间戳水印 if (is_text_) { // 生成时间戳 Bitmap Bitmap text_bitmap makeTextBitmap(makeTimestampString(), getFontSize(video_w), Color.argb(255, 0, 0, 0), true, Color.argb(255, 255, 255, 255),true); // 投递到指定索引的层 (timestamp_index_) for (LibPublisherWrapper i : publisher_list) i.PostLayerBitmap(timestamp_index_, 0, cur_h, text_bitmap, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0); text_bitmap.recycle(); } // 2. 投递图片水印 (Logo) if (is_picture_) { // ... 获取/生成 Logo Bitmap ... for (LibPublisherWrapper i : publisher_list) i.PostLayerImageRGBA8888ByteBuffer(picture_index_, 0, cur_h, buffer, 0, bitmap.getRowBytes(), w, h, 0, 0, scale_w, scale_h, scale_filter_mode, 0); } }注意在二次编码模式下我们需要在startPushRtmpLogic中调用startAudioRecorder()来采集麦克风音频因为此时我们将画面和声音重新编码合成。场景三转推RTMP前端拉取的RTSP或RTMP流可以通过大牛直播SDK的RTMP推送模块转推到自建RTMP服务器或CDN相关逻辑如下private void handlePushRtmp() { if (mStreamPublisher.is_rtmp_publishing()) { stopPushLogic(); runOnUiThread(new Runnable() { Override public void run() { mBtnPushRtmp.setText(推送RTMP); } }); } else { if (startPushRtmpLogic()) { runOnUiThread(new Runnable() { Override public void run() { mBtnPushRtmp.setText(停止推流); } }); } } }场景四轻量级 RTSP 服务除了推流到 RTMP 服务器大牛直播SDK 还允许将 Android 设备变成一个RTSP Server供内网其他设备直接拉流。4.1 启动 RTSP Server这部分逻辑在handleRtspService中private void handleRtspService() { if (isRTSPServiceRunning) { if (libPublisher ! null mRtspServerHandle ! 0) { libPublisher.StopRtspServer(mRtspServerHandle); libPublisher.CloseRtspServer(mRtspServerHandle); mRtspServerHandle 0; } isRTSPServiceRunning false; runOnUiThread(new Runnable() { Override public void run() { mBtnRtspService.setText(启动RTSP服务); mBtnRtspPublish.setEnabled(false); } }); } else { mRtspServerHandle libPublisher.OpenRtspServer(0); if (mRtspServerHandle 0) return; libPublisher.SetRtspServerPort(mRtspServerHandle, 28554); if (libPublisher.StartRtspServer(mRtspServerHandle, 0) 0) { isRTSPServiceRunning true; runOnUiThread(new Runnable() { Override public void run() { mBtnRtspService.setText(停止RTSP服务); mBtnRtspPublish.setEnabled(true); } }); } else { libPublisher.CloseRtspServer(mRtspServerHandle); mRtspServerHandle 0; } } }4.2 发布流到 RTSP Server启动 Server 后我们需要将当前的 Publisher无论是透传的还是二次编码的挂载到 Server 上。private void handleRtspPublish() { if (mStreamPublisher.is_rtsp_publishing()) { mStreamPublisher.StopRtspStream(); mStreamPublisher.try_release(); if (!mStreamPublisher.is_publishing()) { stopAudioRecorder(); stopLayerPostThread(); } runOnUiThread(new Runnable() { Override public void run() { mBtnRtspPublish.setText(发布RTSP流); mBtnRtspService.setEnabled(true); mBtnRtspSession.setEnabled(false); } }); } else { initAndSetConfig(); mStreamPublisher.SetRtspStreamName(stream1); mStreamPublisher.ClearRtspStreamServer(); mStreamPublisher.AddRtspStreamServer(mRtspServerHandle); if (mStreamPublisher.StartRtspStream()) { // 【关键】透传模式不启动本地采集 if (!isRelayMode) { startAudioRecorder(); startLayerPostThread(); } runOnUiThread(new Runnable() { Override public void run() { mBtnRtspPublish.setText(停止RTSP流); mBtnRtspService.setEnabled(false); mBtnRtspSession.setEnabled(true); } }); } } }场景五本地录像录像功能与推流功能是解耦的。我们可以只录像不推流也可以边推流边录像。底层支持自动切片分段保存。private void handleRecord() { if (mStreamPublisher.is_recording()) { mStreamPublisher.StopRecorder(); mStreamPublisher.try_release(); if (!mStreamPublisher.is_publishing()) { stopAudioRecorder(); stopLayerPostThread(); } isPauseRecording true; runOnUiThread(new Runnable() { Override public void run() { mBtnRecord.setText(录像); mBtnPauseRecord.setText(暂停); mBtnPauseRecord.setEnabled(false); } }); } else { initAndSetConfig(); configRecorderParam(); if (mStreamPublisher.StartRecorder()) { // 【关键】透传模式不启动本地采集 if (!isRelayMode) { startAudioRecorder(); startLayerPostThread(); } isPauseRecording true; runOnUiThread(new Runnable() { Override public void run() { mBtnRecord.setText(停止录像); mBtnPauseRecord.setEnabled(true); } }); } } }总结通过对SmartRelayDemo的深度剖析我们看到了一套成熟的移动端音视频解决方案。它不仅解决了单一的“播放”或“推流”问题更通过灵活的架构设计完美覆盖了从低延迟传输到边缘计算处理的多样化需求。----------------------------------------------------------------------- | Android 音视频网关 (SmartPlayer) | ----------------------------------------------------------------------- | v ------------------------[ 输入源 (Input) ]----------------------------- | | | [ 网络流 (RTSP/RTMP) ] [ 麦克风 (AudioRecord) ] | | | | | -------------|-------------------------------------|------------------- | | v | (仅二次编码模式启用) ---[ 播放器封装 (LibPlayerWrapper) ] | | | | | v | | 模式判断 (isRelayMode?) | | | | | ----------------- | | | (YES: 透传) | (NO: 二次编码) | | | | | | v v | | [回调 Encoded Data] [解码 回调 YUV 数据] | | (H.264/AAC 数据包) (I420ExternalRender) | | | | | | | v | | | [ 图层处理 (LayerPostThread) ] | | | (叠加时间戳/Logo/AI画框) | | | | | ----|------------------|---------------------------|------------------- | 零拷贝直传 | YUV水印 | 混合音频 | | v ----v------------------v----------------------------------------------- | 推流器封装 (LibPublisherWrapper) | ----------------------------------------------------------------------- | | | v v v ---------------- ---------------- ---------------- | RTMP 推流 | | 轻量级 RTSP Svr| | 本地 MP4 录像 | | (CDN/服务器) | | (局域网分发) | | (切片存储) | ---------------- ---------------- ----------------以下是对该技术方案优势的升华总结架构的灵活性Architectural Flexibility 通过Wrapper层与回调机制的精妙设计开发者可以在透传模式Relay与转码模式Transcoding之间毫秒级切换。既能满足无人机图传对 100-200ms 级低延迟的苛刻要求也能满足安防行业对视频OSD 水印、AI 分析的业务刚需。性能的极致优化Performance Optimization 在透传模式下通过VideoDataCallbackPostVideoEncodedData实现全链路零解码Zero-Decoding转发将 CPU 占用率降至最低大幅延长设备续航彻底解决了移动设备发热降频的痛点。全栈式的协议栈Full-Stack Protocol Support 一套代码打通了RTSP/RTMP 拉流、RTMP 推流、轻量级 RTSP 服务端以及本地 MP4 录像。这种“拉、推、录、发”四位一体的能力使得 Android 设备不再仅仅是视频的消费者更是边缘视频网络的核心节点。这种“进可攻二次编辑、AI处理退可守极速透传、低功耗”的技术设计让开发者在面对复杂的异构网络环境和多变的业务场景时能够游刃有余构建出真正高可用、工业级的音视频应用。 CSDN官方博客音视频牛哥-CSDN博客

需要专业的网站建设服务?

联系我们获取免费的网站建设咨询和方案报价,让我们帮助您实现业务目标

立即咨询