您的位置:首页 > 其它

测试验证:h264实时流封装ts文件存储,完整实现

2016-12-27 16:17 841 查看
摘要: 在做此测试验证之前,也看了网上不少技术文档,但是由于技术水平原因,看得云里雾里。还好网上有人提供H264文件+aac文件封装ts文件例子。本次测试就是基于这个事例改写。取流端使用testRTSPClient进行。

首先,测试是使用live555的testRTSPClient来进行网络实时流获取。在DummySink::afterGettingFrame接口处,进行音视频判断:

void DummySink::afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes,
struct timeval presentationTime, unsigned /*durationInMicroseconds*/) {
//----------------------------------------------------------------------------------
if (0 == strcmp(fSubsession.mediumName(), "video"))
{
if (!bInitHead)
{
unsigned int num = 0;
SPropRecord * sps = parseSPropParameterSets(fSubsession.fmtp_spropparametersets(), num);
struct timeval tv = { 0, 0 };
unsigned char start_code[4] = { 0x00, 0x00, 0x00, 0x01 };
memcpy(m_recvBuf, start_code, 4);
memcpy(&m_recvBuf[4], sps[0].sPropBytes, sps[0].sPropLength);
WriteBuf2TsFile(25, 1, m_recvBuf, sps[0].sPropLength + 4, 0);
fwrite(m_recvBuf, 1, sps[0].sPropLength + 4, pVideo_H264_File);

memset(m_recvBuf, 0, DUMMY_SINK_RECEIVE_BUFFER_SIZE + 4);
memcpy(m_recvBuf, start_code, 4);
memcpy(&m_recvBuf[4], sps[1].sPropBytes, sps[1].sPropLength);
WriteBuf2TsFile(25, 1, m_recvBuf, sps[1].sPropLength + 4, 0);
fwrite(m_recvBuf, 1, sps[1].sPropLength + 4, pVideo_H264_File);
delete[] sps;

memset(m_recvBuf, 0, DUMMY_SINK_RECEIVE_BUFFER_SIZE + 4);
m_recvBuf[0] = 0x00;
m_recvBuf[1] = 0x00;
m_recvBuf[2] = 0x00;
m_recvBuf[3] = 0x01;
memcpy(&m_recvBuf[4], fReceiveBuffer, frameSize);
WriteBuf2TsFile(25, 1, m_recvBuf, frameSize + 4, 0);
fwrite(m_recvBuf, 1, frameSize + 4, pVideo_H264_File);
bInitHead = true;
}
else
{
memset(m_recvBuf, 0, DUMMY_SINK_RECEIVE_BUFFER_SIZE + 4);
m_recvBuf[0] = 0x00;
m_recvBuf[1] = 0x00;
m_recvBuf[2] = 0x00;
m_recvBuf[3] = 0x01;
memcpy(&m_recvBuf[4], fReceiveBuffer, frameSize);
WriteBuf2TsFile(25, 1, m_recvBuf, frameSize + 4, 0);
fwrite(m_recvBuf, 1, frameSize + 4, pVideo_H264_File);
}
}
if (0 == strcmp(fSubsession.mediumName(), "audio"))
{
BYTE* pbAACBuffer;
pbAACBuffer = new BYTE[nMaxOutputBytes];
frameSize = frameSize / (nPCMBitSize / 8);
int nRet = faacEncEncode(aac_Handle, (int*)fReceiveBuffer, frameSize, pbAACBuffer, nMaxOutputBytes);

WriteBuf2TsFile(fSubsession.scale(), 0, pbAACBuffer, nRet, fSubsession.getNormalPlayTime(presentationTime));
fwrite(pbAACBuffer, 1, nMaxOutputBytes, pAudio_Aac_File);
delete pbAACBuffer;
pbAACBuffer = NULL;
}
//----------------------------------------------------------------------------------

// Then continue, to request the next frame of data:
continuePlaying();
}

此处视频处理需要注意2点,第一是,首次需要写入sps数据信息;第二是,如果传入的H264裸流数据没有0x00000001解析头的,需要自己添加解析头,如果传输数据已经存在则不必添加了。另外,就是音频方面,这里是进行音频转换了的转为aac格式,使用faac开源库处理。

下面是WriteBuf2TsFile里面的相关内容:

/*实时流写入ts文件*/
int WriteBuf2TsFile(unsigned int framerate, int iStreamType, unsigned char *pData, int iDataSize, unsigned long lTimeStamp)
{
unsigned int audiosamplerate = 8000; //音频采样率
unsigned int videoframetype = 0; //视频帧类型
Ts_Adaptation_field ts_adaptation_field_Head;
Ts_Adaptation_field ts_adaptation_field_Tail;
unsigned int WritePacketNum;

if (0 == iStreamType)
{
Take_Out_Pes(&m_audio_tspes, Timestamp_audio, 0x01, NULL,pData,iDataSize);
if (m_audio_tspes.Pes_Packet_Length_Beyond != 0)
{
printf("PES_AUDIO : SIZE = %d\n", m_audio_tspes.Pes_Packet_Length_Beyond);
//填写自适应段标志
WriteAdaptive_flags_Tail(&ts_adaptation_field_Head); //填写自适应段标志 ,这里注意 音频类型不要算pcr 所以都用帧尾代替就行
WriteAdaptive_flags_Tail(&ts_adaptation_field_Tail); //填写自适应段标志帧尾
PES2TS(&m_audio_tspes, TS_AAC_PID, &ts_adaptation_field_Head, &ts_adaptation_field_Tail, Timestamp_video, Timestamp_audio);
Timestamp_audio += 1024 * 1000 * 90 / 8000;
//计算一帧音频所用时间
}
}
else if (1 == iStreamType)
{
Take_Out_Pes(&m_video_tspes, Timestamp_video, 0x00, &videoframetype,pData,iDataSize);
if (m_video_tspes.Pes_Packet_Length_Beyond != 0)
{
printf("PES_VIDEO : SIZE = %d\n", m_video_tspes.Pes_Packet_Length_Beyond);
if (videoframetype == FRAME_I || videoframetype == FRAME_P || videoframetype == FRAME_B)
{
//填写自适应段标志
WriteAdaptive_flags_Head(&ts_adaptation_field_Head, Timestamp_video); //填写自适应段标志帧头
WriteAdaptive_flags_Tail(&ts_adaptation_field_Tail); //填写自适应段标志帧尾
//计算一帧视频所用时间
PES2TS(&m_video_tspes, TS_H264_PID, &ts_adaptation_field_Head, &ts_adaptation_field_Tail, Timestamp_video, Timestamp_audio);
Timestamp_video += 1000 * 90 / framerate;
}
else
{
//填写自适应段标志
WriteAdaptive_flags_Tail(&ts_adaptation_field_Head); //填写自适应段标志 ,这里注意 其它帧类型不要算pcr 所以都用帧尾代替就行
WriteAdaptive_flags_Tail(&ts_adaptation_field_Tail); //填写自适应段标志帧尾
PES2TS(&m_video_tspes, TS_H264_PID, &ts_adaptation_field_Head, &ts_adaptation_field_Tail, Timestamp_video, Timestamp_audio);
}
}
}
return 1;
}

这里需要注意的是,帧率:25,音频采样率:8000,因为是测试所以自己写死了的,需要特别注意的是:时间戳问题,如果搞错就只能播放一帧视频了(这个问题困扰一段时间,可能由于经验问题)。

下面附上连接地址:

H264+AAC文件方式封装ts:http://download.csdn.net/detail/zhuweigangzwg/5605869

实时流封装Demo地址:
https://github.com/Jsnails/MUX_TS
由于空间关系,live555,faac等开源库需要自己去下载编译,上传文件绝对没有做任何处理,那2个开源库编译好,工程配置好,绝对可以编译调试。
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: 
相关文章推荐