二、錄像
關於錄像的文件總共有三個部分,分別是文件頭、數據流和文件尾, 這裡先給出代碼,然後再進行說明。
VC++:
CHKVisionDlg::OnStart()
for(int i = 0; i < GetTotalDSPs(); i++){
m_bDspPreset[i]=TRUE;
if(m_bDspPreset[i]){
char fileName[256];
sprintf(fileName, "d:\\stream%d_%d.264", i, gFileNum++/GetTotalDSPs());
gFileHandle[i] = _open(fileName, _O_CREAT | _O_BINARY | _O_WRONLY| _O_TRUNC, _S_IREAD | _S_IWRITE);
if(gFileHandle[i] == -1){
TRACE("channel %d file open error\n,i");
return;
}
gChannelFrames[i] = 0;
gChannelTotalLength[i] = 0;
gChannelFramesLost[i] = 0;
gChannelOverflow[i] = 0;
gCurrentFileLen[i] = 0;
_write(gFileHandle[i], FileHeader[i], FileHeaderLen);
// could not be start again untill stopped first
//m_bDspPreset[i] = FALSE;
gCaptureStartedNum++;
// let the threads have chance to run
//Sleep(500);
}else
gFileHandle[i] = -1;
}
StreamDirectReadCallback
int __cdecl StreamDirectReadCallback(ULONG channelNum,void *DataBuf,DWord Length,int frameType,void *context)
{
//CHKVisionDlg * lpDlg = (CHKVisionDlg*)context;
//return lpDlg->ProcCallBack(channelNum, DataBuf, Length, frameType);
int i,status=0;
CString ctip;
int nframetype =0;
// if cap images we need clean the queue here
// if (!bCapture)
// return 0;
// no errors
if(frameType > 0) {
if(frameType == PktSysHeader){
// store the file header
memcpy(FileHeader[channelNum], DataBuf, Length);
FileHeaderLen = Length;
TRACE("channel %d get the file header !\n",channelNum);
}
if(frameType == PktIFrames || frameType ==PktSubIFrames){
status = 1;
}
else{
status = 0;
}
if(frameType == PktMotionDetection){
// m_VideoWin.DrawVect(channelNum, (char *)DataBuf, Length);
return 0;
}
if(frameType == PktOrigImage){
return 0;
}
}
if(Length == 0){
TRACE("no data ?\n");
return 0;
}
// if(frameType == PktIFrames){
// int iii=1;
// }
ULONG currentTime = timeGetTime();
gChannelTotalLength[channelNum] += Length;
gCurrentFileLen[channelNum] += Length;
if(currentTime > StartTime+1000){
CString str,str2;
str.Format("%d", (gChannelTotalLength[dcurrentwin] *8/(currentTime - StartTime)));
for(i=0;i<g_nChannelTotal;i++)
gChannelTotalLength[i] = 0;
StartTime= currentTime;
CHKVisionDlg *pMain = (CHKVisionDlg *)AfxGetMainWnd();
pMain->GetDlgItem(IDC_BPS)->SetWindowText((LPCTSTR)str);
}
// if (m_sframe && channelNum ==0)
// {
// if((frameType == PktSFrames && nframetype ==4 )||(frameType == PktSysHeader))
// {
// MP4_ServerWriteData(channelNum,(unsigned char *)DataBuf, Length,frameType,status);
// }
// }
// MP4_ServerWriteData(channelNum,(unsigned char *)DataBuf, Length,frameType,status);
if(frameType ==PktAudioFrames)
{
_write(gFileHandleQcif[channelNum],DataBuf,Length);
MP4_ServerWriteDataEx(channelNum,(unsigned char *)DataBuf, Length,frameType,status,1);
_write(gFileHandle[channelNum], DataBuf, Length);
MP4_ServerWriteDataEx(channelNum,(unsigned char *)DataBuf, Length,frameType,status,0);
}else if (frameType ==PktSubIFrames || frameType ==PktSubPFrames || frameType == PktSubBBPFrames || frameType == PktSubSysHeader)
{
_write(gFileHandleQcif[channelNum],DataBuf,Length);
MP4_ServerWriteDataEx(channelNum,(unsigned char *)DataBuf, Length,frameType,status,1);
}else
{
//_write(gFileHandle[channelNum], DataBuf, Length);
MP4_ServerWriteDataEx(channelNum,(unsigned char *)DataBuf, Length,frameType,status,0);
}
return 0;
}
CHKVisionDlg::OnStop()
for(int i = 0; i < GetTotalDSPs(); i++){
if(m_bDspPreset[i]){
ASSERT(gFileHandle[i] != -1);
// StopVideoCapture(ChannelHandle[i]);
//lseek(gFileHandle[i], 0, SEEK_SET);
//FRAMES_STATISTICS fs;
//GetFramesStatistics(ChannelHandle[i], &fs);
//ULONG frames = fs.AudioFrames + fs.VideoFrames;
//TRACE("channel %i has %x frames written\n", i, frames);
#define END_CODE 0x00000002
ULONG endCode = END_CODE;
_write(gFileHandle[i], &endCode, sizeof(ULONG));
_close(gFileHandle[i]);
///add v34
if (bEncodeCifAndQcif[i])
_close(gFileHandleQcif[i]);
gCaptureStartedNum--;
}
}