提交 494dae45 authored 作者: autulin's avatar autulin

1.恢复使用原始raw视频队列,将生成帧的方法移动到发送帧的线程线程方法中(复用AVFrame防止内存泄漏)

2.修复tcp的bug
上级 c18e4ece
......@@ -104,10 +104,10 @@ public class DemoActivity extends AppCompatActivity implements
// 设置输出
// String fileName = String.valueOf(System.currentTimeMillis());
String fileName = "tttttt";
mediaOutput = mMediaRecorder.setFileOutPut(fileName); //输出到文件,这里demo是/sdcard/DCIM/pstest/tttttt.ps
// int ssrc = 1;
// mediaOutput = mMediaRecorder.setUdpOutPut("10.112.181.160", 8888, ssrc);
// String fileName = "tttttt";
// mediaOutput = mMediaRecorder.setFileOutPut(fileName); //输出到文件,这里demo是/sdcard/pstest/tttttt.ps
int ssrc = 1;
mediaOutput = mMediaRecorder.setTcpOutPut("10.112.154.194", 8888, ssrc);
mMediaRecorder.setSurfaceHolder(mSurfaceView.getHolder());
mMediaRecorder.prepare();
......
......@@ -15,6 +15,7 @@ int GB28181_sender::initSender() {
break;
case 1: // tcp
LOGE("ip:%s, port:%d, out_type:%d", args->ip_addr, args->port, args->outType);
initSocket(args->ip_addr, args->port);
break;
case 2: // file
//打开ps文件
......
......@@ -126,39 +126,15 @@ int GB28181Muxer::initMuxer() {
*/
int GB28181Muxer::sendVideoFrame(uint8_t *buf) {
int64_t st = getCurrentTime();
AVFrame *pNewFrame = genFrame(buf);
int64_t st1 = getCurrentTime();
vFrame_queue.push(pNewFrame);
uint8_t *new_buf = (uint8_t *) malloc(in_y_size * 3 / 2);
memcpy(new_buf, buf, in_y_size * 3 / 2);
video_queue.push(new_buf);
int64_t et = getCurrentTime();
LOGI("[muxer][send in]gen AVFrame time:%lld, send AVFrame to queue time:%lld", st1 - st, et - st1);
LOGI("[muxer][send in]send raw Frame to queue time:%lld", et - st);
videoFrameCnt++;
return 0;
}
/**
* 将原始帧封装成为FFmpeg的AVFrame
* @param rawData 原始帧数据
* @return AVFrame
*/
AVFrame *GB28181Muxer::genFrame(uint8_t *rawData) {
uint8_t *new_buf = (uint8_t *) malloc(in_y_size * 3 / 2);
memcpy(new_buf, rawData, in_y_size * 3 / 2);
AVFrame *pNewFrame = av_frame_alloc();
uint8_t *buf = (uint8_t *) av_malloc(picture_size);
avpicture_fill((AVPicture *) pNewFrame, buf, pCodecCtx->pix_fmt, pCodecCtx->width,
pCodecCtx->height);
custom_filter(this, new_buf, pNewFrame);
if (startTime == 0) {
startTime = getCurrentTime();
pNewFrame->pts = 0;
} else {
pNewFrame->pts = (getCurrentTime() - startTime) * 90;
}
LOGI("[muxer][gen frame]new Frame pts:%lld(%d)",
pNewFrame->pts, videoFrameCnt);
return pNewFrame;
}
/**
* 编码并发送一音频帧到编码队列
......@@ -181,24 +157,40 @@ int GB28181Muxer::sendAudioFrame(uint8_t *buf) {
/**
* 编码的线程方法
* 不断的从AVFrame队列里面取帧出来,送到FFmpeg中编码
* 不断的从视频原始帧队列里面取帧出来,送到FFmpeg中编码
* @param obj
* @return
*/
void *GB28181Muxer::startEncode(void *obj) {
LOGE("[muxer][encode]start encode thread");
GB28181Muxer *gb28181Muxer = (GB28181Muxer *) obj;
//初始化一个AVFrame,这个AVFrame是可以复用多次的
AVFrame *pNewFrame = av_frame_alloc();
uint8_t *buf = (uint8_t *) av_malloc(gb28181Muxer->picture_size);
avpicture_fill((AVPicture *) pNewFrame, buf, gb28181Muxer->pCodecCtx->pix_fmt, gb28181Muxer->pCodecCtx->width,
gb28181Muxer->pCodecCtx->height);
while (!gb28181Muxer->is_end) {
int64_t st = getCurrentTime();
AVFrame * pFrame = *gb28181Muxer->vFrame_queue.wait_and_pop();
uint8_t * new_buf = *gb28181Muxer->video_queue.wait_and_pop();
gb28181Muxer->custom_filter(gb28181Muxer, new_buf, pNewFrame);
delete new_buf;
if (gb28181Muxer->startTime == 0) {
gb28181Muxer->startTime = getCurrentTime();
pNewFrame->pts = 0;
} else {
pNewFrame->pts = (getCurrentTime() - gb28181Muxer->startTime) * 90;
}
int64_t et1 = getCurrentTime();
int ret = avcodec_send_frame(gb28181Muxer->pCodecCtx, pFrame);
int ret = avcodec_send_frame(gb28181Muxer->pCodecCtx, pNewFrame);
while (ret == AVERROR(EAGAIN)) {
usleep(1000);
ret = avcodec_send_frame(gb28181Muxer->pCodecCtx, pFrame);
ret = avcodec_send_frame(gb28181Muxer->pCodecCtx, pNewFrame);
}
int64_t et2 = getCurrentTime();
LOGI("fetch raw frame from queue time:%lld (frame quque left:%d),in FFmpeg time:%lld.", et1 - st, gb28181Muxer->vFrame_queue.size(), et2 - et1);
LOGI("fetch raw frame from queue time:%lld (video frame queue left:%d),in FFmpeg time:%lld.", et1 - st, gb28181Muxer->video_queue.size(), et2 - et1);
if (ret < 0) {
LOGE("send FFmpeg error:%d.", ret);
}
......@@ -227,7 +219,7 @@ void *GB28181Muxer::startMux(void *obj) {
} else{
gb28181Muxer->nowPkt = &gb28181Muxer->pkt;
LOGI("got first encoded pkt!(pts:%lld, queue size: %d) \n",
gb28181Muxer->nowPkt->pts, gb28181Muxer->vFrame_queue.size());
gb28181Muxer->nowPkt->pts, gb28181Muxer->video_queue.size());
gb28181Muxer->lastPts = gb28181Muxer->nowPkt->pts;
gb28181Muxer->muxCnt++;
}
......@@ -245,7 +237,7 @@ void *GB28181Muxer::startMux(void *obj) {
int64_t et = getCurrentTime();
if (ret >= 0){
LOGI("mux one pkt over!(video queue size: %d, audio queue size: %d), time use: %lld",
gb28181Muxer->vFrame_queue.size(), gb28181Muxer->audio_queue.size(), et - st);
gb28181Muxer->video_queue.size(), gb28181Muxer->audio_queue.size(), et - st);
}
}
......@@ -376,9 +368,9 @@ int GB28181Muxer::endMux() {
gb28181Sender->sendCloseSignal();
LOGE("audio queue left num: %d, video queue left num: %d", audio_queue.size(),
vFrame_queue.size());
video_queue.size());
audio_queue.clear();
vFrame_queue.clear();
video_queue.clear();
//Clean
if (video_st) {
......
......@@ -44,8 +44,8 @@ private:
GB28181_sender *gb28181Sender;
volatile int is_end = START_STATE;
volatile int is_release = RELEASE_FALSE;
threadsafe_queue<AVFrame *> vFrame_queue;
// threadsafe_queue<uint8_t *> video_queue;
// threadsafe_queue<AVFrame *> vFrame_queue;
threadsafe_queue<uint8_t *> video_queue;
threadsafe_queue<uint8_t *> audio_queue;
AVFormatContext *pFormatCtx;
AVOutputFormat *fmt;
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论