TI中文支持网
TI专业的中文技术问题搜集分享网站

dm358 解码暂停半小时后不在运行

设计中使用的是Qt控制decode解码程序,利用Qt界面按钮实现解码的暂停与继续,但是在设计中发现,点击暂停过10-30分钟时间,再点击继续解码程序可以运行,但是过30-60分钟后在点击继续按钮的话,解码程序便不再运行了,我测试了一下发现过30分钟后在点击继续按钮的时候,video线程运行了一下,但是display线程没有运行,下面是video.c和display.c的源码,还望改为不吝赐教!

/*
* video.c
*
* This source file has the implementations for the video thread
* functions implemented for 'DVSDK decode demo' on DM365 platform
*
* Copyright (C) 2010 Texas Instruments Incorporated – http://www.ti.com/ * * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met:
*
* Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the * distribution.
*
* Neither the name of Texas Instruments Incorporated nor the names of
* its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <sys/socket.h>
#include <sys/types.h>
#include <sys/un.h>
#include <fcntl.h>
#include <errno.h>
#include <pthread.h>
#include <termios.h>#include <sys/socket.h> //connect,send,recv,setsockopt等
#include <sys/types.h>#include <netinet/in.h> // sockaddr_in, "man 7 ip" ,htons
#include <poll.h> //poll,pollfd
#include <arpa/inet.h> //inet_addr,inet_aton
#include <unistd.h> //read,write
#include <netdb.h> //gethostbyname
#include <error.h> //perror
#include <stdio.h>
#include <errno.h> //errno
#include <signal.h>
#include <time.h>
#include <sys/time.h>
#include <sched.h>

#include <unistd.h>#include <stdlib.h>#include <fcntl.h>#include <limits.h>#include <sys/types.h>#include <sys/stat.h>#include <stdio.h>#include <string.h>#include <sys/stat.h>
#include <time.h>

#include <xdc/std.h>

#include <ti/sdo/ce/Engine.h>
#include <ti/sdo/ce/osal/Memory.h>

#include <ti/sdo/dmai/Fifo.h>
#include <ti/sdo/dmai/Pause.h>
#include <ti/sdo/dmai/BufTab.h>
#include <ti/sdo/dmai/Loader.h>
#include <ti/sdo/dmai/VideoStd.h>
#include <ti/sdo/dmai/ce/Vdec2.h>
#include <ti/sdo/dmai/BufferGfx.h>
#include <ti/sdo/dmai/Rendezvous.h>

#include "video.h"
#include "../demo.h"
#include "../ui.h"
#include "../write_log.h"

/* Buffering for the display driver */
#define NUM_DISPLAY_BUFS 4

/* The masks to use for knowing when a buffer is free */
#define CODEC_FREE 0x1
#define DISPLAY_FREE 0x2

#ifndef YUV_420SP
#define YUV_420SP 256
#endif

#define bool unsigned char//c
#define true 1
#define false 0

pthread_mutex_t mutexPrint;
pthread_cond_t condPrint;

#define YUVBUFLEN_T (VIDEO_WIDTH_T*VIDEO_HEIGHT_T*3/2)
#define RGBBUFLEN_T (VIDEO_WIDTH_T*VIDEO_HEIGHT_T*3)
#define YUVBUFLEN (VIDEO_WIDTH*VIDEO_HEIGHT*3/2)
#define RGBBUFLEN (VIDEO_WIDTH*VIDEO_HEIGHT*3)

unsigned char hDstBuf_buf01[YUVBUFLEN_T], hDstBuf_buf02[YUVBUFLEN_T];
unsigned char yuvbuffer01[YUVBUFLEN_T], yuvbuffer02[YUVBUFLEN_T];
unsigned char rgbBuf01[RGBBUFLEN], rgbBuf02[RGBBUFLEN];
unsigned char rgbBufCrop01[RGBBUFLEN], rgbBufCrop02[RGBBUFLEN];

const char *fifo_name = "/tmp/my_fifo";int pipe_fd = -1;int res = 0;const int open_mode = O_WRONLY;int bytes_sent = 0;char buffer[PIPE_BUF + 1];

/******************************************************************************
* resizeBufTab
******************************************************************************/
static Int resizeBufTab(Vdec2_Handle hVd2, Int displayBufs)
{
BufTab_Handle hBufTab = Vdec2_getBufTab(hVd2);
Int numBufs, numCodecBuffers, numExpBufs;
Buffer_Handle hBuf;
Int32 frameSize;

/* How many buffers can the codec keep at one time? */
numCodecBuffers = Vdec2_getMinOutBufs(hVd2);

if (numCodecBuffers < 0) {
ERR("Failed to get buffer requirements\n");
write_log(1,"Failed to get buffer requirements video.c 85 \r\n");
return FAILURE;
}

/*
* Total number of frames needed are the number of buffers the codec
* can keep at any time, plus the number of frames in the display pipe.
*/
numBufs = numCodecBuffers + displayBufs;

/* Get the size of output buffers needed from codec */
frameSize = Vdec2_getOutBufSize(hVd2);

/*
* Get the first buffer of the BufTab to determine buffer characteristics.
* All buffers in a BufTab share the same characteristics.
*/
hBuf = BufTab_getBuf(hBufTab, 0);

/* Do we need to resize the BufTab? */
if (numBufs > BufTab_getNumBufs(hBufTab) ||
frameSize < Buffer_getSize(hBuf)) {

/* Should we break the current buffers in to many smaller buffers? */
if (frameSize < Buffer_getSize(hBuf)) {

/*
* Chunk the larger buffers of the BufTab in to smaller buffers
* to accomodate the codec requirements.
*/
numExpBufs = BufTab_chunk(hBufTab, numBufs, frameSize);

if (numExpBufs < 0) {
ERR("Failed to chunk %d bufs size %ld to %d bufs size %ld\n",
BufTab_getNumBufs(hBufTab), Buffer_getSize(hBuf),
numBufs, frameSize);
write_log(1,"numExpBufs < 0 Failed to chunk bufs size to bufs size video.c 121 \r\n");
return FAILURE;
}

/*
* Did the current BufTab fit the chunked buffers,
* or do we need to expand the BufTab (numExpBufs > 0)?
*/
if (BufTab_expand(hBufTab, numExpBufs) < 0) {
ERR("Failed to expand BufTab with %d buffers after chunk\n",
numExpBufs);
write_log(1,"Failed to expand BufTab with buffers after chunk video.c 132 \r\n");
return FAILURE;
}
}
else {
/* Just expand the BufTab with more buffers */
if (BufTab_expand(hBufTab, (numBufs – BufTab_getNumBufs(hBufTab))) < 0) {
ERR("Failed to expand BufTab with %d buffers\n",
(numBufs – BufTab_getNumBufs(hBufTab)));
write_log(1,"Failed to expand BufTab with buffers video.c 141 \r\n");
return FAILURE;
}
}
}

return numBufs;
}

/******************************************************************************
* handleCodecBufs
******************************************************************************/
static Int handleCodecBufs(Vdec2_Handle hVd2, Fifo_Handle hFifo)
{
Buffer_Handle hOutBuf, hFreeBuf;
Int numDisplayBufs = 0;
UInt16 useMask;

/* Get a buffer for display from the codec */
hOutBuf = Vdec2_getDisplayBuf(hVd2);
while (hOutBuf) {
/* Mark the buffer as being used by display thread */
useMask = Buffer_getUseMask(hOutBuf);
Buffer_setUseMask(hOutBuf, useMask | DISPLAY_FREE);

/* Send buffer to display thread */
if (Fifo_put(hFifo, hOutBuf) < 0) {
ERR("Failed to send buffer to display thread\n");
write_log(1,"Failed to send buffer to display thread video.c 169 \r\n");
return FAILURE;
}

numDisplayBufs++;
/* Get another buffer for display from the codec */
hOutBuf = Vdec2_getDisplayBuf(hVd2);
}

/* Get a buffer to free from the codec */
hFreeBuf = Vdec2_getFreeBuf(hVd2);
while (hFreeBuf) {
/* The codec is no longer using the buffer */
Buffer_freeUseMask(hFreeBuf, CODEC_FREE);
hFreeBuf = Vdec2_getFreeBuf(hVd2);
}

return numDisplayBufs;
}

/******************************************************************************
* blackFill
******************************************************************************/
static Void blackFill(Buffer_Handle hBuf)
{
#if 1
Int8 *yPtr = Buffer_getUserPtr(hBuf);
Int32 ySize = Buffer_getSize(hBuf) * 2 / 3;
Int8 *cbcrPtr = yPtr + ySize;
Int bpp = ColorSpace_getBpp(ColorSpace_YUV420PSEMI);
Int y;
BufferGfx_Dimensions dim;

BufferGfx_getDimensions(hBuf, &dim);

yPtr += dim.y * dim.lineLength + dim.x * bpp / 8;
for (y = 0; y < dim.height; y++) {
memset(yPtr, 0x0, dim.width * bpp / 8);
yPtr += dim.lineLength;
}

cbcrPtr += dim.y * dim.lineLength / 2 + dim.x * bpp / 8;
for (y = 0; y < dim.height / 2; y++) {
memset(cbcrPtr, 0x80, dim.width * bpp / 8);
cbcrPtr += dim.lineLength;
}
#endif
}

/******************************************************************************
* flushDisplayPipe
******************************************************************************/
static Int flushDisplayPipe(Fifo_Handle hFifo, BufTab_Handle hBufTab,
BufferGfx_Dimensions * dim)
{
#if 1
/* Flush the Display pipe with blank buffers */
Buffer_Handle hBuf;
Int i;

for (i = 0; i < NUM_DISPLAY_BUFS; i++) {
hBuf = BufTab_getFreeBuf(hBufTab);
if (hBuf == NULL) {
ERR("No free buffer found for flushing display pipe.\n");
write_log(1,"No free buffer found for flushing display pipe video.c 231 \r\n");
return FAILURE;
}

BufferGfx_setDimensions(hBuf, dim);

blackFill(hBuf);

/* Send buffer to display thread */
if (Fifo_put(hFifo, hBuf) < 0) {
ERR("Failed to send buffer to display thread\n");
write_log(1,"Failed to send buffer to display thread video.c 242 \r\n");
return FAILURE;
}
}

return SUCCESS;
#endif
}

/******************************************************************************
* findClipDimensions
******************************************************************************/
Int findClipDimensions(Loader_Handle hLoader, Char * videoFile, BufTab_Handle hBufTab, Vdec2_Handle hVd2, BufferGfx_Dimensions * dim)
{
Buffer_Handle hInBuf, hDstBuf, hOutBuf, hFreeBuf;
Int numBufs = NUM_DISPLAY_BUFS;
Int frameNbr = 0;
Int bufIdx;
Int ret;

/* Prime the file loader and start reading from beginning of file */
if (Loader_prime(hLoader, &hInBuf) < 0) {
ERR("Failed to prime loader for file %s\n", videoFile);
write_log(1,"Failed to prime loader for file video.c 265 \r\n");
return FAILURE;
}

/* * We run thru the clip until we get the first display frame, and use it to * determine the true dimensions of the clip. Then we can re-create the * codec using the true dimensions of the clip as maxHeight and maxWidth.
* In this manner, the codec's output buffer array can be created at once * with the right dimension without need for chunking the buffers (which * does not resize the buffers that are already in use by the codec). This * is important so that all buffers in the array are of the same size, with * the chroma data located at the same offset, thereby allowing the * display driver to display these buffers directly without having to do * a frame copy. (The driver requires all buffers to use the same chroma * offset.)
*/ for (bufIdx=0; bufIdx < numBufs; ) {
if (ret != Dmai_EFIRSTFIELD) {
/* Get a free buffer from the BufTab */
hDstBuf = BufTab_getFreeBuf(hBufTab);

if (hDstBuf == NULL) {
ERR("Failed to get free buffer from display pipe BufTab\n");
BufTab_print(hBufTab);
write_log(1,"Failed to get free buffer from display pipe BufTab video.c 290\r\n");
return FAILURE;
}
bufIdx++;
}

/* Make sure the whole buffer is used for output */
BufferGfx_resetDimensions(hDstBuf);

/* Make sure we have some data in input buffer, otherwise DMAI will generate assertion */
if (Buffer_getNumBytesUsed(hInBuf) == 0) {
ERR("Failed to read data from loader, may be we reached EOF ???\n");
write_log(1,"Failed to read data from loader, may be we reached EOF video.c 302 \r\n");
return FAILURE;
}

/* Decode the video buffer */
ret = Vdec2_process(hVd2, hInBuf, hDstBuf);

if (ret < 0) {
ERR("Failed to decode video buffer\n");
write_log(1,"Failed to decode video buffer video.c 311 \r\n");
return FAILURE;
}

/* If no encoded data was used we cannot find the next frame */
if (ret == Dmai_EBITERROR && Buffer_getNumBytesUsed(hInBuf) == 0) {
ERR("Fatal bit error\n");
write_log(1,"Fatal bit error video.c 318 \r\n");
return FAILURE;
}

/* Get a buffer for display from the codec */
hOutBuf = Vdec2_getDisplayBuf(hVd2);
if (hOutBuf) {
/* Get the clip's actual width and height */
BufferGfx_getDimensions(hOutBuf, dim);
break;
}

/* Get a buffer to free from the codec */
hFreeBuf = Vdec2_getFreeBuf(hVd2);
while (hFreeBuf) {
/* The codec is no longer using the buffer */
BufTab_freeBuf(hFreeBuf);
hFreeBuf = Vdec2_getFreeBuf(hVd2);
}

if (frameNbr == 0) {
/*
* Resize the BufTab after the first frame has been processed.
* This because the codec may not know it's buffer requirements
* before the first frame has been decoded.
*/
numBufs = resizeBufTab(hVd2, NUM_DISPLAY_BUFS);

if (numBufs < 0) {
return FAILURE;
}
}

/* Load a new encoded frame from the file system */
if (Loader_getFrame(hLoader, hInBuf) < 0) {
ERR("Failed to get frame of encoded data during clip size estimation\n");
write_log(1,"Failed to get frame of encoded data during clip size estimation video.c 354 \r\n");
return FAILURE;
}

/* End of clip? */
if (Buffer_getUserPtr(hInBuf) == NULL) {
printf("Clip ended, exiting demo..\n");
write_log(1,"Clip ended, exiting demo.. video.c 361 \r\n");
return FAILURE;
}

frameNbr++;
}

if (hOutBuf == NULL) {
ERR("Failed to determine video clip's dimensions. Stream may be "
"unsupported by the video decoder.\n");
write_log(1,"Failed to determine video clip's dimensions. Stream may be "
"unsupported by the video decoder video.c 372 \r\n");
return FAILURE;
}

/* Record clip's width and height for OSD display */
gblSetImageWidth(dim->width);
gblSetImageHeight(dim->height);

return SUCCESS;
}
typedef unsigned char uchar;
void yuv_crop(uchar *pSrcBuf,int src_width,int src_height,int offset_x,int offset_y, uchar *pDstBuf,int dst_width,int dst_height){

uchar *sy = pSrcBuf;
uchar *suv = pSrcBuf + src_width * src_height;
uchar *dy = pDstBuf;
uchar *duv = pDstBuf + dst_width * dst_height;
int i,j;

sy += src_width*offset_y + offset_x;
for(i=0; i<dst_height; i++){
memcpy(dy, sy, dst_width);
sy += src_width;
dy += dst_width;
}

suv += src_width*(offset_y>>1) + (offset_x>>1);
for(i=0; i<(dst_height>>1); i++){
memcpy(duv, suv, dst_width);
duv += dst_width;
suv += src_width;
}
}

/******************************************************************************
* videoThrFxn
******************************************************************************/
Void *videoThrFxn(Void *arg)
{
VideoEnv *envp = (VideoEnv *) arg;
Void *status = THREAD_SUCCESS;
VIDDEC2_Params defaultParams = Vdec2_Params_DEFAULT;
VIDDEC2_DynamicParams defaultDynParams = Vdec2_DynamicParams_DEFAULT;
BufferGfx_Attrs gfxAttrs = BufferGfx_Attrs_DEFAULT;
Loader_Attrs lAttrs = Loader_Attrs_DEFAULT;
Vdec2_Handle hVd2 = NULL;
Loader_Handle hLoader = NULL;
BufTab_Handle hBufTab = NULL;
BufTab_Handle hBufTabFlush = NULL;
Engine_Handle hEngine = NULL;
Buffer_Handle hDstBuf, hInBuf, hDispBuf, hBuf;
Int bufIdx, bufsSent, numDisplayBufs, numBufs;
Int totalNumBufs;
Int fifoRet, ret, frameNbr;
VIDDEC2_Params *params;
VIDDEC2_DynamicParams *dynParams;
Int32 bufSize;
ColorSpace_Type colorSpace = ColorSpace_YUV420PSEMI;
Int numFlushBufsSent = 0;
Int idx;
BufferGfx_Dimensions dim;

bool secondFrame = false; char *p=NULL;
int frame_num=0;
int getNum=0;

/* Open the codec engine */
hEngine = Engine_open(envp->engineName, NULL, NULL);

if (hEngine == NULL) {
ERR("Failed to open codec engine %s\n", envp->engineName);
write_log(1,"Failed to open codec engine video.c 416 \r\n");
cleanup(THREAD_FAILURE);
}

/* Use supplied params if any, otherwise use defaults */
params = envp->params ? envp->params : &defaultParams;
dynParams = envp->dynParams ? envp->dynParams : &defaultDynParams;

params->maxWidth = VideoStd_1080I_WIDTH;
params->maxHeight = VideoStd_1080I_HEIGHT;

if (envp->videoStd == VideoStd_D1_PAL) {
params->maxFrameRate = 25000;
} else {
params->maxFrameRate = 30000;
}
if (colorSpace == ColorSpace_YUV420PSEMI) { params->forceChromaFormat = XDM_YUV_420SP;
printf("%s %s %d XDM_YUV_420SP;\n", __FILE__ , __FUNCTION__, __LINE__);
} else {
params->forceChromaFormat = XDM_YUV_422ILE;
printf("%s %s %d XDM_YUV_422ILE;\n", __FILE__ , __FUNCTION__, __LINE__);
}

/* Create the video decoder */
hVd2 = Vdec2_create(hEngine, envp->videoDecoder, params, dynParams);

if (hVd2 == NULL) {
ERR("Failed to create video decoder: %s\n", envp->videoDecoder);
write_log(1,"Failed to create video decoder video.c 445 \r\n");
cleanup(THREAD_FAILURE);
}

/* Which output buffer size does the codec require? */
bufSize = Vdec2_getOutBufSize(hVd2);

/* Both the codec and the display thread can own a buffer */
gfxAttrs.bAttrs.useMask = CODEC_FREE;

/* Color space */
gfxAttrs.colorSpace = colorSpace;

/* Set the original dimensions of the Buffers to the max */
gfxAttrs.dim.width = params->maxWidth;
gfxAttrs.dim.height = params->maxHeight;
gfxAttrs.dim.lineLength = BufferGfx_calcLineLength(gfxAttrs.dim.width,
colorSpace);

/* Create a table of buffers for decoded data */
hBufTab = BufTab_create(NUM_DISPLAY_BUFS, bufSize,
BufferGfx_getBufferAttrs(&gfxAttrs));

if (hBufTab == NULL) {
ERR("Failed to create BufTab for display pipe\n");
write_log(1,"Failed to create BufTab for display pipe video.c 476 \r\n");
cleanup(THREAD_FAILURE);
}

/* The codec is going to use this BufTab for output buffers */
Vdec2_setBufTab(hVd2, hBufTab);

/* Ask the codec how much input data it needs */
lAttrs.readSize = Vdec2_getInBufSize(hVd2);

/* Let the loader thread read 300000 bytes extra */
lAttrs.readAhead = 300000;

/* Make the total ring buffer larger */
lAttrs.readBufSize = (lAttrs.readSize + lAttrs.readAhead) * 2;

/* Use asynchronous mode since we have a separate loader thread */
lAttrs.async = TRUE;

/* Create the file loader for reading encoded data */
hLoader = Loader_create(envp->videoFile, &lAttrs);

if (hLoader == NULL) {
ERR("Failed to create loader for file %s\n", envp->videoFile);
write_log(1,"Failed to create loader for file video.c 494 \r\n");
cleanup(THREAD_FAILURE);
}

/* The environment copy will be shared with the loader thread */
envp->hLoader = hLoader;

/* Signal that the Loader is created */
Rendezvous_meet(envp->hRendezvousLoader);

/* Make sure the display thread is stopped when it's unlocked */
Pause_on(envp->hPausePrime);

/* Signal that initialization is done and wait for other threads */
Rendezvous_meet(envp->hRendezvousInit);

/* Find the clip's dimensions */
if (findClipDimensions(hLoader, envp->videoFile, hBufTab, hVd2, &dim) == FAILURE) {
cleanup(THREAD_FAILURE);
};

/* Adjust width and height to match the clip's dimensions */
params->maxWidth = dim.width;
params->maxHeight = dim.height;

/* Recreate the codec instance with actual clip's width and height */
if (hVd2) {
Vdec2_delete(hVd2);
}

/* Create the video decoder */
hVd2 = Vdec2_create(hEngine, envp->videoDecoder, params, dynParams);

if (hVd2 == NULL) {
ERR("Failed to create video decoder: %s\n", envp->videoDecoder);
write_log(1,"Failed to create video decoder video.c 529 \r\n");
cleanup(THREAD_FAILURE);
}

/* Which output buffer size does the codec require? */
bufSize = Vdec2_getOutBufSize(hVd2);

/* Recreate the BufTab */
if (hBufTab) {
BufTab_delete(hBufTab);
}

/* Set the original dimensions of the Buffers to the max */
gfxAttrs.dim.width = params->maxWidth;
gfxAttrs.dim.height = params->maxHeight;
gfxAttrs.dim.lineLength = BufferGfx_calcLineLength(gfxAttrs.dim.width,
colorSpace);

/* Create a table of buffers for decoded data */
hBufTab = BufTab_create(NUM_DISPLAY_BUFS, bufSize,
BufferGfx_getBufferAttrs(&gfxAttrs));

if (hBufTab == NULL) {
ERR("Failed to create BufTab for display pipe\n");
write_log(1,"Failed to create BufTab for display pipe video.c 553 \r\n");
cleanup(THREAD_FAILURE);
}

/* * The codec is going to use the new BufTab for output buffers */
Vdec2_setBufTab(hVd2, hBufTab);

frameNbr = 0;
numDisplayBufs = 0;

prime: /* Initialize the state of the decode */
ret = Dmai_EOK;

if (frameNbr == 0) {
numBufs = NUM_DISPLAY_BUFS;
}
else {
/* when looping, the display thread was previously primed */
numBufs = totalNumBufs – NUM_DISPLAY_BUFS;
}

/* Prime the file loader and start reading from beginning of file */
if (Loader_prime(hLoader, &hInBuf) < 0) {
ERR("Failed to prime loader for file %s\n", envp->videoFile);
write_log(1,"Failed to prime loader for file video.c 580 \r\n");
cleanup(THREAD_FAILURE);
}

frame_num += 1;
/* Prime the display thread with video buffers */
for (bufIdx=0; bufIdx < numBufs; ) {
if (ret != Dmai_EFIRSTFIELD) {
/* Get a free buffer from the BufTab */
hDstBuf = BufTab_getFreeBuf(hBufTab);

if (hDstBuf == NULL) {
ERR("Failed to get free buffer from display pipe BufTab\n");
write_log(1,"Failed to get free buffer from display pipe BufTab video.c 592 \r\n");
BufTab_print(hBufTab);
cleanup(THREAD_FAILURE);
}
bufIdx++;
}

/* Make sure the whole buffer is used for output */
BufferGfx_resetDimensions(hDstBuf);

/* Decode the video buffer */
ret = Vdec2_process(hVd2, hInBuf, hDstBuf);

if (ret < 0) {
ERR("Failed to decode video buffer\n");
write_log(1,"Failed to decode video buffer video.c 687 \r\n");
cleanup(THREAD_FAILURE);
print("Failed to decode video buffer video.c 687 \r\n");//
}
//test by zhao 0
else{
print("succeed to decode video buffer video.c 687 \r\n");
}
//test by zhao 1

/* If no encoded data was used we cannot find the next frame */
if (ret == Dmai_EBITERROR && Buffer_getNumBytesUsed(hInBuf) == 0) {
ERR("Fatal bit error\n");
write_log(1,"Fatal bit error video.c 614 \r\n");
cleanup(THREAD_FAILURE);
}

/* Increment statistics for the user interface */
gblIncVideoBytesProcessed(Buffer_getNumBytesUsed(hInBuf));

/* Send frames to display thread */
bufsSent = handleCodecBufs(hVd2, envp->hDisplayInFifo);

if (bufsSent < 0) {
cleanup(THREAD_FAILURE);
print("send the frame to display is failed\n");//test by zhao
}
//test by zhao 0
else{
print("send the frame to display is succeed\n");//test by zhao
}
//test by zhao 1
/* Keep track of the number of buffers sent to the display thread */
numDisplayBufs += bufsSent;

if (frameNbr == 0) {
/*
* Resize the BufTab after the first frame has been processed.
* This because the codec may not know it's buffer requirements
* before the first frame has been decoded.
*/
numBufs = resizeBufTab(hVd2, NUM_DISPLAY_BUFS);

if (numBufs < 0) {
cleanup(THREAD_FAILURE);
}

/* * Set the total number of buffers used between video and display * threads.
*/
totalNumBufs = numBufs;
}

/* Load a new encoded frame from the file system */
if (Loader_getFrame(hLoader, hInBuf) < 0) {
ERR("Failed to get frame of encoded data during priming\n");
print("failed to load the new encoded data during prinmin\n");//test by zhao
write_log(1,"Failed to get frame of encoded data during priming video.c 653 \r\n");
cleanup(THREAD_FAILURE);
}
//test by zhao is start
else{
print("video thread copy data is succeed\n")
}
//test by zhao is over
/* End of clip? */
if (Buffer_getUserPtr(hInBuf) == NULL) {
printf("Clip ended, exiting demo..\n");
write_log(1,"Clip ended, exiting demo.. video.c 660 \r\n");
cleanup(THREAD_SUCCESS);
}

frameNbr++;
}

/* Release the display thread, it is now fully primed */
Pause_off(envp->hPausePrime);

/* get the video file size */
struct stat buf;
stat(envp->videoFile, &buf);
long long totalFrameNum;
int persent = 0;
//1024 Kb per Mb, 100Kb per second,30 frame per second,
totalFrameNum = ((long long)buf.st_size)*30/1024/100/3; int lineNum;
unsigned char *buffer_yuv;

/* Main loop */
while (!gblGetQuit()) {
if (ret != Dmai_EFIRSTFIELD) {
/* Get a displayed frame from the display thread */
fifoRet = Fifo_get(envp->hDisplayOutFifo, &hDispBuf);

if (fifoRet != Dmai_EOK) {
cleanup(THREAD_FAILURE);
}

/* Did the display thread flush the fifo? */
if (fifoRet == Dmai_EFLUSH) {
cleanup(THREAD_SUCCESS);
}

/* The display thread is no longer using the buffer */
Buffer_freeUseMask(hDispBuf, DISPLAY_FREE);

/* Keep track of the number of buffers sent to the display thread */
numDisplayBufs–;

/* Get a free buffer from the BufTab to give to the codec */
hDstBuf = BufTab_getFreeBuf(hBufTab);

if (hDstBuf == NULL) {
ERR("Failed to get free buffer from BufTab\n");
write_log(1,"Failed to get free buffer from BufTab video.c 696 \r\n");
print("failed to get free buffer from bufta");//test by zhao
//BufTab_print(hBufTab);
cleanup(THREAD_FAILURE);
}
//test by zhao 0
else{
print("succeed to get free buffer from bufta");//test by zhao
}
//test by zhao 1
}

/* Make sure the whole buffer is used for output */
BufferGfx_resetDimensions(hDstBuf);

if(ff_flag ==1){
printf("%s %s %d ff_flag = 1;\n", __FILE__ , __FUNCTION__, __LINE__);
ff_flag = 0;
}

frame_num++;// add by larry
pipe_fd = open(fifo_name, open_mode); if(pipe_fd != -1) { persent = (frame_num*100)/totalFrameNum;
int ret = write(pipe_fd, &persent, sizeof(persent));
if(ret == -1){
fprintf(stderr, "Write error on pipe\n");
exit(EXIT_FAILURE); }
else{
//printf("after write fifo, res = %d", ret); fflush(stdout);
}
close(pipe_fd); } else {
printf("open fifo error… \n");
exit(EXIT_FAILURE); }
//printf("write fifo Process %d finished\n", getpid());

/* Decode the video buffer */
ret = Vdec2_process(hVd2, hInBuf, hDstBuf);

// print buffer info
//Buffer_print(hDstBuf);

#if 0 if(print_flag == 1)
{ //yuv_crop(Buffer_getUserPtr(hDstBuf), 672, 576, 30, 48, yuvbuffer01, 594, 460);
init_dither_tab();
if(secondFrame == false){
secondFrame = true; pthread_mutex_lock(&mutexPrint); memset(rgbBuf01, 0, RGBBUFLEN);
//ConvertYUVToRGB(yuvbuffer01, rgbBuf01, 594, 460);
memcpy(Buffer_getUserPtr(hDstBuf), yuvbuffer01, Buffer_getNumBytesUsed(hDstBuf));
}
else{
secondFrame = false; memset(rgbBuf02, 0, RGBBUFLEN); //ConvertYUVToRGB(yuvbuffer02, rgbBuf02, 594, 460); pthread_mutex_unlock(&mutexPrint);
pthread_cond_signal(&condPrint);
print_flag = 0;
} }
else
{
}
#endif

#if 0 if(print_flag == 1)
{ yuv_crop(Buffer_getUserPtr(hDstBuf), 672, 576, 30, 48, yuvbuffer01, 594, 460);
init_dither_tab();
if(secondFrame == false){
secondFrame = true; pthread_mutex_lock(&mutexPrint); memset(rgbBuf01, 0, RGBBUFLEN);
ConvertYUVToRGB(yuvbuffer01, rgbBuf01, 594, 460);
}
else{
secondFrame = false; memset(rgbBuf02, 0, RGBBUFLEN); ConvertYUVToRGB(yuvbuffer02, rgbBuf02, 594, 460); pthread_mutex_unlock(&mutexPrint);
pthread_cond_signal(&condPrint);
print_flag = 0;
} }
else
{
}
#endif

#if 1

if(print_flag == 1)
{ //init_dither_tab();
if(secondFrame == false){
secondFrame = true; pthread_mutex_lock(&mutexPrint); yuv_crop(Buffer_getUserPtr(hDstBuf), VIDEO_WIDTH_T, VIDEO_HEIGHT_T, 30, 48, yuvbuffer01, VIDEO_WIDTH, VIDEO_HEIGHT);
//memcpy(yuvbuffer01, Buffer_getUserPtr(hDstBuf), YUVBUFLEN_T);
} else{ secondFrame = false; yuv_crop(Buffer_getUserPtr(hDstBuf), VIDEO_WIDTH_T, VIDEO_HEIGHT_T, 30, 48, yuvbuffer02, VIDEO_WIDTH, VIDEO_HEIGHT); //memcpy(yuvbuffer02, Buffer_getUserPtr(hDstBuf), YUVBUFLEN_T);
pthread_mutex_unlock(&mutexPrint);
pthread_cond_signal(&condPrint);
print_flag = 0;
} }
#endif
if (ret < 0) {
ERR("Failed to decode video buffer\n");
write_log(1,"Failed to decode video buffer video.c 710 \r\n");
cleanup(THREAD_FAILURE);
}

/* If no encoded data was used we cannot find the next frame */
if (ret == Dmai_EBITERROR && Buffer_getNumBytesUsed(hInBuf) == 0) {
ERR("Fatal bit error\n");
write_log(1,"Fatal bit error video.c 717 \r\n");
cleanup(THREAD_FAILURE);
}

/* Increment statistics for the user interface */
gblIncVideoBytesProcessed(Buffer_getNumBytesUsed(hInBuf)); /* Send frames to display thread */
bufsSent = handleCodecBufs(hVd2, envp->hDisplayInFifo);

if (bufsSent < 0) {
cleanup(THREAD_FAILURE);
printf("send the frame to display buffer is fail\n");//test by zhao
}

/* Keep track of the number of buffers sent to the display thread */
numDisplayBufs += bufsSent;

/* Load a new encoded frame from the file system */
if (Loader_getFrame(hLoader, hInBuf) < 0) {
ERR("Failed to get frame of encoded data from file system\n");
write_log(1,"Failed to get frame of encoded data from file system video.c 738 \r\n");
print("Failed to get frame of encoded data from file system\n");//test by zhao
cleanup(THREAD_FAILURE);
}
//test by zhao 0
else{
print("Failed to get frame of encoded data from file system\n");//test by zhao
}
//test by zhao 1
frameNbr++;

/* End of clip? */
if (Buffer_getUserPtr(hInBuf) == NULL) {// add by larry, send 100% to QT
pipe_fd = open(fifo_name, open_mode); if(pipe_fd != -1) { static int persent = 100;
int ret;

ret = write(pipe_fd, &persent, sizeof(persent));
if(ret == -1){
fprintf(stderr, "Write error on pipe\n");
exit(EXIT_FAILURE); }
else{
//printf("after write fifo, res = %d", ret); fflush(stdout);
}
close(pipe_fd); } else {
printf("open fifo error… \n");
exit(EXIT_FAILURE); } /* Flush the codec for display frames */
Vdec2_flush(hVd2);

bufsSent = 0;
do {
/*
* Temporarily create a dummy buffer for the process call.
* After a flush the codec ignores the input buffer, but since * Codec Engine still address translates the buffer, it needs * to exist.
*/
hInBuf = Buffer_create(1, BufferGfx_getBufferAttrs(&gfxAttrs));

if (hInBuf == NULL) {
ERR("Failed to allocate dummy buffer\n");
write_log(1,"Failed to allocate dummy buffer video.c 761 \r\n");
cleanup(THREAD_FAILURE);
}

Buffer_setNumBytesUsed(hInBuf, 1);

ret = Vdec2_process(hVd2, hInBuf, hDstBuf);

if (ret < 0) {
ERR("Failed to decode video buffer\n");
write_log(1,"Failed to decode video buffer video.c 771 \r\n");
cleanup(THREAD_FAILURE);
}

Buffer_delete(hInBuf);
/* Keep track of the # of buffers sent to the display thread */
numDisplayBufs += bufsSent;

/* Send frames to display thread */
bufsSent = handleCodecBufs(hVd2, envp->hDisplayInFifo);

} while(bufsSent > 0);

/* Flush the display pipe if not looping */
if (!envp->loop) {
/* Create a table of buffers for decoded data */
gfxAttrs.dim = dim;

hBufTabFlush = BufTab_create(NUM_DISPLAY_BUFS, bufSize,
BufferGfx_getBufferAttrs(&gfxAttrs));

if (hBufTabFlush == NULL) {
ERR("Failed to create BufTab for flushing\n");
write_log(1,"Failed to create BufTab for flushing video.c 795 \r\n");
cleanup(THREAD_FAILURE);
}

if (flushDisplayPipe(envp->hDisplayInFifo, hBufTabFlush, &dim) == FAILURE) {
cleanup(THREAD_FAILURE);
}

numFlushBufsSent = NUM_DISPLAY_BUFS;
}

/* Drain the display thread making sure all frames are displayed */
while (numDisplayBufs > (NUM_DISPLAY_BUFS – numFlushBufsSent)) {
/* Get a displayed frame from the display thread */
fifoRet = Fifo_get(envp->hDisplayOutFifo, &hDispBuf);

if (fifoRet != Dmai_EOK) {
cleanup(THREAD_FAILURE);
}

/* Did the display thread flush the fifo? */
if (fifoRet == Dmai_EFLUSH) {
cleanup(THREAD_SUCCESS);
}

/* The display thread is no longer using the buffer */
Buffer_freeUseMask(hDispBuf, DISPLAY_FREE);

/* Keep track of number of buffers sent to the display thread */
numDisplayBufs–;
}

/* Wait for audio to complete if applicable */
Rendezvous_meet(envp->hRendezvousLoop);

/* Loop the clip or quit? */
if (envp->loop) {
/* Make sure the decoder has no state by recreating it */
Vdec2_delete(hVd2);

/* Make sure any buffers kept by the codec are freed */
for (idx = 0; idx < BufTab_getNumBufs(hBufTab); idx++) {
hBuf = BufTab_getBuf(hBufTab, idx);
Buffer_freeUseMask(hBuf, CODEC_FREE);
}

hVd2 = Vdec2_create(hEngine, envp->videoDecoder,
params, dynParams);
if (hVd2 == NULL) {
ERR("Failed to create video decoder: %s\n",envp->videoDecoder);
write_log(1,"Failed to create video decoder video.c 847 \r\n");
cleanup(THREAD_FAILURE);
}

/* The codec is going to use this BufTab for output buffers */
Vdec2_setBufTab(hVd2, hBufTab);

/* Halt the display thread for priming */
Pause_on(envp->hPausePrime);

goto prime;
}
else {
printf("Clip ended, exiting demo..\n");
write_log(1,"Clip ended, exiting demo.. video.c 861 \r\n"); gblSetQuit();
}
} /* End of clip? */
} /* Main loop */

cleanup:
/* Make sure the other threads aren't waiting for us */
Rendezvous_force(envp->hRendezvousInit);
Rendezvous_force(envp->hRendezvousLoop);
Rendezvous_force(envp->hRendezvousLoader);
Pause_off(envp->hPauseProcess);
Pause_off(envp->hPausePrime);
Fifo_flush(envp->hDisplayInFifo);
if (hLoader) Loader_flush(hLoader);

/* Meet up with other threads before cleaning up */
Rendezvous_meet(envp->hRendezvousCleanup);

/* Clean up the thread before exiting */
if (hLoader) {
Loader_delete(hLoader);
}

if (hVd2) {
Vdec2_delete(hVd2);
}

if (hEngine) {
Engine_close(hEngine);
}

if (hBufTab) {
BufTab_delete(hBufTab);
}

if (hBufTabFlush) {
BufTab_delete(hBufTabFlush);
}

return status;
}

void rgb_crop(uchar *pSrcBuf,int src_width,int src_height,int offset_x,int offset_y, uchar *pDstBuf,int dst_width,int dst_height){
uchar *s = pSrcBuf;
uchar *d = pDstBuf;
int i;

s += src_width*offset_y*3 + offset_x*3;
for(i=0; i<dst_height; i++){
memcpy(s, d, dst_width*3);
s += src_width*3;
d += dst_width*3;
}
}

/******************************************************************************
* printThrFxn
******************************************************************************/
#if 1

void cleanup_rc(void *arg)
{
printf("pthread_mutex_unlock(&mutexPrint).\n");
pthread_mutex_unlock(&mutexPrint);
}

Void *printThrFxn(Void *arg)
{
pthread_cleanup_push(cleanup_rc, NULL); // thread cleanup handler
init_dither_tab(); // 0ms

while(1){
pthread_testcancel();
pthread_mutex_lock(&mutexPrint);
pthread_cond_wait(&condPrint, &mutexPrint);
ConvertYUVToRGB(yuvbuffer01, rgbBuf01,VIDEO_WIDTH, VIDEO_HEIGHT);
RGBsaveBMP(rgbBuf01,VIDEO_WIDTH, VIDEO_HEIGHT);
sleep(1);
ConvertYUVToRGB(yuvbuffer02, rgbBuf02,VIDEO_WIDTH, VIDEO_HEIGHT);
RGBsaveBMP(rgbBuf02,VIDEO_WIDTH, VIDEO_HEIGHT);
pthread_mutex_unlock(&mutexPrint); } pthread_cleanup_pop(0);
}
#endif
#if 0
Void *printThrFxn(Void *arg)
{

while(1){ pthread_mutex_lock(&mutexPrint);
pthread_testcancel();
pthread_cond_wait(&condPrint, &mutexPrint);
pthread_testcancel();
FILE *fp;
int ret = -1;
fp = fopen("/opt/bin/yuv", "rw+");
if(fp <= -1)
perror("open file error");
ret = fwrite(yuvbuffer01, 1, 580608, fp);
printf("ret = %d.", ret);
fclose(fp);
//RGBsaveBMP(rgbBuf01,VIDEO_WIDTH_T, VIDEO_HEIGHT_T);
sleep(1);
//RGBsaveBMP(rgbBuf02,VIDEO_WIDTH_T, VIDEO_HEIGHT_T);
printf("RGBsaveBMP end ***\n");
pthread_mutex_unlock(&mutexPrint);

}}

#endif

/*
* display.c
*
* This source file has the implementations for the 'display' function
* for the DVSDK decode demos on DM365 platform.
*
* Copyright (C) 2010 Texas Instruments Incorporated – http://www.ti.com/ * * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met:
*
* Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the * distribution.
*
* Neither the name of Texas Instruments Incorporated nor the names of
* its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/

#include <stdio.h>
#include <string.h>
#include <unistd.h>
#include <unistd.h>

#include <xdc/std.h>

#include <ti/sdo/dmai/Time.h>
#include <ti/sdo/dmai/Framecopy.h>
#include <ti/sdo/dmai/Fifo.h>
#include <ti/sdo/dmai/Pause.h>
#include <ti/sdo/dmai/Display.h>
#include <ti/sdo/dmai/BufferGfx.h>
#include <ti/sdo/dmai/Rendezvous.h>

#include "display.h"
#include "osd.h"
#include "lcd.h"
#include "../demo.h"
#include "../write_log.h"

/* Display loop delay in us */
//#define DISPLAYLOOPLATENCY 33332
#define DISPLAYLOOPLATENCY 41332

/* Buffering for the display driver */
#define NUM_DISPLAY_BUFS 4

/******************************************************************************
* displayThrFxn
******************************************************************************/
Void *displayThrFxn(Void *arg)
{
DisplayEnv *envp = (DisplayEnv *) arg;
Display_Attrs dAttrs = Display_Attrs_DM365_VID_DEFAULT;
Display_Handle hDisplay = NULL;
Framecopy_Handle hFc = NULL;
Void *status = THREAD_SUCCESS;
Uns frameCnt = 0;
BufferGfx_Dimensions srcDim;
Buffer_Handle hSrcBuf, hDstBuf;
Int fifoRet;
ColorSpace_Type colorSpace = ColorSpace_YUV420PSEMI;
BufferGfx_Attrs gfxAttrs = BufferGfx_Attrs_DEFAULT;
BufTab_Handle hBufTab = NULL;
Int32 bufSize;
Time_Attrs tAttrs = Time_Attrs_DEFAULT;
Time_Handle hTime = NULL;
Int32 time, waitTime;
Int bufCnt = 1;
Int num ;
hTime = Time_create(&tAttrs);

if (hTime == NULL) {
ERR("Failed to create Time object\n");
write_log(1," display.c 96");
cleanup(THREAD_FAILURE);
}

if(Time_reset(hTime) != Dmai_EOK) {
ERR("Failed to reset timer\n");
write_log(1,"Failed to reset timer display.c 102 \r\n");
cleanup(THREAD_FAILURE);
}

/* Signal that initialization is done and wait for other threads */
Rendezvous_meet(envp->hRendezvousInit);
num = 0; while (!gblGetQuit()) {
/* Pause processing? */
Pause_test(envp->hPauseProcess);

/* Pause for priming? */
Pause_test(envp->hPausePrime);

/* Get decoded video frame */
fifoRet = Fifo_get(envp->hInFifo, &hSrcBuf);

if (fifoRet < 0) {
ERR("Failed to get buffer from video thread\n");
write_log(1,"Failed to get buffer from video thread display.c 121 \r\n");
cleanup(THREAD_FAILURE);
}

/* Did the video thread flush the fifo? */
if (fifoRet == Dmai_EFLUSH) {
cleanup(THREAD_SUCCESS);
}
BufferGfx_getDimensions(hSrcBuf, &srcDim);

/* Prime the display driver with the first NUM_DISPLAY_BUFS buffers */
if (bufCnt <= NUM_DISPLAY_BUFS) { if (bufCnt == 1) { // Create the Display at the first frame
gfxAttrs.dim.width = srcDim.width;
gfxAttrs.dim.height = srcDim.height;
gfxAttrs.dim.lineLength = srcDim.lineLength;
gfxAttrs.dim.x = srcDim.x;
gfxAttrs.dim.y = srcDim.y;
gfxAttrs.dim.width = srcDim.width;
gfxAttrs.dim.height = srcDim.height;
if (colorSpace == ColorSpace_YUV420PSEMI) {
bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 3 / 2;
} else {
bufSize = gfxAttrs.dim.lineLength * gfxAttrs.dim.height * 2;
}

/* Create a table of buffers to use with the device drivers */
gfxAttrs.colorSpace = colorSpace;
hBufTab = BufTab_create(NUM_DISPLAY_BUFS, bufSize,
BufferGfx_getBufferAttrs(&gfxAttrs));
if (hBufTab == NULL) {
ERR("Failed to create buftab\n");
write_log(1,"Failed to create buftab display.c 153 \r\n");
cleanup(THREAD_FAILURE);
}
/* Create the display device instance */
dAttrs.delayStreamon = TRUE;
dAttrs.numBufs = NUM_DISPLAY_BUFS;
dAttrs.videoStd = envp->videoStd;
/* * Round down the width to a multiple of 32 as required by * display driver. Otherwise, the driver would internally round
* up the width, resulting in the codec padding showing up
* on the display when the image width is not a multiple of 32.
*/
dAttrs.width = ((gfxAttrs.dim.width & 0x1f) ?
(gfxAttrs.dim.width & ~(0x1f)) : gfxAttrs.dim.width);
dAttrs.height = gfxAttrs.dim.height;
dAttrs.videoOutput = envp->displayOutput;
dAttrs.colorSpace = colorSpace;
hDisplay = Display_create(hBufTab, &dAttrs);

if (hDisplay == NULL) {
ERR("Failed to create display device\n");
write_log(1,"Failed to create display device display.c 176 \r\n");
cleanup(THREAD_FAILURE);
}
}

bufCnt++;
}
else {
/* Get a buffer from the display device driver */
if (Display_get(hDisplay, &hDstBuf) < 0) {
ERR("Failed to get display buffer\n");
write_log(1,"Failed to get display buffer display.c 187 \r\n");
printf("failed to get the display buffer");//test by zhao
cleanup(THREAD_FAILURE);
}
//test by zhao 0
else{
printf("succeed to get the display buffer");//test by zhao
}
//test by zhao 1

/* Send buffer back to the video thread */
if (Fifo_put(envp->hOutFifo, hDstBuf) < 0) {
ERR("Failed to send buffer to video thread\n");
write_log(1,"Failed to send buffer to video thread display.c 194 \r\n");
cleanup(THREAD_FAILURE);
}
}

if (envp->videoStd == VideoStd_720P_60) {
if (Time_delta(hTime, (UInt32*)&time) < 0) {
ERR("Failed to get timer delta\n");
write_log(1,"Failed to get timer delta display.c 202 \r\n");
cleanup(THREAD_FAILURE);
}
waitTime = DISPLAYLOOPLATENCY – time;
if(waitTime > 0) {
usleep(waitTime);
}
if(Time_reset(hTime) != Dmai_EOK) {
ERR("Failed to reset timer\n");
write_log(1,"Failed to reset timer display.c 211 \r\n");
cleanup(THREAD_FAILURE);
}
}

/* Incremement statistics for the user interface */
gblIncFrames(); if(num == 0)
{
system("echo LCD > /sys/class/davinci_display/ch0/output");
system("echo 800×480 > /sys/class/davinci_display/ch0/mode");
setOsdEncodeTransparency(0x0);
num = 1;

}

/* Give a filled buffer back to the display device driver */
if (Display_put(hDisplay, hSrcBuf) < 0) {
ERR("Failed to put display buffer\n");
write_log(1,"Failed to put display buffer display.c 230 \r\n");
cleanup(THREAD_FAILURE);
}

frameCnt++;
}

cleanup:
/* Make sure the other threads aren't waiting for us */
Rendezvous_force(envp->hRendezvousInit);
Pause_off(envp->hPauseProcess);
Pause_off(envp->hPausePrime);
Fifo_flush(envp->hOutFifo);

/* Meet up with other threads before cleaning up */
Rendezvous_meet(envp->hRendezvousCleanup);

/* Clean up the thread before exiting */
if (hFc) {
Framecopy_delete(hFc);
}

if (hDisplay) {
Display_delete(hDisplay);
}

/* Clean up the thread before exiting */
if (hBufTab) {
BufTab_delete(hBufTab);
}

if(hTime) {
Time_delete(hTime);
}

return status;
}

kooking:

1.如果不操作界面控制暂停,是否会出现同样的问题

2.可不可以加一些log,看最后是挂在什么地方了

baozeng zhao:

回复 kooking:

不进行暂停的话是不会发生这种情况的,只有在暂停时间超过30分种后才会出现这种情况

baozeng zhao:

回复 kooking:

我加了些log发现display线程在30分钟后就没有运行了,怀疑是不是解码中有什么限制,但是就是找不出来

feller shi:

回复 baozeng zhao:

你好,这个问题解决了么?

另一个,你有尝试过有错视频播放的情况么?

clearLove:

请问大神,您的  DISPLAYLOOPLATENCY  是怎么计算的? 由于我现在的摄像头使用的最好是25FPS,所以需要修改 为 这样的  envp->videoStd == VideoStd_720P_50 ,现在我的  ./decode -v test.264 -y4 这里的y4是我自己添加的和 VideoStd_720P_50 对应的选项, 但是打印输出 只有18FPS。 怀疑是DISPLAYLOOPLATENCY的计算问题,麻烦交流一下,谢谢大神!!!

envp->videoStd == VideoStd_720P_50

Ternence_Hsu:

回复 clearLove:

clearLove

请问大神,您的  DISPLAYLOOPLATENCY  是怎么计算的? 由于我现在的摄像头使用的最好是25FPS,所以需要修改 为 这样的  envp->videoStd == VideoStd_720P_50 ,现在我的  ./decode -v test.264 -y4 这里的y4是我自己添加的和 VideoStd_720P_50 对应的选项, 但是打印输出 只有18FPS。 怀疑是DISPLAYLOOPLATENCY的计算问题,麻烦交流一下,谢谢大神!!!

envp->videoStd == VideoStd_720P_50

clearLove:

回复 Ternence_Hsu:

您好,我是在dm368上面调试的。sdk的版本是 ti-dvsdk_dm368-evm_4_02_00_06。 

clearLove:

回复 Ternence_Hsu:

可以加个qq,交流吗?我的qq是  709165253。 谢谢!

赞(0)
未经允许不得转载:TI中文支持网 » dm358 解码暂停半小时后不在运行
分享到: 更多 (0)