This thread has been locked.
If you have a related question, please click the "Ask a related question" button in the top right corner. The newly created question will be automatically linked to this question.
1. 使用Venc1_create 创建的encode,输出H.264 长宽比例为4:3 ,现在我想编码720P 长宽比例为16:9的,请问能否修改encode 输出的长宽比例?
2. H.264编码中 I帧的间隔可以修改吗? 是修改VIDENC1_DynamicParams 结构中的intraFrameInterval = 30吗?
3. 关于OSD,提供的pallete[4][4],中每个数值具体什么用的?最后一列是透明度吧? 为什么要使用4组?
谢谢!
1. 怎么修改参数? 我用的是dvsdk2_00_00_07
2. 可以强制DSP输出一帧 I 帧吗? 如果可以,需要怎么设置
3. 见附件
谢谢
/* * capture.c * * ============================================================================ * Copyright (c) Texas Instruments Inc 2009 * * Use of this software is controlled by the terms and conditions found in the * license agreement under which this software has been supplied or provided. * ============================================================================ */ #include <xdc/std.h> #include <string.h> #include "../demo.h" #include <ti/sdo/dmai/Ccv.h> #include <ti/sdo/dmai/Fifo.h> #include <ti/sdo/dmai/Blend.h> #include <ti/sdo/dmai/Pause.h> #include <ti/sdo/dmai/BufTab.h> #include <ti/sdo/dmai/Capture.h> #include <ti/sdo/dmai/Display.h> #include <ti/sdo/dmai/BufferGfx.h> #include <ti/sdo/dmai/Rendezvous.h> #ifdef USE_SECOND_STREAM #include <ti/sdo/dmai/Resize.h> #endif #include "capture.h" /* Bitmap position */ #define BITMAP_X 80 #define BITMAP_Y 50 //150 /* Buffering for the display driver */ #define NUM_DISPLAY_BUFS 4 /* Buffering for the capture driver */ #define NUM_CAPTURE_BUFS 4 /* Total number of device driver buffers */ #define NUM_DRIVER_BUFS NUM_CAPTURE_BUFS //+ NUM_DISPLAY_BUFS /* The palette to use while blending the bitmap */ Int8 palette[4][4] = { { 0x80, 0x80, 0x01, 0x28 }, { 0x80, 0x80, 0xf0, 0x28 }, { 0x80, 0x80, 0xf8, 0x28 }, { 0x80, 0x80, 0xff, 0x28 }, }; //change Transparency from 0xff to 0x28 for more transparent #ifdef USE_BYPASS_DISPLAY /* Video standard strings to display on UI */ static Char *videoStdStrings[VideoStd_COUNT] = { "AUTO", "CIF", "SIF NTSC", "SIF PAL", "D1 NTSC", "D1 PAL", "480P", "576P", "720P 60Hz", "720P 50Hz", "1080I 30Hz", "1080I 25Hz", "1080P 30Hz", "1080P 25Hz", "1080P 24Hz" }; #endif /****************************************************************************** * captureThrFxn ******************************************************************************/ Void *captureThrFxn(Void *arg) { CaptureEnv *envp = (CaptureEnv *) arg; Void *status = THREAD_SUCCESS; Capture_Attrs cAttrs = Capture_Attrs_DM6467_DEFAULT; #ifdef USE_BYPASS_DISPLAY /*2011.9.22 by lvrain*/ Display_Attrs dAttrs = Display_Attrs_DM6467_VID_DEFAULT; Display_Handle hDisplay = NULL; #endif Blend_Attrs blAttrs = Blend_Attrs_DEFAULT; Blend_Config_Params bConfigParams = Blend_Config_Params_DEFAULT; BufferGfx_Attrs gfxAttrs = BufferGfx_Attrs_DEFAULT; Ccv_Attrs ccvAttrs = Ccv_Attrs_DEFAULT; #if defined(USE_SECOND_STREAM) /* 2011.09.14 add the second stream video buff by lvrain */ Resize_Attrs rszAttrs = Resize_Attrs_DEFAULT; Resize_Handle hRsz = NULL; #endif Capture_Handle hCapture = NULL; Ccv_Handle hCcv = NULL; Blend_Handle hBlend = NULL; BufTab_Handle hBufTab = NULL; Buffer_Handle hDstBuf, hCapBuf, hBmpBuf,hSecDstBuf; BufferGfx_Dimensions srcDim, dstDim; #ifdef USE_IR_FUNCTION UInt8 trans, oldTrans; #endif VideoStd_Type videoStd; Int32 bufSize; Int fifoRet; Int bufIdx; TRACEFLOW("the captureThrFxn is open \n"); /* Set the color space for the capture buffer */ gfxAttrs.colorSpace = ColorSpace_YUV422PSEMI; /* Calculate the dimensions of a video standard given a color space */ if (BufferGfx_calcDimensions(VideoStd_720P_50, //VideoStd_D1_PAL 2011.09.26 test 720p input lvrain gfxAttrs.colorSpace, &gfxAttrs.dim) < 0) { ERR("Failed to calculate Buffer dimensions\n"); cleanup(THREAD_FAILURE); } /* Calculate buffer size needed of a video standard given a color space */ bufSize = BufferGfx_calcSize(VideoStd_720P_50, gfxAttrs.colorSpace);//VideoStd_D1_PAL 2011.09.26 test 720p input lvrain if (bufSize < 0) { ERR("Failed to calculate size for capture driver buffers\n"); cleanup(THREAD_FAILURE); } /* Create a table of buffers to use with the device drivers change NUM_DRIVER_BUFS to NUM_CAPTURE_BUFS 2011.9.22 by lvrain*/ hBufTab = BufTab_create(NUM_DRIVER_BUFS, bufSize, BufferGfx_getBufferAttrs(&gfxAttrs)); if (hBufTab == NULL) { ERR("Failed to allocate contiguous buffers\n"); cleanup(THREAD_FAILURE); } /* Create capture device driver instance */ cAttrs.numBufs = NUM_CAPTURE_BUFS; cAttrs.videoInput=Capture_Input_COMPONENT; //Capture_Input_COMPOSITE 2011.09.26 test 720p input lvrain hCapture = Capture_create(hBufTab, &cAttrs); if (hCapture == NULL) { ERR("Failed to create capture device, " "720P component input connected?\n"); cleanup(THREAD_FAILURE); } /* Get the video standard from the capture device */ videoStd = Capture_getVideoStd(hCapture); /* We only support 720P capture */ //if (videoStd != VideoStd_720P_50 && videoStd != VideoStd_720P_60) { // ERR("Need 720P composite input to this demo\n"); // cleanup(THREAD_FAILURE); // } #ifdef USE_BYPASS_DISPLAY /*2011.9.22 by lvrain*/ /* Create display device driver instance */ dAttrs.videoStd = videoStd; dAttrs.numBufs = NUM_DISPLAY_BUFS; hDisplay = Display_create(hBufTab, &dAttrs); if (hDisplay == NULL) { ERR("Failed to create display device\n"); cleanup(THREAD_FAILURE); } UI_updateValue(envp->hUI, UI_Value_DisplayType, videoStdStrings[videoStd]); #endif #ifdef USE_TRACE //2011.8.30 add by lvrain for test begin TRACE("videoStd = %d\n",videoStd); #endif //2011.8.30 add by lvrain for test end /* Create color conversion job for 422Psemi to 420Psemi conversion */ ccvAttrs.accel = TRUE; hCcv = Ccv_create(&ccvAttrs); if (hCcv == NULL) { ERR("Failed to create color conversion job\n"); cleanup(THREAD_FAILURE); } /* Get a buffer from the video thread */ fifoRet = Fifo_get(envp->hInFifo, &hDstBuf); if (fifoRet < 0) { ERR("Failed to get buffer from video thread\n"); cleanup(THREAD_FAILURE); } /* Did the video thread flush the fifo? */ if (fifoRet == Dmai_EFLUSH) { cleanup(THREAD_SUCCESS); } #if defined(USE_SECOND_STREAM) /* 2011.09.14 add to get the second stream buffer by lvrain */ /* Get the second stream buffer from the video thread */ fifoRet = Fifo_get(envp->hSecInFifo, &hSecDstBuf); if (fifoRet < 0) { ERR("Failed to get seconde buffer from video thread\n"); cleanup(THREAD_FAILURE); } /* Did the video thread flush the fifo? */ if (fifoRet == Dmai_EFLUSH) { cleanup(THREAD_SUCCESS); } #endif BufferGfx_getDimensions(hDstBuf, &dstDim); #ifdef USE_TRACE//2011.9.15 add by lvrain for test begin TRACE("dstDim.width = %ld",dstDim.width); TRACE("dstDim.height = %ld\n",dstDim.height); #endif //2011.9.15 add by lvrain for test end /* Only process the part of the image which is going to be encoded */ for (bufIdx = 0; bufIdx < NUM_DRIVER_BUFS; bufIdx++) { hCapBuf = BufTab_getBuf(hBufTab, bufIdx); BufferGfx_getDimensions(hCapBuf, &srcDim); #ifdef USE_TRACE//2011.9.15 add by lvrain for test begin TRACE("srcDim.width = %ld",srcDim.width); TRACE("srcDim.height = %ld\n",srcDim.height); #endif //2011.9.15 add by lvrain for test end srcDim.x = (srcDim.width - dstDim.width) / 2 & ~0x7; srcDim.y = (srcDim.height - dstDim.height) / 2; srcDim.width = dstDim.width; srcDim.height = dstDim.height; BufferGfx_setDimensions(hCapBuf, &srcDim); #ifdef USE_TRACE//2011.9.15 add by lvrain for test begin TRACE("bufidx = %d srcDim.x = %ld",bufIdx,srcDim.x); TRACE("srcDim.y = %ld\n",srcDim.y); #endif //2011.9.15 add by lvrain for test end } gblSetImageWidth(dstDim.width); gblSetImageHeight(dstDim.height); /* Configure color conversion job */ if (Ccv_config(hCcv, BufTab_getBuf(hBufTab, 0), hDstBuf) < 0) { ERR("Failed to configure color conversion job\n"); cleanup(THREAD_FAILURE); } if (envp->osd) { /* If an OSD is required, create a blending job */ hBlend = Blend_create(&blAttrs); if (hBlend == NULL) { ERR("Failed to create blending job\n"); cleanup(THREAD_FAILURE); } bConfigParams.bmpXpos = BITMAP_X; bConfigParams.bmpYpos = BITMAP_Y; #ifdef USE_IR_FUNCTION /* Get the current transparency */ oldTrans = UI_getTransparency(envp->hUI); #endif /* Initialize the palette */ memcpy(bConfigParams.palette, palette, sizeof(palette)); /* Get a bitmap Buffer from the UI */ hBmpBuf = UI_lockScreen(envp->hUI); /* Configure the blending job */ if (Blend_config(hBlend, NULL, hBmpBuf, BufTab_getBuf(hBufTab, 0), BufTab_getBuf(hBufTab, 0), &bConfigParams) < 0) { ERR("Failed to configure blending job\n"); cleanup(THREAD_FAILURE); } UI_unlockScreen(envp->hUI); } #ifdef USE_SECOND_STREAM /* 2011.09.14 add he resize job for the second stream by lvrain */ /* Create the resize job */ hRsz = Resize_create(&rszAttrs); if (hRsz == NULL) { ERR("Failed to create resize job\n"); goto cleanup; } /* Configure the resize job */ if (Resize_config(hRsz, hDstBuf,hSecDstBuf) < 0) { ERR("Failed to configure resize job\n"); goto cleanup; } #endif /* Signal that initialization is done and wait for other threads */ Rendezvous_meet(envp->hRendezvousInit); while (!gblGetQuit()) { TRACEFLOW("the captureThrFxn is while \n"); #if 1 // the next is decrease the frame /* Capture an extra frame since encoder doesn't support 50/60Hz */ if (Capture_get(hCapture, &hCapBuf) < 0) { ERR("Failed to get capture buffer\n"); cleanup(THREAD_FAILURE); } if (Capture_put(hCapture, hCapBuf) < 0) { ERR("Failed to put capture buffer\n"); cleanup(THREAD_FAILURE); } #endif // /* Get a buffer from the capture driver to encode */ if (Capture_get(hCapture, &hCapBuf) < 0) { ERR("Failed to get capture buffer\n"); cleanup(THREAD_FAILURE); } if (envp->osd) { #ifdef USE_IR_FUNCTION /* Get the current transparency */ trans = UI_getTransparency(envp->hUI); if (trans != oldTrans) { int i; /* Change the transparency in the palette */ for (i = 0; i < 4; i++) { bConfigParams.palette[i][3] = trans; } /* Reconfigure the blending job if transparency has changed */ if (Blend_config(hBlend, NULL, hBmpBuf, hCapBuf, hCapBuf, &bConfigParams) < 0) { ERR("Failed to configure blending job\n"); cleanup(THREAD_FAILURE); } } #endif /* * Because the whole screen is shown even if -r is used, * reset the dimensions while Blending to make sure the OSD * always ends up in the same place. After blending, restore * the real dimensions. */ BufferGfx_getDimensions(hCapBuf, &srcDim); BufferGfx_resetDimensions(hCapBuf); /* * Lock the screen making sure no changes are done to * the bitmap while we render it. */ hBmpBuf = UI_lockScreen(envp->hUI); /* Execute the blending job to draw the OSD */ if (Blend_execute(hBlend, hBmpBuf, hCapBuf, hCapBuf) < 0) { ERR("Failed to execute blending job\n"); cleanup(THREAD_FAILURE); } UI_unlockScreen(envp->hUI); BufferGfx_setDimensions(hCapBuf, &srcDim); }/* if (envp->osd) */ #ifdef USE_BYPASS_DISPLAY /*2011.9.22 by lvrain*/ /* Preview the captured buffer on the display */ if (Display_get(hDisplay, &hDisBuf) < 0) { ERR("Failed to get display buffer\n"); cleanup(THREAD_FAILURE); } #endif /* Color convert the captured buffer from 422Psemi to 420Psemi */ if (Ccv_execute(hCcv, hCapBuf, hDstBuf) < 0) { ERR("Failed to execute color conversion job\n"); cleanup(THREAD_FAILURE); } /* Send color converted buffer to video thread for encoding */ if (Fifo_put(envp->hOutFifo, hDstBuf) < 0) { ERR("Failed to send buffer to video thread\n"); cleanup(THREAD_FAILURE); } #ifdef USE_SECOND_STREAM /* 2011.09.14 add to product the stream by resize by lvrain */ /* Resize the captured frame to the resolution of the display frame */ if (Resize_execute(hRsz, hDstBuf, hSecDstBuf) < 0) { ERR("Failed to execute resize job\n"); goto cleanup; } /* Send second stream buffer to video thread for encoding */ if (Fifo_put(envp->hSecOutFifo, hSecDstBuf) < 0) { ERR("Failed to send second buffer to video thread\n"); cleanup(THREAD_FAILURE); } #endif #ifdef USE_BYPASS_DISPLAY /*2011.9.22 by lvrain*/ /* Send the preview to the display device driver */ if (Display_put(hDisplay, hCapBuf) < 0) { ERR("Failed to put display buffer\n"); cleanup(THREAD_FAILURE); } #endif /* Return a buffer to the capture driver */ if (Capture_put(hCapture, hCapBuf) < 0) // if (Capture_put(hCapture, hDisBuf) < 0) hCapBuf { ERR("Failed to put capture buffer\n"); cleanup(THREAD_FAILURE); } /* Incremement statistics for the user interface */ gblIncFrames(); /* Get a buffer from the video thread */ fifoRet = Fifo_get(envp->hInFifo, &hDstBuf); if (fifoRet < 0) { ERR("Failed to get buffer from video thread\n"); cleanup(THREAD_FAILURE); } /* Did the video thread flush the fifo? */ if (fifoRet == Dmai_EFLUSH) { cleanup(THREAD_SUCCESS); } #ifdef USE_SECOND_STREAM /* 2011.09.14 add for second stream by lvrain */ /* Get the second stream buffer from the video thread */ fifoRet = Fifo_get(envp->hSecInFifo, &hSecDstBuf); if (fifoRet < 0) { ERR("Failed to get resize buffer from video thread\n"); cleanup(THREAD_FAILURE); } /* Did the video thread flush the fifo? */ if (fifoRet == Dmai_EFLUSH) { cleanup(THREAD_SUCCESS); } #endif } cleanup: /* Make sure the other threads aren't waiting for us */ Rendezvous_force(envp->hRendezvousInit); Pause_off(envp->hPauseProcess); Fifo_flush(envp->hOutFifo); #ifdef USE_SECOND_STREAM /* 2011.09.14 add for second stream by lvrain */ Fifo_flush(envp->hSecOutFifo); #endif /* Meet up with other threads before cleaning up */ Rendezvous_meet(envp->hRendezvousCleanup); /* Clean up the thread before exiting */ if (hBlend) { Blend_delete(hBlend); } if (hCcv) { Ccv_delete(hCcv); } #ifdef USE_SECOND_STREAM /* 2011.09.14 add to delete the resize job which product second stream by lvrain */ if (hRsz) { Resize_delete(hRsz); } #endif #ifdef USE_BYPASS_DISPLAY if (hDisplay) { Display_delete(hDisplay); } #endif if (hCapture) { Capture_delete(hCapture); } if (hBufTab) { BufTab_delete(hBufTab); } return status; }
6467不支持硬件的OSD,也可以理解为,它的VPIF视频输出只支持一个显示层。所以实现OSD的方法是,把OSD图像转换成YUV格式,直接对视频显示buffer中的数据进行覆盖。简单起见,也可以只对Y Buffer中的数据进行覆盖,这样产生的OSD图像是单色的。