usr_routines.c
来自「快速开发基于Blackfin处理器的视频应用」· C语言 代码 · 共 544 行 · 第 1/2 页
C
544 行
memdma1_descriptor_data[9].Large.StartAddress = (void *)&(Video_Frames[Video_Frame_Completed_Number])[Start_of_active_Video_Frame[3]];
memdma1_descriptor_data[10].Large.StartAddress = (void *)&(Video_Frames[Video_Frame_Completed_Number])[Start_of_active_Video_Frame[4]];
memdma1_descriptor_data[11].Large.StartAddress = (void *)&(Video_Frames[Video_Frame_Completed_Number])[Start_of_active_Video_Frame[5]];
}
/*
**
** Function: InitDescriptorChains_Input
**
** Description: Initialize the MDMA descriptor chains for input from PPI
**
**
*/
void InitDescriptorChains_Input()
{
// prepare the Video Frame ->YUV MEMDMA
// from these input parameters, set up the missing fixed parameters in the descriptor chains (do not vary from frame to frame)
// source Y 1st half
memdma1_descriptor_data[0].Large.XCount = Input_Width;
memdma1_descriptor_data[0].Large.YCount = Input_Height/2;
memdma1_descriptor_data[0].Large.YModify = Input_Width + 1;
// destination Y 1st half
memdma1_descriptor_data[6].Large.XCount = Input_Width;
memdma1_descriptor_data[6].Large.YCount = Input_Height/2;
memdma1_descriptor_data[6].Large.YModify = (CCIR656_WIDTH-(Input_Width-1)*2);
// source U 1st half
memdma1_descriptor_data[1].Large.XCount = Input_Width/2;
memdma1_descriptor_data[1].Large.YCount = Input_Height/2;
memdma1_descriptor_data[1].Large.YModify = 1;
// destination U 1st half
memdma1_descriptor_data[7].Large.XCount = Input_Width/2;
memdma1_descriptor_data[7].Large.YCount = Input_Height/2;
memdma1_descriptor_data[7].Large.YModify = (CCIR656_WIDTH-(Input_Width-1)*2+2);
// source V 1st half
memdma1_descriptor_data[2].Large.XCount = Input_Width/2;
memdma1_descriptor_data[2].Large.YCount = Input_Height/2;
memdma1_descriptor_data[2].Large.YModify = 1;
// destination V 1st half
memdma1_descriptor_data[8].Large.XCount = Input_Width/2;
memdma1_descriptor_data[8].Large.YCount = Input_Height/2;
memdma1_descriptor_data[8].Large.YModify = (CCIR656_WIDTH-(Input_Width-1)*2+2);
// setup 2nd half of YUV source descriptor chain (MEMDMA channel 0)
// only Y is required
// source Y 2nd half
memdma1_descriptor_data[3].Large.XCount = Input_Width;
memdma1_descriptor_data[3].Large.YCount = Input_Height/2;
memdma1_descriptor_data[3].Large.YModify = Input_Width + 1;
// destination Y 2nd half
memdma1_descriptor_data[9].Large.XCount = Input_Width;
memdma1_descriptor_data[9].Large.YCount = Input_Height/2;
memdma1_descriptor_data[9].Large.YModify = (CCIR656_WIDTH-(Input_Width-1)*2);
// center in output display
Frame_Offset = max(0, (CCIR656_ACTIVEWIDTH - Input_Width)/2)*2 + max(0, (CCIR656_ACTIVEHEIGHT - Input_Height)/2) * CCIR656_WIDTH/2;
Start_of_active_Video_Frame[0] = (CCIR656_WIDTH*CCIR656_FIELD1_OFFSET) + (CCIR656_WIDTH-2*CCIR656_ACTIVEWIDTH) + CCIR656_YOFFSET + Frame_Offset;
Start_of_active_Video_Frame[1] = (CCIR656_WIDTH*CCIR656_FIELD1_OFFSET) + (CCIR656_WIDTH-2*CCIR656_ACTIVEWIDTH) + CCIR656_UOFFSET + Frame_Offset;
Start_of_active_Video_Frame[2] = (CCIR656_WIDTH*CCIR656_FIELD1_OFFSET) + (CCIR656_WIDTH-2*CCIR656_ACTIVEWIDTH) + CCIR656_VOFFSET + Frame_Offset;
Start_of_active_Video_Frame[3] = (CCIR656_WIDTH*CCIR656_FIELD2_OFFSET) + (CCIR656_WIDTH-2*CCIR656_ACTIVEWIDTH) + CCIR656_YOFFSET + Frame_Offset;
}
/*
**
** Function: InitDescriptorChains_Input
**
** Description: Initialize the MDMA descriptor chains for input from PPI
**
**
*/
void ConfigDescriptorChains_Input()
{
char *lOutputBuffer;
lOutputBuffer = (char*)YUV_Buffer_Address[YUVBufferReadIndex];
// source Y 1st half
memdma1_descriptor_data[0].Large.StartAddress = lOutputBuffer;
// source U 1st half
memdma1_descriptor_data[1].Large.StartAddress = lOutputBuffer + Input_Width*Input_Height;
// source V 1st half
memdma1_descriptor_data[2].Large.StartAddress = lOutputBuffer + Input_Width*Input_Height*5/4;
// source Y 2nd half, start at second row of Y buffer
memdma1_descriptor_data[3].Large.StartAddress = lOutputBuffer + Input_Width;
// destination MDMA descriptors
memdma1_descriptor_data[6].Large.StartAddress = (void *)&(Video_Frames[Video_Frame_Completed_Number])[Start_of_active_Video_Frame[0]];
memdma1_descriptor_data[7].Large.StartAddress = (void *)&(Video_Frames[Video_Frame_Completed_Number])[Start_of_active_Video_Frame[1]];
memdma1_descriptor_data[8].Large.StartAddress = (void *)&(Video_Frames[Video_Frame_Completed_Number])[Start_of_active_Video_Frame[2]];
memdma1_descriptor_data[9].Large.StartAddress = (void *)&(Video_Frames[Video_Frame_Completed_Number])[Start_of_active_Video_Frame[3]];
}
/*
**
** Function: PPI_Out_User_Callback
**
** Description: PPI callback handlers (output)
**
**
*/
void PPI_Out_User_Callback (
void *AppHandle,
u32 Event,
void *pArg)
{
// CASEOF (event type)
switch (Event) {
// CASE (buffer processed)
case ADI_DEV_EVENT_BUFFER_PROCESSED:
// when the buffer chain was created, the CallbackParameter value for the buffer
// that was generating the callback was set to be the number of the processed Video Frame
// So here in the callback that value is passed in as the pArg parameter.
Video_Frame_Completed_Number = (int)pArg -1; // frame that was just completed
Video_Frame_Completed_Flag = true; // semaphore to indicate it was completed
Video_Frame_Counter++; // total number of frame that have been processed
if (Start_MDMA)
{
if (Frame_Ready)
{
// Check to see if there is a new frame to play
if (BufferLevel > 0)
{
Frame_Ready = false;
ConfigDescriptorChains_Output();
KickOff_YUV_to_VideoFrame_DMA();
}
else
Frame_Dropped_Counter++;
}
else
{
Frame_Dropped_Counter++;
}
}
break;
// CASE (an error)
case ADI_DEV_EVENT_DMA_ERROR_INTERRUPT:
case ADI_PPI_EVENT_ERROR_INTERRUPT:
// turn on all LEDs and wait for help
ezTurnOnAllLEDs();
while (1) ;
// ENDCASE
}
// return
}
/*
**
** Function: PPI_In_User_Callback
**
** Description: PPI callback handlers (input)
**
**
*/
void PPI_In_User_Callback(
void *AppHandle,
u32 Event,
void *pArg)
{
// CASEOF (event type)
switch (Event) {
// CASE (buffer processed)
case ADI_DEV_EVENT_BUFFER_PROCESSED:
// when the buffer chain was created, the CallbackParameter value for the buffer
// that was generating the callback was set to be the number of the processed Video Frame
// So here in the callback that value is passed in as the pArg parameter.
Video_Frame_Completed_Number = (int)pArg -1; // frame that was just completed
Video_Frame_Completed_Flag = true; // semaphore to indicate it was completed
Video_Frame_Counter++; // total number of frame that have been processed
if (Start_MDMA)
{
if (Frame_Ready)
{
if (BufferLevel > 0)
{
ConfigDescriptorChains_Input();
Frame_Ready = false;
KickOff_YUV_to_VideoFrame_DMA();
}
else
{
Frame_Dropped_Counter++;
Frame_Dropped = true;
}
}
else
{
Frame_Dropped_Counter++;
Frame_Dropped = true;
}
}
break;
// CASE (an error)
case ADI_DEV_EVENT_DMA_ERROR_INTERRUPT:
case ADI_PPI_EVENT_ERROR_INTERRUPT:
// turn on all LEDs and wait for help
ezTurnOnAllLEDs();
while (1) ;
// ENDCASE
}
// return
}
/*
**
** Function: MDMA1_User_Callback
**
** Description: MDMA1 ISR
**
**
*/
void MDMA1_User_Callback( void *AppHandle,
u32 Event,
void *pArg )
{
// CASEOF (event type)
switch (Event) {
// CASE (buffer processed)
case ADI_DMA_EVENT_DESCRIPTOR_PROCESSED:
if (BufferLevel > 0)
{
UpdateYUVIndex(UPDATETYPE_READ);
BufferLevel--;
}
Frame_Ready = true;
break;
// CASE (an error)
case ADI_DMA_EVENT_ERROR_INTERRUPT:
// turn on all LEDs and wait for help
ezTurnOnAllLEDs();
while (1) ;
// ENDCASE
}
}
⌨️ 快捷键说明
复制代码Ctrl + C
搜索代码Ctrl + F
全屏模式F11
增大字号Ctrl + =
减小字号Ctrl + -
显示快捷键?