📄 ov2630.cpp
字号:
break;
}
default:
ERRORMSG(1,(L"[Camera]: Unrecognized resolution!"));
break;
}
return 0;
}
void Ov2630::set_frame_format(int format, FrameSize size)
{
OV2630Reset();
frame_size = size;
UINT32 width, height;
UINT32 x[2], y[2];
//OV2630 only support RAW RGB format.
switch(size)
{
case CIF:
width = 352;
height = 288;
break;
case SVGA:
width = 800;
height = 600;
break;
case UXGA:
width = 1600;
height = 1200;
break;
default:
width = 1600;
height = 1200;
break;
}
OV2630SetFormat(width, height, &x[0], &y[0], &x[1], &y[1]);
}
int JPEGStreamFlush(void *pStream, void *pStreamHandler, int nAvailBytes, int flag)
{
IppBitstream *dstBS = (IppBitstream *)pStreamHandler;
Ipp8u *pData = (Ipp8u *)pStream;
// FIXME: add out of range handling
memcpy(dstBS->pBsCurByte, pData, nAvailBytes);
dstBS->pBsCurByte += nAvailBytes;
return nAvailBytes;
}
DWORD Ov2630::handle_frame_interrupt(PUCHAR buf,
CS_BITMAPINFOHEADER* info_header,
frame_t* frame)
{
int nWidth = abs(info_header->biWidth);
int nHeight = abs(info_header->biHeight);
int y_size = nWidth * nHeight;
format_t* format = &frame->list->format;
PUCHAR dest[3];
int nQCIWidth = format->width;
int nQCIHeight = format->height;
DWORD dwRetValue = 0;
int nRetVal;
switch (info_header->biCompression & ~BI_SRCPREROTATE)
{
case FOURCC_IJPG:
{
IppCodecStatus retCode = IPP_STATUS_NOERR;
MiscGeneralCallbackTable *pCBTable = NULL;
void *pJPEGEncState = NULL;
IppPicture srcPicture;
IppBitstream dstBitstream;
memset(&srcPicture, 0x00, sizeof(IppPicture));
memset(&dstBitstream, 0x00, sizeof(IppBitstream));
if (0 != miscInitGeneralCallbackTable(&pCBTable))
{
ERRORMSG(1, (L"[Camera]: miscInitGeneralCallbackTable() FAILED!!!"));
return dwRetValue;
}
IppJPEGEncoderParam jpegEncParam;
memset(&jpegEncParam, 0x00, sizeof(IppJPEGEncoderParam));
jpegEncParam.nQuality = 80;
jpegEncParam.nRestartInterval = 0;
jpegEncParam.nJPEGMode = JPEG_BASELINE;
jpegEncParam.nSubsampling = JPEG_411;
jpegEncParam.nBufType = JPEG_INTEGRATEBUF;
Ipp8u *pSrc = (Ipp8u *)(frame->plane[0].buf.buf);
dest[0] = m_pRGB2YUVBuf;
if (NULL == dest[0])
{
ERRORMSG(1, (TEXT("CAM: Failed to malloc!!!")));
return dwRetValue;
}
dest[2] = dest[0] + (nWidth* nHeight);
dest[1] = dest[2] + ((nWidth* nHeight) >> 2);
pCBTable->fStreamFlush = (MiscStreamFlushCallback)JPEGStreamFlush;
// Set source picture properties
srcPicture.picWidth = nWidth;
srcPicture.picHeight = nHeight;
srcPicture.picPlaneNum = 3;
srcPicture.picChannelNum = 3;
srcPicture.picFormat = JPEG_YUV411;
srcPicture.ppPicPlane[0] = dest[0];
srcPicture.ppPicPlane[1] = dest[1];
srcPicture.ppPicPlane[2] = dest[2];
srcPicture.picPlaneStep [0] = nWidth;
srcPicture.picPlaneStep [1] = (nWidth >> 1);
srcPicture.picPlaneStep [2] = (nWidth >> 1);
// Attach output sample buffer to IppBitstream as output for MVED JPEG enc
dstBitstream.pBsBuffer = (Ipp8u *)buf;
dstBitstream.pBsCurByte = dstBitstream.pBsBuffer;
dstBitstream.bsByteLen = nWidth*nHeight/4;//info_header->biSizeImage;
dstBitstream.bsCurBitOffset = 0;
//NKDbgPrintfW(TEXT("dstBitstream.bsByteLen is %d"),dstBitstream.bsByteLen);
//NKDbgPrintfW(TEXT("CAM: pSrc 0x%x, dest[0] 0x%x, dest[1] 0x%x, dest[2] 0x%x"),pSrc, dest[0],dest[1],dest[2]);
#if 1
rpp_mcu = rggb2yuv_init(1600, 1200, color_cfg.color_correct_coe);
if (NULL == rpp_mcu)
{
ERRORMSG(1,(L"[Camera]: IPP Initializing Failed!!!"));
return dwRetValue;
}
#endif
//NKDbgPrintfW(TEXT("CAM: rpp_mcu 0x%x"),rpp_mcu);
retCode = EncoderInitAlloc_JPEG(&jpegEncParam, &srcPicture, &dstBitstream, pCBTable, &pJPEGEncState);
if (IPP_STATUS_NOERR != retCode)
{
ERRORMSG(1, (L"[Camera]: EncoderInitAlloc_JPEG(%d) FAILED!!!", retCode));
miscFreeGeneralCallbackTable(&pCBTable);
return dwRetValue;
}
nRetVal = ippi10RGGBtoYCbCr_RotRsz_8u_P3R(pSrc, dest, rpp_mcu);
if (0 != nRetVal)
{
ERRORMSG(1, (TEXT("[Camera]: ippi10RGGBtoYCbCr_RotRsz_8u_P3R(%d) Failed"),nRetVal));
return dwRetValue;
}
#if 1
if (rpp_mcu)
{
rggb2yuv_deinit(rpp_mcu);
rpp_mcu = NULL;
}
#endif
retCode = Encode_JPEG(&srcPicture, &dstBitstream, pJPEGEncState);
if ((IPP_STATUS_NEED_INPUT != retCode) && (IPP_STATUS_NOERR != retCode))
{
ERRORMSG(1,(L"[Camera]: Encode_JPEG(%d) Failed!!!", retCode));
EncoderFree_JPEG(&pJPEGEncState);
miscFreeGeneralCallbackTable(&pCBTable);
return dwRetValue;
}
retCode = EncoderFree_JPEG(&pJPEGEncState);
if (IPP_STATUS_NOERR != retCode)
{
ERRORMSG(1,(L"[Camera]: EncoderFree_JPEG(%d) Failed!!!", retCode));
}
miscFreeGeneralCallbackTable(&pCBTable);
// Tell MDD the actual pre-encoded JPEG data size
dwRetValue = (long)(dstBitstream.pBsCurByte - dstBitstream.pBsBuffer);
//NKDbgPrintfW(TEXT("CAM: Encode bytes %d"),dwRetValue);
break;
}
case FOURCC_YV12:
if ((nQCIWidth != nWidth)||(nQCIHeight != nHeight))
{
dest[0] = buf;
dest[2] = dest[0] + y_size;
dest[1] = dest[2] + (y_size >> 2);
yv16_to_yv12_convert_rotate_shrink(dest, frame);
}
else
{
dest[0] = buf;
dest[2] = dest[0] + y_size;
dest[1] = dest[2] + (y_size >> 2);
fill_buffer_yv12(dest, frame);
}
break;
default:
ERRORMSG(1,(L"[Camera]: Unsupported frame format !!!\r\n"));
break;
}
return dwRetValue;
}
static void yv16_to_yv12_convert_rotate_shrink(PUCHAR* dest_plane, frame_t* frame)
{
format_t *format = &frame->list->format;
unsigned int y_size = format->width * format->height;
UINT32 x, y;
UINT32 src_x, src_y;
UINT32 w, h;
UCHAR* dst;
UCHAR* src;
dst = dest_plane[0];
src = (PUCHAR)frame->plane[0].buf.buf;
w = format->width >> 1;
h = format->height >> 1;
for (y = 0; y < h; y++)
for (x = 0; x < w; x++)
{
src_x = x << 1;
src_y = y << 1;
dst[(h - y - 1) * w + (w - x - 1)] = src[src_x + src_y * format->width];
}
w = w >> 1;
h = h >> 1;
UINT32 src_w = format->width >> 1;
UINT32 i;
for (i = 0; i < 2; i++)
{
dst = dest_plane[2 - i];
src = (PUCHAR)frame->plane[i + 1].buf.buf;
for (y = 0; y < h; y++)
for (x = 0; x < w; x++)
{
src_x = x << 1;
src_y = y << 2;
dst[(h - y - 1) * w + (w - x - 1)] = src[src_x + src_y * src_w];
}
}
}
#ifdef DEBUG
static void dump_Ov2630()
{
UCHAR buf[0x8b];
OV2630ReadAllRegs(buf, 0x8b);
for (int i = 0; i < 0x8b; i++)
DEBUGMSG(ZONE_IOCTL,(L"CAM: OV2630 reg %x = %x", i, buf[i]));
}
#endif
#define IPP_COE_FLOAT_TO_INT(x) ((short)((x) * (1 << 8) + 0.5))
#if 0
static Ipp16s pMatrix[9];
static IppiRawPixProcCfg_P3R ippiRPPCfg;
static IppiCAMCfg ippiCAMCfg;
#endif
static IppiRawPixProcSpec_P3R *rggb2yuv_init(UINT32 width,
UINT32 height,
float* coe)
{
IppiRawPixProcSpec_P3R *ippiRPPSpec = NULL;
Ipp16s pMatrix[9];
IppiRawPixProcCfg_P3R ippiRPPCfg;
IppiCAMCfg ippiCAMCfg;
for (int i = 0; i < sizeof(pMatrix) / sizeof(pMatrix[0]); i++)
{
pMatrix[i] = IPP_COE_FLOAT_TO_INT(coe[i]);
DEBUGMSG(ZONE_IOCTL,(L"CAM: Matrix[%d] %d", i, pMatrix[i]));
}
/* Initializae CAMCfg structure */
ippiCAMCfg.DPMLen = 0;
ippiCAMCfg.DPMOffset.x = 0;
ippiCAMCfg.DPMOffset.y = 0;
ippiCAMCfg.DPInterp = ippCameraInterpNearest;
ippiCAMCfg.GammaFlag = ippGamPreOneTable;
ippiCAMCfg.GammaIndex[0] = 0;
ippiCAMCfg.pCCMatrix = pMatrix;
ippiCAMCfg.pDeadPixMap = NULL; /* if no dead pixel map */
ippiCAMCfg.pGammaTable[0] = NULL;
ippiCAMCfg.pGammaTable[1] = NULL;
ippiCAMCfg.pGammaTable[2] = NULL;
/* Initialize RPPCfg structure */
ippiRPPCfg.interpolation = ippCameraInterpBilinear;
ippiRPPCfg.colorConversion = ippCameraCscRGGBToYCbCr420;
ippiRPPCfg.rotation = ippCameraRotateDisable;//ippCameraRotate180;;
ippiRPPCfg.srcSize.width = width;
ippiRPPCfg.srcSize.height = height;
ippiRPPCfg.srcStep = width << 1;
ippiRPPCfg.dstSize.width = width;
ippiRPPCfg.dstSize.height = height;
ippiRPPCfg.dstStep[0] = ippiRPPCfg.dstSize.width;
ippiRPPCfg.dstStep[1] = ippiRPPCfg.dstSize.width >> 1;
ippiRPPCfg.dstStep[2] = ippiRPPCfg.dstSize.width >> 1;
ippiRPPCfg.bExtendBorder = ippFalse;
/* call init function */
if (ippiInitAlloc_10RGGBtoYCbCr_RotRsz_P3R(&ippiCAMCfg, &ippiRPPCfg, &ippiRPPSpec) != ippStsOk)
{
ERRORMSG(1,(L"[Camera]: Init IPP Failed!!!"));
return 0;
};
return ippiRPPSpec;
}
static void rggb2yuv_deinit(IppiRawPixProcSpec_P3R *ippiRPPSpec)
{
if (ippiRPPSpec)
ippiFree_10RGGBtoYCbCr_RotRsz_P3R(ippiRPPSpec);
}
void Ov2630::write_sensor_reg(UCHAR addr, UCHAR data)
{
OV2630WriteSensorReg(addr, &data);
}
void Ov2630::set_exposure_gain(UINT16 time, UINT16 gain)
{
DEBUGMSG(ZONE_IOCTL,(L"CAM: Ov2630 Exposure is %d\r\n", time));
UCHAR digi_gain1, digi_gain2;
get_digi_gain(gain, &digi_gain1, &digi_gain2);
UCHAR reg_aech = (UCHAR)((time & 0xfc00) >> 10);
reg_aech |= digi_gain1 << 6;
write_sensor_reg(0x8a, 0);
write_sensor_reg(0x45, reg_aech);
write_sensor_reg(0x4c, digi_gain2 << 5);
write_sensor_reg(0x10, (UCHAR)((time & 0x3fc) >> 2));
write_sensor_reg(0x04, (UCHAR)(time & 0x3));
write_sensor_reg(0x0, get_analog_gain(gain));
}
UCHAR get_analog_gain(UINT16 gain)
{
UINT32 i = 0;
if (gain > 32)
gain = 32;
UCHAR sensor_gain = 0;
DEBUGMSG(ZONE_IOCTL,(L"CAM: Ov2630 Gain is %d\r\n", gain));
while(gain > 2 && i < 4)
{
gain = gain / 2;
sensor_gain |= 0x10 << i;
i++;
}
UCHAR small_gain = (UCHAR)(gain * 16) - 16;
if (small_gain >= 16)
small_gain = 15;
sensor_gain |= small_gain;
return sensor_gain;
}
UCHAR digi_gain_convert(UCHAR gain)
{
if (gain <= 1)
return 0;
if (gain <= 3)
return 1;
else
return 3;
}
void get_digi_gain(UINT16 gain, UCHAR* digi_gain1, UCHAR* digi_gain2)
{
UCHAR gain1 = 1, gain2 = 1;
if (gain <= 32)
{
digi_gain1 = digi_gain2 = 0;
return;
}
gain = gain / 32;
if (gain > 4)
{
gain2 = gain / 4;
gain1 = 4;
} else
gain1 = (UCHAR)gain;
*digi_gain1 = digi_gain_convert(gain1);
*digi_gain2 = digi_gain_convert(gain2);
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -