⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 video4avc1394.cxx

📁 安装 H323需要的pwlib库
💻 CXX
📖 第 1 页 / 共 2 页
字号:
}BOOL PVideoInput1394AvcDevice::SetWhiteness(unsigned newWhiteness) {  return FALSE;}int PVideoInput1394AvcDevice::GetWhiteness(){  return -1;}BOOL PVideoInput1394AvcDevice::GetParameters (int *whiteness, int *brightness,                                       int *colour, int *contrast, int *hue){  *whiteness = -1;  *brightness = -1;  *colour = -1;  *hue = -1;  return FALSE;}int PVideoInput1394AvcDevice::GetNumChannels() {  return numCameras;}BOOL PVideoInput1394AvcDevice::SetChannel(int newChannel){  PTRACE(3, "SetChannel("<<newChannel<<")");  if (PVideoDevice::SetChannel(newChannel) == FALSE)    return FALSE;  if(IsCapturing()) {    Stop();    Start();  }  return TRUE;}BOOL PVideoInput1394AvcDevice::SetFrameRate(unsigned rate){  return PVideoDevice::SetFrameRate(rate);}BOOL PVideoInput1394AvcDevice::GetFrameSizeLimits(unsigned & minWidth,                                           unsigned & minHeight,                                           unsigned & maxWidth,                                           unsigned & maxHeight) {  PTRACE(3, "GetFrameSizeLimits()");  minWidth = 160;  maxWidth = 320;  minHeight = 120;  maxHeight = 240;  return TRUE;}PINDEX PVideoInput1394AvcDevice::GetMaxFrameBytes(){  PTRACE(3, "GetMaxFrameBytes()");  if (converter != NULL) {    PINDEX bytes = converter->GetMaxDstFrameBytes();    if (bytes > frameBytes)      return bytes;  }  return frameBytes;}BOOL dump_ppm(PString name, int w, int h, BYTE * data){	PFile file(name, PFile::WriteOnly);	if (!file.IsOpen()) return FALSE;	file.WriteString("P6\n");	file.WriteString(w);	file.WriteString("\n");	file.WriteString(h);	file.WriteString("\n255\n");	file.Write(data, w*h*3);	file.Close();	return TRUE;}BOOL PVideoInput1394AvcDevice::GetFrameDataNoDelay(BYTE * buffer, PINDEX * bytesReturned){  PTRACE(3, "GetFrameDataNoDelay()");  if (!IsCapturing()) return FALSE;  // get a DV frame first  BOOL frame_complete = FALSE;  BOOL found_first_frame = FALSE;  int skipped = 0;  int broken_frames = 0;  BYTE capture_buffer[150000];  BYTE * capture_buffer_end = capture_buffer;    if (raw1394_start_iso_rcv(handle, 63) < 0) {  	PTRACE(1, "Cannot receive data on channel 63");	return FALSE;  }  while (! frame_complete) {  	raw1394_loop_iterate(handle);	if (*(uint32_t *)raw_buffer >= 492) {		if (!found_first_frame) {			if (raw_buffer[16] == 0x1f && raw_buffer[17] == 0x07)				found_first_frame = TRUE;			else skipped ++;		}		if (skipped > 500) {			PTRACE (0, "Skipped too many frames");			return FALSE;		}		if (found_first_frame) {			if (raw_buffer[16] == 0x1f && raw_buffer[17] == 0x07 && (capture_buffer_end - capture_buffer > 480)) {				// check for a short read. check if we read less than a NTSC frame				// because it is the smaller one. still not sure how to handle NTSC vs. PAL				if (capture_buffer_end - capture_buffer < 120000) {					broken_frames ++;					capture_buffer_end = capture_buffer;				} else {					frame_complete = TRUE;				}			}			if (!frame_complete) {				memcpy (capture_buffer_end, raw_buffer+16, 480);				capture_buffer_end += 480;			}		}		if (broken_frames > 30) {			PTRACE(1, "Received too many broken frames from the camera. Giving up for now");			return FALSE;		}	}  }  raw1394_stop_iso_rcv(handle, 63);  PTRACE(3, "got "<< capture_buffer_end - capture_buffer << " bytes... now convert DV -> RGB");  dv_decoder_t * dv;  dv = dv_decoder_new(TRUE, FALSE, FALSE);  dv->quality = DV_QUALITY_BEST;  if (dv_parse_header(dv, capture_buffer) < 0) {  	PTRACE(0, "cannot parse header");	return FALSE;  }   dv_color_space_t color_space;  BYTE * pixels[3];  int  pitches[3];    PTRACE(3, "Captured image from camera is w"<<dv->width<<"x"<<dv->height);  pitches[0] = dv->width * 3;  pitches[1] = pitches[2] = 0;  pixels[0] = (uint8_t *) malloc (dv->width * dv->height * 3);  pixels[1] = pixels[2] = NULL;  color_space = e_dv_color_rgb;  dv_decode_full_frame(dv, capture_buffer, color_space, pixels, pitches);//  dump_ppm("/tmp/ohphone-before.ppm", dv->width, dv->height, pixels[0]);#if 1   // gotta resize manually :( ... do I?  PTRACE(3, "Manual resize "<<dv->width<<"x"<<dv->height<<" -> "<<frameWidth<<"x"<<frameHeight);  float xRatio = dv->width / (float)frameWidth;  float yRatio = dv->height/ (float)frameHeight;  for (uint y=0;y<frameHeight;y++)  	for (uint x=0;x<frameWidth;x++) {		uint16_t sourceX = (uint16_t) (x * xRatio);		uint16_t sourceY = (uint16_t) (y * yRatio);		memcpy (pixels[0]+3*(y*frameWidth+x), pixels[0]+3*(sourceY*dv->width+sourceX), 3);		// Temporary workaround for RGB -> BGR		#if 1		BYTE temp;		int offset= (y*frameWidth+x)*3;		temp = pixels[0][offset+0];		pixels[0][offset+0] = pixels[0][offset+2];		pixels[0][offset+2] = temp;		#endif	}#endif//  dump_ppm("/tmp/ohphone-after.ppm", frameWidth, frameHeight,pixels[0]);  if (converter != NULL) {    converter->Convert((const BYTE *)pixels[0], buffer, bytesReturned);    if (pixels[0] != NULL) {    	free(pixels[0]);    }  } else {    PTRACE(1, "Converter must exist. Something goes wrong.");    return FALSE;  }  PTRACE(3, "1394avc: return frame: "<< frameWidth << "x"<<frameHeight);#ifdef ESTIMATE_CAPTURE_PERFORMANCE  ++num_captured;  PTime now;  double capturing_time = (double)((now.GetTimestamp()-start_time))/1000000;  ::fprintf(stderr, "time %f, num_captured=%d, fps=%f\n",	    capturing_time, num_captured, num_captured/capturing_time);#endif  return TRUE;}BOOL PVideoInput1394AvcDevice::GetFrameData(BYTE * buffer, PINDEX * bytesReturned){  PTRACE(2, "1394avc::getframedata");  if(frameRate>0) {    if (msBetweenFrames > capturing_duration)      PThread::Current()->Sleep(msBetweenFrames - capturing_duration);    PTime start;    if ( !GetFrameDataNoDelay(buffer, bytesReturned))      return FALSE;    PTime end;    capturing_duration = (int)((end-start).GetMilliSeconds());    return TRUE;  }  return GetFrameDataNoDelay(buffer,bytesReturned);}void PVideoInput1394AvcDevice::ClearMapping(){  PTRACE(3, "ClearMapping()");}BOOL PVideoInput1394AvcDevice::TestAllFormats(){  return TRUE;}BOOL PVideoInput1394AvcDevice::SetColourFormat(const PString & newFormat){  PTRACE(3, "SetColourFormat("<<newFormat<<")");  if (newFormat != colourFormat) {    return FALSE;  }  return TRUE;}BOOL PVideoInput1394AvcDevice::SetFrameSize(unsigned width, unsigned height){  PTRACE(3, "SetFrameSize("<<width<<","<<height<<")");  /*  if ((!(width == 320 && height == 240)) &&      (!(width == 160 && height == 120)))    return FALSE;    */    #if 0  if ((width != 720) || (height != 480))  	return FALSE;	#endif  frameWidth = width;  frameHeight = height;/*  if (frameWidth == 320 && frameHeight == 240)    colourFormat = "UYVY422";  else if (frameWidth == 160 && frameHeight == 120)    colourFormat = "UYV444";*/  colourFormat = "RGB24F";  frameBytes = PVideoDevice::CalculateFrameBytes(frameWidth, frameHeight, colourFormat);    // Don't really need the next lines - AVC cameras don't support change in resolution/*  if (IsCapturing()) {    Stop(); Start();  }*/  return TRUE;}BOOL PVideoInput1394AvcDevice::SetFrameSizeConverter(unsigned width, unsigned height,					 BOOL bScaleNotCrop){  PTRACE(3, "SetFrameSizeConverter("<<width<<","<<height<<","<<bScaleNotCrop<<")");  /*  if (width == CIFWidth && height == CIFHeight)    SetFrameSize(320, 240);  else if (width == QCIFWidth && height == QCIFHeight)    SetFrameSize(160, 120);  else {    PTRACE(1, width << "x" << height << " is not supported.");    return FALSE;  }*/  SetFrameSize(width,height);  if (converter != NULL)     delete converter;    desiredFrameWidth = width;  desiredFrameHeight = height;  PTRACE(3, "Set Converter, colourFormat="<<colourFormat<<" desiredformat="<<desiredColourFormat<<" width="<<width<<" height="<<height<<" frameWidth="<<frameWidth<<" frameHeight="<<frameHeight);  converter = PColourConverter::Create(colourFormat, desiredColourFormat, width, height);  if (converter == NULL) {    PTRACE(1, "Failed to make a converter.");    return FALSE;  }  if (converter->SetSrcFrameSize(width,height) == FALSE) {    PTRACE(1, "Failed to set source frame size of a converter.");    return FALSE;  }  if (converter->SetDstFrameSize(desiredFrameWidth, desiredFrameHeight, FALSE) == FALSE) {    PTRACE(1, "Failed to set destination frame size (+scaling) of a converter.");    return FALSE;  }  return TRUE;}BOOL PVideoInput1394AvcDevice::SetColourFormatConverter(const PString & colourFmt){  PTRACE(3, "SetColourFormatConverter("<<colourFmt<<")");/*  if (colourFmt != "YUV420P") {    PTRACE(1, colourFmt << " is unsupported.");    return FALSE;  }*/  desiredColourFormat = colourFmt;  return SetFrameSizeConverter(desiredFrameWidth, desiredFrameHeight, FALSE);}void PVideoInput1394AvcDevice::GetAvcCameras(){	PTRACE(3, "1394AVC :: GetAvcCameras");	camera_nodes = (nodeid_t *) malloc (sizeof(nodeid_t)*16);	if (camera_nodes == NULL) {		PTRACE(0, "getavccameras :: out of memory");		return;	}	numCameras = 0;	rom1394_directory rom_dir;	if (handle == NULL) {		PTRACE(0, "Weird, handle not created yet :-/");		return;	}	if (raw1394_set_port(handle, 0) < 0) {		PTRACE(0, "Cannot set port");		return;	}	int nodecount = raw1394_get_nodecount(handle);	for (int i=0; i<nodecount;i++) {		PTRACE(3, "check node " << i);		if (rom1394_get_directory(handle, i, &rom_dir) < 0) {			PTRACE(0, "Cannot read ROM data for node" << i);			return;		}		if ((rom1394_get_node_type(&rom_dir) == ROM1394_NODE_TYPE_AVC)) {			PTRACE(3, "Found a camera at node " << i);			camera_nodes[numCameras++] = i;		}	}}int RawISOHandler (raw1394handle_t handle, int channel, size_t length, u_int32_t * data){	PTRACE(4, "1394avc :: rawisohandler with length " << length);	if (length < RAW_BUFFER_SIZE) {		*(u_int32_t *) raw_buffer = length;		memcpy (raw_buffer + 4, data, length);	}	return 0;}// End Of File ///////////////////////////////////////////////////////////////

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -