⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 videomanrendererogl.cpp

📁 VideoMan is a very easy image acquisition library. It is able to manage many video inputs at the sam
💻 CPP
📖 第 1 页 / 共 2 页
字号:
//#include "StdAfx.h"
#include <math.h>

#include "VideoManRendererOGL.h"

#ifndef GL_BGRA
#define GL_BGRA GL_BGRA_EXT
#endif

#ifndef GL_BGR
#define GL_BGR GL_BGR_EXT
#endif

RendererOGLInput::RendererOGLInput(void)
{
}

RendererOGLInput::~RendererOGLInput(void)
{
}

VideoManRendererOGL::VideoManRendererOGL(void)
{
	supportedFormats.push_back(RGB24);
	supportedFormats.push_back(BGR24);
	supportedFormats.push_back(RGB32);
	supportedFormats.push_back(BGR32);
	supportedFormats.push_back(GREY16);
	supportedFormats.push_back(GREY8);	
}

VideoManRendererOGL::~VideoManRendererOGL(void)
{
	for( size_t v = 0; v < inputList.size(); v++ )
	{
		deleteVideoInput( v );
	}
	inputList.clear();
}

bool VideoManRendererOGL::addVideoInput( VideoInput *video )
{
	RendererOGLInput input;

	input.videoInput = video;
	VideoManInputFormat format = video->getVideoManInputFormat();
	PIXEL_FORMAT pF = format.getPixelFormatOut();
	input.depth = format.depth;
	input.nChannels = format.nChannels;
	input.width = format.width;
	input.height = format.height;
	input.pixelFormat = pF;
	input.supported = supportedFormat( pF );	
	if ( !input.supported )
	{
		input.activated = false;
		inputList.push_back( input );
		return false;
	}
		
	if ( !generateTexture( &input ) )
	{
		return false;
	}

	//Initialize screen coordinates
	input.screenCoords.bottom = 0;
	input.screenCoords.left = 0;
	input.screenCoords.width = input.width;
	input.screenCoords.height = input.height;
	
	if ( emptyIndexes.size() > 0 )
	{
		size_t inputIndex = static_cast<size_t>( emptyIndexes.front() );
		emptyIndexes.erase( emptyIndexes.begin() );
		inputList[inputIndex] = input;
		activateVideoInput( inputIndex );
	}
	else
	{		
		inputList.push_back( input );
		activateVideoInput( inputList.size() - 1 );
	}
	//inputList.push_back( input );
	//activateVideoInput( inputList.size() - 1 );
	return true;
}

void VideoManRendererOGL::deleteVideoInput( size_t inputIndex )
{
	assert( inputIndex >= 0 && inputIndex < inputList.size() && "deleteVideoInput: Index out of range");
	glDeleteTextures( 1, &inputList[inputIndex].texture );
	deactivateVideoInput( inputIndex );
	inputList[inputIndex].supported = false;	
	emptyIndexes.push_back( inputIndex );
	emptyIndexes.sort();
}


bool VideoManRendererOGL::generateTexture( RendererInput *input )
{
	RendererOGLInput *inputOGL = (RendererOGLInput*) input;
	/*int pow = 32;
	while( pow < inputOGL->width )//&& pow <= MAX_SIZE )
	{
		pow = pow * 2;
	}
	int textureWidth = pow;
	pow = 32;
	while( pow<inputOGL->height )//&& pow<=MAX_SIZE )
	{
		pow = pow * 2;
	}
	int textureHeight = pow;*/
	int textureWidth = inputOGL->width;
	int textureHeight = inputOGL->height;

	float Bpp = static_cast<float>( inputOGL->depth ) / 8.0f;
	//std::vector<char*> data(textureHeight * textureWidth * static_cast<size_t>( Bpp ), 0);	
	glGenTextures( 1, &inputOGL->texture );
	glBindTexture( GL_TEXTURE_2D, inputOGL->texture );

	glTexParameteri( GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR );//GL_LINEAR
	glTexParameteri( GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR );
	glTexParameteri( GL_TEXTURE_2D,GL_TEXTURE_WRAP_S,GL_CLAMP );
	glTexParameteri( GL_TEXTURE_2D,GL_TEXTURE_WRAP_T,GL_CLAMP );	

	//Cambiado para poder cargar imagenes de 16 bpp y un canal - SIN PROBAR!!!
	//glTexImage2D(GL_TEXTURE_2D, 0, 3, textureWidth, textureHeight, 0, video.format.oglTextureFormat, GL_UNSIGNED_BYTE, &data[0]);
	inputOGL->dataType = GL_UNSIGNED_BYTE;
	switch ( inputOGL->pixelFormat )
	{
		case GREY16:
		{
			inputOGL->oglTextureFormat = GL_LUMINANCE;
			inputOGL->internalFormat = GL_LUMINANCE16;
			inputOGL->dataType = GL_UNSIGNED_SHORT;
			glTexImage2D( GL_TEXTURE_2D, 0, inputOGL->internalFormat, textureWidth, textureHeight, 0, inputOGL->oglTextureFormat, inputOGL->dataType, NULL );
			break;
		}
		case GREY8:	
		{
			inputOGL->oglTextureFormat = GL_LUMINANCE;
			inputOGL->internalFormat = 1;
			glTexImage2D( GL_TEXTURE_2D, 0, 1, textureWidth, textureHeight, 0, inputOGL->oglTextureFormat, GL_UNSIGNED_BYTE, NULL );
			break;
		}
		case RAW8:
		{
			inputOGL->oglTextureFormat = GL_LUMINANCE;
			inputOGL->internalFormat = 1;
			glTexImage2D( GL_TEXTURE_2D, 0, 1, textureWidth, textureHeight, 0, inputOGL->oglTextureFormat, GL_UNSIGNED_BYTE, NULL );
			break;
		}
		case BGR24:
		{			
			inputOGL->oglTextureFormat = GL_BGR;
			inputOGL->internalFormat = 3;
			glTexImage2D( GL_TEXTURE_2D, 0, 3, textureWidth, textureHeight, 0, inputOGL->oglTextureFormat, GL_UNSIGNED_BYTE, NULL );
			break;
		}
		case RGB24:
		{
			inputOGL->oglTextureFormat = GL_RGB;
			inputOGL->internalFormat = 3;
			glTexImage2D( GL_TEXTURE_2D, 0, 3, textureWidth, textureHeight, 0, inputOGL->oglTextureFormat, GL_UNSIGNED_BYTE, NULL );
			break;
		}
		case BGR32:
		{
			inputOGL->oglTextureFormat = GL_BGRA;
			inputOGL->internalFormat = 3;
			glTexImage2D( GL_TEXTURE_2D, 0, 3, textureWidth, textureHeight, 0, inputOGL->oglTextureFormat, GL_UNSIGNED_BYTE, NULL );
			break;
		}
		case RGB32:
		{
			inputOGL->oglTextureFormat = GL_RGBA;
			inputOGL->internalFormat = 3;
			glTexImage2D( GL_TEXTURE_2D, 0, 3, textureWidth, textureHeight, 0, inputOGL->oglTextureFormat, GL_UNSIGNED_BYTE, NULL );
			break;
		}
		default: 
			{				
				return false;
			}
	}
	//Calculate texture coordinates
	inputOGL->tu1 = 0.0f;
	inputOGL->tv1 = 0.0f;
	inputOGL->tu2 = static_cast<float>( inputOGL->width / (float)textureWidth );
	inputOGL->tv2 = static_cast<float>( inputOGL->height / (float)textureHeight );
	
	return true;
}


inline void VideoManRendererOGL::renderInputs()
{
	for( size_t i = 0; i < inputList.size(); i++ )
	{
		renderInput( i );
	}
}


inline void VideoManRendererOGL::renderInput( size_t v )
{
	assert( v >= 0 && v < inputList.size() && "renderInput: Index out of range");
	
	if ( !inputList[v].activated )
		return;
	glEnable(GL_TEXTURE_2D);
	glDisable(GL_LIGHTING);
	glDisable(GL_DEPTH_TEST);
	glColor3f(1.0, 1.0, 1.0);

	glBindTexture(GL_TEXTURE_2D, inputList[v].texture);
	glViewport( inputList[v].screenCoords.left, inputList[v].screenCoords.bottom, inputList[v].screenCoords.width, inputList[v].screenCoords.height);

	glMatrixMode( GL_PROJECTION );
	glLoadIdentity();
	glMatrixMode( GL_MODELVIEW );
	glLoadIdentity();
	
	glBegin( GL_TRIANGLE_STRIP );
		glTexCoord2f( inputList[v].tu1, inputList[v].tv2 );
		glVertex2f( -1,  1 );
		glTexCoord2f( inputList[v].tu1, inputList[v].tv1 );
		glVertex2f( -1, -1 );
		glTexCoord2f( inputList[v].tu2, inputList[v].tv2 );
		glVertex2f(  1,  1 );
		glTexCoord2f( inputList[v].tu2, inputList[v].tv1 );
		glVertex2f(  1, -1 );			
	glEnd();
	/*glLineWidth( 2.0f );
	glColor3f( 0.8f, 0.8f, 0.8f );
	glDisable( GL_TEXTURE_2D );
	glBegin( GL_LINE_LOOP );
		glVertex2i( -1,  1);
		glVertex2i( -1, -1);
		glVertex2i(  1, -1);
		glVertex2i(  1,  1);
	glEnd();*/
}

inline void VideoManRendererOGL::updateTexture( size_t v )
{
	if ( !inputList[v].activated )
		return;
	assert( v >= 0  &&  v < inputList.size() && "updateTexture: Index out of range");
	glBindTexture( GL_TEXTURE_2D, inputList[v].texture );
	if ( inputList[v].videoInput->pixelBuffer != NULL )
	{		
		glTexSubImage2D( GL_TEXTURE_2D, 0, 0, 0, inputList[v].width, inputList[v].height,
		inputList[v].oglTextureFormat, inputList[v].dataType, (void*)inputList[v].videoInput->pixelBuffer);	
	}	
}

inline void VideoManRendererOGL::updateTexture( size_t v, const char *image )
{
	if ( !inputList[v].activated )
		return;
	assert( v >= 0  &&  v < inputList.size() && "updateTexture: Index out of range");
	assert( image != NULL && "updateTexture: Invalid Image");

	glBindTexture( GL_TEXTURE_2D, inputList[v].texture );	
	glTexSubImage2D( GL_TEXTURE_2D, 0, 0, 0, inputList[v].width, inputList[v].height,
		inputList[v].oglTextureFormat, inputList[v].dataType, (void*)image );
}

void VideoManRendererOGL::activateAllVideoInputs()
{
	//activatedInputs = (int)inputList.size();
	activatedInputs = 0;
	for ( int f = 0; f < (int)inputList.size(); f++ )
	{
		if ( inputList[f].supported )
		{
			inputList[f].activated = true;
			activatedInputs++;
		}
	}
	changeScreenSize( screenSize.left,screenSize.bottom,screenSize.width,screenSize.height  );

	/*if ( screenSize.top == 0 || screenSize.right == 0 )
		return;
	if ( inputList.size() == 0 )
		return;
	else if ( inputList.size() == 1 )
	{
		return activateVideoInput( 0, screenSize );
	}
	int numInputs = (int) inputList.size();
	float numColsF = sqrt( ( (float)screenSize.right / (float)screenSize.top ) * 1.33333f * (float)numInputs );
	int numCols = floor( numColsF );
	if ( numCols == 0 ) numCols = 1;
	if ( numCols > numInputs ) numCols = numInputs;
	//if ((numColsF-numCols)>(numColsF-(numCols+1)))
	//	numCols=numCols+1;
	int numFils = ceil( (float)numInputs / (float)numCols );

	float despC = 0.0f;
	float despF = 0.0f;
	float despCs = 0.0f;
	float despFs = 0.0f;
	int v = 0;
	for ( int f = 0; f < numFils; f++ )
	{
		for ( int c = 0; c < numCols; c++ )
		{
			
			if ( v >= numInputs )
				break;
			despC = (float)c / (float)numCols;
			despF = (float)f / (float)numFils;
			despCs = (float)(c+1) / (float)numCols;
			despFs = (float)(f+1) / (float)numFils;

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -