⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 basestrm.cpp

📁 采集卡的驱动编程,具有较大的参考价值,特别开发视频采集的软件工程师有用
💻 CPP
📖 第 1 页 / 共 3 页
字号:
			#ifdef __VIDCAP_TRACEON__
			TVS << TraceInfo << "VidcapBaseStream::GetDroppedFramesProperty -- Got KSPROPERTY_DROPPEDFRAMES_CURRENT\n";
			#endif

			PKSPROPERTY_DROPPEDFRAMES_CURRENT_S pDroppedFrames = 
                (PKSPROPERTY_DROPPEDFRAMES_CURRENT_S) pSPD->PropertyInfo;

            pDroppedFrames->PictureNumber =		m_FrameInfo.PictureNumber;
            pDroppedFrames->DropCount =			m_FrameInfo.DropCount;
            pDroppedFrames->AverageFrameSize =	m_pVideoInfoHeader->bmiHeader.biSizeImage;
                
            pSrb->ActualBytesTransferred = sizeof (KSPROPERTY_DROPPEDFRAMES_CURRENT_S);
			pSrb->Status = STATUS_SUCCESS;

 			#ifdef __VIDCAP_TRACEON__
			TVS << TraceInfo << "VidcapBaseStream::GetDroppedFramesProperty -- STATUS_SUCCESS\n";
			#endif
       }
        break;

    default:
 		#ifdef __VIDCAP_TRACEON__
		TVS << TraceInfo << "VidcapBaseStream::GetDroppedFramesProperty -- STATUS_NOT_IMPLEMENTED\n";
		#endif

		pSrb->Status = STATUS_NOT_IMPLEMENTED;
        break;
    }

 	#ifdef __VIDCAP_TRACEON__
	TVS << TraceInfo << "VidcapBaseStream::GetDroppedFramesProperty -- End\n";
	#endif
}

/*
** VideoCaptureRoutine()
**
**    Routine to capture video frames based on a timer.
**
**    Note:  Devices capable of using interrupts should always
**           trigger capture on a VSYNC interrupt, and not use a timer.
**
** Arguments:
**
** Returns: nothing
**
** Side Effects:  none
*/

void VidcapBaseStream::CaptureFrame()
{
	// -- excessive trace... use ONLY for DEBUG --
	// 	#ifdef __VIDCAP_TRACEON__
	//	TVS << TraceInfo << "VidcapBaseStream::CaptureFrame -- Start\n";
	//	#endif

    PKSSTREAM_HEADER        pDataPacket;
    PKS_FRAME_INFO          pFrameInfo;

    // If we're stopped and the timer is still running, just return.
    // This will stop the timer.

    if (KSSTATE_STOP == m_eState)
	{
	 	#ifdef __VIDCAP_TRACEON__
		TVS << TraceInfo << "VidcapBaseStream::CaptureFrame -- KSSTATE_STOP, returning without capture\n";
		#endif

		return;
	}

    
    // Find out what time it is, if we're using a clock

    if (NULL != m_hMasterClockHandle)
	{
        HW_TIME_CONTEXT TimeContext;

        TimeContext.HwDeviceExtension = (_HW_DEVICE_EXTENSION*)Adapter(); 
        TimeContext.HwStreamObject = m_pClassObject;
        TimeContext.Function = TIME_GET_STREAM_TIME;
	    TimeContext.Time = 0;
		TimeContext.SystemTime = 0;

        StreamClassQueryMasterClockSync (
            m_hMasterClockHandle,
            &TimeContext);

        m_QST_StreamTime = TimeContext.Time;
        m_QST_Now = TimeContext.SystemTime;
    
        if (m_QST_NextFrame == 0)
		{  
            m_QST_NextFrame = m_QST_StreamTime + m_pVideoInfoHeader->AvgTimePerFrame;
        }
		
		//		-- excessive TRACE... use for DEBUG ONLY --
		//	 	#ifdef __VIDCAP_TRACEON__
		//       TVS.Trace(TraceInfo, "VidcapBaseStream::CaptureFrame -- Time=%16lx\n", TimeContext.Time);
		//       TVS.Trace(TraceInfo, "VidcapBaseStream::CaptureFrame -- SysTime=%16lx\n", TimeContext.SystemTime);
		//		#endif
    }


    // Only capture in the RUN state

    if (KSSTATE_RUN == m_eState)
	{  

        //
        // Determine if it is time to capture a frame based on 
        // how much time has elapsed since capture started.
        // If there isn't a clock available, then capture immediately.
        //

        if ((NULL == m_hMasterClockHandle) ||
             (m_QST_StreamTime >= m_QST_NextFrame))
		{

            PHW_STREAM_REQUEST_BLOCK pSrb;

            // Increment the picture count (usually this is VSYNC count)

            m_FrameInfo.PictureNumber++;

            //
            // Get the next queue SRB (if any)
            //

            pSrb = DequeueSrb();

            if (NULL != pSrb)
			{

	 			#ifdef __VIDCAP_TRACEON__
					#ifdef __VIDCAP_CAPTURE_DEBUG__
					TVS << TraceAlways << "VidcapBaseStream::CaptureFrame -- KSSTATE_RUN, Capturing Frame\n";
					#endif
				#endif
                pDataPacket = pSrb->CommandData.DataBufferArray;
                pFrameInfo = (PKS_FRAME_INFO) (pDataPacket + 1);

                //
                // Call the routine which synthesizes images
                //
                
				// -- need to get image type from Adapter... jak
                ImageSynth (pSrb, IMAGE_XFER_NTSC_EIA_100AMP_100SAT);

                // Set additional info fields about the data captured such as:
                //   Frames Captured
                //   Frames Dropped
                //   Field Polarity
                
                m_FrameInfo.ExtendedHeaderSize = pFrameInfo->ExtendedHeaderSize;

                *pFrameInfo = m_FrameInfo;

                // Init the flags to zero
                pDataPacket->OptionsFlags = 0;

                // Set the discontinuity flag if frames have been previously
                // dropped, and then reset our internal flag

                if (m_fDiscontinuity) {
                    pDataPacket->OptionsFlags |= KSSTREAM_HEADER_OPTIONSF_DATADISCONTINUITY;
                    m_fDiscontinuity = FALSE;
                }

                //
                // Return the timestamp for the frame
                //
                pDataPacket->PresentationTime.Numerator = 1;
                pDataPacket->PresentationTime.Denominator = 1;
                pDataPacket->Duration = m_pVideoInfoHeader->AvgTimePerFrame;

                //
                // if we have a master clock AND this is the capture stream
                // 
                if (m_hMasterClockHandle && (Adapter()->IsStream(0,this)))
				{

                    pDataPacket->PresentationTime.Time = m_QST_StreamTime;
                    pDataPacket->OptionsFlags |= 
                        KSSTREAM_HEADER_OPTIONSF_TIMEVALID |
                        KSSTREAM_HEADER_OPTIONSF_DURATIONVALID;
                }
                else
				{
                    //
                    // no clock or the preview stream, so just mark the time as unknown
                    //
                    pDataPacket->PresentationTime.Time = 0;
                    // clear the timestamp valid flags
                    pDataPacket->OptionsFlags &= 
                        ~(KSSTREAM_HEADER_OPTIONSF_TIMEVALID |
                          KSSTREAM_HEADER_OPTIONSF_DURATIONVALID);
                }

                // Every frame we generate is a key frame (aka SplicePoint)
                // Delta frames (B or P) should not set this flag

                pDataPacket->OptionsFlags |= KSSTREAM_HEADER_OPTIONSF_SPLICEPOINT;

                NextSrb(pSrb);
                
            } // if we have an SRB

            else
			{

                //
                // No buffer was available when we should have captured one

                // Increment the counter which keeps track of
                // dropped frames

	 			#ifdef __VIDCAP_TRACEON__
				TVS << TraceAlways << "VidcapBaseStream::CaptureFrame -- KSSTATE_RUN, No SRB, Frame Dropped\n";
				#endif

                m_FrameInfo.DropCount++;

                // Set the (local) discontinuity flag
                // This will cause the next packet processed to have the
                //   KSSTREAM_HEADER_OPTIONSF_DATADISCONTINUITY flag set.

                m_fDiscontinuity = TRUE;

            }

            // Figure out when to capture the next frame
            m_QST_NextFrame += m_pVideoInfoHeader->AvgTimePerFrame;

        } // endif time to capture a frame
    } // endif we're running

	//  -- excessive trace use for DEBUG only --
	// 	#ifdef __VIDCAP_TRACEON__
	//	TVS << TraceInfo << "VidcapBaseStream::CaptureFrame -- End\n";
	//	#endif
}

/*
** ImageSynth()
**
**   Synthesizes NTSC color bars, white, black, and grayscale images
**
** Arguments:
**
**   pSrb - The stream request block for the Video stream
**   ImageXferCommands - Index specifying the image to generate
**
** Returns:
**
**   Nothing
**
** Side Effects:  none
*/

//
// EIA-189-A Standard color bar definitions
//

// 75% Amplitude, 100% Saturation
const static UCHAR NTSCColorBars75Amp100SatRGB24 [3][8] = 
{
//  Whi Yel Cya Grn Mag Red Blu Blk
    191,191,  0,  0,191,191,  0,  0,    // Red
    191,191,191,191,  0,  0,  0,  0,    // Green
    191,  0,191,  0,191,  0,191,  0     // Blue
};

// 100% Amplitude, 100% Saturation
const static UCHAR NTSCColorBars100Amp100SatRGB24 [3][8] = 
{
//  Whi Yel Cya Grn Mag Red Blu Blk
    255,255,  0,  0,255,255,  0,  0,    // Red
    255,255,255,255,  0,  0,  0,  0,    // Green
    255,  0,255,  0,255,  0,255,  0     // Blue
};




void VidcapBaseStream::ImageSynth (
    PHW_STREAM_REQUEST_BLOCK pSrb,
    ImageXferCommands Command
    )
{
    UINT biWidth        =   m_pVideoInfoHeader->bmiHeader.biWidth;
    UINT biHeight       =   m_pVideoInfoHeader->bmiHeader.biHeight;
    UINT biSizeImage    =   m_pVideoInfoHeader->bmiHeader.biSizeImage;
    UINT biWidthBytes   =   KS_DIBWIDTHBYTES (m_pVideoInfoHeader->bmiHeader);
    UINT biBitCount     =   m_pVideoInfoHeader->bmiHeader.biBitCount;

    UINT                    Line;
    PUCHAR                  pLineBuffer;

    PKSSTREAM_HEADER        pDataPacket = pSrb->CommandData.DataBufferArray;
    PUCHAR                  pImage =  (PUCHAR)pDataPacket->Data;

    DEBUG_ASSERT (pSrb->NumberOfBuffers == 1);

	#ifdef __VIDCAP_TRACEON__
	TVS << TraceInfo << "VidcapBaseStream::ImageSynth -- Start\n";
		#ifdef __VIDCAP_CAPTURE_DEBUG__
		TVS.Trace(TraceInfo, "biSizeImage=%d, DataUsed=%d\n", 
            biSizeImage, pDataPacket->DataUsed);
	    TVS.Trace(TraceInfo, "biWidth=%d biHeight=%d WidthBytes=%d bpp=%d\n", 
            biWidth, biHeight, biWidthBytes, biBitCount);
		TVS.Trace(TraceInfo, "pImage=%x\n", pImage);
		#endif
	#endif

    // 
    // Synthesize a single line of image data, which will then be replicated
    //

    pLineBuffer = &m_LineBuffer[0];

    switch (biBitCount) {

    case 24: 
    {

        switch (Command) {

        case IMAGE_XFER_NTSC_EIA_100AMP_100SAT:
            // 100% saturation
            {
                UINT x, col;
                PUCHAR pT = pLineBuffer;
        
                for (x = 0; x < biWidth; x++) {
                    col = (x * 8) / biWidth;
                    *pT++ = NTSCColorBars100Amp100SatRGB24[0][col]; // Red
                    *pT++ = NTSCColorBars100Amp100SatRGB24[1][col]; // Green
                    *pT++ = NTSCColorBars100Amp100SatRGB24[2][col]; // Blue
                }
            }
            break;

        case IMAGE_XFER_NTSC_EIA_75AMP_100SAT:
            // 75% Saturation
            {
                UINT x, col;
                PUCHAR pT = pLineBuffer;
        
                for (x = 0; x < biWidth; x++) {
                    col = (x * 8) / biWidth;
                    *pT++ = NTSCColorBars75Amp100SatRGB24[0][col]; // Red
                    *pT++ = NTSCColorBars75Amp100SatRGB24[1][col]; // Green
                    *pT++ = NTSCColorBars75Amp100SatRGB24[2][col]; // Blue
                }
            }
            break;

        case IMAGE_XFER_BLACK:
            // All black
            RtlFillMemory(
		        pLineBuffer,
		        biWidthBytes,
		        (UCHAR) 0);
            break;

        case IMAGE_XFER_WHITE:
            // All white
            RtlFillMemory(
		        pLineBuffer,
		        biWidthBytes,
		        (UCHAR) 255);
            break;

        case IMAGE_XFER_GRAY_INCREASING:
            // grayscale ramp
            RtlFillMemory(
		        pLineBuffer,
		        biWidthBytes,
		        (UCHAR) (m_FrameInfo.PictureNumber * 8));
            break;

        default:
            break;
        }

    } // switch 24

    break;

    case 8: 
    {

        switch (Command) {

        case IMAGE_XFER_NTSC_EIA_75AMP_100SAT:
            {
                // TODO
            }
            break;

        case IMAGE_XFER_NTSC_EIA_100AMP_100SAT:
            {
                // TODO
            }
            break;

        case IMAGE_XFER_BLACK:
            RtlFillMemory(
		        pLineBuffer,
		        biWidthBytes,
		        (UCHAR) 0);
            break;

        case IMAGE_XFER_WHITE:
            RtlFillMemory(
		        pLineBuffer,
		        biWidthBytes,
		        (UCHAR) 15);
            break;

        case IMAGE_XFER_GRAY_INCREASING:
            RtlFillMemory(
		        pLineBuffer,
		        biWidthBytes,
		        (UCHAR) (m_FrameInfo.PictureNumber % 16));
            break;

        default:
            break;
        }

    } // switch 8

        break;

    default:
        break;
    }

    // 
    // Copy the single line synthesized to all rows of the image
    //

    for (Line = 0; Line < biHeight; Line++, pImage += biWidthBytes) {
        RtlCopyMemory(
                pImage,
		pLineBuffer,
		biWidthBytes);
    }

    //
    // Report back the actual number of bytes copied to the destination buffer
    // (This can be smaller than the allocated buffer for compressed images)
    //

    pDataPacket->DataUsed = biSizeImage;
	#ifdef __VIDCAP_TRACEON__
	TVS << TraceInfo << "VidcapBaseStream::ImageSynth -- End\n";
	#endif
}


///////////////////////////////////////////////////////////////////////
//	VidcapBaseStream::OnProposeDataFormat -- SRB_PROPOSE_DATA_FORMAT
//		Called by DispatchControl. Proposes new data format but doesn't
//		change it.
//
void VidcapBaseStream::OnProposeDataFormat(PHW_STREAM_REQUEST_BLOCK pSrb)
{
   if (!(VerifyFormat (pSrb->CommandData.OpenFormat)))
   {
	   	#ifdef __VIDCAP_TRACEON__
		TVS << TraceInfo << "VidcapBaseStream::OnProposeDataFormat -- STATUS_NO_MATCH\n";
		#endif

		NextSrb(pSrb, STATUS_NO_MATCH);
   }
   else
   {
	   	#ifdef __VIDCAP_TRACEON__
		TVS << TraceInfo << "VidcapBaseStream::OnProposeDataFormat -- STATUS_SUCCESS\n";
		#endif

		NextSrb(pSrb, STATUS_SUCCESS);
   }

}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -