⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 video.cxx

📁 这是一个OPENH323中的MCU程序
💻 CXX
📖 第 1 页 / 共 3 页
字号:
PINDEX PVideoOutputDevice_OpenMCU::GetMaxFrameBytes()
{
  return GetMaxFrameBytesConverted(CalculateFrameBytes(frameWidth, frameHeight, colourFormat));
}


BOOL PVideoOutputDevice_OpenMCU::SetFrameData(unsigned x, unsigned y,
                                              unsigned width, unsigned height,
                                              const BYTE * data,
                                              BOOL /*endFrame*/)
{
  if (x != 0 || y != 0 || width != frameWidth || height != frameHeight) {
    PTRACE(1, "YUVFile output device only supports full frame writes");
    return FALSE;
  }

  return mcuConnection.OnIncomingVideo(data, width, height, width*height*3/2);
}


BOOL PVideoOutputDevice_OpenMCU::EndFrame()
{
  return TRUE;
}


///////////////////////////////////////////////////////////////////////////////////////

VideoFrameStoreList::~VideoFrameStoreList()
{
  while (videoFrameStoreList.begin() != videoFrameStoreList.end()) {
    FrameStore * vf = videoFrameStoreList.begin()->second;
    delete vf;
    videoFrameStoreList.erase(videoFrameStoreList.begin());
  }
}

VideoFrameStoreList::FrameStore & VideoFrameStoreList::AddFrameStore(int width, int height)
{ 
  VideoFrameStoreListMapType::iterator r = videoFrameStoreList.find(WidthHeightToKey(width, height));
  if (r != videoFrameStoreList.end())
    return *(r->second);
  FrameStore * vf = new FrameStore(width, height);
  videoFrameStoreList.insert(VideoFrameStoreListMapType::value_type(WidthHeightToKey(width, height), vf)); 
  return *vf;
}

VideoFrameStoreList::FrameStore & VideoFrameStoreList::GetFrameStore(int width, int height) 
{
  VideoFrameStoreListMapType::iterator r = videoFrameStoreList.find(WidthHeightToKey(width, height));
  if (r != videoFrameStoreList.end())
    return *(r->second);

  FrameStore * vf = new FrameStore(width, height);
  videoFrameStoreList.insert(VideoFrameStoreListMapType::value_type(WidthHeightToKey(width, height), vf)); 
  return *vf;
}

void VideoFrameStoreList::InvalidateExcept(int w, int h)
{
  VideoFrameStoreListMapType::iterator r;
  for (r = videoFrameStoreList.begin(); r != videoFrameStoreList.end(); ++r) {
    unsigned int key = r->first;
    int kw, kh; KeyToWidthHeight(key, kw, kh);
    r->second->valid = (w == kw) && (h == kh);
  }
}

VideoFrameStoreList::FrameStore & VideoFrameStoreList::GetNearestFrameStore(int width, int height, BOOL & found)
{
  // see if exact match, and valid
  VideoFrameStoreListMapType::iterator r = videoFrameStoreList.find(WidthHeightToKey(width, height));
  if ((r != videoFrameStoreList.end()) && r->second->valid) {
    found = TRUE;
    return *(r->second);
  }

  // return the first valid framestore
  for (r = videoFrameStoreList.begin(); r != videoFrameStoreList.end(); ++r) {
    if (r->second->valid) {
      found = TRUE;
      return *(r->second);
    }
  }

  // return not found
  found = FALSE;
  return *(videoFrameStoreList.end()->second);
}

///////////////////////////////////////////////////////////////////////////////////////

static inline int ABS(int v)
{  return (v >= 0) ? v : -v; }

MCUVideoMixer::VideoMixPosition::VideoMixPosition(ConferenceMemberId _id,  ConferenceMember & /*mbr*/, int _x, int _y, int _w, int _h)
  : id(_id), xpos(_x), ypos(_y), width(_w), height(_h)
{ 
  audioLevel = 0; 
}

void MCUVideoMixer::ConvertRGBToYUV(BYTE R, BYTE G, BYTE B, BYTE & Y, BYTE & U, BYTE & V)
{
  Y = (BYTE)PMIN(ABS(R *  2104 + G *  4130 + B *  802 + 4096 +  131072) / 8192, 235);
  U = (BYTE)PMIN(ABS(R * -1214 + G * -2384 + B * 3598 + 4096 + 1048576) / 8192, 240);
  V = (BYTE)PMIN(ABS(R *  3598 + G * -3013 + B * -585 + 4096 + 1048576) / 8192, 240);
}

void MCUVideoMixer::FillYUVFrame(void * buffer, BYTE R, BYTE G, BYTE B, int w, int h)
{
  BYTE Y, U, V;
  ConvertRGBToYUV(R, G, B, Y, U, V);

  const int ysize = w*h;
  const int usize = w*h/4;
  const int vsize = w*h/4;

  memset((BYTE *)buffer + 0,             Y, ysize);
  memset((BYTE *)buffer + ysize,         U, usize);
  memset((BYTE *)buffer + ysize + usize, V, vsize);
}

void MCUVideoMixer::FillCIFYUVFrame(void * buffer, BYTE R, BYTE G, BYTE B)
{
  FillYUVFrame(buffer, R, G, B, CIF_WIDTH, CIF_HEIGHT);
}

void MCUVideoMixer::FillQCIFYUVFrame(void * buffer, BYTE R, BYTE G, BYTE B)
{
  FillYUVFrame(buffer, R, G, B, QCIF_WIDTH, QCIF_HEIGHT);
}

void MCUVideoMixer::FillCIFYUVRect(void * frame, BYTE R, BYTE G, BYTE B, int xPos, int yPos, int rectWidth, int rectHeight)
{
  FillYUVRect(frame, CIF_WIDTH, CIF_HEIGHT, R, G, B, xPos, yPos, rectWidth, rectHeight);
}

void MCUVideoMixer::FillYUVRect(void * frame, int frameWidth, int frameHeight, BYTE R, BYTE G, BYTE B, int xPos, int yPos, int rectWidth, int rectHeight)
{
  //This routine fills a region of the video image with data. It is used as the central
  //point because one only has to add other image formats here.

  int offset       = ( yPos * frameWidth ) + xPos;
  int colourOffset = ( (yPos * frameWidth) >> 2) + (xPos >> 1);

  BYTE Y, U, V;
  ConvertRGBToYUV(R, G, B, Y, U, V);

  BYTE * Yptr = (BYTE*)frame + offset;
  BYTE * UPtr = (BYTE*)frame + (frameWidth * frameHeight) + colourOffset;
  BYTE * VPtr = (BYTE*)frame + (frameWidth * frameHeight) + (frameWidth * frameHeight/4)  + colourOffset;

  int rr ;
  int halfRectWidth = rectWidth >> 1;
  int halfWidth     = frameWidth >> 1;
  
  for (rr = 0; rr < rectHeight;rr+=2) {
    memset(Yptr, Y, rectWidth);
    Yptr += frameWidth;
    memset(Yptr, Y, rectWidth);
    Yptr += frameWidth;

    memset(UPtr, U, halfRectWidth);
    memset(VPtr, V, halfRectWidth);

    UPtr += halfWidth;
    VPtr += halfWidth;
  }
}

void MCUVideoMixer::CopyRectIntoCIF(const void * _src, void * _dst, int xpos, int ypos, int width, int height)
{
  BYTE * src = (BYTE *)_src;
  BYTE * dst = (BYTE *)_dst + (ypos * CIF_WIDTH) + xpos;

  BYTE * dstEnd = dst + CIF_SIZE;
  int y;

  // copy Y
  for (y = 0; y < height; ++y) {
    PAssert(dst + width < dstEnd, "Y write overflow");
    memcpy(dst, src, width);
    src += width;
    dst += CIF_WIDTH;
  }

  // copy U
  dst = (BYTE *)_dst + (CIF_WIDTH * CIF_HEIGHT) + (ypos * CIF_WIDTH/4) + xpos / 2;
  for (y = 0; y < height/2; ++y) {
    PAssert(dst + width/2 <= dstEnd, "U write overflow");
    memcpy(dst, src, width/2);
    src += width/2;
    dst += CIF_WIDTH/2;
  }

  // copy V
  dst = (BYTE *)_dst + (CIF_WIDTH * CIF_HEIGHT) + (CIF_WIDTH * CIF_HEIGHT) / 4 + (ypos * CIF_WIDTH/4) + xpos / 2;
  for (y = 0; y < height/2; ++y) {
    PAssert(dst + width/2 <= dstEnd, "V write overflow");
    memcpy(dst, src, width/2);
    src += width/2;
    dst += CIF_WIDTH/2;
  }
}

void MCUVideoMixer::CopyRectIntoQCIF(const void * _src, void * _dst, int xpos, int ypos, int width, int height)
{
  BYTE * src = (BYTE *)_src;
  BYTE * dst = (BYTE *)_dst + (ypos * QCIF_WIDTH) + xpos;

  BYTE * dstEnd = dst + QCIF_SIZE;
  int y;

  // copy Y
  for (y = 0; y < height; ++y) {
    PAssert(dst + width < dstEnd, "Y write overflow");
    memcpy(dst, src, width);
    src += width;
    dst += QCIF_WIDTH;
  }

  // copy U
  dst = (BYTE *)_dst + (QCIF_WIDTH * QCIF_HEIGHT) + (ypos * CIF_WIDTH/4) + xpos / 2;
  for (y = 0; y < height/2; ++y) {
    PAssert(dst + width/2 <= dstEnd, "U write overflow");
    memcpy(dst, src, width/2);
    src += width/2;
    dst += QCIF_WIDTH/2;
  }

  // copy V
  dst = (BYTE *)_dst + (QCIF_WIDTH * QCIF_HEIGHT) + (QCIF_WIDTH * QCIF_HEIGHT) / 4 + (ypos * QCIF_WIDTH/4) + xpos / 2;
  for (y = 0; y < height/2; ++y) {
    PAssert(dst + width/2 <= dstEnd, "V write overflow");
    memcpy(dst, src, width/2);
    src += width/2;
    dst += QCIF_WIDTH/2;
  }
}

void MCUVideoMixer::ConvertQCIFToCIF(const void * _src, void * _dst)
{
  BYTE * src = (BYTE *)_src;
  BYTE * dst = (BYTE *)_dst;

  //BYTE * dstEnd = dst + CIF_SIZE;
  int y, x;
  BYTE * srcRow;

  // copy Y
  for (y = 0; y < QCIF_HEIGHT; y++) {
    srcRow = src;
    for (x = 0; x < QCIF_WIDTH; x++) {
      dst[0] = dst[1] = *srcRow++;
      dst += 2;
    }
    srcRow = src;
    for (x = 0; x < QCIF_WIDTH; x++) {
      dst[0] = dst[1] = *srcRow++;
      dst += 2;
    }
    src += QCIF_WIDTH;
  }

  // copy U
  for (y = 0; y < QCIF_HEIGHT/2; y++) {
    srcRow = src;
    for (x = 0; x < QCIF_WIDTH/2; x++) {
      dst[0] = dst[1] = *srcRow++;
      dst += 2;
    }
    srcRow = src;
    for (x = 0; x < QCIF_WIDTH/2; x++) {
      dst[0] = dst[1] = *srcRow++;
      dst += 2;
    }
    src += QCIF_WIDTH/2;
  }

  // copy V
  for (y = 0; y < QCIF_HEIGHT/2; y++) {
    srcRow = src;
    for (x = 0; x < QCIF_WIDTH/2; x++) {
      dst[0] = dst[1] = *srcRow++;
      dst += 2;
    }
    srcRow = src;
    for (x = 0; x < QCIF_WIDTH/2; x++) {
      dst[0] = dst[1] = *srcRow++;
      dst += 2;
    }
    src += QCIF_WIDTH/2;
  }
}



///////////////////////////////////////////////////////////////////////////////////////

MCUSimpleVideoMixer::MCUSimpleVideoMixer(BOOL _forceScreenSplit)
  : forceScreenSplit(_forceScreenSplit)
{
  frameStores.AddFrameStore(CIF_WIDTH, CIF_HEIGHT);
  imageStore.SetSize(CIF_SIZE);

  converter = PColourConverter::Create("YUV420P", "YUV420P", CIF_WIDTH, CIF_HEIGHT);
}

BOOL MCUSimpleVideoMixer::ReadFrame(ConferenceMember &, void * buffer, int width, int height, PINDEX & amount)
{
  PWaitAndSignal m(mutex);

  // special case of one member means fill with black
  if (rows == 0) {
    VideoFrameStoreList::FrameStore & fs = frameStores.GetFrameStore(width, height);
    if (!fs.valid) {
      if (!OpenMCU::Current().GetEmptyMediaFrame(fs.data.GetPointer(), width, height, amount))
        FillYUVFrame(fs.data.GetPointer(), 0, 0, 0, width, height);
      fs.valid = TRUE;
    }
    memcpy(buffer, fs.data.GetPointer(), amount);
  }

  // special case of two members means we do nothing, and tell caller to look for full screen version of other video
  if (rows == 1) 
    return FALSE;

  return ReadMixedFrame(buffer, width, height, amount);
}

BOOL MCUSimpleVideoMixer::ReadSrcFrame(VideoFrameStoreList & srcFrameStores, void * buffer, int width, int height, PINDEX & amount)
{
  PWaitAndSignal m(mutex);

  VideoFrameStoreList::FrameStore & cifFs = srcFrameStores.GetFrameStore(CIF_WIDTH, CIF_HEIGHT);

  // get the mixed frame
  if ((width == CIF_WIDTH) && (height == CIF_HEIGHT)) {
    if (!cifFs.valid) {
      if (!OpenMCU::Current().GetEmptyMediaFrame(cifFs.data.GetPointer(), width, height, amount))
        MCUVideoMixer::FillYUVFrame(cifFs.data.GetPointer(), 0, 0, 0, CIF_WIDTH, CIF_HEIGHT);
      cifFs.valid = TRUE;
    }
    memcpy(buffer, cifFs.data.GetPointer(), amount);
  }

  else if (width == QCIF_WIDTH && height == QCIF_HEIGHT) {
    VideoFrameStoreList::FrameStore & qcifFs = srcFrameStores.GetFrameStore(width, height);
    if (!qcifFs.valid) {
      if (!cifFs.valid) {
        if (!OpenMCU::Current().GetEmptyMediaFrame(qcifFs.data.GetPointer(), width, height, amount))
          MCUVideoMixer::FillYUVFrame(qcifFs.data.GetPointer(), 0, 0, 0, width, height);
      } else {
        converter->SetSrcFrameSize(CIF_WIDTH, CIF_HEIGHT);
        converter->SetDstFrameSize(width, height, TRUE);
        PINDEX amount = CIF_SIZE;
        PINDEX bytesReturned = QCIF_SIZE;
        converter->Convert(cifFs.data.GetPointer(), qcifFs.data.GetPointer(), amount, &bytesReturned);
      }
      qcifFs.valid = TRUE;
    }
    memcpy(buffer, qcifFs.data.GetPointer(), amount);
  }

  return TRUE;
}



BOOL MCUSimpleVideoMixer::ReadMixedFrame(void * buffer, int width, int height, PINDEX & amount)

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -