📄 robotrecognition.cc
字号:
// File: RobotRecognition.cc
// Date: 25/08/03
// We acknowledge the work of UNSW for many of the ideas used as the basic foundation of this class
#include "../Common/VisionObject.h"
#include "../Common/Common.h"
#include "VisionData.h"
#include "../Globals.h"
#include "ObjectRecognition.h"
#include "RobotRecognition.h"
#include <math.h>
#include <list>
#include <float.h>
RobotRecognition::RobotRecognition() {
redRobots_ = new VisionObject[4]; // Array of vision objects believed to be red robots
blueRobots_ = new VisionObject[4]; // Array of vision objects believed to be blue robots
redRobotBlobs_ = new Blob[4];
blueRobotBlobs_ = new Blob[4];
sortedBlobs_ = 0;
}
void RobotRecognition::FindRobot(VisionData* vd) {
visionData_ = vd;
numRedRobots_ = 0;
numBlueRobots_ = 0;
for (int i = 0; i < 4; i++) {
redRobots_[i].topBlob_ = NULL;
blueRobots_[i].topBlob_ = NULL;
}
FindRobotWork(c_ROBOT_RED, redRobots_, redRobotBlobs_, &numRedRobots_);
FindRobotWork(c_ROBOT_BLUE, blueRobots_, blueRobotBlobs_, &numBlueRobots_);
}
void RobotRecognition::FindRobotWork(Colour c_ROBOT_COLOUR, VisionObject* robots, Blob* robotBlobs, int* numRobots) {
// Check if the effective tilt of the robots head is excessive as we cannot do robot detection if this is the case
if (RAD_TO_DEG(visionData_->effHeadTilt_) > 80) {
return;
}
// Noise filtering to remove all blue blobs and red blobs with elevations greater than the top
// of the beacons and goals is applied.
RobotNoiseFilter(c_ROBOT_COLOUR);
// In order to utilize the heuristic in every orientation of the head, an extra transform must be
// performed on the image. Effective roll is applied to all the red and blue blobs to ensure their
// orientation is in line with the field plane.
RotateBlobs(c_ROBOT_COLOUR, visionData_->effHeadRoll_);
// Sorts the blobs according to their centroids from left to right
QuickSort(c_ROBOT_COLOUR);
// Find robots within the image which are blue or red as specified in the c_ROBOT_COLOUR variable
FindColourRobot(c_ROBOT_COLOUR, robots, robotBlobs, numRobots);
}
void RobotRecognition::FindColourRobot(Colour c_ROBOT_COLOUR, VisionObject* robots, Blob* robotBlobs, int* numRobots) {
*numRobots = 0; // Reset the number of robots we have found
// find the bottom patch
int buffer = 0;
// finding the leftmost adjointing blob
// These four values are the extremities of the robot bounding box
robotBlobs[*numRobots].minXRotated = currentImageWidth_;
robotBlobs[*numRobots].maxXRotated = 0;
robotBlobs[*numRobots].minYRotated = currentImageHeight_;
robotBlobs[*numRobots].maxYRotated = 0;
// For each blob defined for the colour robot which we are currently finding (order of centroids from left to right)
if (sortedBlobs_ == 0) {
return;
}
// Set the initial area counter
robotBlobs[0].area = 0;
int i = 0;
// Iterate through the blobs for the specified robot colours to see if they can be merged
Blob* blob = NULL;
for (i = 0; i < sortedBlobs_; i++) {
blob = &(visionData_->blobs_[c_ROBOT_COLOUR][i]); // Retrieve the blob from the array
if (blob->subsumed) continue;
// Check that the blobs area is greater than 0 and if so continue
bool newRobot = true;
if (blob->minXRotated <= robotBlobs[*numRobots].minXRotated) newRobot = false;
if (blob->minXRotated <= robotBlobs[*numRobots].maxXRotated+buffer) newRobot = false;
if (!newRobot) {
// Expand the bounding box to include these values
robotBlobs[*numRobots].minXRotated = MIN(robotBlobs[*numRobots].minXRotated,blob->minXRotated);
robotBlobs[*numRobots].maxXRotated = MAX(robotBlobs[*numRobots].maxXRotated,blob->maxXRotated);
robotBlobs[*numRobots].minYRotated = MIN(robotBlobs[*numRobots].minYRotated,blob->minYRotated);
robotBlobs[*numRobots].maxYRotated = MAX(robotBlobs[*numRobots].maxYRotated,blob->maxYRotated);
} else {
}
if (newRobot) {
// This case should only be entered when there is a break in the x distance between the robots
// Assign the current bounding box dimensions to current robot vision object and store the robot
// Unrotate the robot blob
UnrotateBlobs(&(robotBlobs[*numRobots]),visionData_->effHeadRoll_);
int centreX = (int)(robotBlobs[*numRobots].maxX + robotBlobs[*numRobots].minX)/2;
int centreY = (int)(robotBlobs[*numRobots].maxY + robotBlobs[*numRobots].minY)/2;
int confidence = (int) ((robotBlobs[*numRobots].maxY - robotBlobs[*numRobots].minY) * 100/currentImageHeight_);
if (c_ROBOT_COLOUR == c_ROBOT_RED) {
visionObjects_[numVisionObjects_].SetData(VisionObject::OT_ROBOT_RED, &robotBlobs[*numRobots], NULL, visionData_->CalculateHeading(centreX), visionData_->CalculateElevation(centreY), 1, centreX, centreY, confidence);
} else {
visionObjects_[numVisionObjects_].SetData(VisionObject::OT_ROBOT_BLUE, &robotBlobs[*numRobots], NULL, visionData_->CalculateHeading(centreX), visionData_->CalculateElevation(centreY), 1, centreX, centreY, confidence);
}
// Deterermine the distance to the ball using the most reliable method available
double distance = -1;
double infraDist = visionData_->GetInfraredDistance(&robotBlobs[*numRobots],c_ROBOT_COLOUR);
if (infraDist != -1) {
// if -1 is not returned then we can use the distance from infrared as it has passed the sanity checks asscociate dwith using infrared
distance = infraDist;
} else {
int currX = centreX;
int currY = robotBlobs[*numRobots].maxY;
int ratio;
double effRoll = visionData_->effHeadRoll_ + visionData_->bodyRoll_;
if (effRoll == 0.0) ratio = 1000;
else ratio = (int) (cos(effRoll)/sin(effRoll));
bool foundGreen = false;
unsigned char* pixelPointer = NULL;
for (int n = 0; n < 10; n++) {
currY++;
if (ratio!=0 && (n+1)%ratio == 0) currX -= ratio/ABS(ratio); //Ensure we're going directly down (approximately) instead of just down in the image
pixelPointer = &(visionData_->classified_[currY*currentImageWidth_+currX]);
if (*pixelPointer == c_FIELD_GREEN || currX == 0 || currX == currentImageWidth_ || currY == currentImageHeight_) {
foundGreen = true;
break;
}
}
if (!foundGreen) {
currX = centreX;
currY = robotBlobs[*numRobots].maxY;
}
distance = ABS(visionData_->GetDistanceToPoint(currX, currY,HEIGHTOFNECK));
}
if ( (robotBlobs[*numRobots].maxXRotated - robotBlobs[*numRobots].minXRotated)*(robotBlobs[*numRobots].maxYRotated - robotBlobs[*numRobots].minYRotated) > 150) {
// Note the adjustment to the height of the neck. This gives better results as the robot does not have colour all the way to the ground and therefore the blob does not sit on the ground
visionObjects_[numVisionObjects_].distance_ = distance;
visionData_->TransformPositionObject(&visionObjects_[numVisionObjects_]);
robots[*numRobots] = (visionObjects_[numVisionObjects_]);
numVisionObjects_++; // Set the visionObjects counter to point to the next entry in the array
(*numRobots)++; // Set the numRobots variable to point to the next entry in the array
// Decide whether we still have more robots to find - if we have 4 abort from the loop
if (*numRobots == 4) {
break;
}
}
// Reset the variables so we begin to find the next robot
robotBlobs[*numRobots].area = 0; // Reset the total area to 0 as we have now stored the robot
robotBlobs[*numRobots].subsumed = false;
// Update the pointers so they point to the blob which is not part of the bounding box
// so that we can start to find the next robot. Set robotBlobs[*numRobots].minXRotated, robotBlobs[*numRobots].maxXRotated, robotBlobs[*numRobots].minYRotated, robotBlobs[*numRobots].maxYRotated to the edges of this
// blob, forming a bounding box around this outlying blob;
robotBlobs[*numRobots].minXRotated = blob->minXRotated;
robotBlobs[*numRobots].maxXRotated = blob->minXRotated;
robotBlobs[*numRobots].minYRotated = blob->minYRotated;
robotBlobs[*numRobots].maxYRotated = blob->maxYRotated;
robotBlobs[*numRobots].minXRotated = MIN(robotBlobs[*numRobots].minXRotated,blob->minXRotated);
robotBlobs[*numRobots].maxXRotated = MAX(robotBlobs[*numRobots].maxXRotated,blob->maxXRotated);
robotBlobs[*numRobots].minYRotated = MIN(robotBlobs[*numRobots].minYRotated,blob->minYRotated);
robotBlobs[*numRobots].maxYRotated = MAX(robotBlobs[*numRobots].maxYRotated,blob->maxYRotated);
}
// Update the total area of the blob
robotBlobs[*numRobots].area = robotBlobs[*numRobots].area + blob->area;
if (i == sortedBlobs_-1) {
// This case should only be entered when there is a break in the x distance between the robots
// Assign the current bounding box dimensions to current robot vision object and store the robot
// Unrotate the robot blob
UnrotateBlobs(&(robotBlobs[*numRobots]),visionData_->effHeadRoll_);
int centreX = (int)(robotBlobs[*numRobots].maxX + robotBlobs[*numRobots].minX)/2;
int centreY = (int)(robotBlobs[*numRobots].maxY + robotBlobs[*numRobots].minY)/2;
int confidence = (int) ((robotBlobs[*numRobots].maxY - robotBlobs[*numRobots].minY) * 100/currentImageHeight_);
if (c_ROBOT_COLOUR == c_ROBOT_RED) {
visionObjects_[numVisionObjects_].SetData(VisionObject::OT_ROBOT_RED, &robotBlobs[*numRobots], NULL, visionData_->CalculateHeading(centreX), visionData_->CalculateElevation(centreY), 1, centreX, centreY, confidence);
} else {
visionObjects_[numVisionObjects_].SetData(VisionObject::OT_ROBOT_BLUE, &robotBlobs[*numRobots], NULL, visionData_->CalculateHeading(centreX), visionData_->CalculateElevation(centreY), 1, centreX, centreY, confidence);
}
// Deterermine the distance to the ball using the most reliable method available
double distance = -1;
double infraDist = visionData_->GetInfraredDistance(&robotBlobs[*numRobots],c_ROBOT_COLOUR);
if (infraDist != -1) {
// if -1 is not returned then we can use the distance from infrared as it has passed the sanity checks asscociate dwith using infrared
distance = infraDist;
} else {
int currX = centreX;
int currY = robotBlobs[*numRobots].maxY;
int ratio;
double effRoll = visionData_->effHeadRoll_ + visionData_->bodyRoll_;
if (effRoll == 0) ratio = 1000;
else ratio = (int) (cos(-effRoll)/sin(-effRoll));
bool foundGreen = false;
unsigned char* pixelPointer = NULL;
for (int n = 0; n < 10; n++) {
currY++;
if (ratio!=0 && (n+1)%ratio == 0) currX -= ratio/ABS(ratio); //Ensure we're going directly down (approximately) instead of just down in the image
pixelPointer = &(visionData_->classified_[currY*currentImageWidth_+currX]);
if (*pixelPointer == c_FIELD_GREEN || currX == 0 || currX == currentImageWidth_ || currY == currentImageHeight_) {
foundGreen = true;
break;
}
}
if (!foundGreen) {
currX = centreX;
currY = robotBlobs[*numRobots].maxY;
}
distance = ABS(visionData_->GetDistanceToPoint(currX, currY,HEIGHTOFNECK));
}
if ( (robotBlobs[*numRobots].maxXRotated - robotBlobs[*numRobots].minXRotated)*(robotBlobs[*numRobots].maxYRotated - robotBlobs[*numRobots].minYRotated) > 150) {
// Note the adjustment to the height of the neck. This gives better results as the robot does not have colour all the way to the ground and therefore the blob does not sit on the ground
visionObjects_[numVisionObjects_].distance_ = distance;
visionData_->TransformPositionObject(&visionObjects_[numVisionObjects_]);
robots[*numRobots] = (visionObjects_[numVisionObjects_]);
numVisionObjects_++;
}
}
// Buffer is the maximum distance to look right - max_width is the maximum length of a bounding box
// (ie bounding box of the dog viewed from the side) given a specific height. In this case, max_width
// is the maximum possible length of a bounding box with te height of the current bounding box.
double HORI = 40.0; // the width of the bounding box containing a robot standing perpendicular to the camera
double VERT = 18.0; // the height of the bounding box containing a robot standing perpendicular to the camera
buffer = (int) MAX(MIN(((double)(robotBlobs[*numRobots].maxYRotated - robotBlobs[*numRobots].minYRotated)/VERT)*HORI - (robotBlobs[*numRobots].maxXRotated - robotBlobs[*numRobots].minXRotated), 20), 10);
}
// Finish processing of the robots to determine if valid robots have been found
for (i = 0; i < *numRobots; i++) {
// If the confidence that we have in the vision object is 0 then throw it away as it cannot really be used
if (robots[i].confidence_ <= 0) {
continue;
}
for (int k=0; k < numVisionObjects_; k++) {
if ((visionObjects_[k].confidence_ > 20) && (visionObjects_[k].type_ >= VisionObject::OT_BEACON_YP) && (visionObjects_[k].type_ <= VisionObject::OT_GOAL_Y)) {
// beacons should be further away than robot
if ((ABS(visionObjects_[k].heading_ - robots[i].heading_) < 5) && (visionObjects_[k].distance_ < robots[i].distance_)) {
robots[i].confidence_ = 0;
}
if (robots[i].elevation_ > visionObjects_[k].elevation_ + 5) { // 5 is the error threshold
robots[i].confidence_ = 0;
}
}
}
// final check: if doesnt see any beacon, then check its own elevation
if ((RAD_TO_DEG(visionData_->effHeadTilt_) - RAD_TO_DEG(robots[i].elevation_)) < -5) {
robots[i].confidence_ = 0;
} else if (RAD_TO_DEG(robots[i].elevation_) >= 20.0) {
robots[i].confidence_ = 0;
}
}
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -