📄 guiserver.cpp
字号:
100, 73, 1000), true);
multiRobotRangeDevice->setCumulativeDrawingData(
new ArDrawingData("polyDots", ArColor(125, 0, 125),
100, 72, 1000), true);
}
// if we're not then we want to create the multirobot peer classes
else
{
// set the path planning so it uses the explicit collision range for how far its planning
pathTask.setUseCollisionRangeForPlanningFlag(true);
// make our thing that gathers information from the other servers
multiRobotPeerRangeDevice = new ArMultiRobotPeerRangeDevice(&arMap);
// make our thing that sends information to the other servers
multiRobotPeer = new ArServerHandlerMultiRobotPeer(&server, &robot,
&pathTask, &locTask);
// hook the two together so they both know what priority this robot is
multiRobotPeer->setNewPrecedenceCallback(
multiRobotPeerRangeDevice->getSetPrecedenceCallback());
// hook the two together so they both know what priority this
// robot's fingerprint is
multiRobotPeer->setNewFingerprintCallback(
multiRobotPeerRangeDevice->getSetFingerprintCallback());
// hook the two together so that the range device can call on the
// server handler to change its fingerprint
multiRobotPeerRangeDevice->setChangeFingerprintCB(
multiRobotPeer->getChangeFingerprintCB());
// then add the robot to the places it needs to be
robot.addRangeDevice(multiRobotPeerRangeDevice);
pathTask.addRangeDevice(multiRobotPeerRangeDevice,
ArPathPlanningTask::BOTH);
// Set the range device so that we can see the information its using
// to avoid, you can comment these out in order to not see them
multiRobotPeerRangeDevice->setCurrentDrawingData(
new ArDrawingData("polyDots", ArColor(125, 125, 0),
100, 72, 1000), true);
multiRobotPeerRangeDevice->setCumulativeDrawingData(
new ArDrawingData("polyDots", ArColor(125, 0, 125),
100, 72, 1000), true);
}
#endif // ifndef SONARNL
ArUtil::sleep(300);
// Add additional range devices to the robot and path planning task.
// IRs
robot.lock();
ArIRs irs;
robot.addRangeDevice(&irs);
pathTask.addRangeDevice(&irs, ArPathPlanningTask::CURRENT);
// Bumpers.
ArBumpers bumpers;
robot.addRangeDevice(&bumpers);
pathTask.addRangeDevice(&bumpers, ArPathPlanningTask::CURRENT);
// Forbidden regions.
ArForbiddenRangeDevice forbidden(&arMap);
robot.addRangeDevice(&forbidden);
pathTask.addRangeDevice(&forbidden, ArPathPlanningTask::CURRENT);
// Action to overide when localization is lost.
ArActionLost actionLostPath(&locTask, &pathTask);
pathTask.getPathPlanActionGroup()->addAction(&actionLostPath, 150);
// Action to slow down robot when localization score drops but not lost.
ArActionSlowDownWhenNotCertain actionSlowDown(&locTask);
pathTask.getPathPlanActionGroup()->addAction(&actionSlowDown, 140);
#ifndef SONARNL
// This is the place to add a range device which will hold sensor data
// and delete it appropriately to replan around blocked paths.
ArGlobalReplanningRangeDevice replanDev(&pathTask);
#endif
// Create objects that add network services:
// Drawing in the map display:
ArServerInfoDrawings drawings(&server);
drawings.addRobotsRangeDevices(&robot);
#ifndef SONARNL
drawings.addRangeDevice(&replanDev);
#endif // SONARNL
#ifndef SONARNL
// add something to display the reflectors
ArLaserReflectorDevice reflector(&sick, &robot);
drawings.addRangeDevice(&reflector);
#endif
/* If you want to draw the destination put this code back in:
ArServerDrawingDestination destination(
&drawings, &pathTask, "destination",
500, 500,
new ArDrawingData("polyDots",
ArColor(0xff, 0xff, 0x0),
800, // size
49), // just below the robot
*/
/* If you want to see the local path planning area use this
(You can enable this particular drawing from custom commands
which is set up down below in ArServerInfoPath)
ArDrawingData drawingDataP("polyLine", ArColor(200,200,200), 1, 75);
ArFunctor2C<ArPathPlanningTask, ArServerClient *, ArNetPacket *>
drawingFunctorP(pathTask, &ArPathPlanningTask::drawSearchRectangle);
drawings.addDrawing(&drawingDataP, "Local Plan Area", &drawingFunctorP);
*/
/* If you want to see the points making up the local path in addition to the
* main path use this.
ArDrawingData drawingDataP2("polyDots", ArColor(0,128,0), 100, 70);
ArFunctor2C<ArPathPlanningTask, ArServerClient *, ArNetPacket *>
drawingFunctorP2(pathTask, &ArPathPlanningTask::drawPathPoints);
drawings.addDrawing(&drawingDataP2, "Path Points", &drawingFunctorP2);
*/
/* If you want to see the points used by localization use this
ArDrawingData drawingDataL("polyDots", ArColor(0,255,0), 100, 75);
ArFunctor2C<ArLocalizationTask, ArServerClient *, ArNetPacket *>
drawingFunctorL(locTask, &ArLocalizationTask::drawRangePoints);
drawings.addDrawing(&drawingDataL, "Localization Points", &drawingFunctorL);
*/
#ifndef SONARNL
/* If you want to see the reflector rays use this */
ArDrawingData drawingDataL1("polySegments", ArColor(200,200,200), 1, 75);
ArFunctor2C<ArLocalizationTask, ArServerClient *, ArNetPacket *>
drawingFunctorL2(locTask, &ArLocalizationTask::drawReflectorRays);
drawings.addDrawing(&drawingDataL1, "Reflector Rays", &drawingFunctorL2);
#endif
// Misc. simple commands:
ArServerHandlerCommands commands(&server);
// Forward any video if either ACTS or SAV server are running.
// You can find out more about SAV and ACTS on our website
// http://robots.activmedia.com. ACTS is for color tracking and is
// a seperate product. SAV just does software A/V transmitting and is
// free to all our customers. Just run ACTS or SAV server before you
// start this program and this class here will forward video from the
// server to the client.
ArHybridForwarderVideo videoForwarder(&server, "localhost", 7070);
// make a camera to use in case we have video
ArPTZ *camera = NULL;
ArServerHandlerCamera *handlerCamera = NULL;
ArCameraCollection *cameraCollection = NULL;
// if we have video then set up a camera
if (videoForwarder.isForwardingVideo())
{
cameraCollection = new ArCameraCollection();
cameraCollection->addCamera("Cam1", "VCC4", "Camera", "VCC4");
videoForwarder.setCameraName("Cam1");
videoForwarder.addToCameraCollection(*cameraCollection);
bool invertedCamera = false;
camera = new ArVCC4(&robot, invertedCamera,
ArVCC4::COMM_UNKNOWN, true, true);
camera->init();
handlerCamera = new ArServerHandlerCamera("Cam1",
&server,
&robot,
camera,
cameraCollection);
pathTask.addNewGoalCB(
new ArFunctor1C<ArServerHandlerCamera, ArPose>(
handlerCamera,
&ArServerHandlerCamera::cameraModeLookAtGoalSetGoal));
pathTask.addGoalFinishedCB(
new ArFunctorC<ArServerHandlerCamera>(
handlerCamera,
&ArServerHandlerCamera::cameraModeLookAtGoalClearGoal));
}
// After all of the cameras / videos have been created and added to the collection,
// then start the collection server.
//
if (cameraCollection != NULL) {
new ArServerHandlerCameraCollection(&server, cameraCollection);
}
// These provide various kinds of information to the client:
ArServerInfoRobot serverInfoRobot(&server, &robot);
ArServerInfoSensor serverInfoSensor(&server, &robot);
ArServerInfoPath serverInfoPath(&server, &robot, &pathTask);
serverInfoPath.addSearchRectangleDrawing(&drawings);
serverInfoPath.addControlCommands(&commands);
// Provides localization info and allows the client to relocalize at a given
// pose:
ArServerInfoLocalization serverInfoLocalization(&server, &robot, &locTask);
ArServerHandlerLocalization serverLocHandler(&server, &robot, &locTask);
// Provide the map to the client (and related controls):
// This uses both lines and points now, since everything except
// sonar localization uses both (path planning with sonar still uses both)
ArServerHandlerMap serverMap(&server, &arMap);
// Add some simple (custom) commands for testing and debugging:
ArServerSimpleComUC uCCommands(&commands, &robot); // Send any command to the microcontroller
ArServerSimpleComMovementLogging loggingCommands(&commands, &robot); // configure logging
ArServerSimpleComGyro gyroCommands(&commands, &robot, &gyro); // monitor the gyro
ArServerSimpleComLogRobotConfig configCommands(&commands, &robot); // trigger logging of the robot config parameters
ArServerSimpleServerCommands serverCommands(&commands, &server); // monitor networking behavior (track packets sent etc.)
/* Set up the possible modes for remote control from a client such as
* MobileEyes:
*/
// Mode To go to a goal or other specific point:
ArServerModeGoto modeGoto(&server, &robot, &pathTask, &arMap,
locTask.getHomePose());
// Add a simple (custom) command that allows you to give a list of
// goals to tour, instead of all. Useful for testing and debugging.
modeGoto.addTourGoalsInListSimpleCommand(&commands);
// Mode To stop and remain stopped:
ArServerModeStop modeStop(&server, &robot);
// Unless we are using SONARNL, cause the sonar to turn off automatically
// when the robot is stopped, and turn it back on when commands to move
// are sent. (If using SONARNL to localize, then we cannot turn sonar
// off since localization may get lost)
#ifndef SONARNL
ArSonarAutoDisabler sonarAutoDisabler(&robot);
#endif
// Teleoperation modes To drive by keyboard, joystick, etc:
ArServerModeRatioDrive modeRatioDrive(&server, &robot); // New, improved mode
ArServerModeDrive modeDrive(&server, &robot); // Older mode for compatability
// Prevent normal teleoperation driving if localization is lost using
// a high-priority action, which enables itself when the particular mode is
// active.
// (You have to enter unsafe drive mode to drive when lost.)
ArActionLost actionLostRatioDrive(&locTask, &pathTask, &modeRatioDrive);
modeRatioDrive.getActionGroup()->addAction(&actionLostRatioDrive, 110);
ArActionLost actionLostDrive(&locTask, &pathTask, &modeDrive);
modeDrive.getActionGroup()->addAction(&actionLostDrive, 110);
// Drive mode's configuration and custom (simple) commands:
modeRatioDrive.addToConfig(Aria::getConfig(), "Teleop settings");
modeDrive.addControlCommands(&commands);
modeRatioDrive.addControlCommands(&commands);
// Wander mode (also prevent wandering if lost):
ArServerModeWander modeWander(&server, &robot);
ArActionLost actionLostWander(&locTask, &pathTask, &modeWander);
modeWander.getActionGroup()->addAction(&actionLostWander, 110);
// This provides a small table of interesting information for the client
// to display to the operator:
ArServerInfoStrings stringInfo(&server);
Aria::getInfoGroup()->addAddStringCallback(stringInfo.getAddStringFunctor());
// Display localization score, and laser communication statistic if
// not SonArnl:
#ifdef SONARNL
Aria::getInfoGroup()->addStringDouble(
"Localization Score", 8,
new ArRetFunctorC<double, ArSonarLocalizationTask>(
&locTask,
&ArSonarLocalizationTask::getLocalizationScore),
"%.03f");
Aria::getInfoGroup()->addStringInt(
"Num Samples", 8,
new ArRetFunctorC<int, ArSonarLocalizationTask>(
&locTask, &ArSonarLocalizationTask::getCurrentNumSamples),
"%4d");
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -