⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 simplen150sim.java

📁 利用JAVA编写的群体机器人局部通讯完成一定得队形控制
💻 JAVA
📖 第 1 页 / 共 3 页
字号:
                /*--- draw the trail ---*/                g.setColor(background);                                Point from = (Point)point_list.nextElement();                while (point_list.hasMoreElements())                        {                        Point next = (Point)point_list.nextElement();                        g.drawLine(from.x,from.y,next.x,next.y);                                                from = next;                                                }                }	private String display_string = "blank";	/**         * Set the String that is printed on the robot's display.         * For simulated robots, this appears printed below the agent         * when view "Robot State" is selected.         * @param s String, the text to display.         */        public void setDisplayString(String s)		{		display_string = s;		}        /**         * Draw the robot's state.         */        public void drawState(Graphics g, int w, int h,                double t, double b, double l, double r)                {                top =t; bottom =b; left =l; right =r;                if (DEBUG) System.out.println("draw "+                        w + " " +                        h + " " +                        t + " " +                        b + " " +                        l + " " +                        r + " ");                double meterspp = (r - l) / (double)w;                int radius = (int)(RADIUS / meterspp);                int xpix = (int)((position.x - l) / meterspp);                int ypix = (int)((double)h - ((position.y - b) / meterspp));                /*--- draw State ---*/                g.setColor(background);                g.drawString(display_string,xpix+radius+3,ypix-radius);				/*--- draw Vectors ---*/		displayVectors.draw(g, w, h, t, b, l, r);                }	/**         * Set the length of the trail (in movement steps).         * @param l int, the length of the trail.         */        public void setTrailLength(int l)		{		trail = new CircularBuffer(l);		}        /**         * Clear the trail.         */        public void clearTrail()		{		trail.clear();		}        /**         * Draw the robot in a specific spot.         */        public void draw(Vec2 pos, Graphics g, int w, int h,                double t, double b, double l, double r)                {                Vec2 old_pos = position;                position = pos;                draw(g,w,h,t,b,l,r);                position = old_pos;                }	/**	 * Draw the robot.	 */	public void draw(Graphics g, int w, int h,		double t, double b, double l, double r)		{		top =t; bottom =b; left =l; right =r;		if (DEBUG) System.out.println("draw "+			w + " " +			h + " " +			t + " " +			b + " " +			l + " " +			r + " ");		double meterspp = (r - l) / (double)w;		if (DEBUG) System.out.println("meterspp "+meterspp);		int radius = (int)(RADIUS / meterspp);		int turretd = (int)Units.RadToDeg(turret);		int visionr = (int)(MultiForageN150.VISION_RANGE / meterspp);		int xpix = (int)((position.x - l) / meterspp);		int ypix = (int)((double)h - ((position.y - b) / meterspp));		if (DEBUG) System.out.println("robot at"+			" at "+xpix+","+ypix);		/*--- draw the main body ---*/		g.setColor(foreground);		g.fillOval(xpix - radius, ypix - radius,			radius + radius, radius + radius);		//g.drawOval(xpix - radius, ypix - radius,		//	radius + radius, radius + radius);		/*--- draw the turret ---*/		g.setColor(background);		int dirx = xpix + (int)((double)radius * Math.cos(turret));		int diry = ypix + -(int)((double)radius * Math.sin(turret));		g.drawLine(xpix, ypix, dirx, diry);		//g.drawArc(xpix - visionr, ypix - visionr,		//		visionr + visionr, visionr + visionr,		//		turretd - (MultiForageN150.VISION_FOV_DEG/2), 		//			MultiForageN150.VISION_FOV_DEG);						/*--- draw steer      ---*/		dirx = xpix + (int)((double)radius * Math.cos(steer.t)*0.5);		diry = ypix + -(int)((double)radius * Math.sin(steer.t)*0.5);		g.drawLine(xpix, ypix, dirx, diry);		/*--- draw what we are carrying ---*/		if (in_gripper != null)			{			Vec2 gpos = new Vec2(RADIUS,0);			gpos.sett(turret);			gpos.add(position);			in_gripper.draw(gpos,g,w,h,t,b,l,r);			}		}	/**	 * Clean up.	 */	public void quit()		{		}	/**	 * Gets time elapsed since the robot was instantiated. 	 * Since this is simulation, it may not match real elapsed time.	 */	public long getTime()		{		return(time);		}	private	long	last_Obstaclest = 0;	private	Vec2	last_Obstacles[];	private	int	num_Obstacles;	/**	 * Get an array of Vec2s that point egocentrically from the	 * center of the robot to the obstacles currently sensed by the 	 * bumpers and sonars.	 * @param timestamp only get new information 	 *	if timestamp > than last call or timestamp == -1 .	 * @return the sensed obstacles.	 */	public Vec2[] getObstacles(long timestamp)	{		if((timestamp > last_Obstaclest)||(timestamp == -1))		{			if (timestamp != -1) last_Obstaclest = timestamp;			Vec2 tmp_objs[] = new Vec2[all_objects.length];			num_Obstacles = 0;			/*--- check all objects ---*/			for(int i = 0; i<all_objects.length; i++)				{				/*--- check if it's an obstacle and not self ---*/				if (all_objects[i].isObstacle() &&					(all_objects[i].getID() != unique_id))					{					Vec2 tmp = all_objects[i].getClosestPoint(							position);					if (tmp.r<obstacle_rangeM)					tmp_objs[num_Obstacles++] = tmp;					}				}			last_Obstacles = new Vec2[num_Obstacles];			for(int i = 0; i<num_Obstacles; i++)				last_Obstacles[i] = new Vec2(tmp_objs[i].x,	tmp_objs[i].y);		}				Vec2[] retval = new Vec2[num_Obstacles];		for(int i = 0; i<num_Obstacles; i++)			retval[i] = new Vec2(last_Obstacles[i].x,last_Obstacles[i].y);		return(retval);	}	private	double	obstacle_rangeM = 1.0;	/**	 * Set the maximum range at which a sensor reading should be considered	 * an obstacle.  Beyond this range, the readings are ignored.	 * The default range on startup is 1 meter.	 * @param range the range in meters.	 */	public void setObstacleMaxRange(double range)		{		obstacle_rangeM = range;		}	private	long	last_VisualObjectst = 0;	private Vec2[]	last_VisualObjects;	private	int	num_VisualObjects = 0;	private	int	last_channel = 0;	/**	 * Get an array of Vec2s that represent the	 * locations of visually sensed objects egocentrically 	 * from center of the robot to the objects currently sensed by the 	 * vision system.		 * @param timestamp only get new information 	 *	if timestamp > than last call or timestamp == -1 .	 * @param channel (1-6) which type/color of object to retrieve.	 * @return the sensed objects.	 */	public Vec2[] getVisualObjects(long timestamp, int channel)		{		if(((timestamp > last_VisualObjectst)||			(channel != last_channel))||(timestamp == -1))			{			if (timestamp != -1) last_VisualObjectst = timestamp;			last_channel = channel;			num_VisualObjects = 0;			Vec2 tmp_objs[] = new Vec2[all_objects.length];			/*--- check all objects ---*/			for(int i = 0; i<all_objects.length; i++)				{				/*--- check if it's of the right code and not self ---*/				if (all_objects[i].getVisionClass()==channel &&					(all_objects[i].getID() != unique_id))					{					Vec2 tmp = all_objects[i].getCenter(							position);					if ((tmp.r<MultiForageN150.VISION_RANGE)&&						(Math.abs(						Units.BestTurnRad(turret,tmp.t))						< (MultiForageN150.VISION_FOV_RAD/2)))						tmp_objs[num_VisualObjects++] = tmp;					}				}			last_VisualObjects = new Vec2[num_VisualObjects];			for(int i = 0; i<num_VisualObjects; i++)			last_VisualObjects[i] = new Vec2(tmp_objs[i].x,				tmp_objs[i].y);			}		// if the timestamp is not changed, return the last_VisualObjects.		Vec2[] retval = new Vec2[num_VisualObjects];		for(int i = 0; i<num_VisualObjects; i++)			retval[i] = new Vec2(last_VisualObjects[i].x,				last_VisualObjects[i].y);		return(retval);		}	  /**	    * this is a dummy implementation to keep compatibility with VisualSensorObject.	    * at this point, vision noise is not built into the class. for an example,	    * see SimpleCyeSim.	    */	  public void setVisionNoise(double mean, double stddev, long seed) { }	  	private	long	last_VisualSizest = 0;	/**	 * NOT IMPLEMENTED:	 * Get an array of doubles that represent an estimate of the	 * size in square meters of the visually sensed objects.	 * @param timestamp only get new information if 	 * 	timestamp > than last call or timestamp == -1 .	 * @param channel (1-6) which type/color of object to retrieve.	 * @return the sizes of the sensed objects.	 */	public double[] getVisualSizes(long timestamp, int channel)		{		/* todo */		return(new double[0]);		}	private	long	last_VisualAxest = 0;	/**	 * NOT IMPLEMENTED:	 * Get an array of doubles that represent the	 * major axis orientation of the visually sensed objects.	 * 0 and PI are horizontal, PI/2 is vertical.	 * @param timestamp only get new information 	 *	if timestamp > than last call or timestamp == -1 .	 * @param channel (1-6) which type/color of object to retrieve.	 * @return the major axes of the sensed objects.	 */	public double[] getVisualAxes(long timestamp, int channel)		{		/* todo */		return(new double[0]);		}	private	long	last_ObjectInGrippert = -1;	private	int	    last_ObjectInGripper = -1;	/**	 * Get the kind of object in the gripper.	 * @param timestamp only get new information 	 *	if timestamp > than last call or timestamp == -1 .	 * @return channel (1-6) which type/color of 	 *	 object in the gripper, 0 otherwise.	 */	public int getObjectInGripper(long timestamp)		{		if((timestamp > last_ObjectInGrippert)||(timestamp == -1))			{			if (timestamp != -1) last_ObjectInGrippert = timestamp;			last_ObjectInGripper = -1;			/*--- check if we are holding something ---*/			if (in_gripper != null)				last_ObjectInGripper = in_gripper.getVisionClass();			else				{				/*--- find gripper position in global coord ---*/				Vec2 gpos = new Vec2(RADIUS,0);				gpos.sett(turret);				gpos.add(position);					/*--- check all objects ---*/				for(int i = 0; i<all_objects.length; i++)					{					/*--- check if it's not self ---*/					if (all_objects[i].getID() != unique_id)						{						Vec2 tmp = all_objects[i].getCenter(gpos);						if ((tmp.r<MultiForageN150.GRIPPER_CAPTURE_RADIUS)							&&(all_objects[i].getVisionClass()>=0))							{							last_ObjectInGripper = all_objects[i].getVisionClass();							break;							}						}					}				}			}		return(last_ObjectInGripper);		}	/**	 * Get the position of the robot in global coordinates.	 * @param timestamp only get new information 	 *	if timestamp > than last call or timestamp == -1.	 * @return the position.	 */	public Vec2 getPosition(long timestamp)		{		return(new Vec2(position.x, position.y));		}			/**	 * Get the position of the robot in global coordinates.	 * @return the position.	 */	public Vec2 getPosition()		{		return(new Vec2(position.x, position.y));		}			/**	 * Reset the odometry of the robot in global coordinates.	 * This might be done when reliable sensor information provides	 * a very good estimate of the robot's location, or if you	 * are starting the robot in a known location other than (0,0).	 * Do this only if you are certain you're right!	 * @param position the new position.	 */	public void resetPosition(Vec2 posit)		{		position.setx(posit.x);		position.sety(posit.y);		}	private boolean	in_reverse = false;	/**	*/	public double getSteerHeading(long timestamp)		{		return(steer.t);		}		/**	*/	public void resetSteerHeading(double heading)		{		/* ensure in legal range */		heading = Units.ClipRad(heading); 		/* if we're in reverse, the steer heading is PI out */		if (in_reverse) heading = Units.ClipRad(heading + Math.PI);		/* set the angle */		steer.sett(heading);		}	private double desired_heading;	/**	*/	public void setSteerHeading(long timestamp, double heading)		{		/* ensure in legal range */		desired_heading = Units.ClipRad(heading);

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -