?? rescuevansim.java
字號:
/** * Clear the trail. */ public void clearTrail() { trail.clear(); } /** * Draw the robot in a specific spot. */ public void draw(Vec2 pos, Graphics g, int w, int h, double t, double b, double l, double r) { Vec2 old_pos = position; position = pos; draw(g,w,h,t,b,l,r); position = old_pos; } /** * Draw the robot as an icon. */ public void drawIcon(Graphics g, int w, int h, double t, double b, double l, double r) { Vec2[] body = new Vec2[6]; int[] bodyx = new int[6]; int[] bodyy = new int[6]; top =t; bottom =b; left =l; right =r; if (DEBUG) System.out.println("draw "+ w + " " + h + " " + t + " " + b + " " + l + " " + r + " "); double meterspp = (r - l) / (double)w; if (DEBUG) System.out.println("meterspp "+meterspp); int radius = (int)(RADIUS / meterspp); //int visionr = (int)(VISION_RANGE / meterspp); int xpix = (int)((position.x - l) / meterspp); int ypix = (int)((double)h - ((position.y - b) / meterspp)); if (DEBUG) System.out.println("position is "+position.x+","+position.y); if (DEBUG) System.out.println("robot at"+ " at "+xpix+","+ypix); /*--- draw the main body ---*/ body[0] = new Vec2( 2.0, 1.2); body[1] = new Vec2( 2.4, 0.8); body[2] = new Vec2( 2.4,-0.8); body[3] = new Vec2( 2.0,-1.2); body[4] = new Vec2(-2.4,-1.2); body[5] = new Vec2(-2.4, 1.2); for(int j = 0; j<6; j++) // scale and rotate { //body[j].setr(body[j].r / meterspp); body[j].setr(body[j].r * 5); body[j].sett(body[j].t - steer.t); bodyx[j] = (int)body[j].x + xpix; bodyy[j] = (int)body[j].y + ypix; } g.setColor(foreground); g.fillPolygon(bodyx, bodyy, 6); g.setColor(background); g.drawPolygon(bodyx, bodyy, 6); } /** * Draw the robot. */ public void draw(Graphics g, int w, int h, double t, double b, double l, double r) { Vec2[] body = new Vec2[6]; int[] bodyx = new int[6]; int[] bodyy = new int[6]; top =t; bottom =b; left =l; right =r; if (DEBUG) System.out.println("draw "+ w + " " + h + " " + t + " " + b + " " + l + " " + r + " "); double meterspp = (r - l) / (double)w; if (DEBUG) System.out.println("meterspp "+meterspp); int radius = (int)(RADIUS / meterspp); //int visionr = (int)(VISION_RANGE / meterspp); int xpix = (int)((position.x - l) / meterspp); int ypix = (int)((double)h - ((position.y - b) / meterspp)); if (DEBUG) System.out.println("robot at"+ " at "+xpix+","+ypix); /*--- draw the main body ---*/ body[0] = new Vec2( 2.0, 1.2); body[1] = new Vec2( 2.4, 0.8); body[2] = new Vec2( 2.4,-0.8); body[3] = new Vec2( 2.0,-1.2); body[4] = new Vec2(-2.4,-1.2); body[5] = new Vec2(-2.4, 1.2); for(int j = 0; j<6; j++) // scale and rotate { body[j].setr(body[j].r / meterspp); //body[j].setr(body[j].r * 10); body[j].sett(body[j].t - steer.t); bodyx[j] = (int)body[j].x + xpix; bodyy[j] = (int)body[j].y + ypix; } g.setColor(foreground); g.fillPolygon(bodyx, bodyy, 6); g.setColor(background); g.drawPolygon(bodyx, bodyy, 6); } /** * Clean up. */ public void quit() { } /** * Gets time elapsed since the robot was instantiated. * Since this is simulation, it may not match real elapsed time. */ public long getTime() { return(time); } private long last_Obstaclest = 0; private Vec2 last_Obstacles[]; private int num_Obstacles; /** * Get an array of Vec2s that point egocentrically from the * center of the robot to the obstacles currently sensed by the * bumpers and sonars. * @param timestamp only get new information * if timestamp > than last call or timestamp == -1 . * @return the sensed obstacles. */ public Vec2[] getObstacles(long timestamp) { if((timestamp > last_Obstaclest)||(timestamp == -1)) { if (timestamp != -1) last_Obstaclest = timestamp; Vec2 tmp_objs[] = new Vec2[all_objects.length]; num_Obstacles = 0; /*--- check all objects ---*/ for(int i = 0; i<all_objects.length; i++) { /*--- check if it's an obstacle and not self ---*/ if (all_objects[i].isObstacle() && (all_objects[i].getID() != unique_id)) { Vec2 tmp = all_objects[i].getClosestPoint( position); if (tmp.r<obstacle_rangeM) tmp_objs[num_Obstacles++] = tmp; } } last_Obstacles = new Vec2[num_Obstacles]; for(int i = 0; i<num_Obstacles; i++) last_Obstacles[i] = new Vec2(tmp_objs[i].x, tmp_objs[i].y); } Vec2[] retval = new Vec2[num_Obstacles]; for(int i = 0; i<num_Obstacles; i++) retval[i] = new Vec2(last_Obstacles[i].x, last_Obstacles[i].y); return(retval); } private double obstacle_rangeM = 1.0; /** * Set the maximum range at which a sensor reading should be considered * an obstacle. Beyond this range, the readings are ignored. * The default range on startup is 1 meter. * @param range the range in meters. */ public void setObstacleMaxRange(double range) { obstacle_rangeM = range; } private long last_VisualObjectst = 0; private Vec2[] last_VisualObjects; private int num_VisualObjects = 0; private int last_channel = 0; /** * Get an array of Vec2s that represent the * locations of visually sensed objects egocentrically * from center of the robot to the objects currently sensed by the * vision system. * @param timestamp only get new information * if timestamp > than last call or timestamp == -1 . * @param channel (1-6) which type/color of object to retrieve. * @return the sensed objects. */ public Vec2[] getVisualObjects(long timestamp, int channel) { if(((timestamp > last_VisualObjectst)|| (channel != last_channel))||(timestamp == -1)) { if (timestamp != -1) last_VisualObjectst = timestamp; last_channel = channel; num_VisualObjects = 0; Vec2 tmp_objs[] = new Vec2[all_objects.length]; /*--- check all objects ---*/ for(int i = 0; i<all_objects.length; i++) { /*--- check if it's of the right code and not self ---*/ if (all_objects[i].getVisionClass()==channel && (all_objects[i].getID() != unique_id)) { Vec2 tmp = all_objects[i].getCenter( position); if (tmp.r<RescueVan.VISION_RANGE) tmp_objs[num_VisualObjects++] = tmp; } } last_VisualObjects = new Vec2[num_VisualObjects]; for(int i = 0; i<num_VisualObjects; i++) last_VisualObjects[i] = new Vec2(tmp_objs[i].x, tmp_objs[i].y); } Vec2[] retval = new Vec2[num_VisualObjects]; for(int i = 0; i<num_VisualObjects; i++) retval[i] = new Vec2(last_VisualObjects[i].x, last_VisualObjects[i].y); return(retval); } /** * this is a dummy implementation to keep compatibility with VisualSensorObject. * at this point, vision noise is not built into the class. for an example, * see SimpleCyeSim. */ public void setVisionNoise(double mean, double stddev, long seed) { } private long last_VisualSizest = 0; /** * NOT IMPLEMENTED: * Get an array of doubles that represent an estimate of the * size in square meters of the visually sensed objects. * @param timestamp only get new information if * timestamp > than last call or timestamp == -1 . * @param channel (1-6) which type/color of object to retrieve. * @return the sizes of the sensed objects. */ public double[] getVisualSizes(long timestamp, int channel) { /* todo */ return(new double[0]); } private long last_VisualAxest = 0; /** * NOT IMPLEMENTED: * Get an array of doubles that represent the * major axis orientation of the visually sensed objects. * 0 and PI are horizontal, PI/2 is vertical. * @param timestamp only get new information * if timestamp > than last call or timestamp == -1 . * @param channel (1-6) which type/color of object to retrieve. * @return the major axes of the sensed objects. */ public double[] getVisualAxes(long timestamp, int channel) { /* todo */ return(new double[0]); } /** * Get the position of the robot in global coordinates. * @param timestamp only get new information * if timestamp > than last call or timestamp == -1. * @return the position. */ public Vec2 getPosition(long timestamp) { return(new Vec2(position.x, position.y)); } /** * Get the position of the robot in global coordinates. * @return the position. */ public Vec2 getPosition() { return(new Vec2(position.x, position.y)); } /** * Reset the odometry of the robot in global coordinates. * This might be done when reliable sensor information provides * a very good estimate of the robot's location, or if you * are starting the robot in a known location other than (0,0). * Do this only if you are certain you're right! * @param position the new position. */ public void resetPosition(Vec2 posit) { if (DEBUG) System.out.println("position reset to "+posit.x+","+posit.y); position.setx(posit.x); position.setx(posit.y); } private boolean in_reverse = false; /** */ public double getSteerHeading(long timestamp) { return(steer.t); } /** */ public void resetSteerHeading(double heading) { /* ensure in legal range */ heading = Units.ClipRad(heading); /* if we're in reverse, the steer heading is PI out */ if (in_reverse) heading = Units.ClipRad(heading + Math.PI); /* set the angle */ steer.sett(heading); } private double desired_heading; /** */ public void setSteerHeading(long timestamp, double heading) { /* ensure in legal range */ desired_heading = Units.ClipRad(heading); /* check if we should go in reverse */ // NEVER GO IN REVERSE FOR A VAN //double turn = Units.BestTurnRad(steer.t, desired_heading); //if (Math.abs(turn)>(Math.PI/2)) //{ //in_reverse = true; //desired_heading = //Units.ClipRad(desired_heading+Math.PI); //} //else in_reverse = false; } private double desired_speed = 0; /** */ public void setSpeed(long timestamp, double speed) { /* ensure legal range */ if (speed > 1.0) speed = 1.0; else if (speed < 0) speed = 0; desired_speed = speed; } protected double base_speed = MAX_TRANSLATION; /** */ public void setBaseSpeed(double speed) { if (speed > MAX_TRANSLATION) speed = MAX_TRANSLATION; else if (speed < 0) speed = 0; base_speed = speed; } private long last_Opponentst = 0; private Vec2 last_Opponents[]; /** * Get an array of Vec2s that point egocentrically from the * center of the robot to the opponents currently sensed by the * robot * @param timestamp only get new information if * timestamp > than last call or timestamp == -1 . * @return the sensed Opponents. */ public Vec2[] getOpponents(long timestamp) { if((timestamp > last_Opponentst)||(timestamp == -1)) { if (timestamp != -1) last_Opponentst = timestamp; last_Opponents = kin_sensor.getOpponents(all_objects); } return(last_Opponents); } /** * Return an int represting the player's ID on the team. * This value may not be valid if the simulation has not * been "set up" yet. Do not use it during initialization. * @param timestamp only get new information if * timestamp > than last call or timestamp == -1 . * @return the number. */ public int getPlayerNumber(long timestamp) { return(kin_sensor.getPlayerNumber(all_objects)); } private long last_Teammatest = 0; private Vec2 last_Teammates[] = new Vec2[0]; /** * Get an array of Vec2s that point egocentrically from the * center of the robot to the teammates currently sensed by the * robot. * @param timestamp only get new information if * timestamp > than last call or timestamp == -1 . * @return the sensed teammates. */ public Vec2[] getTeammates(long timestamp) { if((timestamp > last_Teammatest)||(timestamp == -1)) { if (timestamp != -1) last_Teammatest = timestamp; last_Teammates = kin_sensor.getTeammates(all_objects); } return(last_Teammates); } private double kin_rangeM = 4.0; /** * Set the maximum range at which a sensor reading should be considered * kin. Beyond this range, the readings are ignored. * Also used by opponent sensor. * The default range on startup is 1 meter. * @param range the range in meters. */ public void setKinMaxRange(double range) { kin_rangeM = range; kin_sensor.setKinMaxRange(range); } /** * NOT IMPLEMENTED */ public void setGripperHeight(long timestamp, double position) { /* todo */ } /*--- Transceiver methods ---*/ public void multicast(int[] ids, Message m) throws CommunicationException { transceiver.multicast(ids, m, all_objects); } public void broadcast(Message m) { transceiver.broadcast(m, all_objects); } public void unicast(int id, Message m) throws CommunicationException { transceiver.unicast(id, m, all_objects); } public CircularBufferEnumeration getReceiveChannel() { return(transceiver.getReceiveChannel()); } public void setCommunicationMaxRange(double m) { transceiver.setCommunicationMaxRange(m); } public void receive(Message m) { transceiver.receive(m); } public boolean connected() { return(true); } }
?? 快捷鍵說明
復制代碼
Ctrl + C
搜索代碼
Ctrl + F
全屏模式
F11
切換主題
Ctrl + Shift + D
顯示快捷鍵
?
增大字號
Ctrl + =
減小字號
Ctrl + -