diff --git a/src/main/java/frc/robot/Constants.java b/src/main/java/frc/robot/Constants.java index e7830b4..df1141b 100644 --- a/src/main/java/frc/robot/Constants.java +++ b/src/main/java/frc/robot/Constants.java @@ -6,7 +6,10 @@ import com.pathplanner.lib.config.RobotConfig; import edu.wpi.first.math.geometry.Rotation2d; +import edu.wpi.first.math.geometry.Transform3d; import edu.wpi.first.math.geometry.Translation2d; +import edu.wpi.first.math.geometry.Translation3d; +import edu.wpi.first.math.geometry.Rotation3d; import edu.wpi.first.math.system.plant.DCMotor; import edu.wpi.first.math.util.Units; import edu.wpi.first.units.measure.Current; @@ -106,6 +109,21 @@ General.ROBOT_MASS, General.ROBOT_MOI, new ModuleConfig(WHEEL_RADIUS, General.MA ); } + public static class Vision + { + public static final String PHOTON_CAMERA_NAME = "photonCam"; // TODO: Name + public static final double MAX_DETECTION_RANGE = Units.inchesToMeters(300); + public static final Transform3d ROBOT_TO_CAMERA = new Transform3d( + new Translation3d(0.5, 0.0, 0.5), // X forward, Y left, Z up from robot center + new Rotation3d(0, Units.degreesToRadians(-30), 0) + ); // Camera tilted up 30 degrees + public static final double VISION_STD_DEV_BASE_XY = 0.5; // meters + public static final double VISION_STD_DEV_BASE_THETA = Units.degreesToRadians(30); + public static final double VISION_STD_DEV_MULTI_XY = 0.1; + public static final double VISION_STD_DEV_MULTI_THETA = Units.degreesToRadians(10); + public static final double VISION_DISTANCE_SCALE = 0.2; // How much to increase uncertainty per meter + } + public static class Elevator { public static final double EXTENSION_KP = 0.4; diff --git a/src/main/java/frc/robot/RobotContainer.java b/src/main/java/frc/robot/RobotContainer.java index 8b4ec9f..0fd4d28 100644 --- a/src/main/java/frc/robot/RobotContainer.java +++ b/src/main/java/frc/robot/RobotContainer.java @@ -20,6 +20,9 @@ import frc.robot.subsystems.drive.ModuleIO; import frc.robot.subsystems.drive.ModuleIOHardware; import frc.robot.subsystems.drive.ModuleIOSim; +import frc.robot.subsystems.vision.Vision; +import frc.robot.subsystems.vision.VisionIO; +import frc.robot.subsystems.vision.VisionIOPhotonLib; import frc.robot.subsystems.elevator.Elevator; import frc.robot.subsystems.elevator.ElevatorIO; import frc.robot.subsystems.elevator.ElevatorIOHardware; @@ -46,6 +49,8 @@ public class RobotContainer { // Subsystems private final Drive _drive; + @SuppressWarnings("unused") + private final Vision _vision; private final Elevator _elevator; private final Manipulator _manipulator; private final Funnel _funnel; @@ -67,6 +72,7 @@ public RobotContainer() case REAL: // Real robot, instantiate hardware IO implementations _drive = new Drive(new GyroIONavX(), new ModuleIOHardware(0), new ModuleIOHardware(1), new ModuleIOHardware(2), new ModuleIOHardware(3)); + _vision = new Vision(_drive, new VisionIOPhotonLib(_drive)); _elevator = new Elevator(new ElevatorIOHardware()); _manipulator = new Manipulator(new ManipulatorIOHardware()); _funnel = new Funnel(new FunnelIOHardware()); @@ -76,6 +82,7 @@ public RobotContainer() case SIM: // Sim robot, instantiate physics sim IO implementations _drive = new Drive(new GyroIO() {}, new ModuleIOSim(), new ModuleIOSim(), new ModuleIOSim(), new ModuleIOSim()); + _vision = new Vision(_drive, new VisionIOPhotonLib(_drive)); _elevator = new Elevator(new ElevatorIOSim()); _manipulator = new Manipulator(new ManipulatorIOSim(() -> _controller.leftTrigger().getAsBoolean())); _funnel = new Funnel(new FunnelIOSim()); @@ -85,6 +92,7 @@ public RobotContainer() default: // Replayed robot, disable IO implementations _drive = new Drive(new GyroIO() {}, new ModuleIO() {}, new ModuleIO() {}, new ModuleIO() {}, new ModuleIO() {}); + _vision = new Vision(_drive, new VisionIO() {}); _elevator = new Elevator(new ElevatorIO() {}); _manipulator = new Manipulator(new ManipulatorIO() {}); _funnel = new Funnel(new FunnelIO() {}); diff --git a/src/main/java/frc/robot/subsystems/drive/Drive.java b/src/main/java/frc/robot/subsystems/drive/Drive.java index a5592d9..748b3e6 100644 --- a/src/main/java/frc/robot/subsystems/drive/Drive.java +++ b/src/main/java/frc/robot/subsystems/drive/Drive.java @@ -22,6 +22,7 @@ import edu.wpi.first.hal.FRCNetComm.tInstances; import edu.wpi.first.hal.FRCNetComm.tResourceType; import edu.wpi.first.math.Matrix; +import edu.wpi.first.math.VecBuilder; import edu.wpi.first.math.estimator.SwerveDrivePoseEstimator; import edu.wpi.first.math.geometry.Pose2d; import edu.wpi.first.math.geometry.Rotation2d; @@ -32,6 +33,7 @@ import edu.wpi.first.math.kinematics.SwerveModuleState; import edu.wpi.first.math.numbers.N1; import edu.wpi.first.math.numbers.N3; +import edu.wpi.first.math.util.Units; import edu.wpi.first.wpilibj.Alert; import edu.wpi.first.wpilibj.DriverStation; import edu.wpi.first.wpilibj.DriverStation.Alliance; @@ -103,7 +105,11 @@ public Drive(GyroIO gyroIO, ModuleIO flModuleIO, ModuleIO frModuleIO, ModuleIO b Logger.recordOutput("Odometry/TrajectorySetpoint", targetPose); }); - _poseEstimator = new SwerveDrivePoseEstimator(_kinematics, new Rotation2d(), getModulePositions(), new Pose2d()); + // Configure state estimation with standard deviations + var stateStdDevs = VecBuilder.fill(0.05, 0.05, Units.degreesToRadians(5)); // x, y, heading + var visionStdDevs = VecBuilder.fill(0.5, 0.5, Units.degreesToRadians(30)); // Higher uncertainty for vision + + _poseEstimator = new SwerveDrivePoseEstimator(_kinematics, new Rotation2d(), getModulePositions(), new Pose2d(), stateStdDevs, visionStdDevs); // Create a list of bezier points from poses. Each pose represents one waypoint. // The rotation component of the pose should be the direction of travel. Do not diff --git a/src/main/java/frc/robot/subsystems/vision/Vision.java b/src/main/java/frc/robot/subsystems/vision/Vision.java new file mode 100644 index 0000000..99d3929 --- /dev/null +++ b/src/main/java/frc/robot/subsystems/vision/Vision.java @@ -0,0 +1,56 @@ +package frc.robot.subsystems.vision; + +import org.littletonrobotics.junction.Logger; + +import edu.wpi.first.math.VecBuilder; +import edu.wpi.first.wpilibj2.command.SubsystemBase; +import frc.robot.Constants; +import frc.robot.subsystems.drive.Drive; +import java.util.Arrays; + +public class Vision extends SubsystemBase +{ + private final VisionIO _io; + private final VisionIOInputsAutoLogged _inputs = new VisionIOInputsAutoLogged(); + private final Drive _drive; + + public Vision(Drive drive, VisionIO io) + { + _drive = drive; + _io = io; + } + + @Override + public void periodic() + { + _io.updateInputs(_inputs); + Logger.processInputs("Vision", _inputs); + + if (_inputs.hasPose) + { + double avgDistance = 0; + if (_inputs.targetDistances.length > 0) + { + avgDistance = Arrays.stream(_inputs.targetDistances).average().getAsDouble(); + } + + double xyStdDev; + double thetaStdDev; + + if (_inputs.numTargets > 1) + { + xyStdDev = Constants.Vision.VISION_STD_DEV_MULTI_XY + (avgDistance * Constants.Vision.VISION_DISTANCE_SCALE); + thetaStdDev = Constants.Vision.VISION_STD_DEV_MULTI_THETA; + } + else + { + xyStdDev = Constants.Vision.VISION_STD_DEV_BASE_XY + (avgDistance * Constants.Vision.VISION_DISTANCE_SCALE); + thetaStdDev = Constants.Vision.VISION_STD_DEV_BASE_THETA; + } + + var stdDevs = VecBuilder.fill(xyStdDev, xyStdDev, thetaStdDev); + + _drive.addVisionMeasurement(_inputs.pose, _inputs.captureTimestamp, stdDevs); + } + } +} \ No newline at end of file diff --git a/src/main/java/frc/robot/subsystems/vision/VisionIO.java b/src/main/java/frc/robot/subsystems/vision/VisionIO.java new file mode 100644 index 0000000..e2d2722 --- /dev/null +++ b/src/main/java/frc/robot/subsystems/vision/VisionIO.java @@ -0,0 +1,27 @@ +package frc.robot.subsystems.vision; + +import org.littletonrobotics.junction.AutoLog; + +import edu.wpi.first.math.geometry.Pose2d; + +public interface VisionIO +{ + @AutoLog + public static class VisionIOInputs + { + public double captureTimestamp = 0.0; + public Pose2d pose = new Pose2d(); + public boolean hasTargets = false; + public boolean hasPose = false; + public int numTargets = 0; + public double[] targetDistances = new double[] {}; // Target = specific april tag + public int[] targetIds = new int[] {}; + public double[] targetYaws = new double[] {}; + public double[] targetPitches = new double[] {}; + public double[] targetAreas = new double[] {}; + } + + public default void updateInputs(VisionIOInputs inputs) + { + } +} diff --git a/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonLib.java b/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonLib.java new file mode 100644 index 0000000..74fa73b --- /dev/null +++ b/src/main/java/frc/robot/subsystems/vision/VisionIOPhotonLib.java @@ -0,0 +1,197 @@ +package frc.robot.subsystems.vision; + +import java.util.ArrayList; +import java.util.EnumSet; +import java.util.List; +import java.util.Optional; + +import org.photonvision.EstimatedRobotPose; +import org.photonvision.PhotonCamera; +import org.photonvision.PhotonPoseEstimator; +import org.photonvision.PhotonPoseEstimator.PoseStrategy; +import org.photonvision.targeting.PhotonPipelineResult; +import org.photonvision.targeting.PhotonTrackedTarget; + +import edu.wpi.first.apriltag.AprilTagFieldLayout; +import edu.wpi.first.apriltag.AprilTagFields; +import edu.wpi.first.math.geometry.Pose2d; +import edu.wpi.first.math.geometry.Transform3d; +import edu.wpi.first.networktables.NetworkTableEvent; +import edu.wpi.first.networktables.NetworkTableInstance; +import frc.robot.Constants; +import frc.robot.subsystems.drive.Drive; + +public class VisionIOPhotonLib implements VisionIO +{ + private final PhotonCamera _camera; + private final AprilTagFieldLayout _fieldLayout; + private final PhotonPoseEstimator _poseEstimator; + private double _captureTimesStamp = 0.0; + private Pose2d _pose = new Pose2d(); + private boolean _hasPose = false; + private double[] _distances = new double[] {}; + private int[] _targetIds = new int[] {}; + private double[] _targetYaws = new double[] {}; + private int _numProcessedTargets; + + public VisionIOPhotonLib(Drive drive) + { + _camera = new PhotonCamera(Constants.Vision.PHOTON_CAMERA_NAME); + + try + { + // Load the 2024 field layout + _fieldLayout = AprilTagFieldLayout.loadFromResource(AprilTagFields.kDefaultField.m_resourceFile); + } + catch (Exception e) + { + System.err.println("Failed to load AprilTag layout: " + e.getMessage()); + throw new RuntimeException(e); + } + + _poseEstimator = new PhotonPoseEstimator(_fieldLayout, PoseStrategy.MULTI_TAG_PNP_ON_COPROCESSOR, Constants.Vision.ROBOT_TO_CAMERA); + _poseEstimator.setMultiTagFallbackStrategy(PoseStrategy.LOWEST_AMBIGUITY); + + NetworkTableInstance.getDefault().addListener(NetworkTableInstance.getDefault().getEntry("/photonvision/" + Constants.Vision.PHOTON_CAMERA_NAME + "/latencyMillis"), EnumSet.of(NetworkTableEvent.Kind.kValueRemote), event -> + { + var results = _camera.getAllUnreadResults(); + if (results.isEmpty()) + { + return; + } + // get the latest result (last in the list) + var result = results.get(results.size() - 1); + + double timestamp = result.getTimestampSeconds(); + List targets = result.getTargets(); + + List distances = new ArrayList<>(); + List ids = new ArrayList<>(); + List yaws = new ArrayList<>(); + List processedTargets = new ArrayList<>(); + + // Process all targets + for (PhotonTrackedTarget target : targets) + { + ids.add(target.getFiducialId()); + yaws.add(target.getYaw()); + + var transform = target.getBestCameraToTarget(); + var distance = Math.hypot(transform.getX(), transform.getY()); + distances.add(distance); + + if (distance <= Constants.Vision.MAX_DETECTION_RANGE) + { + processedTargets.add(target); + } + } + + // Get pose estimate from processed targets + _poseEstimator.setReferencePose(drive.getPose()); + Optional estimatedPose = _poseEstimator.update(result); + Pose2d pose = new Pose2d(); + boolean hasPose = false; + + if (estimatedPose.isPresent()) + { + pose = estimatedPose.get().estimatedPose.toPose2d(); + hasPose = true; + } + + synchronized (VisionIOPhotonLib.this) + { + _captureTimesStamp = timestamp; + _distances = distances.stream().mapToDouble(Double::doubleValue).toArray(); + _pose = pose; + _numProcessedTargets = processedTargets.size(); + _hasPose = hasPose; + _targetIds = ids.stream().mapToInt(Integer::intValue).toArray(); + _targetYaws = yaws.stream().mapToDouble(Double::doubleValue).toArray(); + } + }); + } + + private Optional getEstimatedGlobalPose(Pose2d prevEstimatedRobotPose, PhotonPipelineResult result) + { + _poseEstimator.setReferencePose(prevEstimatedRobotPose); + return _poseEstimator.update(result); + } + + @Override + public synchronized void updateInputs(VisionIOInputs inputs) + { + var results = _camera.getAllUnreadResults(); + if (results.isEmpty()) + { + // No results available + inputs.hasTargets = false; + inputs.numTargets = 0; + inputs.targetIds = new int[0]; + inputs.targetYaws = new double[0]; + inputs.targetPitches = new double[0]; + inputs.targetAreas = new double[0]; + inputs.targetDistances = new double[0]; + inputs.hasPose = false; + return; + } + + var result = results.get(results.size() - 1); + + inputs.captureTimestamp = result.getTimestampSeconds(); + inputs.hasTargets = result.hasTargets(); + + if (inputs.hasTargets) + { + List targets = result.getTargets(); + int numTargets = targets.size(); + + inputs.targetIds = new int[numTargets]; + inputs.targetYaws = new double[numTargets]; + inputs.targetPitches = new double[numTargets]; + inputs.targetAreas = new double[numTargets]; + inputs.targetDistances = new double[numTargets]; + + for (int i = 0; i < numTargets; i++) + { + PhotonTrackedTarget target = targets.get(i); + inputs.targetIds[i] = target.getFiducialId(); + inputs.targetYaws[i] = target.getYaw(); + inputs.targetPitches[i] = target.getPitch(); + inputs.targetAreas[i] = target.getArea(); + + Transform3d bestCameraToTarget = target.getBestCameraToTarget(); + inputs.targetDistances[i] = Math.hypot(bestCameraToTarget.getX(), bestCameraToTarget.getY()); + } + + inputs.numTargets = numTargets; + + // Update pose estimate if we have new data + if (_captureTimesStamp != inputs.captureTimestamp) + { + Optional poseEstimate = getEstimatedGlobalPose(_pose, result); + if (poseEstimate.isPresent()) + { + inputs.pose = poseEstimate.get().estimatedPose.toPose2d(); + inputs.hasPose = true; + _pose = inputs.pose; + } + else + { + inputs.hasPose = false; + } + _captureTimesStamp = inputs.captureTimestamp; + } + } + else + { + // No targets visible + inputs.numTargets = 0; + inputs.targetIds = new int[0]; + inputs.targetYaws = new double[0]; + inputs.targetPitches = new double[0]; + inputs.targetAreas = new double[0]; + inputs.targetDistances = new double[0]; + inputs.hasPose = false; + } + } +}