Skip to content

Commit

Permalink
[examples] Add example programs for AprilTags detection (#4932)
Browse files Browse the repository at this point in the history
Co-authored-by: Peter Johnson <[email protected]>
  • Loading branch information
fovea1959 and PeterJohnson authored Jan 14, 2023
1 parent 1e05b21 commit cf1a411
Show file tree
Hide file tree
Showing 9 changed files with 362 additions and 3 deletions.
1 change: 1 addition & 0 deletions wpilibcExamples/.styleguide
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ cppSrcFileInclude {
includeOtherLibs {
^cameraserver/
^cscore
^fmt/
^frc/
^frc2/
^hal/
Expand Down
165 changes: 165 additions & 0 deletions wpilibcExamples/src/main/cpp/examples/AprilTagsVision/cpp/Robot.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,165 @@
// Copyright (c) FIRST and other WPILib contributors.
// Open Source Software; you can modify and/or share it under the terms of
// the WPILib BSD license file in the root directory of this project.

#include <cstdio>
#include <span>
#include <sstream>
#include <string>
#include <thread>

#include <cameraserver/CameraServer.h>
#include <fmt/format.h>
#include <frc/TimedRobot.h>
#include <frc/apriltag/AprilTagDetection.h>
#include <frc/apriltag/AprilTagDetector.h>
#include <frc/apriltag/AprilTagPoseEstimator.h>
#include <frc/geometry/Transform3d.h>
#include <networktables/IntegerArrayTopic.h>
#include <networktables/NetworkTableInstance.h>
#include <opencv2/core/core.hpp>
#include <opencv2/core/types.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <units/angle.h>
#include <units/length.h>

/**
* This is a demo program showing the detection of AprilTags.
* The image is acquired from the USB camera, then any detected AprilTags
* are marked up on the image and sent to the dashboard.
*
* Be aware that the performance on this is much worse than a coprocessor
* solution!
*/
class Robot : public frc::TimedRobot {
#if defined(__linux__) || defined(_WIN32)

private:
static void VisionThread() {
frc::AprilTagDetector detector;
// look for tag16h5, don't correct any error bits
detector.AddFamily("tag16h5", 0);

// Set up Pose Estimator - parameters are for a Microsoft Lifecam HD-3000
// (https://www.chiefdelphi.com/t/wpilib-apriltagdetector-sample-code/421411/21)
frc::AprilTagPoseEstimator::Config poseEstConfig = {
.tagSize = units::length::inch_t(6.0),
.fx = 699.3778103158814,
.fy = 677.7161226393544,
.cx = 345.6059345433618,
.cy = 207.12741326228522};
frc::AprilTagPoseEstimator estimator(poseEstConfig);

// Get the USB camera from CameraServer
cs::UsbCamera camera = frc::CameraServer::StartAutomaticCapture();
// Set the resolution
camera.SetResolution(640, 480);

// Get a CvSink. This will capture Mats from the Camera
cs::CvSink cvSink = frc::CameraServer::GetVideo();
// Setup a CvSource. This will send images back to the Dashboard
cs::CvSource outputStream =
frc::CameraServer::PutVideo("Detected", 640, 480);

// Mats are very memory expensive. Lets reuse this Mat.
cv::Mat mat;
cv::Mat grayMat;

// Instantiate once
std::vector<int64_t> tags;
cv::Scalar outlineColor{0, 255, 0};
cv::Scalar crossColor{0, 0, 255};

// We'll output to NT
auto tagsTable =
nt::NetworkTableInstance::GetDefault().GetTable("apriltags");
auto pubTags = tagsTable->GetIntegerArrayTopic("tags").Publish();

while (true) {
// Tell the CvSink to grab a frame from the camera and
// put it in the source mat. If there is an error notify the
// output.
if (cvSink.GrabFrame(mat) == 0) {
// Send the output the error.
outputStream.NotifyError(cvSink.GetError());
// skip the rest of the current iteration
continue;
}

cv::cvtColor(mat, grayMat, cv::COLOR_BGR2GRAY);

cv::Size g_size = grayMat.size();
frc::AprilTagDetector::Results detections =
detector.Detect(g_size.width, g_size.height, grayMat.data);

// have not seen any tags yet
tags.clear();

for (const frc::AprilTagDetection* detection : detections) {
// remember we saw this tag
tags.push_back(detection->GetId());

// draw lines around the tag
for (int i = 0; i <= 3; i++) {
int j = (i + 1) % 4;
const frc::AprilTagDetection::Point pti = detection->GetCorner(i);
const frc::AprilTagDetection::Point ptj = detection->GetCorner(j);
line(mat, cv::Point(pti.x, pti.y), cv::Point(ptj.x, ptj.y),
outlineColor, 2);
}

// mark the center of the tag
const frc::AprilTagDetection::Point c = detection->GetCenter();
int ll = 10;
line(mat, cv::Point(c.x - ll, c.y), cv::Point(c.x + ll, c.y),
crossColor, 2);
line(mat, cv::Point(c.x, c.y - ll), cv::Point(c.x, c.y + ll),
crossColor, 2);

// identify the tag
putText(mat, std::to_string(detection->GetId()),
cv::Point(c.x + ll, c.y), cv::FONT_HERSHEY_SIMPLEX, 1,
crossColor, 3);

// determine pose
frc::Transform3d pose = estimator.Estimate(*detection);

// put pose into NT
frc::Rotation3d rotation = pose.Rotation();
tagsTable->GetEntry(fmt::format("pose_{}", detection->GetId()))
.SetDoubleArray(
{{ pose.X().value(),
pose.Y().value(),
pose.Z().value(),
rotation.X().value(),
rotation.Y().value(),
rotation.Z().value() }});
}

// put list of tags onto NT
pubTags.Set(tags);

// Give the output stream a new image to display
outputStream.PutFrame(mat);
}
}
#endif

void RobotInit() override {
// We need to run our vision program in a separate thread. If not, our robot
// program will not run.
#if defined(__linux__) || defined(_WIN32)
std::thread visionThread(VisionThread);
visionThread.detach();
#else
std::fputs("Vision only available on Linux or Windows.\n", stderr);
std::fflush(stderr);
#endif
}
};

#ifndef RUNNING_FRC_TESTS
int main() {
return frc::StartRobot<Robot>();
}
#endif
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
#include "Robot.h"

#include <fmt/format.h>

#include <frc/DriverStation.h>
#include <frc/Timer.h>

Expand Down
11 changes: 11 additions & 0 deletions wpilibcExamples/src/main/cpp/examples/examples.json
Original file line number Diff line number Diff line change
Expand Up @@ -259,6 +259,17 @@
"gradlebase": "cpp",
"commandversion": 2
},
{
"name": "AprilTags Vision",
"description": "on-roboRIO detection of AprilTags using an attached USB camera.",
"tags": [
"Vision",
"AprilTags"
],
"foldername": "AprilTagsVision",
"gradlebase": "cpp",
"commandversion": 2
},
{
"name": "I2C Communication",
"description": "An example program that communicates with external devices (such as an Arduino) using the roboRIO's I2C port",
Expand Down
1 change: 0 additions & 1 deletion wpilibcExamples/src/main/cpp/templates/timed/cpp/Robot.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
#include "Robot.h"

#include <fmt/core.h>

#include <frc/smartdashboard/SmartDashboard.h>

void Robot::RobotInit() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,11 @@
// Open Source Software; you can modify and/or share it under the terms of
// the WPILib BSD license file in the root directory of this project.

#include <fmt/format.h>
#include <gtest/gtest.h>

#include <array>

#include <fmt/format.h>
#include <frc/AddressableLED.h>
#include <frc/simulation/AddressableLEDSim.h>
#include <frc/util/Color.h>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
// Copyright (c) FIRST and other WPILib contributors.
// Open Source Software; you can modify and/or share it under the terms of
// the WPILib BSD license file in the root directory of this project.

package edu.wpi.first.wpilibj.examples.apriltagsvision;

import edu.wpi.first.wpilibj.RobotBase;

/**
* Do NOT add any static variables to this class, or any initialization at all. Unless you know what
* you are doing, do not modify this file except to change the parameter class to the startRobot
* call.
*/
public final class Main {
private Main() {}

/**
* Main initialization function. Do not perform any initialization here.
*
* <p>If you change your main robot class, change the parameter type.
*/
public static void main(String... args) {
RobotBase.startRobot(Robot::new);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,147 @@
// Copyright (c) FIRST and other WPILib contributors.
// Open Source Software; you can modify and/or share it under the terms of
// the WPILib BSD license file in the root directory of this project.

package edu.wpi.first.wpilibj.examples.apriltagsvision;

import edu.wpi.first.apriltag.AprilTagDetection;
import edu.wpi.first.apriltag.AprilTagDetector;
import edu.wpi.first.apriltag.AprilTagPoseEstimator;
import edu.wpi.first.cameraserver.CameraServer;
import edu.wpi.first.cscore.CvSink;
import edu.wpi.first.cscore.CvSource;
import edu.wpi.first.cscore.UsbCamera;
import edu.wpi.first.math.geometry.Rotation3d;
import edu.wpi.first.math.geometry.Transform3d;
import edu.wpi.first.networktables.IntegerArrayPublisher;
import edu.wpi.first.networktables.NetworkTable;
import edu.wpi.first.networktables.NetworkTableInstance;
import edu.wpi.first.wpilibj.TimedRobot;
import java.util.ArrayList;
import org.opencv.core.Mat;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import org.opencv.imgproc.Imgproc;

/**
* This is a demo program showing the detection of AprilTags. The image is acquired from the USB
* camera, then any detected AprilTags are marked up on the image and sent to the dashboard.
*
* <p>Be aware that the performance on this is much worse than a coprocessor solution!
*/
public class Robot extends TimedRobot {
@Override
public void robotInit() {
var visionThread = new Thread(() -> apriltagVisionThreadProc());
visionThread.setDaemon(true);
visionThread.start();
}

void apriltagVisionThreadProc() {
var detector = new AprilTagDetector();
// look for tag16h5, don't correct any error bits
detector.addFamily("tag16h5", 0);

// Set up Pose Estimator - parameters are for a Microsoft Lifecam HD-3000
// (https://www.chiefdelphi.com/t/wpilib-apriltagdetector-sample-code/421411/21)
var poseEstConfig =
new AprilTagPoseEstimator.Config(
0.1524, 699.3778103158814, 677.7161226393544, 345.6059345433618, 207.12741326228522);
var estimator = new AprilTagPoseEstimator(poseEstConfig);

// Get the UsbCamera from CameraServer
UsbCamera camera = CameraServer.startAutomaticCapture();
// Set the resolution
camera.setResolution(640, 480);

// Get a CvSink. This will capture Mats from the camera
CvSink cvSink = CameraServer.getVideo();
// Setup a CvSource. This will send images back to the Dashboard
CvSource outputStream = CameraServer.putVideo("Detected", 640, 480);

// Mats are very memory expensive. Lets reuse these.
var mat = new Mat();
var grayMat = new Mat();

// Instantiate once
ArrayList<Long> tags = new ArrayList<>();
var outlineColor = new Scalar(0, 255, 0);
var crossColor = new Scalar(0, 0, 255);

// We'll output to NT
NetworkTable tagsTable = NetworkTableInstance.getDefault().getTable("apriltags");
IntegerArrayPublisher pubTags = tagsTable.getIntegerArrayTopic("tags").publish();

// This cannot be 'true'. The program will never exit if it is. This
// lets the robot stop this thread when restarting robot code or
// deploying.
while (!Thread.interrupted()) {
// Tell the CvSink to grab a frame from the camera and put it
// in the source mat. If there is an error notify the output.
if (cvSink.grabFrame(mat) == 0) {
// Send the output the error.
outputStream.notifyError(cvSink.getError());
// skip the rest of the current iteration
continue;
}

Imgproc.cvtColor(mat, grayMat, Imgproc.COLOR_RGB2GRAY);

AprilTagDetection[] detections = detector.detect(grayMat);

// have not seen any tags yet
tags.clear();

for (AprilTagDetection detection : detections) {
// remember we saw this tag
tags.add((long) detection.getId());

// draw lines around the tag
for (var i = 0; i <= 3; i++) {
var j = (i + 1) % 4;
var pt1 = new Point(detection.getCornerX(i), detection.getCornerY(i));
var pt2 = new Point(detection.getCornerX(j), detection.getCornerY(j));
Imgproc.line(mat, pt1, pt2, outlineColor, 2);
}

// mark the center of the tag
var cx = detection.getCenterX();
var cy = detection.getCenterY();
var ll = 10;
Imgproc.line(mat, new Point(cx - ll, cy), new Point(cx + ll, cy), crossColor, 2);
Imgproc.line(mat, new Point(cx, cy - ll), new Point(cx, cy + ll), crossColor, 2);

// identify the tag
Imgproc.putText(
mat,
Integer.toString(detection.getId()),
new Point(cx + ll, cy),
Imgproc.FONT_HERSHEY_SIMPLEX,
1,
crossColor,
3);

// determine pose
Transform3d pose = estimator.estimate(detection);

// put pose into dashbaord
Rotation3d rot = pose.getRotation();
tagsTable
.getEntry("pose_" + detection.getId())
.setDoubleArray(
new double[] {
pose.getX(), pose.getY(), pose.getZ(), rot.getX(), rot.getY(), rot.getZ()
});
}

// put list of tags onto dashboard
pubTags.set(tags.stream().mapToLong(Long::longValue).toArray());

// Give the output stream a new image to display
outputStream.putFrame(mat);
}

pubTags.close();
detector.close();
}
}
Loading

0 comments on commit cf1a411

Please sign in to comment.