Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[examples] Add example programs for AprilTags detection #4932

Merged
merged 19 commits into from
Jan 14, 2023
Merged
Show file tree
Hide file tree
Changes from 17 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
174 changes: 174 additions & 0 deletions wpilibcExamples/src/main/cpp/examples/AprilTagsVision/cpp/Robot.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,174 @@
// Copyright (c) FIRST and other WPILib contributors.
// Open Source Software; you can modify and/or share it under the terms of
// the WPILib BSD license file in the root directory of this project.

#include <cstdio>
#include <span>
#include <sstream>
#include <string>
#include <thread>

#include <cameraserver/CameraServer.h>
#include <frc/TimedRobot.h>
#include <frc/apriltag/AprilTagDetection.h>
#include <frc/apriltag/AprilTagDetector.h>
#include <frc/apriltag/AprilTagPoseEstimator.h>
#include <frc/geometry/Transform3d.h>
#include <frc/smartdashboard/SmartDashboard.h>
#include <opencv2/core/core.hpp>
#include <opencv2/core/types.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <units/angle.h>
#include <units/length.h>

/**
* This is a demo program showing the detection of AprilTags.
* The image is acquired from the USB camera, then any detected AprilTags
* are marked up on the image and sent to the dashboard.
*
* Be aware that the performance on this is much worse than a coprocessor
* solution!
*/
class Robot : public frc::TimedRobot {
#if defined(__linux__) || defined(_WIN32)

private:
static void VisionThread() {
frc::AprilTagDetector detector;
// look for tag16h5, don't correct any error bits
detector.AddFamily("tag16h5", 0);

// Set up Pose Estimator - parameters are for a Microsoft Lifecam HD-3000
// (https://www.chiefdelphi.com/t/wpilib-apriltagdetector-sample-code/421411/21)
frc::AprilTagPoseEstimator::Config poseEstConfig = {
.tagSize = units::length::inch_t(6.0),
.fx = 699.3778103158814,
.fy = 677.7161226393544,
.cx = 345.6059345433618,
.cy = 207.12741326228522};
frc::AprilTagPoseEstimator estimator(poseEstConfig);

// Get the USB camera from CameraServer
cs::UsbCamera camera = frc::CameraServer::StartAutomaticCapture();
// Set the resolution
camera.SetResolution(640, 480);

// Get a CvSink. This will capture Mats from the Camera
cs::CvSink cvSink = frc::CameraServer::GetVideo();
// Setup a CvSource. This will send images back to the Dashboard
cs::CvSource outputStream =
frc::CameraServer::PutVideo("Detected", 640, 480);

// Mats are very memory expensive. Lets reuse this Mat.
cv::Mat mat;
cv::Mat grayMat;

// Instantiate once
std::vector<int> tags;
cv::Scalar outlineColor{0, 255, 0};
cv::Scalar crossColor{0, 0, 255};

while (true) {
// Tell the CvSink to grab a frame from the camera and
// put it in the source mat. If there is an error notify the
// output.
if (cvSink.GrabFrame(mat) == 0) {
// Send the output the error.
outputStream.NotifyError(cvSink.GetError());
// skip the rest of the current iteration
continue;
}

cv::cvtColor(mat, grayMat, cv::COLOR_BGR2GRAY);

cv::Size g_size = grayMat.size();
frc::AprilTagDetector::Results detections =
detector.Detect(g_size.width, g_size.height, grayMat.data);

// have not seen any tags yet
tags.clear();

for (const frc::AprilTagDetection* detection : detections) {
// remember we saw this tag
tags.push_back(detection->GetId());

// draw lines around the tag
for (int i = 0; i <= 3; i++) {
int j = (i + 1) % 4;
const frc::AprilTagDetection::Point pti = detection->GetCorner(i);
const frc::AprilTagDetection::Point ptj = detection->GetCorner(j);
line(mat, cv::Point(pti.x, pti.y), cv::Point(ptj.x, ptj.y),
outlineColor, 2);
}

// mark the center of the tag
const frc::AprilTagDetection::Point c = detection->GetCenter();
int ll = 10;
line(mat, cv::Point(c.x - ll, c.y), cv::Point(c.x + ll, c.y),
crossColor, 2);
line(mat, cv::Point(c.x, c.y - ll), cv::Point(c.x, c.y + ll),
crossColor, 2);

// identify the tag
putText(mat, std::to_string(detection->GetId()),
cv::Point(c.x + ll, c.y), cv::FONT_HERSHEY_SIMPLEX, 1,
crossColor, 3);

// determine pose
frc::Transform3d pose = estimator.Estimate(*detection);

// put pose into dashbaord
std::stringstream dashboardString;
dashboardString << "Translation: ";
dashboardString << units::length::to_string(pose.X());
dashboardString << ", ";
dashboardString << units::length::to_string(pose.Y());
dashboardString << ", ";
dashboardString << units::length::to_string(pose.Z());
frc::Rotation3d rotation = pose.Rotation();
dashboardString << "; Rotation: ";
dashboardString << units::angle::to_string(rotation.X());
dashboardString << ", ";
dashboardString << units::angle::to_string(rotation.Y());
dashboardString << ", ";
dashboardString << units::angle::to_string(rotation.Z());
frc::SmartDashboard::PutString(
"pose_" + std::to_string(detection->GetId()),
dashboardString.str());
PeterJohnson marked this conversation as resolved.
Show resolved Hide resolved
}

// put list of tags onto dashboard
std::stringstream tags_s;
if (tags.size() > 0) {
if (tags.size() > 1) {
std::copy(tags.begin(), tags.end() - 1,
std::ostream_iterator<int>(tags_s, ","));
}
tags_s << tags.back();
}
frc::SmartDashboard::PutString("tags", tags_s.str());

// Give the output stream a new image to display
outputStream.PutFrame(mat);
}
}
#endif

void RobotInit() override {
// We need to run our vision program in a separate thread. If not, our robot
// program will not run.
#if defined(__linux__) || defined(_WIN32)
std::thread visionThread(VisionThread);
visionThread.detach();
#else
std::fputs("Vision only available on Linux or Windows.\n", stderr);
std::fflush(stderr);
#endif
}
};

#ifndef RUNNING_FRC_TESTS
int main() {
return frc::StartRobot<Robot>();
}
#endif
11 changes: 11 additions & 0 deletions wpilibcExamples/src/main/cpp/examples/examples.json
Original file line number Diff line number Diff line change
Expand Up @@ -259,6 +259,17 @@
"gradlebase": "cpp",
"commandversion": 2
},
{
"name": "AprilTags Vision",
"description": "on-roboRIO detection of AprilTags using an attached USB camera.",
"tags": [
"Vision",
"AprilTags"
],
"foldername": "AprilTagsVision",
"gradlebase": "cpp",
"commandversion": 2
},
{
"name": "I2C Communication",
"description": "An example program that communicates with external devices (such as an Arduino) using the roboRIO's I2C port",
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
// Copyright (c) FIRST and other WPILib contributors.
// Open Source Software; you can modify and/or share it under the terms of
// the WPILib BSD license file in the root directory of this project.

package edu.wpi.first.wpilibj.examples.apriltagsvision;

import edu.wpi.first.wpilibj.RobotBase;

/**
* Do NOT add any static variables to this class, or any initialization at all. Unless you know what
* you are doing, do not modify this file except to change the parameter class to the startRobot
* call.
*/
public final class Main {
private Main() {}

/**
* Main initialization function. Do not perform any initialization here.
*
* <p>If you change your main robot class, change the parameter type.
*/
public static void main(String... args) {
RobotBase.startRobot(Robot::new);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
// Copyright (c) FIRST and other WPILib contributors.
// Open Source Software; you can modify and/or share it under the terms of
// the WPILib BSD license file in the root directory of this project.

package edu.wpi.first.wpilibj.examples.apriltagsvision;

import edu.wpi.first.apriltag.AprilTagDetection;
import edu.wpi.first.apriltag.AprilTagDetector;
import edu.wpi.first.apriltag.AprilTagPoseEstimator;
import edu.wpi.first.cameraserver.CameraServer;
import edu.wpi.first.cscore.CvSink;
import edu.wpi.first.cscore.CvSource;
import edu.wpi.first.cscore.UsbCamera;
import edu.wpi.first.math.geometry.Transform3d;
import edu.wpi.first.wpilibj.TimedRobot;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
import java.util.ArrayList;
import org.opencv.core.Mat;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import org.opencv.imgproc.Imgproc;

/**
* This is a demo program showing the detection of AprilTags. The image is acquired from the USB
* camera, then any detected AprilTags are marked up on the image and sent to the dashboard.
*
* <p>Be aware that the performance on this is much worse than a coprocessor solution!
*/
public class Robot extends TimedRobot {
@Override
public void robotInit() {
var visionThread = new Thread(() -> apriltagVisionThreadProc());
visionThread.setDaemon(true);
visionThread.start();
}

void apriltagVisionThreadProc() {
var detector = new AprilTagDetector();
// look for tag16h5, don't correct any error bits
detector.addFamily("tag16h5", 0);

// Set up Pose Estimator - parameters are for a Microsoft Lifecam HD-3000
// (https://www.chiefdelphi.com/t/wpilib-apriltagdetector-sample-code/421411/21)
var poseEstConfig =
new AprilTagPoseEstimator.Config(
0.1524, 699.3778103158814, 677.7161226393544, 345.6059345433618, 207.12741326228522);
var estimator = new AprilTagPoseEstimator(poseEstConfig);

// Get the UsbCamera from CameraServer
UsbCamera camera = CameraServer.startAutomaticCapture();
// Set the resolution
camera.setResolution(640, 480);

// Get a CvSink. This will capture Mats from the camera
CvSink cvSink = CameraServer.getVideo();
// Setup a CvSource. This will send images back to the Dashboard
CvSource outputStream = CameraServer.putVideo("Detected", 640, 480);

// Mats are very memory expensive. Lets reuse these.
var mat = new Mat();
var grayMat = new Mat();

// Instantiate once
ArrayList<Integer> tags = new ArrayList<>();
var outlineColor = new Scalar(0, 255, 0);
var crossColor = new Scalar(0, 0, 255);

// This cannot be 'true'. The program will never exit if it is. This
// lets the robot stop this thread when restarting robot code or
// deploying.
while (!Thread.interrupted()) {
// Tell the CvSink to grab a frame from the camera and put it
// in the source mat. If there is an error notify the output.
if (cvSink.grabFrame(mat) == 0) {
// Send the output the error.
outputStream.notifyError(cvSink.getError());
// skip the rest of the current iteration
continue;
}

Imgproc.cvtColor(mat, grayMat, Imgproc.COLOR_RGB2GRAY);

AprilTagDetection[] detections = detector.detect(grayMat);

// have not seen any tags yet
tags.clear();

for (AprilTagDetection detection : detections) {
// remember we saw this tag
tags.add(detection.getId());

// draw lines around the tag
for (var i = 0; i <= 3; i++) {
var j = (i + 1) % 4;
var pt1 = new Point(detection.getCornerX(i), detection.getCornerY(i));
var pt2 = new Point(detection.getCornerX(j), detection.getCornerY(j));
Imgproc.line(mat, pt1, pt2, outlineColor, 2);
}

// mark the center of the tag
var cx = detection.getCenterX();
var cy = detection.getCenterY();
var ll = 10;
Imgproc.line(mat, new Point(cx - ll, cy), new Point(cx + ll, cy), crossColor, 2);
Imgproc.line(mat, new Point(cx, cy - ll), new Point(cx, cy + ll), crossColor, 2);

// identify the tag
Imgproc.putText(
mat,
Integer.toString(detection.getId()),
new Point(cx + ll, cy),
Imgproc.FONT_HERSHEY_SIMPLEX,
1,
crossColor,
3);

// determine pose
Transform3d pose = estimator.estimate(detection);

// put pose into dashbaord
var dashboardString = pose.toString();
SmartDashboard.putString("pose_" + detection.getId(), dashboardString);
}

// put list of tags onto dashboard
SmartDashboard.putString("tags", tags.toString());

// Give the output stream a new image to display
outputStream.putFrame(mat);
}

detector.close();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -295,6 +295,18 @@
"mainclass": "Main",
"commandversion": 2
},
{
"name": "AprilTags Vision",
"description": "on-roboRIO detection of AprilTags using an attached USB camera.",
"tags": [
"Vision",
"AprilTags"
],
"foldername": "apriltagsvision",
"gradlebase": "java",
"mainclass": "Main",
"commandversion": 2
},
{
"name": "Shuffleboard Sample",
"description": "An example program that adds data to various Shuffleboard tabs that demonstrates the Shuffleboard API",
Expand Down