mirror of
https://github.com/TimeCrafters/CenterStage
synced 2025-12-15 08:42:35 +00:00
RedCrab: Refactor to allow usage of multiple different vision processors, update vision processors to allow getting results for telemetry purposes.
This commit is contained in:
@@ -11,18 +11,14 @@ import com.qualcomm.robotcore.hardware.DcMotorSimple;
|
||||
import com.qualcomm.robotcore.hardware.IMU;
|
||||
import com.qualcomm.robotcore.hardware.Servo;
|
||||
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.CurrentUnit;
|
||||
import org.firstinspires.ftc.robotcore.external.tfod.TFObjectDetector;
|
||||
import org.firstinspires.ftc.vision.VisionPortal;
|
||||
import org.firstinspires.ftc.vision.tfod.TfodProcessor;
|
||||
import org.timecrafters.TimeCraftersConfigurationTool.library.TimeCraftersConfiguration;
|
||||
import org.timecrafters.TimeCraftersConfigurationTool.library.backend.config.Action;
|
||||
import org.timecrafters.TimeCraftersConfigurationTool.library.backend.config.Variable;
|
||||
|
||||
import java.sql.Time;
|
||||
|
||||
import dev.cyberarm.engine.V2.CyberarmEngine;
|
||||
import dev.cyberarm.engine.V2.Utilities;
|
||||
|
||||
@@ -80,6 +76,7 @@ public class RedCrabMinibot {
|
||||
|
||||
public TimeCraftersConfiguration config;
|
||||
private final PIDController clawArmPIDController;
|
||||
public final String webcamName = "Webcam 1";
|
||||
|
||||
public enum Path {
|
||||
LEFT,
|
||||
@@ -87,8 +84,16 @@ public class RedCrabMinibot {
|
||||
RIGHT
|
||||
}
|
||||
|
||||
public final TfodProcessor tfod;
|
||||
public final VisionPortal visionPortal;
|
||||
/* --- VisionProcessors --- */
|
||||
/// Tensorflow Lite Pixel detector.
|
||||
/// NOTE: detects april tags as pixels, use with caution!
|
||||
public TfodProcessor tfPixel = null;
|
||||
/// TeamProp detector: using OpenCV for subframe saturation threshold detection.
|
||||
public TeamPropVisionProcessor teamProp = null;
|
||||
/// Spike Mark detector: using OpenCV for full frame saturation threshold detection.
|
||||
public SpikeMarkDetectorVisionProcessor spikeMark = null;
|
||||
/// Doohickey
|
||||
public VisionPortal visionPortal = null;
|
||||
|
||||
public RedCrabMinibot(boolean autonomous) {
|
||||
engine = CyberarmEngine.instance;
|
||||
@@ -96,15 +101,6 @@ public class RedCrabMinibot {
|
||||
config = new TimeCraftersConfiguration("cyberarm_RedCrab");
|
||||
loadConstants();
|
||||
|
||||
if (autonomous) {
|
||||
tfod = TfodProcessor.easyCreateWithDefaults();
|
||||
visionPortal = VisionPortal.easyCreateWithDefaults(
|
||||
engine.hardwareMap.get(WebcamName.class, "Webcam 1"), tfod);
|
||||
} else {
|
||||
tfod = null;
|
||||
visionPortal = null;
|
||||
}
|
||||
|
||||
/// IMU ///
|
||||
/// ------------------------------------------------------------------------------------ ///
|
||||
imu = engine.hardwareMap.get(IMU.class, "imu"); // | Ctrl Hub, I2C Port: 0
|
||||
|
||||
@@ -27,22 +27,24 @@ public class SpikeMarkDetectorVisionProcessor implements VisionProcessor {
|
||||
|
||||
private Selection selection = Selection.NONE;
|
||||
|
||||
/// NOTE: Rect defined with a 480x640 (WxH) frame size assumed
|
||||
private Rect rect = new Rect(1, 1, 479, 639);
|
||||
/// NOTE: Rect defined with a 640x480 (WxH) frame size assumed
|
||||
private final Rect rect = new Rect(1, 1, 639, 479);
|
||||
private Mat subMat = new Mat();
|
||||
private Mat rotatedMat = new Mat();
|
||||
private Mat hsvMat = new Mat();
|
||||
|
||||
private double saturation;
|
||||
|
||||
@Override
|
||||
public void init(int width, int height, CameraCalibration calibration) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object processFrame(Mat frame, long captureTimeNanos) {
|
||||
Core.rotate(frame, rotatedMat,Core.ROTATE_90_CLOCKWISE);
|
||||
Core.rotate(frame, rotatedMat,Core.ROTATE_180);
|
||||
Imgproc.cvtColor(rotatedMat, hsvMat, Imgproc.COLOR_RGB2HSV);
|
||||
|
||||
double saturation = averageSaturation(hsvMat, rect);
|
||||
saturation = averageSaturation(hsvMat, rect);
|
||||
|
||||
// TODO: Tune this value or do more processing
|
||||
if (saturation > 0.5) {
|
||||
@@ -100,6 +102,10 @@ public class SpikeMarkDetectorVisionProcessor implements VisionProcessor {
|
||||
return selection;
|
||||
}
|
||||
|
||||
public double getSaturation() {
|
||||
return saturation;
|
||||
}
|
||||
|
||||
private android.graphics.Rect makeDrawableRect(Rect rect, float scaleBmpPxToCanvasPx) {
|
||||
int left = Math.round(rect.x * scaleBmpPxToCanvasPx);
|
||||
int top = Math.round(rect.y * scaleBmpPxToCanvasPx);
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
# Red Crab / Copy Bot
|
||||
## TODO LIST
|
||||
- [ ] Test CyberarmEngine threadless mode
|
||||
- [ ] Verify drivetrain motor gearbox modules are with highest gear reduction closest to the motor.
|
||||
- [ ] Tune ClawArm PIDF
|
||||
- [ ] Test and tune TeamProp and SpikeMark VisionProcessor
|
||||
|
||||
@@ -31,14 +31,17 @@ public class TeamPropVisionProcessor implements VisionProcessor {
|
||||
|
||||
private Location location = Location.NONE;
|
||||
|
||||
/// NOTE: Rects are defined with a 480x640 (WxH) frame size assumed
|
||||
/// NOTE: Rects are defined with a 640x480 (WxH) frame size assumed
|
||||
// 640 / 3 = ~212
|
||||
private Rect rectLeft = new Rect(1, 1, 160, 639);
|
||||
private Rect rectCenter = new Rect(rectLeft.x + rectLeft.width, 1, 160, 639);
|
||||
private Rect rectRight = new Rect(rectCenter.x + rectCenter.width, 1, 160, 639);
|
||||
private final Rect rectLeft = new Rect(1, 1, 639, 160);
|
||||
private final Rect rectCenter = new Rect(rectLeft.x + rectLeft.width, 1, 639, 160);
|
||||
private final Rect rectRight = new Rect(rectCenter.x + rectCenter.width, 1, 639, 160);
|
||||
private Mat subMat = new Mat();
|
||||
private Mat rotatedMat = new Mat();
|
||||
private Mat hsvMat = new Mat();
|
||||
private double saturationLeft;
|
||||
private double saturationCenter;
|
||||
private double saturationRight;
|
||||
|
||||
@Override
|
||||
public void init(int width, int height, CameraCalibration calibration) {
|
||||
@@ -46,12 +49,12 @@ public class TeamPropVisionProcessor implements VisionProcessor {
|
||||
|
||||
@Override
|
||||
public Object processFrame(Mat frame, long captureTimeNanos) {
|
||||
Core.rotate(frame, rotatedMat,Core.ROTATE_90_CLOCKWISE);
|
||||
Core.rotate(frame, rotatedMat,Core.ROTATE_180);
|
||||
Imgproc.cvtColor(rotatedMat, hsvMat, Imgproc.COLOR_RGB2HSV);
|
||||
|
||||
double saturationLeft = averageSaturation(hsvMat, rectLeft);
|
||||
double saturationCenter = averageSaturation(hsvMat, rectLeft);
|
||||
double saturationRight = averageSaturation(hsvMat, rectLeft);
|
||||
saturationLeft = averageSaturation(hsvMat, rectLeft);
|
||||
saturationCenter = averageSaturation(hsvMat, rectLeft);
|
||||
saturationRight = averageSaturation(hsvMat, rectLeft);
|
||||
|
||||
if (saturationLeft > saturationCenter && saturationLeft > saturationRight) {
|
||||
location = Location.LEFT;
|
||||
@@ -88,8 +91,7 @@ public class TeamPropVisionProcessor implements VisionProcessor {
|
||||
|
||||
float textYOffset = selectedPaint.getTextSize() + selectedPaint.getStrokeWidth();
|
||||
|
||||
Location loc = (Location) userContext;
|
||||
switch (loc) {
|
||||
switch ((Location) userContext) {
|
||||
case LEFT:
|
||||
canvas.drawRect(drawRectLeft, selectedPaint);
|
||||
canvas.drawRect(drawRectCenter, notSelectedPaint);
|
||||
@@ -131,6 +133,18 @@ public class TeamPropVisionProcessor implements VisionProcessor {
|
||||
return location;
|
||||
}
|
||||
|
||||
public double getSaturationLeft() {
|
||||
return saturationLeft;
|
||||
}
|
||||
|
||||
public double getSaturationCenter() {
|
||||
return saturationCenter;
|
||||
}
|
||||
|
||||
public double getSaturationRight() {
|
||||
return saturationRight;
|
||||
}
|
||||
|
||||
private android.graphics.Rect makeDrawableRect(Rect rect, float scaleBmpPxToCanvasPx) {
|
||||
int left = Math.round(rect.x * scaleBmpPxToCanvasPx);
|
||||
int top = Math.round(rect.y * scaleBmpPxToCanvasPx);
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
package dev.cyberarm.minibots.red_crab.states;
|
||||
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
|
||||
import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
|
||||
import org.firstinspires.ftc.vision.VisionPortal;
|
||||
import org.firstinspires.ftc.vision.tfod.TfodProcessor;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
@@ -25,8 +28,12 @@ public class PathSelector extends CyberarmState {
|
||||
|
||||
@Override
|
||||
public void init() {
|
||||
robot.tfod.setClippingMargins(0, 0, 0, 0);
|
||||
robot.tfod.setMinResultConfidence((float) minConfidence);
|
||||
robot.tfPixel = TfodProcessor.easyCreateWithDefaults();
|
||||
robot.visionPortal = VisionPortal.easyCreateWithDefaults(
|
||||
engine.hardwareMap.get(WebcamName.class, robot.webcamName), robot.tfPixel);
|
||||
|
||||
robot.tfPixel.setClippingMargins(0, 0, 0, 0);
|
||||
robot.tfPixel.setMinResultConfidence((float) minConfidence);
|
||||
|
||||
engine.blackboardSet("autonomousPath", 0);
|
||||
}
|
||||
@@ -39,7 +46,7 @@ public class PathSelector extends CyberarmState {
|
||||
return;
|
||||
}
|
||||
|
||||
recognitions = robot.tfod.getRecognitions();
|
||||
recognitions = robot.tfPixel.getRecognitions();
|
||||
for (Recognition recognition : recognitions) {
|
||||
double x = (recognition.getLeft() + recognition.getRight()) / 2;
|
||||
double y = (recognition.getTop() + recognition.getBottom()) / 2;
|
||||
@@ -58,7 +65,7 @@ public class PathSelector extends CyberarmState {
|
||||
|
||||
private void stopVision() {
|
||||
robot.visionPortal.close();
|
||||
robot.tfod.shutdown();
|
||||
robot.tfPixel.shutdown();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
Reference in New Issue
Block a user