diff --git a/FtcRobotController/src/main/AndroidManifest.xml b/FtcRobotController/src/main/AndroidManifest.xml index 1ce6a3e161..2e747e96f9 100644 --- a/FtcRobotController/src/main/AndroidManifest.xml +++ b/FtcRobotController/src/main/AndroidManifest.xml @@ -1,9 +1,11 @@ + blobs = colorLocator.getBlobs(); - - /* - * The list of Blobs can be filtered to remove unwanted Blobs. - * Note: All contours will be still displayed on the Stream Preview, but only those that satisfy the filter - * conditions will remain in the current list of "blobs". Multiple filters may be used. - * - * Use any of the following filters. - * - * ColorBlobLocatorProcessor.Util.filterByArea(minArea, maxArea, blobs); - * A Blob's area is the number of pixels contained within the Contour. Filter out any that are too big or small. - * Start with a large range and then refine the range based on the likely size of the desired object in the viewfinder. - * - * ColorBlobLocatorProcessor.Util.filterByDensity(minDensity, maxDensity, blobs); - * A blob's density is an indication of how "full" the contour is. - * If you put a rubber band around the contour you would get the "Convex Hull" of the contour. - * The density is the ratio of Contour-area to Convex Hull-area. - * - * ColorBlobLocatorProcessor.Util.filterByAspectRatio(minAspect, maxAspect, blobs); - * A blob's Aspect ratio is the ratio of boxFit long side to short side. - * A perfect Square has an aspect ratio of 1. All others are > 1 - */ - ColorBlobLocatorProcessor.Util.filterByArea(50, 20000, blobs); // filter out very small blobs. - - /* - * The list of Blobs can be sorted using the same Blob attributes as listed above. - * No more than one sort call should be made. Sorting can use ascending or descending order. - * ColorBlobLocatorProcessor.Util.sortByArea(SortOrder.DESCENDING, blobs); // Default - * ColorBlobLocatorProcessor.Util.sortByDensity(SortOrder.DESCENDING, blobs); - * ColorBlobLocatorProcessor.Util.sortByAspectRatio(SortOrder.DESCENDING, blobs); - */ - - telemetry.addLine(" Area Density Aspect Center"); - - // Display the size (area) and center location for each Blob. - for(ColorBlobLocatorProcessor.Blob b : blobs) - { - RotatedRect boxFit = b.getBoxFit(); - telemetry.addLine(String.format("%5d %4.2f %5.2f (%3d,%3d)", - b.getContourArea(), b.getDensity(), b.getAspectRatio(), (int) boxFit.center.x, (int) boxFit.center.y)); - } - - telemetry.update(); - sleep(50); - } - } -} diff --git a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVisionColorSensor.java b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVisionColorSensor.java deleted file mode 100644 index 6be2bc4e62..0000000000 --- a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVisionColorSensor.java +++ /dev/null @@ -1,136 +0,0 @@ -/* - * Copyright (c) 2024 Phil Malone - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. - */ - -package org.firstinspires.ftc.robotcontroller.external.samples; - -import android.graphics.Color; -import android.util.Size; - -import com.qualcomm.robotcore.eventloop.opmode.Disabled; -import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; -import com.qualcomm.robotcore.eventloop.opmode.TeleOp; - -import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName; -import org.firstinspires.ftc.vision.VisionPortal; -import org.firstinspires.ftc.vision.opencv.ImageRegion; -import org.firstinspires.ftc.vision.opencv.PredominantColorProcessor; - -/* - * This OpMode illustrates how to use a video source (camera) as a color sensor - * - * A "color sensor" will typically determine the color of the object that it is pointed at. - * - * This sample performs the same function, except it uses a video camera to inspect an object or scene. - * The user may choose to inspect all, or just a Region of Interest (ROI), of the active camera view. - * The user must also provide a list of "acceptable colors" (Swatches) from which the closest matching color will be selected. - * - * To perform this function, a VisionPortal runs a PredominantColorProcessor process. - * The PredominantColorProcessor process is created first, and then the VisionPortal is built to use this process. - * The PredominantColorProcessor analyses the ROI and splits the colored pixels into several color-clusters. - * The largest of these clusters is then considered to be the "Predominant Color" - * The process then matches the Predominant Color with the closest Swatch and returns that match. - * - * To aid the user, a colored rectangle is drawn on the camera preview to show the RegionOfInterest, - * The Predominant Color is used to paint the rectangle border, so the user can verify that the color is reasonable. - * - * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. - * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list - */ - -@Disabled -@TeleOp(name = "Concept: Vision Color-Sensor", group = "Concept") -public class ConceptVisionColorSensor extends LinearOpMode -{ - @Override - public void runOpMode() - { - /* Build a "Color Sensor" vision processor based on the PredominantColorProcessor class. - * - * - Focus the color sensor by defining a RegionOfInterest (ROI) which you want to inspect. - * This can be the entire frame, or a sub-region defined using: - * 1) standard image coordinates or 2) a normalized +/- 1.0 coordinate system. - * Use one form of the ImageRegion class to define the ROI. - * ImageRegion.entireFrame() - * ImageRegion.asImageCoordinates(50, 50, 150, 150) 100x100 pixel square near the upper left corner - * ImageRegion.asUnityCenterCoordinates(-0.1, 0.1, 0.1, -0.1) 10% width/height square centered on screen - * - * - Set the list of "acceptable" color swatches (matches). - * Only colors that you assign here will be returned. - * If you know the sensor will be pointing to one of a few specific colors, enter them here. - * Or, if the sensor may be pointed randomly, provide some additional colors that may match the surrounding. - * Possible choices are: - * RED, ORANGE, YELLOW, GREEN, CYAN, BLUE, PURPLE, MAGENTA, BLACK, WHITE; - * - * Note that in the example shown below, only some of the available colors are included. - * This will force any other colored region into one of these colors. - * eg: Green may be reported as YELLOW, as this may be the "closest" match. - */ - PredominantColorProcessor colorSensor = new PredominantColorProcessor.Builder() - .setRoi(ImageRegion.asUnityCenterCoordinates(-0.1, 0.1, 0.1, -0.1)) - .setSwatches( - PredominantColorProcessor.Swatch.RED, - PredominantColorProcessor.Swatch.BLUE, - PredominantColorProcessor.Swatch.YELLOW, - PredominantColorProcessor.Swatch.BLACK, - PredominantColorProcessor.Swatch.WHITE) - .build(); - - /* - * Build a vision portal to run the Color Sensor process. - * - * - Add the colorSensor process created above. - * - Set the desired video resolution. - * Since a high resolution will not improve this process, choose a lower resolution that is - * supported by your camera. This will improve overall performance and reduce latency. - * - Choose your video source. This may be - * .setCamera(hardwareMap.get(WebcamName.class, "Webcam 1")) ..... for a webcam - * or - * .setCamera(BuiltinCameraDirection.BACK) ... for a Phone Camera - */ - VisionPortal portal = new VisionPortal.Builder() - .addProcessor(colorSensor) - .setCameraResolution(new Size(320, 240)) - .setCamera(hardwareMap.get(WebcamName.class, "Webcam 1")) - .build(); - - telemetry.setMsTransmissionInterval(50); // Speed up telemetry updates, Just use for debugging. - - // WARNING: To be able to view the stream preview on the Driver Station, this code runs in INIT mode. - while (opModeIsActive() || opModeInInit()) - { - telemetry.addData("DS preview on/off", "3 dots, Camera Stream\n"); - - // Request the most recent color analysis. - // This will return the closest matching colorSwatch and the predominant RGB color. - // Note: to take actions based on the detected color, simply use the colorSwatch in a comparison or switch. - // eg: - // if (result.closestSwatch == PredominantColorProcessor.Swatch.RED) {... some code ...} - PredominantColorProcessor.Result result = colorSensor.getAnalysis(); - - // Display the Color Sensor result. - telemetry.addData("Best Match:", result.closestSwatch); - telemetry.addLine(String.format("R %3d, G %3d, B %3d", Color.red(result.rgb), Color.green(result.rgb), Color.blue(result.rgb))); - telemetry.update(); - - sleep(20); - } - } -} diff --git a/MeepMeepTesting/.gitignore b/MeepMeepTesting/.gitignore new file mode 100644 index 0000000000..42afabfd2a --- /dev/null +++ b/MeepMeepTesting/.gitignore @@ -0,0 +1 @@ +/build \ No newline at end of file diff --git a/MeepMeepTesting/build.gradle b/MeepMeepTesting/build.gradle new file mode 100644 index 0000000000..081ef7820d --- /dev/null +++ b/MeepMeepTesting/build.gradle @@ -0,0 +1,16 @@ +plugins { + id 'java-library' +} + +java { + sourceCompatibility = JavaVersion.VERSION_1_8 + targetCompatibility = JavaVersion.VERSION_1_8 +} + +repositories { + maven { url = 'https://maven.brott.dev/' } +} + +dependencies { + implementation 'com.acmerobotics.roadrunner:MeepMeep:0.1.6' +} \ No newline at end of file diff --git a/MeepMeepTesting/src/main/java/com/example/meepmeeptesting/MeepMeepTesting.java b/MeepMeepTesting/src/main/java/com/example/meepmeeptesting/MeepMeepTesting.java new file mode 100644 index 0000000000..58b9faffbc --- /dev/null +++ b/MeepMeepTesting/src/main/java/com/example/meepmeeptesting/MeepMeepTesting.java @@ -0,0 +1,54 @@ +package com.example.meepmeeptesting; + +import com.acmerobotics.roadrunner.Pose2d; +import com.acmerobotics.roadrunner.Vector2d; +import com.noahbres.meepmeep.MeepMeep; +import com.noahbres.meepmeep.roadrunner.DefaultBotBuilder; +import com.noahbres.meepmeep.roadrunner.entity.RoadRunnerBotEntity; + +public class MeepMeepTesting { + public static void main(String[] args) { + MeepMeep meepMeep = new MeepMeep(800); + + RoadRunnerBotEntity myBot = new DefaultBotBuilder(meepMeep) + // Set bot constraints: maxVel, maxAccel, maxAngVel, maxAngAccel, track width + .setConstraints(60, 60, Math.toRadians(180), Math.toRadians(180), 15) + .setDimensions(14, 16) + .build(); + + myBot.runAction(myBot.getDrive().actionBuilder(new Pose2d(42, 64, 3*Math.PI/2)) + .lineToY(60) + .turnTo(-0.2783) + .strafeTo(new Vector2d(56, 56)) + .turnTo(Math.toRadians(225)) + + .turnTo(-1.95) + .strafeTo(new Vector2d(48, 36)) + .turnTo(Math.toRadians((270))) + + .turnTo(-1.95) + .strafeTo(new Vector2d(56, 56)) + .turnTo(Math.toRadians(225)) + + .turnTo(-1.471) + .strafeTo(new Vector2d(58, 36)) + .turnTo(Math.toRadians((270))) + + .turnTo(-1.471) + .strafeTo(new Vector2d(56, 56)) + .turnTo(Math.toRadians(225)) + + .turnTo(-1.234) + .strafeTo(new Vector2d(63, 36)) + + .strafeTo(new Vector2d(56, 56)) + .turnTo(Math.toRadians(225)) + .build()); + + meepMeep.setBackground(MeepMeep.Background.FIELD_INTO_THE_DEEP_JUICE_DARK) + .setDarkMode(true) + .setBackgroundAlpha(0.95f) + .addEntity(myBot) + .start(); + } +} \ No newline at end of file diff --git a/TeamCode/build.gradle b/TeamCode/build.gradle index 2ff491a8c8..ebd012eec7 100644 --- a/TeamCode/build.gradle +++ b/TeamCode/build.gradle @@ -38,5 +38,5 @@ dependencies { implementation "com.acmerobotics.roadrunner:core:1.0.0" implementation "com.acmerobotics.roadrunner:actions:1.0.0" implementation "com.acmerobotics.dashboard:dashboard:0.4.16" - implementation "com.github.jdhs-ftc:road-runner-ftc-otos:a425f97d46" + implementation "com.github.jdhs-ftc:road-runner-ftc-otos:a02ba3fe51" } diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/MecanumDrive.java b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/MecanumDrive.java index c7fb5b91f2..b3a2d19be2 100644 --- a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/MecanumDrive.java +++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/MecanumDrive.java @@ -64,31 +64,31 @@ public static class Params { // drive model parameters public double inPerTick = 1; // SparkFun OTOS Note: you can probably leave this at 1 - public double lateralInPerTick = inPerTick; - public double trackWidthTicks = 0; + public double lateralInPerTick = 0.6735446406471194; + public double trackWidthTicks = 0.8915769517728793; // feedforward parameters (in tick units) - public double kS = 0; - public double kV = 0; - public double kA = 0; + public double kS =1.329784940726059; + public double kV = 0.11982742930621032; + public double kA = 0.01; // path profile parameters (in inches) public double maxWheelVel = 50; - public double minProfileAccel = -30; - public double maxProfileAccel = 50; + public double minProfileAccel = -15; + public double maxProfileAccel = 15; // turn profile parameters (in radians) - public double maxAngVel = Math.PI; // shared with path - public double maxAngAccel = Math.PI; + public double maxAngVel =2* Math.PI; // shared with path + public double maxAngAccel =2* Math.PI; // path controller gains - public double axialGain = 0.0; - public double lateralGain = 0.0; - public double headingGain = 0.0; // shared with turn + public double axialGain = 8; + public double lateralGain = 8; + public double headingGain = 8; // shared with turn - public double axialVelGain = 0.0; - public double lateralVelGain = 0.0; - public double headingVelGain = 0.0; // shared with turn + public double axialVelGain = 1; + public double lateralVelGain = 1; + public double headingVelGain = 1; // shared with turn } public static Params PARAMS = new Params(); @@ -209,7 +209,7 @@ public Twist2dDual