FtcRobotController v8.0

This commit is contained in:
Cal Kestis 2022-09-07 13:59:24 -07:00
parent aba72e566c
commit e0282fcbd3
24 changed files with 819 additions and 423 deletions

View File

@ -1,15 +1,14 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:versionCode="45"
android:versionName="7.2">
android:versionCode="47"
android:versionName="8.0">
<uses-permission android:name="android.permission.RECEIVE_BOOT_COMPLETED" />
<application
android:allowBackup="true"
android:largeHeap="true"
android:extractNativeLibs="true"
android:icon="@drawable/ic_launcher"
android:label="@string/app_name"
android:theme="@style/AppThemeRedRC"

View File

@ -0,0 +1,142 @@
/* Copyright (c) 2022 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.util.Range;
/**
* This OpMode Sample illustrates how to use an external "hardware" class to modularize all the robot's sensors and actuators.
* This approach is very efficient because the same hardware class can be used by all of your teleop and autonomous OpModes
* without requiring many copy & paste operations. Once you have defined and tested the hardware class with one OpMode,
* it is instantly available to other OpModes.
*
* The real benefit of this approach is that as you tweak your robot hardware, you only need to make changes in ONE place (the Hardware Class).
* So, to be effective you should put as much or your hardware setup and access code as possible in the hardware class.
* Essentially anything you do with hardware in BOTH Teleop and Auto should likely go in the hardware class.
*
* The Hardware Class is created in a separate file, and then an "instance" of this class is created in each OpMode.
* In order for the class to do typical OpMode things (like send telemetry data) it must be passed a reference to the
* OpMode object when it's created, so it can access all core OpMode functions. This is illustrated below.
*
* In this concept sample, the hardware class file is called RobotHardware.java and it must accompany this sample OpMode.
* So, if you copy ConceptExternalHardwareClass.java into TeamCode (using Android Studio or OnBotJava) then RobotHardware.java
* must also be copied to the same location (maintaining its name).
*
* For comparison purposes, this sample and its accompanying hardware class duplicates the functionality of the
* RobotTelopPOV_Linear opmode. It assumes three motors (left_drive, right_drive and arm) and two servos (left_hand and right_hand)
*
* View the RobotHardware.java class file for more details
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list
*
* In OnBot Java, add a new OpMode, drawing from this Sample; select TeleOp.
* Also add another new file named RobotHardware.java, drawing from the Sample with that name; select Not an OpMode.
*/
@TeleOp(name="Concept: Robot Hardware Class", group="Robot")
@Disabled
public class ConceptExternalHardwareClass extends LinearOpMode {
// Create a RobotHardware object to be used to access robot hardware.
// Prefix any hardware functions with "robot." to access this class.
RobotHardware robot = new RobotHardware(this);
@Override
public void runOpMode() {
double drive = 0;
double turn = 0;
double arm = 0;
double handOffset = 0;
// initialize all the hardware, using the hardware class. See how clean and simple this is?
robot.init();
// Send telemetry message to signify robot waiting;
// Wait for the game to start (driver presses PLAY)
waitForStart();
// run until the end of the match (driver presses STOP)
while (opModeIsActive()) {
// Run wheels in POV mode (note: The joystick goes negative when pushed forward, so negate it)
// In this mode the Left stick moves the robot fwd and back, the Right stick turns left and right.
// This way it's also easy to just drive straight, or just turn.
drive = -gamepad1.left_stick_y;
turn = gamepad1.right_stick_x;
// Combine drive and turn for blended motion. Use RobotHardware class
robot.driveRobot(drive, turn);
// Use gamepad left & right Bumpers to open and close the claw
// Use the SERVO constants defined in RobotHardware class.
// Each time around the loop, the servos will move by a small amount.
// Limit the total offset to half of the full travel range
if (gamepad1.right_bumper)
handOffset += robot.HAND_SPEED;
else if (gamepad1.left_bumper)
handOffset -= robot.HAND_SPEED;
handOffset = Range.clip(handOffset, -0.5, 0.5);
// Move both servos to new position. Use RobotHardware class
robot.setHandPositions(handOffset);
// Use gamepad buttons to move arm up (Y) and down (A)
// Use the MOTOR constants defined in RobotHardware class.
if (gamepad1.y)
arm = robot.ARM_UP_POWER;
else if (gamepad1.a)
arm = robot.ARM_DOWN_POWER;
else
arm = 0;
robot.setArmPower(arm);
// Send telemetry messages to explain controls and show robot status
telemetry.addData("Drive", "Left Stick");
telemetry.addData("Turn", "Right Stick");
telemetry.addData("Arm Up/Down", "Y & A Buttons");
telemetry.addData("Hand Open/Closed", "Left and Right Bumpers");
telemetry.addData("-", "-------");
telemetry.addData("Drive Power", "%.2f", drive);
telemetry.addData("Turn Power", "%.2f", turn);
telemetry.addData("Arm Power", "%.2f", arm);
telemetry.addData("Hand Position", "Offset = %.2f", handOffset);
telemetry.update();
// Pace this loop so hands move at a reasonable speed.
sleep(50);
}
}
}

View File

@ -46,21 +46,28 @@ import java.util.List;
Three scenarios are tested:
Cache Mode = OFF This is the normal default, where no cache is used, and every read produces a discrete transaction with
an expansion hub, which is the slowest approach.
an expansion hub, which is the slowest approach, but guarentees that the value is as fresh (recent) as possible..
Cache Mode = AUTO This mode will attempt to minimize the number of discrete read commands, by performing bulk-reads
and then returning values that have been cached. The cache is updated automatically whenever a specific read operation is repeated.
This mode will always return fresh data, but it may perform more bulk-reads than absolutely required.
Extra reads will be performed if multiple identical encoder/velocity reads are performed in one control cycle.
and then returning values that have been cached. The cache is updated automatically whenever any specific encoder is re-read.
This mode will always return new data, but it may perform more bulk-reads than absolutely required.
Extra reads will be performed if multiple encoder/velocity reads are performed on the same encoder in one control cycle.
This mode is a good compromise between the OFF and MANUAL modes.
Cache Mode = MANUAL This mode enables the user's code to determine the best time to refresh the cached bulk-read data.
Well organized code can place all the sensor reads in one location, and then just reset the cache once per control cycle.
The approach will produce the shortest cycle times, but it does require the user to manually clear the cache.
Note: If there are significant user-program delays between encoder reads, the cached value may not be fresh (recent).
You can issue a clearBulkCache() call at any time force a fresh bulk-read on the next encoder read.
Cache Mode = MANUAL This mode requires the user's code to determine the best time to clear the cached bulk-read data.
Well organized code will reset the cache once at the beginning of the control cycle, and then immediately read and store all the encoder values.
This approach will produce the shortest cycle times, but it does require the user to manually clear the cache.
Since NO automatic Bulk-Reads are performed, neglecting to clear the bulk cache will result in the same values being returned
each time an encoder read is performed.
-------------------------------------
General tip to speed up your control cycles:
No matter what method you use to read encoders and other inputs, you should try to
avoid reading the same input multiple times around a control loop.
avoid reading the same encoder input multiple times around a control loop.
Under normal conditions, this will slow down the control loop.
The preferred method is to read all the required inputs ONCE at the beginning of the loop,
and save the values in variable that can be used by other parts of the control code.

View File

@ -40,8 +40,8 @@ import org.firstinspires.ftc.robotcore.external.tfod.TFObjectDetector;
import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
/**
* This 2020-2021 OpMode illustrates the basics of using the TensorFlow Object Detection API to
* determine the position of the Freight Frenzy game elements.
* This 2022-2023 OpMode illustrates the basics of using the TensorFlow Object Detection API to
* determine which image is being presented to the robot.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
@ -52,23 +52,21 @@ import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
@TeleOp(name = "Concept: TensorFlow Object Detection", group = "Concept")
@Disabled
public class ConceptTensorFlowObjectDetection extends LinearOpMode {
/* Note: This sample uses the all-objects Tensor Flow model (FreightFrenzy_BCDM.tflite), which contains
* the following 4 detectable objects
* 0: Ball,
* 1: Cube,
* 2: Duck,
* 3: Marker (duck location tape marker)
*
* Two additional model assets are available which only contain a subset of the objects:
* FreightFrenzy_BC.tflite 0: Ball, 1: Cube
* FreightFrenzy_DM.tflite 0: Duck, 1: Marker
*/
private static final String TFOD_MODEL_ASSET = "FreightFrenzy_BCDM.tflite";
/*
* Specify the source for the Tensor Flow Model.
* If the TensorFlowLite object model is included in the Robot Controller App as an "asset",
* the OpMode must to load it using loadModelFromAsset(). However, if a team generated model
* has been downloaded to the Robot Controller's SD FLASH memory, it must to be loaded using loadModelFromFile()
* Here we assume it's an Asset. Also see method initTfod() below .
*/
private static final String TFOD_MODEL_ASSET = "PowerPlay.tflite";
// private static final String TFOD_MODEL_FILE = "/sdcard/FIRST/tflitemodels/CustomTeamModel.tflite";
private static final String[] LABELS = {
"Ball",
"Cube",
"Duck",
"Marker"
"1 Bolt",
"2 Bulb",
"3 Panel"
};
/*
@ -114,11 +112,11 @@ public class ConceptTensorFlowObjectDetection extends LinearOpMode {
// The TensorFlow software will scale the input images from the camera to a lower resolution.
// This can result in lower detection accuracy at longer distances (> 55cm or 22").
// If your target is at distance greater than 50 cm (20") you can adjust the magnification value
// If your target is at distance greater than 50 cm (20") you can increase the magnification value
// to artificially zoom in to the center of image. For best results, the "aspectRatio" argument
// should be set to the value of the images used to create the TensorFlow Object Detection model
// (typically 16/9).
tfod.setZoom(2.5, 16.0/9.0);
tfod.setZoom(1.0, 16.0/9.0);
}
/** Wait for the game to begin */
@ -133,19 +131,22 @@ public class ConceptTensorFlowObjectDetection extends LinearOpMode {
// the last time that call was made.
List<Recognition> updatedRecognitions = tfod.getUpdatedRecognitions();
if (updatedRecognitions != null) {
telemetry.addData("# Object Detected", updatedRecognitions.size());
telemetry.addData("# Objects Detected", updatedRecognitions.size());
// step through the list of recognitions and display boundary info.
int i = 0;
for (Recognition recognition : updatedRecognitions) {
telemetry.addData(String.format("label (%d)", i), recognition.getLabel());
telemetry.addData(String.format(" left,top (%d)", i), "%.03f , %.03f",
recognition.getLeft(), recognition.getTop());
telemetry.addData(String.format(" right,bottom (%d)", i), "%.03f , %.03f",
recognition.getRight(), recognition.getBottom());
i++;
}
telemetry.update();
// step through the list of recognitions and display image position/size information for each one
// Note: "Image number" refers to the randomized image orientation/number
for (Recognition recognition : updatedRecognitions) {
double col = (recognition.getLeft() + recognition.getRight()) / 2 ;
double row = (recognition.getTop() + recognition.getBottom()) / 2 ;
double width = Math.abs(recognition.getRight() - recognition.getLeft()) ;
double height = Math.abs(recognition.getTop() - recognition.getBottom()) ;
telemetry.addData(""," ");
telemetry.addData("Image", "%s (%.0f %% Conf.)", recognition.getLabel(), recognition.getConfidence() * 100 );
telemetry.addData("- Position (Row/Col)","%.0f / %.0f", row, col);
telemetry.addData("- Size (Width/Height)","%.0f / %.0f", width, height);
}
telemetry.update();
}
}
}
@ -166,8 +167,6 @@ public class ConceptTensorFlowObjectDetection extends LinearOpMode {
// Instantiate the Vuforia engine
vuforia = ClassFactory.getInstance().createVuforia(parameters);
// Loading trackables is not necessary for the TensorFlow Object Detection engine.
}
/**
@ -177,10 +176,14 @@ public class ConceptTensorFlowObjectDetection extends LinearOpMode {
int tfodMonitorViewId = hardwareMap.appContext.getResources().getIdentifier(
"tfodMonitorViewId", "id", hardwareMap.appContext.getPackageName());
TFObjectDetector.Parameters tfodParameters = new TFObjectDetector.Parameters(tfodMonitorViewId);
tfodParameters.minResultConfidence = 0.8f;
tfodParameters.minResultConfidence = 0.75f;
tfodParameters.isModelTensorFlow2 = true;
tfodParameters.inputSize = 320;
tfodParameters.inputSize = 300;
tfod = ClassFactory.getInstance().createTFObjectDetector(tfodParameters, vuforia);
// Use loadModelFromAsset() if the TF Model is built in as an asset by Android Studio
// Use loadModelFromFile() if you have downloaded a custom team model to the Robot Controller's FLASH.
tfod.loadModelFromAsset(TFOD_MODEL_ASSET, LABELS);
// tfod.loadModelFromFile(TFOD_MODEL_FILE, LABELS);
}
}

View File

@ -41,8 +41,8 @@ import org.firstinspires.ftc.robotcore.external.tfod.TFObjectDetector;
import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
/**
* This 2020-2021 OpMode illustrates the basics of using the TensorFlow Object Detection API to
* determine the position of the Freight Frenzy game elements.
* This 2022-2023 OpMode illustrates the basics of using the TensorFlow Object Detection API to
* determine which image is being presented to the robot.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list.
@ -53,23 +53,21 @@ import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
@TeleOp(name = "Concept: TensorFlow Object Detection Switchable Cameras", group = "Concept")
@Disabled
public class ConceptTensorFlowObjectDetectionSwitchableCameras extends LinearOpMode {
/* Note: This sample uses the all-objects Tensor Flow model (FreightFrenzy_BCDM.tflite), which contains
* the following 4 detectable objects
* 0: Ball,
* 1: Cube,
* 2: Duck,
* 3: Marker (duck location tape marker)
*
* Two additional model assets are available which only contain a subset of the objects:
* FreightFrenzy_BC.tflite 0: Ball, 1: Cube
* FreightFrenzy_DM.tflite 0: Duck, 1: Marker
*/
private static final String TFOD_MODEL_ASSET = "FreightFrenzy_BCDM.tflite";
/*
* Specify the source for the Tensor Flow Model.
* If the TensorFlowLite object model is included in the Robot Controller App as an "asset",
* the OpMode must to load it using loadModelFromAsset(). However, if a team generated model
* has been downloaded to the Robot Controller's SD FLASH memory, it must to be loaded using loadModelFromFile()
* Here we assume it's an Asset. Also see method initTfod() below .
*/
private static final String TFOD_MODEL_ASSET = "PowerPlay.tflite";
// private static final String TFOD_MODEL_FILE = "/sdcard/FIRST/tflitemodels/CustomTeamModel.tflite";
private static final String[] LABELS = {
"Ball",
"Cube",
"Duck",
"Marker"
"1 Bolt",
"2 Bulb",
"3 Panel"
};
/*
@ -123,11 +121,11 @@ public class ConceptTensorFlowObjectDetectionSwitchableCameras extends LinearOpM
// The TensorFlow software will scale the input images from the camera to a lower resolution.
// This can result in lower detection accuracy at longer distances (> 55cm or 22").
// If your target is at distance greater than 50 cm (20") you can adjust the magnification value
// If your target is at distance greater than 50 cm (20") you can increase the magnification value
// to artificially zoom in to the center of image. For best results, the "aspectRatio" argument
// should be set to the value of the images used to create the TensorFlow Object Detection model
// (typically 16/9).
tfod.setZoom(2.5, 16.0/9.0);
tfod.setZoom(1.0, 16.0/9.0);
}
/** Wait for the game to begin */
@ -140,16 +138,19 @@ public class ConceptTensorFlowObjectDetectionSwitchableCameras extends LinearOpM
if (tfod != null) {
doCameraSwitching();
List<Recognition> recognitions = tfod.getRecognitions();
telemetry.addData("# Object Detected", recognitions.size());
// step through the list of recognitions and display boundary info.
int i = 0;
telemetry.addData("# Objects Detected", recognitions.size());
// step through the list of recognitions and display image size and position
// Note: "Image number" refers to the randomized image orientation/number
for (Recognition recognition : recognitions) {
telemetry.addData(String.format("label (%d)", i), recognition.getLabel());
telemetry.addData(String.format(" left,top (%d)", i), "%.03f , %.03f",
recognition.getLeft(), recognition.getTop());
telemetry.addData(String.format(" right,bottom (%d)", i), "%.03f , %.03f",
recognition.getRight(), recognition.getBottom());
i++;
double col = (recognition.getLeft() + recognition.getRight()) / 2 ;
double row = (recognition.getTop() + recognition.getBottom()) / 2 ;
double width = Math.abs(recognition.getRight() - recognition.getLeft()) ;
double height = Math.abs(recognition.getTop() - recognition.getBottom()) ;
telemetry.addData(""," ");
telemetry.addData("Image", "%s (%.0f %% Conf.)", recognition.getLabel(), recognition.getConfidence() * 100 );
telemetry.addData("- Position (Row/Col)","%.0f / %.0f", row, col);
telemetry.addData("- Size (Width/Height)","%.0f / %.0f", width, height);
}
telemetry.update();
}
@ -179,8 +180,6 @@ public class ConceptTensorFlowObjectDetectionSwitchableCameras extends LinearOpM
// Set the active camera to Webcam 1.
switchableCamera = (SwitchableCamera) vuforia.getCamera();
switchableCamera.setActiveCamera(webcam1);
// Loading trackables is not necessary for the TensorFlow Object Detection engine.
}
/**
@ -190,11 +189,15 @@ public class ConceptTensorFlowObjectDetectionSwitchableCameras extends LinearOpM
int tfodMonitorViewId = hardwareMap.appContext.getResources().getIdentifier(
"tfodMonitorViewId", "id", hardwareMap.appContext.getPackageName());
TFObjectDetector.Parameters tfodParameters = new TFObjectDetector.Parameters(tfodMonitorViewId);
tfodParameters.minResultConfidence = 0.8f;
tfodParameters.minResultConfidence = 0.75f;
tfodParameters.isModelTensorFlow2 = true;
tfodParameters.inputSize = 320;
tfodParameters.inputSize = 300;
tfod = ClassFactory.getInstance().createTFObjectDetector(tfodParameters, vuforia);
// Use loadModelFromAsset() if the TF Model is built in as an asset by Android Studio
// Use loadModelFromFile() if you have downloaded a custom team model to the Robot Controller's FLASH.
tfod.loadModelFromAsset(TFOD_MODEL_ASSET, LABELS);
// tfod.loadModelFromFile(TFOD_MODEL_FILE, LABELS);
}
private void doCameraSwitching() {

View File

@ -40,8 +40,8 @@ import org.firstinspires.ftc.robotcore.external.tfod.TFObjectDetector;
import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
/**
* This 2020-2021 OpMode illustrates the basics of using the TensorFlow Object Detection API to
* determine the position of the Freight Frenzy game elements.
* This 2022-2023 OpMode illustrates the basics of using the TensorFlow Object Detection API to
* determine which image is being presented to the robot.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list.
@ -52,23 +52,22 @@ import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
@TeleOp(name = "Concept: TensorFlow Object Detection Webcam", group = "Concept")
@Disabled
public class ConceptTensorFlowObjectDetectionWebcam extends LinearOpMode {
/* Note: This sample uses the all-objects Tensor Flow model (FreightFrenzy_BCDM.tflite), which contains
* the following 4 detectable objects
* 0: Ball,
* 1: Cube,
* 2: Duck,
* 3: Marker (duck location tape marker)
*
* Two additional model assets are available which only contain a subset of the objects:
* FreightFrenzy_BC.tflite 0: Ball, 1: Cube
* FreightFrenzy_DM.tflite 0: Duck, 1: Marker
*/
private static final String TFOD_MODEL_ASSET = "FreightFrenzy_BCDM.tflite";
/*
* Specify the source for the Tensor Flow Model.
* If the TensorFlowLite object model is included in the Robot Controller App as an "asset",
* the OpMode must to load it using loadModelFromAsset(). However, if a team generated model
* has been downloaded to the Robot Controller's SD FLASH memory, it must to be loaded using loadModelFromFile()
* Here we assume it's an Asset. Also see method initTfod() below .
*/
private static final String TFOD_MODEL_ASSET = "PowerPlay.tflite";
// private static final String TFOD_MODEL_FILE = "/sdcard/FIRST/tflitemodels/CustomTeamModel.tflite";
private static final String[] LABELS = {
"Ball",
"Cube",
"Duck",
"Marker"
"1 Bolt",
"2 Bulb",
"3 Panel"
};
/*
@ -114,11 +113,11 @@ public class ConceptTensorFlowObjectDetectionWebcam extends LinearOpMode {
// The TensorFlow software will scale the input images from the camera to a lower resolution.
// This can result in lower detection accuracy at longer distances (> 55cm or 22").
// If your target is at distance greater than 50 cm (20") you can adjust the magnification value
// If your target is at distance greater than 50 cm (20") you can increase the magnification value
// to artificially zoom in to the center of image. For best results, the "aspectRatio" argument
// should be set to the value of the images used to create the TensorFlow Object Detection model
// (typically 16/9).
tfod.setZoom(2.5, 16.0/9.0);
tfod.setZoom(1.0, 16.0/9.0);
}
/** Wait for the game to begin */
@ -133,18 +132,22 @@ public class ConceptTensorFlowObjectDetectionWebcam extends LinearOpMode {
// the last time that call was made.
List<Recognition> updatedRecognitions = tfod.getUpdatedRecognitions();
if (updatedRecognitions != null) {
telemetry.addData("# Object Detected", updatedRecognitions.size());
// step through the list of recognitions and display boundary info.
int i = 0;
for (Recognition recognition : updatedRecognitions) {
telemetry.addData(String.format("label (%d)", i), recognition.getLabel());
telemetry.addData(String.format(" left,top (%d)", i), "%.03f , %.03f",
recognition.getLeft(), recognition.getTop());
telemetry.addData(String.format(" right,bottom (%d)", i), "%.03f , %.03f",
recognition.getRight(), recognition.getBottom());
i++;
}
telemetry.update();
telemetry.addData("# Objects Detected", updatedRecognitions.size());
// step through the list of recognitions and display image position/size information for each one
// Note: "Image number" refers to the randomized image orientation/number
for (Recognition recognition : updatedRecognitions) {
double col = (recognition.getLeft() + recognition.getRight()) / 2 ;
double row = (recognition.getTop() + recognition.getBottom()) / 2 ;
double width = Math.abs(recognition.getRight() - recognition.getLeft()) ;
double height = Math.abs(recognition.getTop() - recognition.getBottom()) ;
telemetry.addData(""," ");
telemetry.addData("Image", "%s (%.0f %% Conf.)", recognition.getLabel(), recognition.getConfidence() * 100 );
telemetry.addData("- Position (Row/Col)","%.0f / %.0f", row, col);
telemetry.addData("- Size (Width/Height)","%.0f / %.0f", width, height);
}
telemetry.update();
}
}
}
@ -165,8 +168,6 @@ public class ConceptTensorFlowObjectDetectionWebcam extends LinearOpMode {
// Instantiate the Vuforia engine
vuforia = ClassFactory.getInstance().createVuforia(parameters);
// Loading trackables is not necessary for the TensorFlow Object Detection engine.
}
/**
@ -176,10 +177,14 @@ public class ConceptTensorFlowObjectDetectionWebcam extends LinearOpMode {
int tfodMonitorViewId = hardwareMap.appContext.getResources().getIdentifier(
"tfodMonitorViewId", "id", hardwareMap.appContext.getPackageName());
TFObjectDetector.Parameters tfodParameters = new TFObjectDetector.Parameters(tfodMonitorViewId);
tfodParameters.minResultConfidence = 0.8f;
tfodParameters.isModelTensorFlow2 = true;
tfodParameters.inputSize = 320;
tfod = ClassFactory.getInstance().createTFObjectDetector(tfodParameters, vuforia);
tfod.loadModelFromAsset(TFOD_MODEL_ASSET, LABELS);
tfodParameters.minResultConfidence = 0.75f;
tfodParameters.isModelTensorFlow2 = true;
tfodParameters.inputSize = 300;
tfod = ClassFactory.getInstance().createTFObjectDetector(tfodParameters, vuforia);
// Use loadModelFromAsset() if the TF Model is built in as an asset by Android Studio
// Use loadModelFromFile() if you have downloaded a custom team model to the Robot Controller's FLASH.
tfod.loadModelFromAsset(TFOD_MODEL_ASSET, LABELS);
// tfod.loadModelFromFile(TFOD_MODEL_FILE, LABELS);
}
}

View File

@ -49,11 +49,11 @@ import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables;
/**
* This OpMode illustrates the basics of using the Vuforia engine to determine
* the identity of Vuforia VuMarks encountered on the field. The code is structured as
* a LinearOpMode. It shares much structure with {@link ConceptVuforiaNavigation}; we do not here
* a LinearOpMode. It shares much structure with {@link ConceptVuforiaFieldNavigation}; we do not here
* duplicate the core Vuforia documentation found there, but rather instead focus on the
* differences between the use of Vuforia for navigation vs VuMark identification.
*
* @see ConceptVuforiaNavigation
* @see ConceptVuforiaFieldNavigation
* @see VuforiaLocalizer
* @see VuforiaTrackableDefaultListener
* see ftc_app/doc/tutorial/FTC_FieldCoordinateSystemDefinition.pdf
@ -62,7 +62,7 @@ import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables;
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*
* IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as
* is explained in {@link ConceptVuforiaNavigation}.
* is explained below.
*/
@TeleOp(name="Concept: VuMark Id", group ="Concept")

View File

@ -50,11 +50,11 @@ import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables;
/**
* This OpMode illustrates the basics of using the Vuforia engine to determine
* the identity of Vuforia VuMarks encountered on the field. The code is structured as
* a LinearOpMode. It shares much structure with {@link ConceptVuforiaNavigationWebcam}; we do not here
* a LinearOpMode. It shares much structure with {@link ConceptVuforiaFieldNavigationWebcam}; we do not here
* duplicate the core Vuforia documentation found there, but rather instead focus on the
* differences between the use of Vuforia for navigation vs VuMark identification.
*
* @see ConceptVuforiaNavigationWebcam
* @see ConceptVuforiaFieldNavigationWebcam
* @see VuforiaLocalizer
* @see VuforiaTrackableDefaultListener
* see ftc_app/doc/tutorial/FTC_FieldCoordinateSystemDefinition.pdf
@ -63,7 +63,7 @@ import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables;
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*
* IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as
* is explained in {@link ConceptVuforiaNavigationWebcam}.
* is explained below
*/
@TeleOp(name="Concept: VuMark Id Webcam", group ="Concept")

View File

@ -96,14 +96,14 @@ public class ConceptVuforiaDriveToTargetWebcam extends LinearOpMode
this.vuforia = ClassFactory.getInstance().createVuforia(parameters);
// Load the trackable objects from the Assets file, and give them meaningful names
VuforiaTrackables targetsFreightFrenzy = this.vuforia.loadTrackablesFromAsset("FreightFrenzy");
targetsFreightFrenzy.get(0).setName("Blue Storage");
targetsFreightFrenzy.get(1).setName("Blue Alliance Wall");
targetsFreightFrenzy.get(2).setName("Red Storage");
targetsFreightFrenzy.get(3).setName("Red Alliance Wall");
VuforiaTrackables targetsPowerPlay = this.vuforia.loadTrackablesFromAsset("PowerPlay");
targetsPowerPlay.get(0).setName("Red Audience Wall");
targetsPowerPlay.get(1).setName("Red Rear Wall");
targetsPowerPlay.get(2).setName("Blue Audience Wall");
targetsPowerPlay.get(3).setName("Blue Rear Wall");
// Start tracking targets in the background
targetsFreightFrenzy.activate();
targetsPowerPlay.activate();
// Initialize the hardware variables. Note that the strings used here as parameters
// to 'get' must correspond to the names assigned during the robot configuration
@ -132,7 +132,7 @@ public class ConceptVuforiaDriveToTargetWebcam extends LinearOpMode
{
// Look for first visible target, and save its pose.
targetFound = false;
for (VuforiaTrackable trackable : targetsFreightFrenzy)
for (VuforiaTrackable trackable : targetsPowerPlay)
{
if (((VuforiaTrackableDefaultListener) trackable.getListener()).isVisible())
{

View File

@ -100,7 +100,7 @@ public class ConceptVuforiaFieldNavigation extends LinearOpMode {
" -- YOUR NEW VUFORIA KEY GOES HERE --- ";
// Since ImageTarget trackables use mm to specifiy their dimensions, we must use mm for all the physical dimension.
// We will define some constants and conversions here. These are useful for the Freight Frenzy field.
// We will define some constants and conversions here. These are useful for the FTC competition field.
private static final float mmPerInch = 25.4f;
private static final float mmTargetHeight = 6 * mmPerInch; // the height of the center of the target image above the floor
private static final float halfField = 72 * mmPerInch;
@ -136,9 +136,8 @@ public class ConceptVuforiaFieldNavigation extends LinearOpMode {
// Instantiate the Vuforia engine
vuforia = ClassFactory.getInstance().createVuforia(parameters);
// Load the data sets for the trackable objects. These particular data
// sets are stored in the 'assets' part of our application.
targets = this.vuforia.loadTrackablesFromAsset("FreightFrenzy");
// Load the trackable assets.
targets = this.vuforia.loadTrackablesFromAsset("PowerPlay");
// For convenience, gather together all the trackable objects in one easily-iterable collection */
List<VuforiaTrackable> allTrackables = new ArrayList<VuforiaTrackable>();
@ -163,10 +162,10 @@ public class ConceptVuforiaFieldNavigation extends LinearOpMode {
*/
// Name and locate each trackable object
identifyTarget(0, "Blue Storage", -halfField, oneAndHalfTile, mmTargetHeight, 90, 0, 90);
identifyTarget(1, "Blue Alliance Wall", halfTile, halfField, mmTargetHeight, 90, 0, 0);
identifyTarget(2, "Red Storage", -halfField, -oneAndHalfTile, mmTargetHeight, 90, 0, 90);
identifyTarget(3, "Red Alliance Wall", halfTile, -halfField, mmTargetHeight, 90, 0, 180);
identifyTarget(0, "Red Audience Wall", -halfField, -oneAndHalfTile, mmTargetHeight, 90, 0, 90);
identifyTarget(1, "Red Rear Wall", halfField, -oneAndHalfTile, mmTargetHeight, 90, 0, -90);
identifyTarget(2, "Blue Audience Wall", -halfField, oneAndHalfTile, mmTargetHeight, 90, 0, 90);
identifyTarget(3, "Blue Rear Wall", halfField, oneAndHalfTile, mmTargetHeight, 90, 0, -90);
/*
* Create a transformation matrix describing where the phone is on the robot.

View File

@ -137,7 +137,7 @@ public class ConceptVuforiaFieldNavigationWebcam extends LinearOpMode {
// Load the data sets for the trackable objects. These particular data
// sets are stored in the 'assets' part of our application.
targets = this.vuforia.loadTrackablesFromAsset("FreightFrenzy");
targets = this.vuforia.loadTrackablesFromAsset("PowerPlay");
// For convenience, gather together all the trackable objects in one easily-iterable collection */
List<VuforiaTrackable> allTrackables = new ArrayList<VuforiaTrackable>();
@ -162,10 +162,10 @@ public class ConceptVuforiaFieldNavigationWebcam extends LinearOpMode {
*/
// Name and locate each trackable object
identifyTarget(0, "Blue Storage", -halfField, oneAndHalfTile, mmTargetHeight, 90, 0, 90);
identifyTarget(1, "Blue Alliance Wall", halfTile, halfField, mmTargetHeight, 90, 0, 0);
identifyTarget(2, "Red Storage", -halfField, -oneAndHalfTile, mmTargetHeight, 90, 0, 90);
identifyTarget(3, "Red Alliance Wall", halfTile, -halfField, mmTargetHeight, 90, 0, 180);
identifyTarget(0, "Red Audience Wall", -halfField, -oneAndHalfTile, mmTargetHeight, 90, 0, 90);
identifyTarget(1, "Red Rear Wall", halfField, -oneAndHalfTile, mmTargetHeight, 90, 0, -90);
identifyTarget(2, "Blue Audience Wall", -halfField, oneAndHalfTile, mmTargetHeight, 90, 0, 90);
identifyTarget(3, "Blue Rear Wall", halfField, oneAndHalfTile, mmTargetHeight, 90, 0, -90);
/*
* Create a transformation matrix describing where the camera is on the robot.

View File

@ -1,4 +1,4 @@
/* Copyright (c) 2017 FIRST. All rights reserved.
/* Copyright (c) 2022 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
@ -29,7 +29,7 @@
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.hardware.modernrobotics.ModernRoboticsI2cGyro;
import com.qualcomm.hardware.bosch.BNO055IMU;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
@ -37,36 +37,56 @@ import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.util.ElapsedTime;
import com.qualcomm.robotcore.util.Range;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.robotcore.external.navigation.AxesOrder;
import org.firstinspires.ftc.robotcore.external.navigation.AxesReference;
import org.firstinspires.ftc.robotcore.external.navigation.Orientation;
/**
* This file illustrates the concept of driving a path based on Gyro heading and encoder counts.
* The code is structured as a LinearOpMode
* This file illustrates the concept of driving an autonomous path based on Gyro heading and encoder counts.
* The code is structured as a LinearOpMode
*
* The code REQUIRES that you DO have encoders on the wheels,
* otherwise you would use: RobotAutoDriveByTime;
* The path to be followed by the robot is built from a series of drive, turn or pause steps.
* Each step on the path is defined by a single function call, and these can be strung together in any order.
*
* This code ALSO requires that you have a Modern Robotics I2C gyro with the name "gyro"
* otherwise you would use: RobotAutoDriveByEncoder;
* The code REQUIRES that you have encoders on the drive motors, otherwise you should use: RobotAutoDriveByTime;
*
* This code requires that the drive Motors have been configured such that a positive
* power command moves them forward, and causes the encoders to count UP.
* This code ALSO requires that you have a BOSCH BNO055 IMU, otherwise you would use: RobotAutoDriveByEncoder;
* This IMU is found in REV Control/Expansion Hubs shipped prior to July 2022, and possibly also on later models.
* To run as written, the Control/Expansion hub should be mounted horizontally on a flat part of the robot chassis.
*
* This code uses the RUN_TO_POSITION mode to enable the Motor controllers to generate the run profile
* This sample requires that the drive Motors have been configured with names : left_drive and right_drive.
* It also requires that a positive power command moves both motors forward, and causes the encoders to count UP.
* So please verify that both of your motors move the robot forward on the first move. If not, make the required correction.
* See the beginning of runOpMode() to set the FORWARD/REVERSE option for each motor.
*
* In order to calibrate the Gyro correctly, the robot must remain stationary during calibration.
* This is performed when the INIT button is pressed on the Driver Station.
* This code assumes that the robot is stationary when the INIT button is pressed.
* If this is not the case, then the INIT should be performed again.
* This code uses RUN_TO_POSITION mode for driving straight, and RUN_USING_ENCODER mode for turning and holding.
* Note: You must call setTargetPosition() at least once before switching to RUN_TO_POSITION mode.
*
* Note: in this example, all angles are referenced to the initial coordinate frame set during the
* the Gyro Calibration process, or whenever the program issues a resetZAxisIntegrator() call on the Gyro.
* Notes:
*
* All angles are referenced to the coordinate-frame that is set whenever resetHeading() is called.
* In this sample, the heading is reset when the Start button is touched on the Driver station.
* Note: It would be possible to reset the heading after each move, but this would accumulate steering errors.
*
* The angle of movement/rotation is assumed to be a standardized rotation around the robot Z axis,
* which means that a Positive rotation is Counter Clockwise, looking down on the field.
* This is consistent with the FTC field coordinate conventions set out in the document:
* ftc_app\doc\tutorial\FTC_FieldCoordinateSystemDefinition.pdf
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list
* Control Approach.
*
* To reach, or maintain a required heading, this code implements a basic Proportional Controller where:
*
* Steering power = Heading Error * Proportional Gain.
*
* "Heading Error" is calculated by taking the difference between the desired heading and the actual heading,
* and then "normalizing" it by converting it to a value in the +/- 180 degree range.
*
* "Proportional Gain" is a constant that YOU choose to set the "strength" of the steering response.
*
* Use Android Studio to Copy this Class, and Paste it into your "TeamCode" folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
*/
@Autonomous(name="Robot: Auto Drive By Gyro", group="Robot")
@ -76,9 +96,21 @@ public class RobotAutoDriveByGyro_Linear extends LinearOpMode {
/* Declare OpMode members. */
private DcMotor leftDrive = null;
private DcMotor rightDrive = null;
ModernRoboticsI2cGyro gyro = null; // Additional Gyro device
private BNO055IMU imu = null; // Control/Expansion Hub IMU
private double robotHeading = 0;
private double robotHeading = 0;
private double headingOffset = 0;
private double headingError = 0;
// These variable are declared here (as class members) so they can be updated in various methods,
// but still be displayed by sendTelemetry()
private double targetHeading = 0;
private double driveSpeed = 0;
private double turnSpeed = 0;
private double leftSpeed = 0;
private double rightSpeed = 0;
private int leftTarget = 0;
private int rightTarget = 0;
// Calculate the COUNTS_PER_INCH for your specific drive train.
// Go to your motor vendor website to determine your motor's COUNTS_PER_MOTOR_REV
@ -86,20 +118,24 @@ public class RobotAutoDriveByGyro_Linear extends LinearOpMode {
// For example, use a value of 2.0 for a 12-tooth spur gear driving a 24-tooth spur gear.
// This is gearing DOWN for less speed and more torque.
// For gearing UP, use a gear ratio less than 1.0. Note this will affect the direction of wheel rotation.
static final double COUNTS_PER_MOTOR_REV = 1440 ; // eg: TETRIX Motor Encoder
static final double COUNTS_PER_MOTOR_REV = 537.7 ; // eg: GoBILDA 312 RPM Yellow Jacket
static final double DRIVE_GEAR_REDUCTION = 1.0 ; // No External Gearing.
static final double WHEEL_DIAMETER_INCHES = 4.0 ; // For figuring circumference
static final double COUNTS_PER_INCH = (COUNTS_PER_MOTOR_REV * DRIVE_GEAR_REDUCTION) /
(WHEEL_DIAMETER_INCHES * 3.1415);
// These constants define the desired driving/control characteristics
// The can/should be tweaked to suite the specific robot drive train.
static final double DRIVE_SPEED = 0.7; // Nominal speed for better accuracy.
static final double TURN_SPEED = 0.5; // Nominal half speed for better accuracy.
static final double HEADING_THRESHOLD = 1 ; // As tight as we can make it with an integer gyro
static final double P_TURN_COEFF = 0.1; // Larger is more responsive, but also less stable
static final double P_DRIVE_COEFF = 0.15; // Larger is more responsive, but also less stable
// They can/should be tweaked to suit the specific robot drive train.
static final double DRIVE_SPEED = 0.4; // Max driving speed for better distance accuracy.
static final double TURN_SPEED = 0.2; // Max Turn speed to limit turn rate
static final double HEADING_THRESHOLD = 1.0 ; // How close must the heading get to the target before moving to next step.
// Requiring more accuracy (a smaller number) will often make the turn take longer to get into the final position.
// Define the Proportional control coefficient (or GAIN) for "heading control".
// We define one value when Turning (larger errors), and the other is used when Driving straight (smaller errors).
// Increase these numbers if the heading does not corrects strongly enough (eg: a heavy robot or using tracks)
// Decrease these numbers if the heading does not settle on the correct value (eg: very agile robot with omni wheels)
static final double P_TURN_GAIN = 0.02; // Larger is more responsive, but also less stable
static final double P_DRIVE_GAIN = 0.03; // Larger is more responsive, but also less stable
@Override
@ -115,145 +151,118 @@ public class RobotAutoDriveByGyro_Linear extends LinearOpMode {
leftDrive.setDirection(DcMotor.Direction.REVERSE);
rightDrive.setDirection(DcMotor.Direction.FORWARD);
gyro = (ModernRoboticsI2cGyro)hardwareMap.gyroSensor.get("gyro");
// define initialization values for IMU, and then initialize it.
BNO055IMU.Parameters parameters = new BNO055IMU.Parameters();
parameters.angleUnit = BNO055IMU.AngleUnit.DEGREES;
imu = hardwareMap.get(BNO055IMU.class, "imu");
imu.initialize(parameters);
// Ensure the robot it stationary, then reset the encoders and calibrate the gyro.
// Ensure the robot is stationary. Reset the encoders and set the motors to BRAKE mode
leftDrive.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
rightDrive.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
leftDrive.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
rightDrive.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
// Send telemetry message to alert driver that we are calibrating;
telemetry.addData(">", "Calibrating Gyro"); //
telemetry.update();
gyro.calibrate();
// make sure the gyro is calibrated before continuing
while (!isStopRequested() && gyro.isCalibrating()) {
sleep(50);
idle();
}
telemetry.addData(">", "Robot Ready."); //
telemetry.update();
leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
// Wait for the game to start (Display Gyro value)
// Wait for the game to start (Display Gyro value while waiting)
while (opModeInInit()) {
telemetry.addData(">", "Robot Heading = %4.0f", getHeading());
telemetry.addData(">", "Robot Heading = %4.0f", getRawHeading());
telemetry.update();
}
// Reset gyro before we move..
gyro.resetZAxisIntegrator();
getHeading();
// Set the encoders for closed loop speed control, and reset the heading.
leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
resetHeading();
// Step through each leg of the path,
// Note: Reverse movement is obtained by setting a negative distance (not speed)
// Put a hold after each turn
gyroDrive(DRIVE_SPEED, 48.0, 0.0); // Drive FWD 48 inches
gyroTurn( TURN_SPEED, -45.0); // Turn CW to -45 Degrees
gyroHold( TURN_SPEED, -45.0, 0.5); // Hold -45 Deg heading for a 1/2 second
gyroDrive(DRIVE_SPEED, 12.0, -45.0); // Drive FWD 12 inches at 45 degrees
gyroTurn( TURN_SPEED, 45.0); // Turn CCW to 45 Degrees
gyroHold( TURN_SPEED, 45.0, 0.5); // Hold 45 Deg heading for a 1/2 second
gyroTurn( TURN_SPEED, 0.0); // Turn CW to 0 Degrees
gyroHold( TURN_SPEED, 0.0, 1.0); // Hold 0 Deg heading for a 1 second
gyroDrive(DRIVE_SPEED,-48.0, 0.0); // Drive REV 48 inches
// Notes: Reverse movement is obtained by setting a negative distance (not speed)
// holdHeading() is used after turns to let the heading stabilize
// Add a sleep(2000) after any step to keep the telemetry data visible for review
driveStraight(DRIVE_SPEED, 24.0, 0.0); // Drive Forward 24"
turnToHeading( TURN_SPEED, -45.0); // Turn CW to -45 Degrees
holdHeading( TURN_SPEED, -45.0, 0.5); // Hold -45 Deg heading for a 1/2 second
driveStraight(DRIVE_SPEED, 17.0, -45.0); // Drive Forward 17" at -45 degrees (12"x and 12"y)
turnToHeading( TURN_SPEED, 45.0); // Turn CCW to 45 Degrees
holdHeading( TURN_SPEED, 45.0, 0.5); // Hold 45 Deg heading for a 1/2 second
driveStraight(DRIVE_SPEED, 17.0, 45.0); // Drive Forward 17" at 45 degrees (-12"x and 12"y)
turnToHeading( TURN_SPEED, 0.0); // Turn CW to 0 Degrees
holdHeading( TURN_SPEED, 0.0, 1.0); // Hold 0 Deg heading for 1 second
driveStraight(DRIVE_SPEED,-48.0, 0.0); // Drive in Reverse 48" (should return to approx. staring position)
telemetry.addData("Path", "Complete");
telemetry.addData("Final Heading", "%5.0f", getHeading());
telemetry.update();
sleep(1000); // Pause to display last telemetry message.
}
/**
* Method to drive on a fixed compass bearing (angle), based on encoder counts.
/*
* ====================================================================================================
* Driving "Helper" functions are below this line.
* These provide the high and low level methods that handle driving straight and turning.
* ====================================================================================================
*/
// ********** HIGH Level driving functions. ********************
/**
* Method to drive in a straight line, on a fixed compass heading (angle), based on encoder counts.
* Move will stop if either of these conditions occur:
* 1) Move gets to the desired position
* 2) Driver stops the opmode running.
*
* @param speed Target speed for forward motion. Should allow for +/- variance for adjusting heading
* @param maxDriveSpeed MAX Speed for forward/rev motion (range 0 to +1.0) .
* @param distance Distance (in inches) to move from current position. Negative distance means move backward.
* @param angle Absolute Angle (in Degrees) relative to last gyro reset.
* @param heading Absolute Heading Angle (in Degrees) relative to last gyro reset.
* 0 = fwd. +ve is CCW from fwd. -ve is CW from forward.
* If a relative angle is required, add/subtract from current heading.
* If a relative angle is required, add/subtract from the current robotHeading.
*/
public void gyroDrive ( double speed,
double distance,
double angle) {
int newLeftTarget;
int newRightTarget;
int moveCounts;
double max;
double error;
double steer;
double leftSpeed;
double rightSpeed;
public void driveStraight(double maxDriveSpeed,
double distance,
double heading) {
// Ensure that the opmode is still active
if (opModeIsActive()) {
// Determine new target position, and pass to motor controller
moveCounts = (int)(distance * COUNTS_PER_INCH);
newLeftTarget = leftDrive.getCurrentPosition() + moveCounts;
newRightTarget = rightDrive.getCurrentPosition() + moveCounts;
int moveCounts = (int)(distance * COUNTS_PER_INCH);
leftTarget = leftDrive.getCurrentPosition() + moveCounts;
rightTarget = rightDrive.getCurrentPosition() + moveCounts;
// Set Target and Turn On RUN_TO_POSITION
leftDrive.setTargetPosition(newLeftTarget);
rightDrive.setTargetPosition(newRightTarget);
// Set Target FIRST, then turn on RUN_TO_POSITION
leftDrive.setTargetPosition(leftTarget);
rightDrive.setTargetPosition(rightTarget);
leftDrive.setMode(DcMotor.RunMode.RUN_TO_POSITION);
rightDrive.setMode(DcMotor.RunMode.RUN_TO_POSITION);
// start motion.
speed = Range.clip(Math.abs(speed), 0.0, 1.0);
leftDrive.setPower(speed);
rightDrive.setPower(speed);
// Set the required driving speed (must be positive for RUN_TO_POSITION)
// Start driving straight, and then enter the control loop
maxDriveSpeed = Math.abs(maxDriveSpeed);
moveRobot(maxDriveSpeed, 0);
// keep looping while we are still active, and BOTH motors are running.
while (opModeIsActive() &&
(leftDrive.isBusy() && rightDrive.isBusy())) {
// adjust relative speed based on heading error.
error = getError(angle);
steer = getSteer(error, P_DRIVE_COEFF);
// Determine required steering to keep on heading
turnSpeed = getSteeringCorrection(heading, P_DRIVE_GAIN);
// if driving in reverse, the motor correction also needs to be reversed
if (distance < 0)
steer *= -1.0;
turnSpeed *= -1.0;
leftSpeed = speed - steer;
rightSpeed = speed + steer;
// Normalize speeds if either one exceeds +/- 1.0;
max = Math.max(Math.abs(leftSpeed), Math.abs(rightSpeed));
if (max > 1.0)
{
leftSpeed /= max;
rightSpeed /= max;
}
leftDrive.setPower(leftSpeed);
rightDrive.setPower(rightSpeed);
// Apply the turning correction to the current driving speed.
moveRobot(driveSpeed, turnSpeed);
// Display drive status for the driver.
telemetry.addData("Angle Target:Current", "%5.2f:%5.0f", angle, robotHeading);
telemetry.addData("Error:Steer", "%5.1f:%5.1f", error, steer);
telemetry.addData("Target L:R", "%7d:%7d", newLeftTarget, newRightTarget);
telemetry.addData("Actual L:R", "%7d:%7d", leftDrive.getCurrentPosition(),
rightDrive.getCurrentPosition());
telemetry.addData("Speed L:R", "%5.2f:%5.2f", leftSpeed, rightSpeed);
telemetry.update();
sendTelemetry(true);
}
// Stop all motion;
leftDrive.setPower(0);
rightDrive.setPower(0);
// Turn off RUN_TO_POSITION
// Stop all motion & Turn off RUN_TO_POSITION
moveRobot(0, 0);
leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
}
@ -265,124 +274,158 @@ public class RobotAutoDriveByGyro_Linear extends LinearOpMode {
* 1) Move gets to the heading (angle)
* 2) Driver stops the opmode running.
*
* @param speed Desired speed of turn.
* @param angle Absolute Angle (in Degrees) relative to last gyro reset.
* 0 = fwd. +ve is CCW from fwd. -ve is CW from forward.
* If a relative angle is required, add/subtract from current heading.
* @param maxTurnSpeed Desired MAX speed of turn. (range 0 to +1.0)
* @param heading Absolute Heading Angle (in Degrees) relative to last gyro reset.
* 0 = fwd. +ve is CCW from fwd. -ve is CW from forward.
* If a relative angle is required, add/subtract from current heading.
*/
public void gyroTurn ( double speed, double angle) {
public void turnToHeading(double maxTurnSpeed, double heading) {
// Run getSteeringCorrection() once to pre-calculate the current error
getSteeringCorrection(heading, P_DRIVE_GAIN);
// keep looping while we are still active, and not on heading.
while (opModeIsActive() && !onHeading(speed, angle, P_TURN_COEFF)) {
// Update telemetry & Allow time for other processes to run.
telemetry.update();
while (opModeIsActive() && (Math.abs(headingError) > HEADING_THRESHOLD)) {
// Determine required steering to keep on heading
turnSpeed = getSteeringCorrection(heading, P_TURN_GAIN);
// Clip the speed to the maximum permitted value.
turnSpeed = Range.clip(turnSpeed, -maxTurnSpeed, maxTurnSpeed);
// Pivot in place by applying the turning correction
moveRobot(0, turnSpeed);
// Display drive status for the driver.
sendTelemetry(false);
}
// Stop all motion;
moveRobot(0, 0);
}
/**
* Method to obtain & hold a heading for a finite amount of time
* Move will stop once the requested time has elapsed
* This function is useful for giving the robot a moment to stabilize it's heading between movements.
*
* @param speed Desired speed of turn.
* @param angle Absolute Angle (in Degrees) relative to last gyro reset.
* @param maxTurnSpeed Maximum differential turn speed (range 0 to +1.0)
* @param heading Absolute Heading Angle (in Degrees) relative to last gyro reset.
* 0 = fwd. +ve is CCW from fwd. -ve is CW from forward.
* If a relative angle is required, add/subtract from current heading.
* @param holdTime Length of time (in seconds) to hold the specified heading.
*/
public void gyroHold( double speed, double angle, double holdTime) {
public void holdHeading(double maxTurnSpeed, double heading, double holdTime) {
ElapsedTime holdTimer = new ElapsedTime();
holdTimer.reset();
// keep looping while we have time remaining.
holdTimer.reset();
while (opModeIsActive() && (holdTimer.time() < holdTime)) {
// Update telemetry & Allow time for other processes to run.
onHeading(speed, angle, P_TURN_COEFF);
telemetry.update();
// Determine required steering to keep on heading
turnSpeed = getSteeringCorrection(heading, P_TURN_GAIN);
// Clip the speed to the maximum permitted value.
turnSpeed = Range.clip(turnSpeed, -maxTurnSpeed, maxTurnSpeed);
// Pivot in place by applying the turning correction
moveRobot(0, turnSpeed);
// Display drive status for the driver.
sendTelemetry(false);
}
// Stop all motion;
leftDrive.setPower(0);
rightDrive.setPower(0);
moveRobot(0, 0);
}
// ********** LOW Level driving functions. ********************
/**
* This method uses a Proportional Controller to determine how much steering correction is required.
*
* @param desiredHeading The desired absolute heading (relative to last heading reset)
* @param proportionalGain Gain factor applied to heading error to obtain turning power.
* @return Turning power needed to get to required heading.
*/
public double getSteeringCorrection(double desiredHeading, double proportionalGain) {
targetHeading = desiredHeading; // Save for telemetry
// Get the robot heading by applying an offset to the IMU heading
robotHeading = getRawHeading() - headingOffset;
// Determine the heading current error
headingError = targetHeading - robotHeading;
// Normalize the error to be within +/- 180 degrees
while (headingError > 180) headingError -= 360;
while (headingError <= -180) headingError += 360;
// Multiply the error by the gain to determine the required steering correction/ Limit the result to +/- 1.0
return Range.clip(headingError * proportionalGain, -1, 1);
}
/**
* Perform one cycle of closed loop heading control.
*
* @param speed Desired speed of turn.
* @param angle Absolute Angle (in Degrees) relative to last gyro reset.
* 0 = fwd. +ve is CCW from fwd. -ve is CW from forward.
* If a relative angle is required, add/subtract from current heading.
* @param PCoeff Proportional Gain coefficient
* @return
* This method takes separate drive (fwd/rev) and turn (right/left) requests,
* combines them, and applies the appropriate speed commands to the left and right wheel motors.
* @param drive forward motor speed
* @param turn clockwise turning motor speed.
*/
boolean onHeading(double speed, double angle, double PCoeff) {
double error ;
double steer ;
boolean onTarget = false ;
double leftSpeed;
double rightSpeed;
public void moveRobot(double drive, double turn) {
driveSpeed = drive; // save this value as a class member so it can be used by telemetry.
turnSpeed = turn; // save this value as a class member so it can be used by telemetry.
// determine turn power based on +/- error
error = getError(angle);
leftSpeed = drive - turn;
rightSpeed = drive + turn;
if (Math.abs(error) <= HEADING_THRESHOLD) {
steer = 0.0;
leftSpeed = 0.0;
rightSpeed = 0.0;
onTarget = true;
}
else {
steer = getSteer(error, PCoeff);
rightSpeed = speed * steer;
leftSpeed = -rightSpeed;
// Scale speeds down if either one exceeds +/- 1.0;
double max = Math.max(Math.abs(leftSpeed), Math.abs(rightSpeed));
if (max > 1.0)
{
leftSpeed /= max;
rightSpeed /= max;
}
// Send desired speeds to motors.
leftDrive.setPower(leftSpeed);
rightDrive.setPower(rightSpeed);
// Display it for the driver.
telemetry.addData("Target/Current", "%5.2f / %5.0f", angle, robotHeading);
telemetry.addData("Error/Steer", "%5.2f / %5.2f", error, steer);
telemetry.addData("Speed.", "%5.2f : %5.2f", leftSpeed, rightSpeed);
return onTarget;
}
/**
* getError determines the error between the target angle and the robot's current heading
* @param targetAngle Desired angle (relative to global reference established at last Gyro Reset).
* @return error angle: Degrees in the range +/- 180. Centered on the robot's frame of reference
* +ve error means the robot should turn LEFT (CCW) to reduce error.
* Display the various control parameters while driving
*
* @param straight Set to true if we are driving straight, and the encoder positions should be included in the telemetry.
*/
public double getError(double targetAngle) {
private void sendTelemetry(boolean straight) {
double robotError;
if (straight) {
telemetry.addData("Motion", "Drive Straight");
telemetry.addData("Target Pos L:R", "%7d:%7d", leftTarget, rightTarget);
telemetry.addData("Actual Pos L:R", "%7d:%7d", leftDrive.getCurrentPosition(),
rightDrive.getCurrentPosition());
} else {
telemetry.addData("Motion", "Turning");
}
// calculate error in -179 to +180 range (
robotError = targetAngle - getHeading();
while (robotError > 180) robotError -= 360;
while (robotError <= -180) robotError += 360;
return robotError;
telemetry.addData("Angle Target:Current", "%5.2f:%5.0f", targetHeading, robotHeading);
telemetry.addData("Error:Steer", "%5.1f:%5.1f", headingError, turnSpeed);
telemetry.addData("Wheel Speeds L:R.", "%5.2f : %5.2f", leftSpeed, rightSpeed);
telemetry.update();
}
/**
* read and save the current robot heading
* read the raw (un-offset Gyro heading) directly from the IMU
*/
public double getHeading() {
robotHeading = (double)gyro.getIntegratedZValue();
return robotHeading;
public double getRawHeading() {
Orientation angles = imu.getAngularOrientation(AxesReference.INTRINSIC, AxesOrder.ZYX, AngleUnit.DEGREES);
return angles.firstAngle;
}
/**
* returns desired steering force. +/- 1 range. +ve = steer left
* @param error Error angle in robot relative degrees
* @param PCoeff Proportional Gain Coefficient
* @return
* Reset the "offset" heading back to zero
*/
public double getSteer(double error, double PCoeff) {
return Range.clip(error * PCoeff, -1, 1);
public void resetHeading() {
// Save a new heading offset equal to the current raw heading.
headingOffset = getRawHeading();
robotHeading = 0;
}
}

View File

@ -90,8 +90,8 @@ public class RobotAutoDriveToLine_Linear extends LinearOpMode {
rightDrive.setDirection(DcMotor.Direction.FORWARD);
// If there are encoders connected, switch to RUN_USING_ENCODER mode for greater accuracy
// robot.leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
// robot.rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
// leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
// rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
// Get a reference to our sensor object. It's recommended to use NormalizedColorSensor over
// ColorSensor, because NormalizedColorSensor consistently gives values between 0 and 1, while

View File

@ -0,0 +1,167 @@
/* Copyright (c) 2022 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.Servo;
import com.qualcomm.robotcore.util.Range;
/**
* This file works in conjunction with the External Hardware Class sample called: ConceptExternalHardwareClass.java
* Please read the explanations in that Sample about how to use this class definition.
*
* This file defines a Java Class that performs all the setup and configuration for a sample robot's hardware (motors and sensors).
* It assumes three motors (left_drive, right_drive and arm) and two servos (left_hand and right_hand)
*
* This one file/class can be used by ALL of your OpModes without having to cut & paste the code each time.
*
* Where possible, the actual hardware objects are "abstracted" (or hidden) so the OpMode code just makes calls into the class,
* rather than accessing the internal hardware directly. This is why the objects are declared "private".
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with *exactly the same name*.
*
* Or.. In OnBot Java, add a new file named RobotHardware.java, drawing from this Sample; select Not an OpMode.
* Also add a new OpMode, drawing from the Sample ConceptExternalHardwareClass.java; select TeleOp.
*
*/
public class RobotHardware {
/* Declare OpMode members. */
private LinearOpMode myOpMode = null; // gain access to methods in the calling OpMode.
// Define Motor and Servo objects (Make them private so they can't be accessed externally)
private DcMotor leftDrive = null;
private DcMotor rightDrive = null;
private DcMotor armMotor = null;
private Servo leftHand = null;
private Servo rightHand = null;
// Define Drive constants. Make them public so they CAN be used by the calling OpMode
public static final double MID_SERVO = 0.5 ;
public static final double HAND_SPEED = 0.02 ; // sets rate to move servo
public static final double ARM_UP_POWER = 0.45 ;
public static final double ARM_DOWN_POWER = -0.45 ;
// Define a constructor that allows the OpMode to pass a reference to itself.
public RobotHardware (LinearOpMode opmode) {
myOpMode = opmode;
}
/**
* Initialize all the robot's hardware.
* This method must be called ONCE when the OpMode is initialized.
*
* All of the hardware devices are accessed via the hardware map, and initialized.
*/
public void init() {
// Define and Initialize Motors (note: need to use reference to actual OpMode).
leftDrive = myOpMode.hardwareMap.get(DcMotor.class, "left_drive");
rightDrive = myOpMode.hardwareMap.get(DcMotor.class, "right_drive");
armMotor = myOpMode.hardwareMap.get(DcMotor.class, "arm");
// To drive forward, most robots need the motor on one side to be reversed, because the axles point in opposite directions.
// Pushing the left stick forward MUST make robot go forward. So adjust these two lines based on your first test drive.
// Note: The settings here assume direct drive on left and right wheels. Gear Reduction or 90 Deg drives may require direction flips
leftDrive.setDirection(DcMotor.Direction.REVERSE);
rightDrive.setDirection(DcMotor.Direction.FORWARD);
// If there are encoders connected, switch to RUN_USING_ENCODER mode for greater accuracy
// leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
// rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
// Define and initialize ALL installed servos.
leftHand = myOpMode.hardwareMap.get(Servo.class, "left_hand");
rightHand = myOpMode.hardwareMap.get(Servo.class, "right_hand");
leftHand.setPosition(MID_SERVO);
rightHand.setPosition(MID_SERVO);
myOpMode.telemetry.addData(">", "Hardware Initialized");
myOpMode.telemetry.update();
}
/**
* Calculates the left/right motor powers required to achieve the requested
* robot motions: Drive (Axial motion) and Turn (Yaw motion).
* Then sends these power levels to the motors.
*
* @param Drive Fwd/Rev driving power (-1.0 to 1.0) +ve is forward
* @param Turn Right/Left turning power (-1.0 to 1.0) +ve is CW
*/
public void driveRobot(double Drive, double Turn) {
// Combine drive and turn for blended motion.
double left = Drive + Turn;
double right = Drive - Turn;
// Scale the values so neither exceed +/- 1.0
double max = Math.max(Math.abs(left), Math.abs(right));
if (max > 1.0)
{
left /= max;
right /= max;
}
// Use existing function to drive both wheels.
setDrivePower(left, right);
}
/**
* Pass the requested wheel motor powers to the appropriate hardware drive motors.
*
* @param leftWheel Fwd/Rev driving power (-1.0 to 1.0) +ve is forward
* @param rightWheel Fwd/Rev driving power (-1.0 to 1.0) +ve is forward
*/
public void setDrivePower(double leftWheel, double rightWheel) {
// Output the values to the motor drives.
leftDrive.setPower(leftWheel);
rightDrive.setPower(rightWheel);
}
/**
* Pass the requested arm power to the appropriate hardware drive motor
*
* @param power driving power (-1.0 to 1.0)
*/
public void setArmPower(double power) {
armMotor.setPower(power);
}
/**
* Send the two hand-servos to opposing (mirrored) positions, based on the passed offset.
*
* @param offset
*/
public void setHandPositions(double offset) {
offset = Range.clip(offset, -0.5, 0.5);
leftHand.setPosition(MID_SERVO + offset);
rightHand.setPosition(MID_SERVO - offset);
}
}

View File

@ -86,8 +86,8 @@ public class RobotTeleopPOV_Linear extends LinearOpMode {
rightDrive.setDirection(DcMotor.Direction.FORWARD);
// If there are encoders connected, switch to RUN_USING_ENCODER mode for greater accuracy
// robot.leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
// robot.rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
// leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
// rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
// Define and initialize ALL installed servos.
leftClaw = hardwareMap.get(Servo.class, "left_hand");
@ -96,7 +96,7 @@ public class RobotTeleopPOV_Linear extends LinearOpMode {
rightClaw.setPosition(MID_SERVO);
// Send telemetry message to signify robot waiting;
telemetry.addData("Say", "Hello Driver"); //
telemetry.addData(">", "Robot Ready. Press Play."); //
telemetry.update();
// Wait for the game to start (driver presses PLAY)

View File

@ -84,8 +84,8 @@ public class RobotTeleopTank_Iterative extends OpMode{
rightDrive.setDirection(DcMotor.Direction.FORWARD);
// If there are encoders connected, switch to RUN_USING_ENCODER mode for greater accuracy
// robot.leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
// robot.rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
// leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
// rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
// Define and initialize ALL installed servos.
leftClaw = hardwareMap.get(Servo.class, "left_hand");
@ -94,7 +94,7 @@ public class RobotTeleopTank_Iterative extends OpMode{
rightClaw.setPosition(MID_SERVO);
// Send telemetry message to signify robot waiting;
telemetry.addData("Say", "Hello Driver"); //
telemetry.addData(">", "Robot Ready. Press Play."); //
}
/*

View File

@ -37,14 +37,9 @@ Concept: This is a sample OpMode that illustrates performing a specific function
Each OpMode should try to only demonstrate a single concept so they are easy to
locate based on their name. These OpModes may not produce a drivable robot.
Library: This is a class, or set of classes used to implement some strategy.
These will typically NOT implement a full OpMode. Instead they will be included
by an OpMode to provide some stand-alone capability.
After the prefix, other conventions will apply:
* Sensor class names are constructed as: Sensor - Company - Type
* Robot class names are constructed as: Robot - Mode - Action - OpModetype
* Concept class names are constructed as: Concept - Topic - OpModetype
* Library class names are constructed as: Library - Topic - OpModetype

View File

@ -5,45 +5,35 @@ This document defines the FTC Sample OpMode and Class conventions.
### OpMode Name
A range of different samples classes will reside in the java/external/samples folder.
To gain a better understanding of how the samples are organized, and how to interpret the
naming system, it will help to understand the conventions that were used during their creation.
For ease of understanding, the class names will follow a naming convention which indicates
the purpose of each class. The prefix of the name will be one of the following:
To summarize: A range of different samples classes will reside in the java/external/samples.
The class names will follow a naming convention which indicates the purpose of each class.
The prefix of the name will be one of the following:
Basic: This is a minimally functional OpMode used to illustrate the skeleton/structure
of a particular style of OpMode. These are bare bones Tank Drive examples.
Basic: This is a minimally functional OpMode used to illustrate the skeleton/structure
of a particular style of OpMode. These are bare bones examples.
Sensor: This is a Sample OpMode that shows how to use a specific sensor.
It is not intended to drive a functioning robot, it is simply showing the minimal code
required to read and display the sensor values.
Hardware: This is not an actual OpMode, but a helper class that is used to describe
one particular robot's hardware configuration: eg: For the K9 or Robot.
Look at any Robot sample to see how this can be used in an OpMode.
Teams can copy one of these to create their own robot definition.
Robot: This is a Sample OpMode that uses the Robot robot hardware as a base.
It may be used to provide some standard baseline Robot opmodes, or
to demonstrate how a particular sensor or concept can be used directly on the
Robot chassis.
Robot: This is a Sample OpMode that assumes a simple two-motor (differential) drive base.
It may be used to provide a common baseline driving OpMode, or
to demonstrate how a particular sensor or concept can be used to navigate.
Concept: This is a sample OpMode that illustrates performing a specific function or concept.
These may be complex, but their operation should be explained clearly in the comments,
or the comments should reference an external doc, guide or tutorial.
Each OpMode should try to only demonstrate a single concept so they are easy to
locate based on their name.
Library: This is a class, or set of classes used to implement some strategy.
These will typically NOT implement a full opmode. Instead they will be included
by an OpMode to provide some stand-alone capability.
locate based on their name. These OpModes may not produce a drivable robot.
After the prefix, other conventions will apply:
* Sensor class names should constructed as: Sensor - Company - Type
* Hardware class names should be constructed as: Hardware - Robot type
* Robot class names should be constructed as: Robot - Mode - Action - OpModetype
* Robot class names should be constructed as: Robot - Mode - Action - OpModetype
* Concept class names should be constructed as: Concept - Topic - OpModetype
* Library class names should be constructed as: Library - Topic - OpModetype
### Sample OpMode Content/Style

View File

@ -306,9 +306,9 @@ public class FtcRobotControllerActivity extends Activity
preferencesHelper.writeBooleanPrefIfDifferent(context.getString(R.string.pref_rc_connected), true);
preferencesHelper.getSharedPreferences().registerOnSharedPreferenceChangeListener(sharedPreferencesListener);
// Check if this RC app is from a later FTC season that what was installed previously
// Check if this RC app is from a later FTC season than what was installed previously
int ftcSeasonYearOfPreviouslyInstalledRc = preferencesHelper.readInt(getString(R.string.pref_ftc_season_year_of_current_rc), 0);
int ftcSeasonYearOfCurrentlyInstalledRc = AppUtil.getInstance().getFtcSeasonYear(YearMonth.now()).getValue();
int ftcSeasonYearOfCurrentlyInstalledRc = AppUtil.getInstance().getFtcSeasonYear(AppUtil.getInstance().getLocalSdkBuildMonth()).getValue();
if (ftcSeasonYearOfCurrentlyInstalledRc > ftcSeasonYearOfPreviouslyInstalledRc) {
preferencesHelper.writeIntPrefIfDifferent(getString(R.string.pref_ftc_season_year_of_current_rc), ftcSeasonYearOfCurrentlyInstalledRc);
// Since it's a new FTC season, we should reset certain settings back to their default values.
@ -395,10 +395,9 @@ public class FtcRobotControllerActivity extends Activity
readNetworkType();
ServiceController.startService(FtcRobotControllerWatchdogService.class);
bindToService();
logPackageVersions();
logDeviceSerialNumber();
AndroidBoard.getInstance().logAndroidBoardInfo();
RobotLog.logAppInfo();
RobotLog.logDeviceInfo();
AndroidBoard.getInstance().logAndroidBoardInfo();
if (preferencesHelper.readBoolean(getString(R.string.pref_wifi_automute), false)) {
initWifiMute(true);
@ -499,19 +498,6 @@ public class FtcRobotControllerActivity extends Activity
}
}
protected void logPackageVersions() {
RobotLog.logBuildConfig(com.qualcomm.ftcrobotcontroller.BuildConfig.class);
RobotLog.logBuildConfig(com.qualcomm.robotcore.BuildConfig.class);
RobotLog.logBuildConfig(com.qualcomm.hardware.BuildConfig.class);
RobotLog.logBuildConfig(com.qualcomm.ftccommon.BuildConfig.class);
RobotLog.logBuildConfig(com.google.blocks.BuildConfig.class);
RobotLog.logBuildConfig(org.firstinspires.inspection.BuildConfig.class);
}
protected void logDeviceSerialNumber() {
RobotLog.ii(TAG, "Android device serial number: " + Device.getSerialNumberOrUnknown());
}
protected void readNetworkType() {
// Control hubs are always running the access point model. Everything else, for the time
// being always runs the Wi-Fi Direct model.

View File

@ -65,7 +65,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
<item>@style/AppThemeTealRC</item>
</integer-array>
<string name="pref_ftc_season_year_of_current_rc">pref_ftc_season_year_of_current_rc</string>
<string translatable="false" name="pref_ftc_season_year_of_current_rc">pref_ftc_season_year_of_current_rc_new</string>
<string name="packageName">@string/packageNameRobotController</string>

View File

@ -54,10 +54,57 @@ The readme.md file located in the [/TeamCode/src/main/java/org/firstinspires/ftc
# Release Information
## Version 8.0 (20220907-131644)
### Breaking Changes
* Increases the Robocol version.
* This means an 8.0 or later Robot Controller or Driver Station will not be able to communicate with a 7.2 or earlier Driver Station or Robot Controller.
* If you forget to update both apps at the same time, an error message will be shown explaining which app is older and should be updated.
* Initializing I2C devices now happens when you retrieve them from the `HardwareMap` for the first time.
* Previously, all I2C devices would be initialized before the Op Mode even began executing,
whether you were actually going to use them or not. This could result in reduced performance and
unnecessary warnings.
* With this change, it is very important for Java users to retrieve all needed devices from the
`HardwareMap` **during the Init phase of the Op Mode**. Namely, declare a variable for each hardware
device the Op Mode will use, and assign a value to each. Do not do this during the Run phase, or your
Op Mode may briefly hang while the devices you are retrieving get initialized.
* Op Modes that do not use all of the I2C devices specified in the configuration file should take
less time to initialize. Op Modes that do use all of the specified I2C devices should take the
same amount of time as previously.
* Fixes [issue #251](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/251) by changing the order in which axis rotation rates are read from the angular velocity vector in the BNO055 IMU driver.
* Deprecates `pitchMode` in `BNO055IMU.Parameters`.
* Setting `pitchMode` to `PitchMode.WINDOWS` would break the coordinate conventions used by the driver.
* Moves `OpModeManagerImpl` to the `com.qualcomm.robotcore.eventloop.opmode` package.
* This breaks third party libraries EasyOpenCV (version 1.5.1 and earlier) and FTC Dashboard (version 0.4.4 and earlier).
* Deletes the deprecated `OpMode` method `resetStartTime()` (use `resetRuntime()` instead).
* Deletes the protected `LinearOpMode.LinearOpModeHelper` class (which was not meant for use by Op Modes).
* Removes I2C Device (Synchronous) config type (deprecated since 2018)
### Enhancements
* Uncaught exceptions in Op Modes no longer require a Restart Robot
* A blue screen popping up with a stacktrace is not an SDK error; this replaces the red text in the telemetry area.
* Since the very first SDK release, Op Mode crashes have put the robot into "EMERGENCY STOP" state, only showing the first line of the exception, and requiring the user to press "Restart Robot" to continue
* Exceptions during an Op Mode now open a popup window with the same color scheme as the log viewer, containing 15 lines of the exception stacktrace to allow easily tracing down the offending line without needing to connect to view logs over ADB or scroll through large amounts of logs in the log viewer.
* The exception text in the popup window is both zoomable and scrollable just like a webpage.
* Pressing the "OK" button in the popup window will return to the main screen of the Driver Station and allow an Op Mode to be run again immediately, without the need to perform a "Restart Robot"
* Adds new Java sample to demonstrate using a hardware class to abstract robot actuators, and share them across multiple Op Modes.
* Sample Op Mode is [/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptExternalHardwareClass.java](ConceptExternalHardwareClass.java)
* Abstracted hardware class is [/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotHardware.java](RobotHardware.java))
* Updates RobotAutoDriveByGyro_Linear Java sample to use REV Control/Expansion hub IMU.
* Updates Vuforia samples to reference PowerPlay assets and have correct names and field locations of image targets.
* Updates TensorFlow samples to reference PowerPlay assets.
* Adds opt-in support for Java 8 language features to the OnBotJava editor.
* To opt in, open the OnBotJava Settings, and check `Enable beta Java 8 support`.
* Note that Java 8 code will only compile when the Robot Controller runs Android 7.0 Nougat or later.
* Please report issues [here](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues).
* In OnBotJava, clicking on build errors now correctly jumps to the correct location.
* Improves OnBotJava autocomplete behavior, to provide better completion options in most cases.
* Adds a QR code to the Robot Controller Inspection Report when viewed from the Driver Station for scanning by inspectors at competition.
* Improves I2C performance and reliability in some scenarios.
## Version 7.2 (20220723-130006)
### Breaking Changes
* Updates the build tooling. For Android Studio users, this change requires Android Studio Chipmunk 2021.2.1.
* Removes support for devices that are not competition legal, including Modern Robotics Core Control Modules, the Matrix Controller, and HiTechnic/NXT controllers and sensors. Support remains for Modern Robotics I2C sensors.

View File

@ -14,31 +14,41 @@ Sample opmodes exist in the FtcRobotController module.
To locate these samples, find the FtcRobotController module in the "Project/Android" tab.
Expand the following tree elements:
FtcRobotController / java / org.firstinspires.ftc.robotcontroller / external / samples
FtcRobotController/java/org.firstinspires.ftc.robotcontroller/external/samples
A range of different samples classes can be seen in this folder.
The class names follow a naming convention which indicates the purpose of each class.
The full description of this convention is found in the samples/sample_convention.md file.
### Naming of Samples
A brief synopsis of the naming convention is given here:
To gain a better understanding of how the samples are organized, and how to interpret the
naming system, it will help to understand the conventions that were used during their creation.
These conventions are described (in detail) in the sample_conventions.md file in this folder.
To summarize: A range of different samples classes will reside in the java/external/samples.
The class names will follow a naming convention which indicates the purpose of each class.
The prefix of the name will be one of the following:
* Basic: This is a minimally functional OpMode used to illustrate the skeleton/structure
Basic: This is a minimally functional OpMode used to illustrate the skeleton/structure
of a particular style of OpMode. These are bare bones examples.
* Sensor: This is a Sample OpMode that shows how to use a specific sensor.
It is not intended as a functioning robot, it is simply showing the minimal code
Sensor: This is a Sample OpMode that shows how to use a specific sensor.
It is not intended to drive a functioning robot, it is simply showing the minimal code
required to read and display the sensor values.
* Hardware: This is not an actual OpMode, but a helper class that is used to describe
one particular robot's hardware devices: eg: for a Pushbot. Look at any
Pushbot sample to see how this can be used in an OpMode.
Teams can copy one of these to create their own robot definition.
* Pushbot: This is a Sample OpMode that uses the Pushbot robot structure as a base.
* Concept: This is a sample OpMode that illustrates performing a specific function or concept.
Robot: This is a Sample OpMode that assumes a simple two-motor (differential) drive base.
It may be used to provide a common baseline driving OpMode, or
to demonstrate how a particular sensor or concept can be used to navigate.
Concept: This is a sample OpMode that illustrates performing a specific function or concept.
These may be complex, but their operation should be explained clearly in the comments,
or the header should reference an external doc, guide or tutorial.
* Library: This is a class, or set of classes used to implement some strategy.
These will typically NOT implement a full OpMode. Instead they will be included
by an OpMode to provide some stand-alone capability.
or the comments should reference an external doc, guide or tutorial.
Each OpMode should try to only demonstrate a single concept so they are easy to
locate based on their name. These OpModes may not produce a drivable robot.
After the prefix, other conventions will apply:
* Sensor class names are constructed as: Sensor - Company - Type
* Robot class names are constructed as: Robot - Mode - Action - OpModetype
* Concept class names are constructed as: Concept - Topic - OpModetype
Once you are familiar with the range of samples available, you can choose one to be the
basis for your own robot. In all cases, the desired sample(s) needs to be copied into
@ -50,7 +60,7 @@ This is done inside Android Studio directly, using the following steps:
2) Right click on the sample class and select "Copy"
3) Expand the TeamCode / java folder
3) Expand the TeamCode/java folder
4) Right click on the org.firstinspires.ftc.teamcode folder and select "Paste"

View File

@ -7,16 +7,16 @@ repositories {
}
dependencies {
implementation 'org.firstinspires.ftc:Inspection:7.2.0'
implementation 'org.firstinspires.ftc:Blocks:7.2.0'
implementation 'org.firstinspires.ftc:Tfod:7.2.0'
implementation 'org.firstinspires.ftc:RobotCore:7.2.0'
implementation 'org.firstinspires.ftc:RobotServer:7.2.0'
implementation 'org.firstinspires.ftc:OnBotJava:7.2.0'
implementation 'org.firstinspires.ftc:Hardware:7.2.0'
implementation 'org.firstinspires.ftc:FtcCommon:7.2.0'
implementation 'org.firstinspires.ftc:Inspection:8.0.0'
implementation 'org.firstinspires.ftc:Blocks:8.0.0'
implementation 'org.firstinspires.ftc:Tfod:8.0.0'
implementation 'org.firstinspires.ftc:RobotCore:8.0.0'
implementation 'org.firstinspires.ftc:RobotServer:8.0.0'
implementation 'org.firstinspires.ftc:OnBotJava:8.0.0'
implementation 'org.firstinspires.ftc:Hardware:8.0.0'
implementation 'org.firstinspires.ftc:FtcCommon:8.0.0'
implementation 'org.tensorflow:tensorflow-lite-task-vision:0.2.0'
implementation 'androidx.appcompat:appcompat:1.2.0'
implementation 'org.firstinspires.ftc:gameAssets-FreightFrenzy:1.0.0'
implementation 'org.firstinspires.ftc:gameAssets-PowerPlay:1.0.0'
}