diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index 01b8bf6..f3d88b1 100644 Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 7f1910a..aa991fc 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,5 @@ -#Fri Jul 24 14:30:03 PDT 2020 distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-7.4.2-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-7.3.3-all.zip diff --git a/src/main/AndroidManifest.xml b/src/main/AndroidManifest.xml new file mode 100644 index 0000000..7ec8ab3 --- /dev/null +++ b/src/main/AndroidManifest.xml @@ -0,0 +1,79 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/main/assets/FTC_2016-17.dat b/src/main/assets/FTC_2016-17.dat new file mode 100644 index 0000000..d15d80a Binary files /dev/null and b/src/main/assets/FTC_2016-17.dat differ diff --git a/src/main/assets/FTC_2016-17.xml b/src/main/assets/FTC_2016-17.xml new file mode 100644 index 0000000..f75b930 --- /dev/null +++ b/src/main/assets/FTC_2016-17.xml @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/src/main/assets/RelicVuMark.dat b/src/main/assets/RelicVuMark.dat new file mode 100644 index 0000000..e873bcf Binary files /dev/null and b/src/main/assets/RelicVuMark.dat differ diff --git a/src/main/assets/RelicVuMark.xml b/src/main/assets/RelicVuMark.xml new file mode 100644 index 0000000..b644913 --- /dev/null +++ b/src/main/assets/RelicVuMark.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/src/main/assets/StonesAndChips.dat b/src/main/assets/StonesAndChips.dat new file mode 100644 index 0000000..8ab481a Binary files /dev/null and b/src/main/assets/StonesAndChips.dat differ diff --git a/src/main/assets/StonesAndChips.xml b/src/main/assets/StonesAndChips.xml new file mode 100644 index 0000000..2c6da5e --- /dev/null +++ b/src/main/assets/StonesAndChips.xml @@ -0,0 +1,7 @@ + + + + + + + diff --git a/src/main/assets/UltimateGoal.dat b/src/main/assets/UltimateGoal.dat new file mode 100644 index 0000000..ff47f62 Binary files /dev/null and b/src/main/assets/UltimateGoal.dat differ diff --git a/src/main/assets/UltimateGoal.tflite b/src/main/assets/UltimateGoal.tflite new file mode 100644 index 0000000..489a96a Binary files /dev/null and b/src/main/assets/UltimateGoal.tflite differ diff --git a/src/main/assets/UltimateGoal.xml b/src/main/assets/UltimateGoal.xml new file mode 100644 index 0000000..bb743b0 --- /dev/null +++ b/src/main/assets/UltimateGoal.xml @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/src/main/assets/qcar_config.xsd b/src/main/assets/qcar_config.xsd new file mode 100644 index 0000000..e69de29 diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/BasicOmniOpMode_Linear.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/BasicOmniOpMode_Linear.java new file mode 100644 index 0000000..2790db4 --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/BasicOmniOpMode_Linear.java @@ -0,0 +1,167 @@ +/* Copyright (c) 2021 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.hardware.DcMotor; +import com.qualcomm.robotcore.util.ElapsedTime; + +/** + * This file contains an example of a Linear "OpMode". + * An OpMode is a 'program' that runs in either the autonomous or the teleop period of an FTC match. + * The names of OpModes appear on the menu of the FTC Driver Station. + * When a selection is made from the menu, the corresponding OpMode is executed. + * + * This particular OpMode illustrates driving a 4-motor Omni-Directional (or Holonomic) robot. + * This code will work with either a Mecanum-Drive or an X-Drive train. + * Both of these drives are illustrated at https://gm0.org/en/latest/docs/robot-design/drivetrains/holonomic.html + * Note that a Mecanum drive must display an X roller-pattern when viewed from above. + * + * Also note that it is critical to set the correct rotation direction for each motor. See details below. + * + * Holonomic drives provide the ability for the robot to move in three axes (directions) simultaneously. + * Each motion axis is controlled by one Joystick axis. + * + * 1) Axial: Driving forward and backward Left-joystick Forward/Backward + * 2) Lateral: Strafing right and left Left-joystick Right and Left + * 3) Yaw: Rotating Clockwise and counter clockwise Right-joystick Right and Left + * + * This code is written assuming that the right-side motors need to be reversed for the robot to drive forward. + * When you first test your robot, if it moves backward when you push the left stick forward, then you must flip + * the direction of all 4 motors (see code below). + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list + */ + +@TeleOp(name="Basic: Omni Linear OpMode", group="Linear Opmode") +@Disabled +public class BasicOmniOpMode_Linear extends LinearOpMode { + + // Declare OpMode members for each of the 4 motors. + private ElapsedTime runtime = new ElapsedTime(); + private DcMotor leftFrontDrive = null; + private DcMotor leftBackDrive = null; + private DcMotor rightFrontDrive = null; + private DcMotor rightBackDrive = null; + + @Override + public void runOpMode() { + + // Initialize the hardware variables. Note that the strings used here must correspond + // to the names assigned during the robot configuration step on the DS or RC devices. + leftFrontDrive = hardwareMap.get(DcMotor.class, "left_front_drive"); + leftBackDrive = hardwareMap.get(DcMotor.class, "left_back_drive"); + rightFrontDrive = hardwareMap.get(DcMotor.class, "right_front_drive"); + rightBackDrive = hardwareMap.get(DcMotor.class, "right_back_drive"); + + // ######################################################################################## + // !!! IMPORTANT Drive Information. Test your motor directions. !!!!! + // ######################################################################################## + // Most robots need the motors on one side to be reversed to drive forward. + // The motor reversals shown here are for a "direct drive" robot (the wheels turn the same direction as the motor shaft) + // If your robot has additional gear reductions or uses a right-angled drive, it's important to ensure + // that your motors are turning in the correct direction. So, start out with the reversals here, BUT + // when you first test your robot, push the left joystick forward and observe the direction the wheels turn. + // Reverse the direction (flip FORWARD <-> REVERSE ) of any wheel that runs backward + // Keep testing until ALL the wheels move the robot forward when you push the left joystick forward. + leftFrontDrive.setDirection(DcMotor.Direction.REVERSE); + leftBackDrive.setDirection(DcMotor.Direction.REVERSE); + rightFrontDrive.setDirection(DcMotor.Direction.FORWARD); + rightBackDrive.setDirection(DcMotor.Direction.FORWARD); + + // Wait for the game to start (driver presses PLAY) + telemetry.addData("Status", "Initialized"); + telemetry.update(); + + waitForStart(); + runtime.reset(); + + // run until the end of the match (driver presses STOP) + while (opModeIsActive()) { + double max; + + // POV Mode uses left joystick to go forward & strafe, and right joystick to rotate. + double axial = -gamepad1.left_stick_y; // Note: pushing stick forward gives negative value + double lateral = gamepad1.left_stick_x; + double yaw = gamepad1.right_stick_x; + + // Combine the joystick requests for each axis-motion to determine each wheel's power. + // Set up a variable for each drive wheel to save the power level for telemetry. + double leftFrontPower = axial + lateral + yaw; + double rightFrontPower = axial - lateral - yaw; + double leftBackPower = axial - lateral + yaw; + double rightBackPower = axial + lateral - yaw; + + // Normalize the values so no wheel power exceeds 100% + // This ensures that the robot maintains the desired motion. + max = Math.max(Math.abs(leftFrontPower), Math.abs(rightFrontPower)); + max = Math.max(max, Math.abs(leftBackPower)); + max = Math.max(max, Math.abs(rightBackPower)); + + if (max > 1.0) { + leftFrontPower /= max; + rightFrontPower /= max; + leftBackPower /= max; + rightBackPower /= max; + } + + // This is test code: + // + // Uncomment the following code to test your motor directions. + // Each button should make the corresponding motor run FORWARD. + // 1) First get all the motors to take to correct positions on the robot + // by adjusting your Robot Configuration if necessary. + // 2) Then make sure they run in the correct direction by modifying the + // the setDirection() calls above. + // Once the correct motors move in the correct direction re-comment this code. + + /* + leftFrontPower = gamepad1.x ? 1.0 : 0.0; // X gamepad + leftBackPower = gamepad1.a ? 1.0 : 0.0; // A gamepad + rightFrontPower = gamepad1.y ? 1.0 : 0.0; // Y gamepad + rightBackPower = gamepad1.b ? 1.0 : 0.0; // B gamepad + */ + + // Send calculated power to wheels + leftFrontDrive.setPower(leftFrontPower); + rightFrontDrive.setPower(rightFrontPower); + leftBackDrive.setPower(leftBackPower); + rightBackDrive.setPower(rightBackPower); + + // Show the elapsed game time and wheel power. + telemetry.addData("Status", "Run Time: " + runtime.toString()); + telemetry.addData("Front left/Right", "%4.2f, %4.2f", leftFrontPower, rightFrontPower); + telemetry.addData("Back left/Right", "%4.2f, %4.2f", leftBackPower, rightBackPower); + telemetry.update(); + } + }} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/BasicOpMode_Iterative.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/BasicOpMode_Iterative.java new file mode 100644 index 0000000..e2cf965 --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/BasicOpMode_Iterative.java @@ -0,0 +1,140 @@ +/* Copyright (c) 2017 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.OpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.hardware.DcMotor; +import com.qualcomm.robotcore.util.ElapsedTime; +import com.qualcomm.robotcore.util.Range; + +/** + * This file contains an example of an iterative (Non-Linear) "OpMode". + * An OpMode is a 'program' that runs in either the autonomous or the teleop period of an FTC match. + * The names of OpModes appear on the menu of the FTC Driver Station. + * When a selection is made from the menu, the corresponding OpMode + * class is instantiated on the Robot Controller and executed. + * + * This particular OpMode just executes a basic Tank Drive Teleop for a two wheeled robot + * It includes all the skeletal structure that all iterative OpModes contain. + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list + */ + +@TeleOp(name="Basic: Iterative OpMode", group="Iterative Opmode") +@Disabled +public class BasicOpMode_Iterative extends OpMode +{ + // Declare OpMode members. + private ElapsedTime runtime = new ElapsedTime(); + private DcMotor leftDrive = null; + private DcMotor rightDrive = null; + + /* + * Code to run ONCE when the driver hits INIT + */ + @Override + public void init() { + telemetry.addData("Status", "Initialized"); + + // Initialize the hardware variables. Note that the strings used here as parameters + // to 'get' must correspond to the names assigned during the robot configuration + // step (using the FTC Robot Controller app on the phone). + leftDrive = hardwareMap.get(DcMotor.class, "left_drive"); + rightDrive = hardwareMap.get(DcMotor.class, "right_drive"); + + // To drive forward, most robots need the motor on one side to be reversed, because the axles point in opposite directions. + // Pushing the left stick forward MUST make robot go forward. So adjust these two lines based on your first test drive. + // Note: The settings here assume direct drive on left and right wheels. Gear Reduction or 90 Deg drives may require direction flips + leftDrive.setDirection(DcMotor.Direction.REVERSE); + rightDrive.setDirection(DcMotor.Direction.FORWARD); + + // Tell the driver that initialization is complete. + telemetry.addData("Status", "Initialized"); + } + + /* + * Code to run REPEATEDLY after the driver hits INIT, but before they hit PLAY + */ + @Override + public void init_loop() { + } + + /* + * Code to run ONCE when the driver hits PLAY + */ + @Override + public void start() { + runtime.reset(); + } + + /* + * Code to run REPEATEDLY after the driver hits PLAY but before they hit STOP + */ + @Override + public void loop() { + // Setup a variable for each drive wheel to save power level for telemetry + double leftPower; + double rightPower; + + // Choose to drive using either Tank Mode, or POV Mode + // Comment out the method that's not used. The default below is POV. + + // POV Mode uses left stick to go forward, and right stick to turn. + // - This uses basic math to combine motions and is easier to drive straight. + double drive = -gamepad1.left_stick_y; + double turn = gamepad1.right_stick_x; + leftPower = Range.clip(drive + turn, -1.0, 1.0) ; + rightPower = Range.clip(drive - turn, -1.0, 1.0) ; + + // Tank Mode uses one stick to control each wheel. + // - This requires no math, but it is hard to drive forward slowly and keep straight. + // leftPower = -gamepad1.left_stick_y ; + // rightPower = -gamepad1.right_stick_y ; + + // Send calculated power to wheels + leftDrive.setPower(leftPower); + rightDrive.setPower(rightPower); + + // Show the elapsed game time and wheel power. + telemetry.addData("Status", "Run Time: " + runtime.toString()); + telemetry.addData("Motors", "left (%.2f), right (%.2f)", leftPower, rightPower); + } + + /* + * Code to run ONCE after the driver hits STOP + */ + @Override + public void stop() { + } + +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/BasicOpMode_Linear.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/BasicOpMode_Linear.java new file mode 100644 index 0000000..277be2c --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/BasicOpMode_Linear.java @@ -0,0 +1,115 @@ +/* Copyright (c) 2017 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.hardware.DcMotor; +import com.qualcomm.robotcore.util.ElapsedTime; +import com.qualcomm.robotcore.util.Range; + + +/** + * This file contains an minimal example of a Linear "OpMode". An OpMode is a 'program' that runs in either + * the autonomous or the teleop period of an FTC match. The names of OpModes appear on the menu + * of the FTC Driver Station. When a selection is made from the menu, the corresponding OpMode + * class is instantiated on the Robot Controller and executed. + * + * This particular OpMode just executes a basic Tank Drive Teleop for a two wheeled robot + * It includes all the skeletal structure that all linear OpModes contain. + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list + */ + +@TeleOp(name="Basic: Linear OpMode", group="Linear Opmode") +@Disabled +public class BasicOpMode_Linear extends LinearOpMode { + + // Declare OpMode members. + private ElapsedTime runtime = new ElapsedTime(); + private DcMotor leftDrive = null; + private DcMotor rightDrive = null; + + @Override + public void runOpMode() { + telemetry.addData("Status", "Initialized"); + telemetry.update(); + + // Initialize the hardware variables. Note that the strings used here as parameters + // to 'get' must correspond to the names assigned during the robot configuration + // step (using the FTC Robot Controller app on the phone). + leftDrive = hardwareMap.get(DcMotor.class, "left_drive"); + rightDrive = hardwareMap.get(DcMotor.class, "right_drive"); + + // To drive forward, most robots need the motor on one side to be reversed, because the axles point in opposite directions. + // Pushing the left stick forward MUST make robot go forward. So adjust these two lines based on your first test drive. + // Note: The settings here assume direct drive on left and right wheels. Gear Reduction or 90 Deg drives may require direction flips + leftDrive.setDirection(DcMotor.Direction.REVERSE); + rightDrive.setDirection(DcMotor.Direction.FORWARD); + + // Wait for the game to start (driver presses PLAY) + waitForStart(); + runtime.reset(); + + // run until the end of the match (driver presses STOP) + while (opModeIsActive()) { + + // Setup a variable for each drive wheel to save power level for telemetry + double leftPower; + double rightPower; + + // Choose to drive using either Tank Mode, or POV Mode + // Comment out the method that's not used. The default below is POV. + + // POV Mode uses left stick to go forward, and right stick to turn. + // - This uses basic math to combine motions and is easier to drive straight. + double drive = -gamepad1.left_stick_y; + double turn = gamepad1.right_stick_x; + leftPower = Range.clip(drive + turn, -1.0, 1.0) ; + rightPower = Range.clip(drive - turn, -1.0, 1.0) ; + + // Tank Mode uses one stick to control each wheel. + // - This requires no math, but it is hard to drive forward slowly and keep straight. + // leftPower = -gamepad1.left_stick_y ; + // rightPower = -gamepad1.right_stick_y ; + + // Send calculated power to wheels + leftDrive.setPower(leftPower); + rightDrive.setPower(rightPower); + + // Show the elapsed game time and wheel power. + telemetry.addData("Status", "Run Time: " + runtime.toString()); + telemetry.addData("Motors", "left (%.2f), right (%.2f)", leftPower, rightPower); + telemetry.update(); + } + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptCompassCalibration.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptCompassCalibration.java new file mode 100644 index 0000000..45dac81 --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptCompassCalibration.java @@ -0,0 +1,125 @@ +/* Copyright (c) 2017 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.hardware.CompassSensor; +import com.qualcomm.robotcore.hardware.DcMotor; +import com.qualcomm.robotcore.util.ElapsedTime; + +/** + * This file illustrates the concept of calibrating a MR Compass + * This code assumes there is a compass configured with the name "compass" + * + * This code will put the compass into calibration mode, wait three seconds and then attempt + * to rotate two full turns clockwise. This will allow the compass to do a magnetic calibration. + * + * Once compete, the program will put the compass back into measurement mode and check to see if the + * calibration was successful. + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list + */ + +@TeleOp(name="Concept: Compass Calibration", group="Concept") +@Disabled +public class ConceptCompassCalibration extends LinearOpMode { + + /* Declare OpMode members. */ + public DcMotor leftDrive = null; + public DcMotor rightDrive = null; + private ElapsedTime runtime = new ElapsedTime(); + CompassSensor compass; + + final static double MOTOR_POWER = 0.2; // scale from 0 to 1 + static final long HOLD_TIME_MS = 3000; + static final double CAL_TIME_SEC = 20; + + @Override + public void runOpMode() { + + // Initialize the drive system variables. + leftDrive = hardwareMap.get(DcMotor.class, "left_drive"); + rightDrive = hardwareMap.get(DcMotor.class, "right_drive"); + + // To drive forward, most robots need the motor on one side to be reversed, because the axles point in opposite directions. + // Pushing the left stick forward MUST make robot go forward. So adjust these two lines based on your first test drive. + // Note: The settings here assume direct drive on left and right wheels. Gear Reduction or 90 Deg drives may require direction flips + leftDrive.setDirection(DcMotor.Direction.REVERSE); + rightDrive.setDirection(DcMotor.Direction.FORWARD); + + // get a reference to our Compass Sensor object. + compass = hardwareMap.get(CompassSensor.class, "compass"); + + // Send telemetry message to signify robot waiting; + telemetry.addData("Status", "Ready to cal"); // + telemetry.update(); + + // Wait for the game to start (driver presses PLAY) + waitForStart(); + + // Set the compass to calibration mode + compass.setMode(CompassSensor.CompassMode.CALIBRATION_MODE); + telemetry.addData("Compass", "Compass in calibration mode"); + telemetry.update(); + + sleep(HOLD_TIME_MS); // Just do a sleep while we switch modes + + // Start the robot rotating clockwise + telemetry.addData("Compass", "Calibration mode. Turning the robot..."); + telemetry.update(); + leftDrive.setPower(MOTOR_POWER); + rightDrive.setPower(-MOTOR_POWER); + + // run until time expires OR the driver presses STOP; + runtime.reset(); + while (opModeIsActive() && (runtime.time() < CAL_TIME_SEC)) { + idle(); + } + + // Stop all motors and turn off claibration + leftDrive.setPower(0); + rightDrive.setPower(0); + compass.setMode(CompassSensor.CompassMode.MEASUREMENT_MODE); + telemetry.addData("Compass", "Returning to measurement mode"); + telemetry.update(); + + sleep(HOLD_TIME_MS); // Just do a sleep while we switch modes + + // Report whether the Calibration was successful or not. + if (compass.calibrationFailed()) + telemetry.addData("Compass", "Calibrate Failed. Try Again!"); + else + telemetry.addData("Compass", "Calibrate Passed."); + telemetry.update(); + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptExternalHardwareClass.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptExternalHardwareClass.java new file mode 100644 index 0000000..6607fb6 --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptExternalHardwareClass.java @@ -0,0 +1,142 @@ +/* Copyright (c) 2022 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.util.Range; + +/** + * This OpMode Sample illustrates how to use an external "hardware" class to modularize all the robot's sensors and actuators. + * This approach is very efficient because the same hardware class can be used by all of your teleop and autonomous OpModes + * without requiring many copy & paste operations. Once you have defined and tested the hardware class with one OpMode, + * it is instantly available to other OpModes. + * + * The real benefit of this approach is that as you tweak your robot hardware, you only need to make changes in ONE place (the Hardware Class). + * So, to be effective you should put as much or your hardware setup and access code as possible in the hardware class. + * Essentially anything you do with hardware in BOTH Teleop and Auto should likely go in the hardware class. + * + * The Hardware Class is created in a separate file, and then an "instance" of this class is created in each OpMode. + * In order for the class to do typical OpMode things (like send telemetry data) it must be passed a reference to the + * OpMode object when it's created, so it can access all core OpMode functions. This is illustrated below. + * + * In this concept sample, the hardware class file is called RobotHardware.java and it must accompany this sample OpMode. + * So, if you copy ConceptExternalHardwareClass.java into TeamCode (using Android Studio or OnBotJava) then RobotHardware.java + * must also be copied to the same location (maintaining its name). + * + * For comparison purposes, this sample and its accompanying hardware class duplicates the functionality of the + * RobotTelopPOV_Linear opmode. It assumes three motors (left_drive, right_drive and arm) and two servos (left_hand and right_hand) + * + * View the RobotHardware.java class file for more details + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list + * + * In OnBot Java, add a new OpMode, drawing from this Sample; select TeleOp. + * Also add another new file named RobotHardware.java, drawing from the Sample with that name; select Not an OpMode. + */ + +@TeleOp(name="Concept: Robot Hardware Class", group="Robot") +@Disabled +public class ConceptExternalHardwareClass extends LinearOpMode { + + // Create a RobotHardware object to be used to access robot hardware. + // Prefix any hardware functions with "robot." to access this class. + RobotHardware robot = new RobotHardware(this); + + @Override + public void runOpMode() { + double drive = 0; + double turn = 0; + double arm = 0; + double handOffset = 0; + + // initialize all the hardware, using the hardware class. See how clean and simple this is? + robot.init(); + + // Send telemetry message to signify robot waiting; + // Wait for the game to start (driver presses PLAY) + waitForStart(); + + // run until the end of the match (driver presses STOP) + while (opModeIsActive()) { + + // Run wheels in POV mode (note: The joystick goes negative when pushed forward, so negate it) + // In this mode the Left stick moves the robot fwd and back, the Right stick turns left and right. + // This way it's also easy to just drive straight, or just turn. + drive = -gamepad1.left_stick_y; + turn = gamepad1.right_stick_x; + + // Combine drive and turn for blended motion. Use RobotHardware class + robot.driveRobot(drive, turn); + + // Use gamepad left & right Bumpers to open and close the claw + // Use the SERVO constants defined in RobotHardware class. + // Each time around the loop, the servos will move by a small amount. + // Limit the total offset to half of the full travel range + if (gamepad1.right_bumper) + handOffset += robot.HAND_SPEED; + else if (gamepad1.left_bumper) + handOffset -= robot.HAND_SPEED; + handOffset = Range.clip(handOffset, -0.5, 0.5); + + // Move both servos to new position. Use RobotHardware class + robot.setHandPositions(handOffset); + + // Use gamepad buttons to move arm up (Y) and down (A) + // Use the MOTOR constants defined in RobotHardware class. + if (gamepad1.y) + arm = robot.ARM_UP_POWER; + else if (gamepad1.a) + arm = robot.ARM_DOWN_POWER; + else + arm = 0; + + robot.setArmPower(arm); + + // Send telemetry messages to explain controls and show robot status + telemetry.addData("Drive", "Left Stick"); + telemetry.addData("Turn", "Right Stick"); + telemetry.addData("Arm Up/Down", "Y & A Buttons"); + telemetry.addData("Hand Open/Closed", "Left and Right Bumpers"); + telemetry.addData("-", "-------"); + + telemetry.addData("Drive Power", "%.2f", drive); + telemetry.addData("Turn Power", "%.2f", turn); + telemetry.addData("Arm Power", "%.2f", arm); + telemetry.addData("Hand Position", "Offset = %.2f", handOffset); + telemetry.update(); + + // Pace this loop so hands move at a reasonable speed. + sleep(50); + } + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptGamepadRumble.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptGamepadRumble.java new file mode 100644 index 0000000..929fdaa --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptGamepadRumble.java @@ -0,0 +1,200 @@ +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.hardware.Gamepad; +import com.qualcomm.robotcore.util.ElapsedTime; + +/** + * This sample illustrates using the rumble feature of many gamepads. + * + * Note: Some gamepads "rumble" better than others. + * The Xbox & PS4 have a left (rumble1) and right (rumble2) rumble motor. + * These two gamepads have two distinct rumble modes: Large on the left, and small on the right + * The ETpark gamepad may only respond to rumble1, and may only run at full power. + * The Logitech F310 gamepad does not have *any* rumble ability. + * + * Moral: You should use this sample to experiment with your specific gamepads to explore their rumble features. + * + * The rumble motors are accessed through the standard gamepad1 and gamepad2 objects. + * Several new methods were added to the Gamepad class in FTC SDK Rev 7 + * The key methods are as follows: + * + * .rumble(double rumble1, double rumble2, int durationMs) + * This method sets the rumble power of both motors for a specific time duration. + * Both rumble arguments are motor-power levels in the 0.0 to 1.0 range. + * durationMs is the desired length of the rumble action in milliseconds. + * This method returns immediately. + * Note: + * Use a durationMs of Gamepad.RUMBLE_DURATION_CONTINUOUS to provide a continuous rumble + * Use a power of 0, or duration of 0 to stop a rumble. + * + * .rumbleBlips(int count) allows an easy way to signal the driver with a series of rumble blips. + * Just specify how many blips you want. + * This method returns immediately. + * + * .runRumbleEffect(customRumbleEffect) allows you to run a custom rumble sequence that you have + * built using the Gamepad.RumbleEffect.Builder() + * A "custom effect" is a sequence of steps, where each step can rumble any of the + * rumble motors for a specific period at a specific power level. + * The Custom Effect will play as the (un-blocked) OpMode continues to run + * + * .isRumbling() returns true if there is a rumble effect in progress. + * Use this call to prevent stepping on a current rumble. + * + * .stopRumble() Stop any ongoing rumble or custom rumble effect. + * + * .rumble(int durationMs) Full power rumble for fixed duration. + * + * Note: Whenever a new Rumble command is issued, any currently executing rumble action will + * be truncated, and the new action started immediately. Take these precautions: + * 1) Do Not SPAM the rumble motors by issuing rapid fire commands + * 2) Multiple sources for rumble commands must coordinate to avoid tromping on each other. + * + * This can be achieved several possible ways: + * 1) Only having one source for rumble actions + * 2) Issuing rumble commands on transitions, rather than states. + * e.g. The moment a touch sensor is pressed, rather than the entire time it is being pressed. + * 3) Scheduling rumble commands based on timed events. e.g. 10 seconds prior to endgame + * 4) Rumble on non-overlapping mechanical actions. e.g. arm fully-extended or fully-retracted. + * 5) Use isRumbling() to hold off on a new rumble if one is already in progress. + * + * The examples shown here are representstive of how to invoke a gamepad rumble. + * It is assumed that these will be modified to suit the specific robot and team strategy needs. + * + * ######## Read the telemetry display on the Driver Station Screen for instructions. ###### + * + * Ex 1) This example shows a) how to create a custom rumble effect, and then b) how to trigger it based + * on game time. One use for this might be to alert the driver that half-time or End-game is approaching. + * + * Ex 2) This example shows tying the rumble power to a changing sensor value. + * In this case it is the Gamepad trigger, but it could be any sensor output scaled to the 0-1 range. + * Since it takes over the rumble motors, it is only performed when the Left Bumper is pressed. + * Note that this approach MUST include a way to turn OFF the rumble when the button is released. + * + * Ex 3) This example shows a simple way to trigger a 3-blip sequence. In this case it is + * triggered by the gamepad A (Cross) button, but it could be any sensor, like a touch or light sensor. + * Note that this code ensures that it only rumbles once when the input goes true. + * + * Ex 4) This example shows how to trigger a single rumble when an input value gets over a certain value. + * In this case it is reading the Right Trigger, but it could be any variable sensor, like a + * range sensor, or position sensor. The code needs to ensure that it is only triggered once, so + * it waits till the sensor drops back below the threshold before it can trigger again. + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list. + */ + +@Disabled +@TeleOp(name="Concept: Gamepad Rumble", group ="Concept") +public class ConceptGamepadRumble extends LinearOpMode +{ + boolean lastA = false; // Use to track the prior button state. + boolean lastLB = false; // Use to track the prior button state. + boolean highLevel = false; // used to prevent multiple level-based rumbles. + boolean secondHalf = false; // Use to prevent multiple half-time warning rumbles. + + Gamepad.RumbleEffect customRumbleEffect; // Use to build a custom rumble sequence. + ElapsedTime runtime = new ElapsedTime(); // Use to determine when end game is starting. + + final double HALF_TIME = 60.0; // Wait this many seconds before rumble-alert for half-time. + final double TRIGGER_THRESHOLD = 0.75; // Squeeze more than 3/4 to get rumble. + + @Override + public void runOpMode() + { + // Example 1. a) start by creating a three-pulse rumble sequence: right, LEFT, LEFT + customRumbleEffect = new Gamepad.RumbleEffect.Builder() + .addStep(0.0, 1.0, 500) // Rumble right motor 100% for 500 mSec + .addStep(0.0, 0.0, 300) // Pause for 300 mSec + .addStep(1.0, 0.0, 250) // Rumble left motor 100% for 250 mSec + .addStep(0.0, 0.0, 250) // Pause for 250 mSec + .addStep(1.0, 0.0, 250) // Rumble left motor 100% for 250 mSec + .build(); + + telemetry.addData(">", "Press Start"); + telemetry.update(); + + waitForStart(); + runtime.reset(); // Start game timer. + + // Loop while monitoring buttons for rumble triggers + while (opModeIsActive()) + { + // Read and save the current gamepad button states. + boolean currentA = gamepad1.a ; + boolean currentLB = gamepad1.left_bumper ; + + // Display the current Rumble status. Just for interest. + telemetry.addData(">", "Are we RUMBLING? %s\n", gamepad1.isRumbling() ? "YES" : "no" ); + + // ---------------------------------------------------------------------------------------- + // Example 1. b) Watch the runtime timer, and run the custom rumble when we hit half-time. + // Make sure we only signal once by setting "secondHalf" flag to prevent further rumbles. + // ---------------------------------------------------------------------------------------- + if ((runtime.seconds() > HALF_TIME) && !secondHalf) { + gamepad1.runRumbleEffect(customRumbleEffect); + secondHalf =true; + } + + // Display the time remaining while we are still counting down. + if (!secondHalf) { + telemetry.addData(">", "Halftime Alert Countdown: %3.0f Sec \n", (HALF_TIME - runtime.seconds()) ); + } + + + // ---------------------------------------------------------------------------------------- + // Example 2. If Left Bumper is being pressed, power the rumble motors based on the two trigger depressions. + // This is useful to see how the rumble feels at various power levels. + // ---------------------------------------------------------------------------------------- + if (currentLB) { + // Left Bumper is being pressed, so send left and right "trigger" values to left and right rumble motors. + gamepad1.rumble(gamepad1.left_trigger, gamepad1.right_trigger, Gamepad.RUMBLE_DURATION_CONTINUOUS); + + // Show what is being sent to rumbles + telemetry.addData(">", "Squeeze triggers to control rumbles"); + telemetry.addData("> : Rumble", "Left: %.0f%% Right: %.0f%%", gamepad1.left_trigger * 100, gamepad1.right_trigger * 100); + } else { + // Make sure rumble is turned off when Left Bumper is released (only one time each press) + if (lastLB) { + gamepad1.stopRumble(); + } + + // Prompt for manual rumble action + telemetry.addData(">", "Hold Left-Bumper to test Manual Rumble"); + telemetry.addData(">", "Press A (Cross) for three blips"); + telemetry.addData(">", "Squeeze right trigger slowly for 1 blip"); + } + lastLB = currentLB; // remember the current button state for next time around the loop + + + // ---------------------------------------------------------------------------------------- + // Example 3. Blip 3 times at the moment that A (Cross) is pressed. (look for pressed transition) + // BUT !!! Skip it altogether if the Gamepad is already rumbling. + // ---------------------------------------------------------------------------------------- + if (currentA && !lastA) { + if (!gamepad1.isRumbling()) // Check for possible overlap of rumbles. + gamepad1.rumbleBlips(3); + } + lastA = currentA; // remember the current button state for next time around the loop + + + // ---------------------------------------------------------------------------------------- + // Example 4. Rumble once when gamepad right trigger goes above the THRESHOLD. + // ---------------------------------------------------------------------------------------- + if (gamepad1.right_trigger > TRIGGER_THRESHOLD) { + if (!highLevel) { + gamepad1.rumble(0.9, 0, 200); // 200 mSec burst on left motor. + highLevel = true; // Hold off any more triggers + } + } else { + highLevel = false; // We can trigger again now. + } + + // Send the telemetry data to the Driver Station, and then pause to pace the program. + telemetry.update(); + sleep(10); + } + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptGamepadTouchpad.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptGamepadTouchpad.java new file mode 100644 index 0000000..dc1795a --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptGamepadTouchpad.java @@ -0,0 +1,78 @@ +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.eventloop.opmode.Disabled; + +import org.firstinspires.ftc.robotcore.external.Telemetry; + +/** + * This sample illustrates using the touchpad feature found on some gamepads. + * + * The Sony PS4 gamepad can detect two distinct touches on the central touchpad. + * Other gamepads with different touchpads may provide mixed results. + * + * The touchpads are accessed through the standard gamepad1 and gamepad2 objects. + * Several new members were added to the Gamepad class in FTC SDK Rev 7 + * + * .touchpad_finger_1 returns true if at least one finger is detected. + * .touchpad_finger_1_x finger 1 X coordinate. Valid if touchpad_finger_1 is true + * .touchpad_finger_1_y finger 1 Y coordinate. Valid if touchpad_finger_1 is true + * + * .touchpad_finger_2 returns true if a second finger is detected + * .touchpad_finger_2_x finger 2 X coordinate. Valid if touchpad_finger_2 is true + * .touchpad_finger_2_y finger 2 Y coordinate. Valid if touchpad_finger_2 is true + * + * Finger touches are reported with an X and Y coordinate in following coordinate system. + * + * 1) X is the Horizontal axis, and Y is the vertical axis + * 2) The 0,0 origin is at the center of the touchpad. + * 3) 1.0, 1.0 is at the top right corner of the touchpad. + * 4) -1.0,-1.0 is at the bottom left corner of the touchpad. + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list. + */ + +@Disabled +@TeleOp(name="Concept: Gamepad Touchpad", group ="Concept") +public class ConceptGamepadTouchpad extends LinearOpMode +{ + @Override + public void runOpMode() + { + telemetry.setDisplayFormat(Telemetry.DisplayFormat.MONOSPACE); + + telemetry.addData(">", "Press Start"); + telemetry.update(); + + waitForStart(); + + while (opModeIsActive()) + { + boolean finger = false; + + // Display finger 1 x & y position if finger detected + if(gamepad1.touchpad_finger_1) + { + finger = true; + telemetry.addLine(String.format("Finger 1: x=%5.2f y=%5.2f\n", gamepad1.touchpad_finger_1_x, gamepad1.touchpad_finger_1_y)); + } + + // Display finger 2 x & y position if finger detected + if(gamepad1.touchpad_finger_2) + { + finger = true; + telemetry.addLine(String.format("Finger 2: x=%5.2f y=%5.2f\n", gamepad1.touchpad_finger_2_x, gamepad1.touchpad_finger_2_y)); + } + + if(!finger) + { + telemetry.addLine("No fingers"); + } + + telemetry.update(); + sleep(10); + } + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptMotorBulkRead.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptMotorBulkRead.java new file mode 100644 index 0000000..e0c5a75 --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptMotorBulkRead.java @@ -0,0 +1,229 @@ +/* Copyright (c) 2019 Phil Malone. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.hardware.lynx.LynxModule; +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.hardware.DcMotorEx; +import com.qualcomm.robotcore.util.ElapsedTime; + +import java.util.Iterator; +import java.util.List; + + /* + This sample illustrates how to use the Expansion Hub's Bulk-Read feature to speed up control cycle times. + In this example there are 4 motors that need their encoder positions, and velocities read. + The sample is written to work with one or two expansion hubs, with no assumption as to where the motors are located. + + Three scenarios are tested: + Cache Mode = OFF This is the normal default, where no cache is used, and every read produces a discrete transaction with + an expansion hub, which is the slowest approach, but guarentees that the value is as fresh (recent) as possible.. + + Cache Mode = AUTO This mode will attempt to minimize the number of discrete read commands, by performing bulk-reads + and then returning values that have been cached. The cache is updated automatically whenever any specific encoder is re-read. + This mode will always return new data, but it may perform more bulk-reads than absolutely required. + Extra reads will be performed if multiple encoder/velocity reads are performed on the same encoder in one control cycle. + This mode is a good compromise between the OFF and MANUAL modes. + Note: If there are significant user-program delays between encoder reads, the cached value may not be fresh (recent). + You can issue a clearBulkCache() call at any time force a fresh bulk-read on the next encoder read. + + Cache Mode = MANUAL This mode requires the user's code to determine the best time to clear the cached bulk-read data. + Well organized code will reset the cache once at the beginning of the control cycle, and then immediately read and store all the encoder values. + This approach will produce the shortest cycle times, but it does require the user to manually clear the cache. + Since NO automatic Bulk-Reads are performed, neglecting to clear the bulk cache will result in the same values being returned + each time an encoder read is performed. + + ------------------------------------- + + General tip to speed up your control cycles: + + No matter what method you use to read encoders and other inputs, you should try to + avoid reading the same encoder input multiple times around a control loop. + Under normal conditions, this will slow down the control loop. + The preferred method is to read all the required inputs ONCE at the beginning of the loop, + and save the values in variable that can be used by other parts of the control code. + + eg: if you are sending encoder positions to your telemetry display, putting a getCurrentPosition() + call in the telemetry statement will force the code to go and get another copy which will take time. + It's much better read the position into a variable once, and use that variable for control AND display. + Reading saved variables takes no time at all. + + Once you put all your sensor reads at the beginning of the control cycle, it's very easy to use + the bulk-read AUTO mode to streamline your cycle timing. + + */ +@TeleOp (name = "Motor Bulk Reads", group = "Tests") +@Disabled +public class ConceptMotorBulkRead extends LinearOpMode { + + final int TEST_CYCLES = 500; // Number of control cycles to run to determine cycle times. + + private DcMotorEx m1, m2, m3, m4; // Motor Objects + private long e1, e2, e3, e4; // Encoder Values + private double v1, v2, v3, v4; // Velocities + + // Cycle Times + double t1 = 0; + double t2 = 0; + double t3 = 0; + + @Override + public void runOpMode() { + + int cycles; + + // Important Step 1: Make sure you use DcMotorEx when you instantiate your motors. + m1 = hardwareMap.get(DcMotorEx.class, "m1"); // Configure the robot to use these 4 motor names, + m2 = hardwareMap.get(DcMotorEx.class, "m2"); // or change these strings to match your existing Robot Configuration. + m3 = hardwareMap.get(DcMotorEx.class, "m3"); + m4 = hardwareMap.get(DcMotorEx.class, "m4"); + + // Important Step 2: Get access to a list of Expansion Hub Modules to enable changing caching methods. + List allHubs = hardwareMap.getAll(LynxModule.class); + + ElapsedTime timer = new ElapsedTime(); + + telemetry.addData(">", "Press play to start tests"); + telemetry.addData(">", "Test results will update for each access method."); + telemetry.update(); + waitForStart(); + + // -------------------------------------------------------------------------------------- + // Run control loop using legacy encoder reads + // In this mode, a single read is done for each encoder position, and a bulk read is done for each velocity read. + // This is the worst case scenario. + // This is the same as using LynxModule.BulkCachingMode.OFF + // -------------------------------------------------------------------------------------- + + displayCycleTimes("Test 1 of 3 (Wait for completion)"); + + timer.reset(); + cycles = 0; + while (opModeIsActive() && (cycles++ < TEST_CYCLES)) { + e1 = m1.getCurrentPosition(); + e2 = m2.getCurrentPosition(); + e3 = m3.getCurrentPosition(); + e4 = m4.getCurrentPosition(); + + v1 = m1.getVelocity(); + v2 = m2.getVelocity(); + v3 = m3.getVelocity(); + v4 = m4.getVelocity(); + + // Put Control loop action code here. + + } + // calculate the average cycle time. + t1 = timer.milliseconds() / cycles; + displayCycleTimes("Test 2 of 3 (Wait for completion)"); + + // -------------------------------------------------------------------------------------- + // Run test cycles using AUTO cache mode + // In this mode, only one bulk read is done per cycle, UNLESS you read a specific encoder/velocity item AGAIN in that cycle. + // -------------------------------------------------------------------------------------- + + // Important Step 3: Option A. Set all Expansion hubs to use the AUTO Bulk Caching mode + for (LynxModule module : allHubs) { + module.setBulkCachingMode(LynxModule.BulkCachingMode.AUTO); + } + + timer.reset(); + cycles = 0; + while (opModeIsActive() && (cycles++ < TEST_CYCLES)) { + e1 = m1.getCurrentPosition(); // Uses 1 bulk-read for all 4 encoder/velocity reads, + e2 = m2.getCurrentPosition(); // but don't do any `get` operations more than once per cycle. + e3 = m3.getCurrentPosition(); + e4 = m4.getCurrentPosition(); + + v1 = m1.getVelocity(); + v2 = m2.getVelocity(); + v3 = m3.getVelocity(); + v4 = m4.getVelocity(); + + // Put Control loop action code here. + + } + // calculate the average cycle time. + t2 = timer.milliseconds() / cycles; + displayCycleTimes("Test 3 of 3 (Wait for completion)"); + + // -------------------------------------------------------------------------------------- + // Run test cycles using MANUAL cache mode + // In this mode, only one block read is done each control cycle. + // This is the MOST efficient method, but it does require that the cache is cleared manually each control cycle. + // -------------------------------------------------------------------------------------- + + // Important Step 3: Option B. Set all Expansion hubs to use the MANUAL Bulk Caching mode + for (LynxModule module : allHubs) { + module.setBulkCachingMode(LynxModule.BulkCachingMode.MANUAL); + } + + timer.reset(); + cycles = 0; + while (opModeIsActive() && (cycles++ < TEST_CYCLES)) { + + // Important Step 4: If you are using MANUAL mode, you must clear the BulkCache once per control cycle + for (LynxModule module : allHubs) { + module.clearBulkCache(); + } + + e1 = m1.getCurrentPosition(); // Uses 1 bulk-read to obtain ALL the motor data + e2 = m2.getCurrentPosition(); // There is no penalty for doing more `get` operations in this cycle, + e3 = m3.getCurrentPosition(); // but they will return the same data. + e4 = m4.getCurrentPosition(); + + v1 = m1.getVelocity(); + v2 = m2.getVelocity(); + v3 = m3.getVelocity(); + v4 = m4.getVelocity(); + + // Put Control loop action code here. + + } + // calculate the average cycle time. + t3 = timer.milliseconds() / cycles; + displayCycleTimes("Complete"); + + // wait until op-mode is stopped by user, before clearing display. + while (opModeIsActive()) ; + } + + // Display three comparison times. + void displayCycleTimes(String status) { + telemetry.addData("Testing", status); + telemetry.addData("Cache = OFF", "%5.1f mS/cycle", t1); + telemetry.addData("Cache = AUTO", "%5.1f mS/cycle", t2); + telemetry.addData("Cache = MANUAL", "%5.1f mS/cycle", t3); + telemetry.update(); + } +} + diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptNullOp.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptNullOp.java new file mode 100644 index 0000000..2f84695 --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptNullOp.java @@ -0,0 +1,79 @@ +/* Copyright (c) 2017 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.OpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.util.ElapsedTime; + +import java.text.SimpleDateFormat; +import java.util.Date; + +/** + * Demonstrates empty OpMode + */ +@TeleOp(name = "Concept: NullOp", group = "Concept") +@Disabled +public class ConceptNullOp extends OpMode { + + private ElapsedTime runtime = new ElapsedTime(); + + @Override + public void init() { + telemetry.addData("Status", "Initialized"); + } + + /* + * Code to run when the op mode is first enabled goes here + * @see com.qualcomm.robotcore.eventloop.opmode.OpMode#start() + */ + @Override + public void init_loop() { + } + + /* + * This method will be called ONCE when start is pressed + * @see com.qualcomm.robotcore.eventloop.opmode.OpMode#loop() + */ + @Override + public void start() { + runtime.reset(); + } + + /* + * This method will be called repeatedly in a loop + * @see com.qualcomm.robotcore.eventloop.opmode.OpMode#loop() + */ + @Override + public void loop() { + telemetry.addData("Status", "Run Time: " + runtime.toString()); + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptRampMotorSpeed.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptRampMotorSpeed.java new file mode 100644 index 0000000..f891142 --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptRampMotorSpeed.java @@ -0,0 +1,114 @@ +/* Copyright (c) 2017 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.hardware.DcMotor; + +/** + * This OpMode ramps a single motor speed up and down repeatedly until Stop is pressed. + * The code is structured as a LinearOpMode + * + * This code assumes a DC motor configured with the name "left_drive" as is found on a Robot. + * + * INCREMENT sets how much to increase/decrease the power each cycle + * CYCLE_MS sets the update period. + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list + */ +@TeleOp(name = "Concept: Ramp Motor Speed", group = "Concept") +@Disabled +public class ConceptRampMotorSpeed extends LinearOpMode { + + static final double INCREMENT = 0.01; // amount to ramp motor each CYCLE_MS cycle + static final int CYCLE_MS = 50; // period of each cycle + static final double MAX_FWD = 1.0; // Maximum FWD power applied to motor + static final double MAX_REV = -1.0; // Maximum REV power applied to motor + + // Define class members + DcMotor motor; + double power = 0; + boolean rampUp = true; + + + @Override + public void runOpMode() { + + // Connect to motor (Assume standard left wheel) + // Change the text in quotes to match any motor name on your robot. + motor = hardwareMap.get(DcMotor.class, "left_drive"); + + // Wait for the start button + telemetry.addData(">", "Press Start to run Motors." ); + telemetry.update(); + waitForStart(); + + // Ramp motor speeds till stop pressed. + while(opModeIsActive()) { + + // Ramp the motors, according to the rampUp variable. + if (rampUp) { + // Keep stepping up until we hit the max value. + power += INCREMENT ; + if (power >= MAX_FWD ) { + power = MAX_FWD; + rampUp = !rampUp; // Switch ramp direction + } + } + else { + // Keep stepping down until we hit the min value. + power -= INCREMENT ; + if (power <= MAX_REV ) { + power = MAX_REV; + rampUp = !rampUp; // Switch ramp direction + } + } + + // Display the current value + telemetry.addData("Motor Power", "%5.2f", power); + telemetry.addData(">", "Press Stop to end test." ); + telemetry.update(); + + // Set the motor to the new power and pause; + motor.setPower(power); + sleep(CYCLE_MS); + idle(); + } + + // Turn off motor and signal done; + motor.setPower(0); + telemetry.addData(">", "Done"); + telemetry.update(); + + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptRevSPARKMini.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptRevSPARKMini.java new file mode 100644 index 0000000..6f2aa70 --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptRevSPARKMini.java @@ -0,0 +1,112 @@ +/* Copyright (c) 2018 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.hardware.DcMotorSimple; +import com.qualcomm.robotcore.util.ElapsedTime; +import com.qualcomm.robotcore.util.Range; + + +/** + * + * This OpMode executes a basic Tank Drive Teleop for a two wheeled robot using two REV SPARKminis. + * To use this example, connect two REV SPARKminis into servo ports on the Expansion Hub. On the + * robot configuration, use the drop down list under 'Servos' to select 'REV SPARKmini Controller' + * and name them 'left_drive' and 'right_drive'. + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list + */ + +@TeleOp(name="REV SPARKmini Simple Drive Example", group="Concept") +@Disabled +public class ConceptRevSPARKMini extends LinearOpMode { + + // Declare OpMode members. + private ElapsedTime runtime = new ElapsedTime(); + private DcMotorSimple leftDrive = null; + private DcMotorSimple rightDrive = null; + + @Override + public void runOpMode() { + telemetry.addData("Status", "Initialized"); + telemetry.update(); + + // Initialize the hardware variables. Note that the strings used here as parameters + // to 'get' must correspond to the names assigned during the robot configuration + // step (using the FTC Robot Controller app on the phone). + leftDrive = hardwareMap.get(DcMotorSimple.class, "left_drive"); + rightDrive = hardwareMap.get(DcMotorSimple.class, "right_drive"); + + // Most robots need the motor on one side to be reversed to drive forward + // Reverse the motor that runs backward when connected directly to the battery + leftDrive.setDirection(DcMotorSimple.Direction.FORWARD); + rightDrive.setDirection(DcMotorSimple.Direction.REVERSE); + + // Wait for the game to start (driver presses PLAY) + waitForStart(); + runtime.reset(); + + // run until the end of the match (driver presses STOP) + while (opModeIsActive()) { + + // Setup a variable for each drive wheel to save power level for telemetry + double leftPower; + double rightPower; + + // Choose to drive using either Tank Mode, or POV Mode + // Comment out the method that's not used. The default below is POV. + + // POV Mode uses left stick to go forward, and right stick to turn. + // - This uses basic math to combine motions and is easier to drive straight. + double drive = -gamepad1.left_stick_y; + double turn = gamepad1.right_stick_x; + leftPower = Range.clip(drive + turn, -1.0, 1.0) ; + rightPower = Range.clip(drive - turn, -1.0, 1.0) ; + + // Tank Mode uses one stick to control each wheel. + // - This requires no math, but it is hard to drive forward slowly and keep straight. + // leftPower = -gamepad1.left_stick_y ; + // rightPower = -gamepad1.right_stick_y ; + + // Send calculated power to wheels + leftDrive.setPower(leftPower); + rightDrive.setPower(rightPower); + + // Show the elapsed game time and wheel power. + telemetry.addData("Status", "Run Time: " + runtime.toString()); + telemetry.addData("Motors", "left (%.2f), right (%.2f)", leftPower, rightPower); + telemetry.update(); + } + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptScanServo.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptScanServo.java new file mode 100644 index 0000000..38c7eb0 --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptScanServo.java @@ -0,0 +1,115 @@ +/* Copyright (c) 2017 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.hardware.Servo; + +/** + * This OpMode scans a single servo back and forward until Stop is pressed. + * The code is structured as a LinearOpMode + * INCREMENT sets how much to increase/decrease the servo position each cycle + * CYCLE_MS sets the update period. + * + * This code assumes a Servo configured with the name "left_hand" as is found on a Robot. + * + * NOTE: When any servo position is set, ALL attached servos are activated, so ensure that any other + * connected servos are able to move freely before running this test. + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list + */ +@TeleOp(name = "Concept: Scan Servo", group = "Concept") +@Disabled +public class ConceptScanServo extends LinearOpMode { + + static final double INCREMENT = 0.01; // amount to slew servo each CYCLE_MS cycle + static final int CYCLE_MS = 50; // period of each cycle + static final double MAX_POS = 1.0; // Maximum rotational position + static final double MIN_POS = 0.0; // Minimum rotational position + + // Define class members + Servo servo; + double position = (MAX_POS - MIN_POS) / 2; // Start at halfway position + boolean rampUp = true; + + + @Override + public void runOpMode() { + + // Connect to servo (Assume Robot Left Hand) + // Change the text in quotes to match any servo name on your robot. + servo = hardwareMap.get(Servo.class, "left_hand"); + + // Wait for the start button + telemetry.addData(">", "Press Start to scan Servo." ); + telemetry.update(); + waitForStart(); + + + // Scan servo till stop pressed. + while(opModeIsActive()){ + + // slew the servo, according to the rampUp (direction) variable. + if (rampUp) { + // Keep stepping up until we hit the max value. + position += INCREMENT ; + if (position >= MAX_POS ) { + position = MAX_POS; + rampUp = !rampUp; // Switch ramp direction + } + } + else { + // Keep stepping down until we hit the min value. + position -= INCREMENT ; + if (position <= MIN_POS ) { + position = MIN_POS; + rampUp = !rampUp; // Switch ramp direction + } + } + + // Display the current value + telemetry.addData("Servo Position", "%5.2f", position); + telemetry.addData(">", "Press Stop to end test." ); + telemetry.update(); + + // Set the servo to the new position and pause; + servo.setPosition(position); + sleep(CYCLE_MS); + idle(); + } + + // Signal done; + telemetry.addData(">", "Done"); + telemetry.update(); + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptSoundsASJava.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptSoundsASJava.java new file mode 100644 index 0000000..5c961ad --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptSoundsASJava.java @@ -0,0 +1,135 @@ +/* Copyright (c) 2018 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.ftccommon.SoundPlayer; +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; + +import java.io.File; + +/** + * This file demonstrates how to play simple sounds on both the RC and DS phones. + * It illustrates how to build sounds into your application as a resource. + * This technique is best suited for use with Android Studio since it assumes you will be creating a new application + * + * If you are using OnBotJava, please see the ConceptSoundsOnBotJava sample + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list + * + * Operation: + * + * Gamepad X & B buttons are used to trigger sounds in this example, but any event can be used. + * Note: Time should be allowed for sounds to complete before playing other sounds. + * + * For sound files to be used as a compiled-in resource, they need to be located in a folder called "raw" under your "res" (resources) folder. + * You can create your own "raw" folder from scratch, or you can copy the one from the FtcRobotController module. + * + * Android Studio coders will ultimately need a folder in your path as follows: + * /TeamCode/src/main/res/raw + * + * Copy any .wav files you want to play into this folder. + * Make sure that your files ONLY use lower-case characters, and have no spaces or special characters other than underscore. + * + * The name you give your .wav files will become the resource ID for these sounds. + * eg: gold.wav becomes R.raw.gold + * + * If you wish to use the sounds provided for this sample, they are located in: + * /FtcRobotController/src/main/res/raw + * You can copy and paste the entire 'raw' folder using Android Studio. + * + */ + +@TeleOp(name="Concept: Sound Resources", group="Concept") +@Disabled +public class ConceptSoundsASJava extends LinearOpMode { + + // Declare OpMode members. + private boolean goldFound; // Sound file present flags + private boolean silverFound; + + private boolean isX = false; // Gamepad button state variables + private boolean isB = false; + + private boolean wasX = false; // Gamepad button history variables + private boolean WasB = false; + + @Override + public void runOpMode() { + + // Determine Resource IDs for sounds built into the RC application. + int silverSoundID = hardwareMap.appContext.getResources().getIdentifier("silver", "raw", hardwareMap.appContext.getPackageName()); + int goldSoundID = hardwareMap.appContext.getResources().getIdentifier("gold", "raw", hardwareMap.appContext.getPackageName()); + + // Determine if sound resources are found. + // Note: Preloading is NOT required, but it's a good way to verify all your sounds are available before you run. + if (goldSoundID != 0) + goldFound = SoundPlayer.getInstance().preload(hardwareMap.appContext, goldSoundID); + + if (silverSoundID != 0) + silverFound = SoundPlayer.getInstance().preload(hardwareMap.appContext, silverSoundID); + + // Display sound status + telemetry.addData("gold resource", goldFound ? "Found" : "NOT found\n Add gold.wav to /src/main/res/raw" ); + telemetry.addData("silver resource", silverFound ? "Found" : "Not found\n Add silver.wav to /src/main/res/raw" ); + + // Wait for the game to start (driver presses PLAY) + telemetry.addData(">", "Press Start to continue"); + telemetry.update(); + waitForStart(); + + telemetry.addData(">", "Press X, B to play sounds."); + telemetry.update(); + + // run until the end of the match (driver presses STOP) + while (opModeIsActive()) { + + // say Silver each time gamepad X is pressed (This sound is a resource) + if (silverFound && (isX = gamepad1.x) && !wasX) { + SoundPlayer.getInstance().startPlaying(hardwareMap.appContext, silverSoundID); + telemetry.addData("Playing", "Resource Silver"); + telemetry.update(); + } + + // say Gold each time gamepad B is pressed (This sound is a resource) + if (goldFound && (isB = gamepad1.b) && !WasB) { + SoundPlayer.getInstance().startPlaying(hardwareMap.appContext, goldSoundID); + telemetry.addData("Playing", "Resource Gold"); + telemetry.update(); + } + + // Save last button states + wasX = isX; + WasB = isB; + } + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptSoundsOnBotJava.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptSoundsOnBotJava.java new file mode 100644 index 0000000..bacb66e --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptSoundsOnBotJava.java @@ -0,0 +1,119 @@ +/* Copyright (c) 2018 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.ftccommon.SoundPlayer; +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import java.io.File; + +/** + * This file demonstrates how to play simple sounds on both the RC and DS phones. + * It illustrates how to play sound files that have been copied to the RC Phone + * This technique is best suited for use with OnBotJava since it does not require the app to be modified. + * + * Operation: + * + * Gamepad X & B buttons are used to trigger sounds in this example, but any event can be used. + * Note: Time should be allowed for sounds to complete before playing other sounds. + * + * To play a new sound, you will need to copy the .wav files to the phone, and then provide the full path to them as part of your OpMode. + * This is done in this sample for the two sound files. silver.wav and gold.wav + * + * You can put the files in a variety of soundPaths, but we recommend you put them in the /FIRST/blocks/sounds folder. + * Your OpModes will have guaranteed access to this folder, and you can transfer files into this folder using the BLOCKS web page. + * -- There is a link called "sounds" on the right hand side of the color bar on the BLOCKS page that can be used to send sound files to this folder by default. + * Or you can use Windows File Manager, or ADB to transfer the sound files + * + * To get full use of THIS sample, you will need to copy two sound file called silver.wav and gold.wav to /FIRST/blocks/sounds on the RC phone. + * They can be located here: + * https://github.com/ftctechnh/ftc_app/tree/master/FtcRobotController/src/main/res/raw/gold.wav + * https://github.com/ftctechnh/ftc_app/tree/master/FtcRobotController/src/main/res/raw/silver.wav + */ + +@TeleOp(name="Concept: Sound Files", group="Concept") +@Disabled +public class ConceptSoundsOnBotJava extends LinearOpMode { + + // Point to sound files on the phone's drive + private String soundPath = "/FIRST/blocks/sounds"; + private File goldFile = new File("/sdcard" + soundPath + "/gold.wav"); + private File silverFile = new File("/sdcard" + soundPath + "/silver.wav"); + + // Declare OpMode members. + private boolean isX = false; // Gamepad button state variables + private boolean isB = false; + + private boolean wasX = false; // Gamepad button history variables + private boolean WasB = false; + + @Override + public void runOpMode() { + + // Make sure that the sound files exist on the phone + boolean goldFound = goldFile.exists(); + boolean silverFound = silverFile.exists(); + + // Display sound status + telemetry.addData("gold sound", goldFound ? "Found" : "NOT Found \nCopy gold.wav to " + soundPath ); + telemetry.addData("silver sound", silverFound ? "Found" : "NOT Found \nCopy silver.wav to " + soundPath ); + + // Wait for the game to start (driver presses PLAY) + telemetry.addData(">", "Press Start to continue"); + telemetry.update(); + waitForStart(); + + telemetry.addData(">", "Press X or B to play sounds."); + telemetry.update(); + + // run until the end of the match (driver presses STOP) + while (opModeIsActive()) { + + // say Silver each time gamepad X is pressed (This sound is a resource) + if (silverFound && (isX = gamepad1.x) && !wasX) { + SoundPlayer.getInstance().startPlaying(hardwareMap.appContext, silverFile); + telemetry.addData("Playing", "Silver File"); + telemetry.update(); + } + + // say Gold each time gamepad B is pressed (This sound is a resource) + if (goldFound && (isB = gamepad1.b) && !WasB) { + SoundPlayer.getInstance().startPlaying(hardwareMap.appContext, goldFile); + telemetry.addData("Playing", "Gold File"); + telemetry.update(); + } + + // Save last button states + wasX = isX; + WasB = isB; + } + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptSoundsSKYSTONE.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptSoundsSKYSTONE.java new file mode 100644 index 0000000..8832957 --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptSoundsSKYSTONE.java @@ -0,0 +1,123 @@ +/* Copyright (c) 2018 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import android.content.Context; + +import com.qualcomm.ftccommon.SoundPlayer; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.eventloop.opmode.Disabled; + +/** + * This file demonstrates how to play one of the several SKYSTONE/Star Wars sounds loaded into the SDK. + * It does this by creating a simple "chooser" controlled by the gamepad Up Down buttons. + * This code also prevents sounds from stacking up by setting a "playing" flag, which is cleared when the sound finishes playing. + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list + * + * Operation: + * Use the DPAD to change the selected sound, and the Right Bumper to play it. + */ + +@TeleOp(name="SKYSTONE Sounds", group="Concept") +@Disabled +public class ConceptSoundsSKYSTONE extends LinearOpMode { + + // List of available sound resources + String sounds[] = {"ss_alarm", "ss_bb8_down", "ss_bb8_up", "ss_darth_vader", "ss_fly_by", + "ss_mf_fail", "ss_laser", "ss_laser_burst", "ss_light_saber", "ss_light_saber_long", "ss_light_saber_short", + "ss_light_speed", "ss_mine", "ss_power_up", "ss_r2d2_up", "ss_roger_roger", "ss_siren", "ss_wookie" }; + boolean soundPlaying = false; + + @Override + public void runOpMode() { + + // Variables for choosing from the available sounds + int soundIndex = 0; + int soundID = -1; + boolean was_dpad_up = false; + boolean was_dpad_down = false; + + Context myApp = hardwareMap.appContext; + + // create a sound parameter that holds the desired player parameters. + SoundPlayer.PlaySoundParams params = new SoundPlayer.PlaySoundParams(); + params.loopControl = 0; + params.waitForNonLoopingSoundsToFinish = true; + + // In this sample, we will skip waiting for the user to press play, and start displaying sound choices right away + while (!isStopRequested()) { + + // Look for DPAD presses to change the selection + if (gamepad1.dpad_down && !was_dpad_down) { + // Go to next sound (with list wrap) and display it + soundIndex = (soundIndex + 1) % sounds.length; + } + + if (gamepad1.dpad_up && !was_dpad_up) { + // Go to previous sound (with list wrap) and display it + soundIndex = (soundIndex + sounds.length - 1) % sounds.length; + } + + // Look for trigger to see if we should play sound + // Only start a new sound if we are currently not playing one. + if (gamepad1.right_bumper && !soundPlaying) { + + // Determine Resource IDs for the sounds you want to play, and make sure it's valid. + if ((soundID = myApp.getResources().getIdentifier(sounds[soundIndex], "raw", myApp.getPackageName())) != 0){ + + // Signal that the sound is now playing. + soundPlaying = true; + + // Start playing, and also Create a callback that will clear the playing flag when the sound is complete. + SoundPlayer.getInstance().startPlaying(myApp, soundID, params, null, + new Runnable() { + public void run() { + soundPlaying = false; + }} ); + } + } + + // Remember the last state of the dpad to detect changes. + was_dpad_up = gamepad1.dpad_up; + was_dpad_down = gamepad1.dpad_down; + + // Display the current sound choice, and the playing status. + telemetry.addData("", "Use DPAD up/down to choose sound."); + telemetry.addData("", "Press Right Bumper to play sound."); + telemetry.addData("", ""); + telemetry.addData("Sound >", sounds[soundIndex]); + telemetry.addData("Status >", soundPlaying ? "Playing" : "Stopped"); + telemetry.update(); + } + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptTelemetry.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptTelemetry.java new file mode 100644 index 0000000..d3b4682 --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptTelemetry.java @@ -0,0 +1,178 @@ +/* Copyright (c) 2017 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.hardware.VoltageSensor; +import com.qualcomm.robotcore.util.ElapsedTime; + +import org.firstinspires.ftc.robotcore.external.Func; +import org.firstinspires.ftc.robotcore.external.Telemetry; + +/** + * {@link ConceptTelemetry} illustrates various ways in which telemetry can be + * transmitted from the robot controller to the driver station. The sample illustrates + * numeric and text data, formatted output, and optimized evaluation of expensive-to-acquire + * information. The telemetry {@link Telemetry#log() log} is illustrated by scrolling a poem + * to the driver station. + * + * @see Telemetry + */ +@TeleOp(name = "Concept: Telemetry", group = "Concept") +@Disabled +public class ConceptTelemetry extends LinearOpMode { + /** keeps track of the line of the poem which is to be emitted next */ + int poemLine = 0; + + /** keeps track of how long it's been since we last emitted a line of poetry */ + ElapsedTime poemElapsed = new ElapsedTime(); + + static final String[] poem = new String[] { + + "Mary had a little lamb,", + "His fleece was white as snow,", + "And everywhere that Mary went,", + "The lamb was sure to go.", + "", + "He followed her to school one day,", + "Which was against the rule,", + "It made the children laugh and play", + "To see a lamb at school.", + "", + "And so the teacher turned it out,", + "But still it lingered near,", + "And waited patiently about,", + "Till Mary did appear.", + "", + "\"Why does the lamb love Mary so?\"", + "The eager children cry.", + "\"Why, Mary loves the lamb, you know,\"", + "The teacher did reply.", + "", + "" + }; + + @Override public void runOpMode() { + + /* we keep track of how long it's been since the OpMode was started, just + * to have some interesting data to show */ + ElapsedTime opmodeRunTime = new ElapsedTime(); + + // We show the log in oldest-to-newest order, as that's better for poetry + telemetry.log().setDisplayOrder(Telemetry.Log.DisplayOrder.OLDEST_FIRST); + // We can control the number of lines shown in the log + telemetry.log().setCapacity(6); + // The interval between lines of poetry, in seconds + double sPoemInterval = 0.6; + + /** + * Wait until we've been given the ok to go. For something to do, we emit the + * elapsed time as we sit here and wait. If we didn't want to do anything while + * we waited, we would just call {@link #waitForStart()}. + */ + while (!isStarted()) { + telemetry.addData("time", "%.1f seconds", opmodeRunTime.seconds()); + telemetry.update(); + idle(); + } + + // Ok, we've been given the ok to go + + /** + * As an illustration, the first line on our telemetry display will display the battery voltage. + * The idea here is that it's expensive to compute the voltage (at least for purposes of illustration) + * so you don't want to do it unless the data is actually going to make it to the + * driver station (recall that telemetry transmission is throttled to reduce bandwidth use. + * Note that getBatteryVoltage() below returns 'Infinity' if there's no voltage sensor attached. + * + * @see Telemetry#getMsTransmissionInterval() + */ + telemetry.addData("voltage", "%.1f volts", new Func() { + @Override public Double value() { + return getBatteryVoltage(); + } + }); + + // Reset to keep some timing stats for the post-'start' part of the opmode + opmodeRunTime.reset(); + int loopCount = 1; + + // Go go gadget robot! + while (opModeIsActive()) { + + // Emit poetry if it's been a while + if (poemElapsed.seconds() > sPoemInterval) { + emitPoemLine(); + } + + // As an illustration, show some loop timing information + telemetry.addData("loop count", loopCount); + telemetry.addData("ms/loop", "%.3f ms", opmodeRunTime.milliseconds() / loopCount); + + // Show joystick information as some other illustrative data + telemetry.addLine("left joystick | ") + .addData("x", gamepad1.left_stick_x) + .addData("y", gamepad1.left_stick_y); + telemetry.addLine("right joystick | ") + .addData("x", gamepad1.right_stick_x) + .addData("y", gamepad1.right_stick_y); + + /** + * Transmit the telemetry to the driver station, subject to throttling. + * @see Telemetry#getMsTransmissionInterval() + */ + telemetry.update(); + + /** Update loop info and play nice with the rest of the {@link Thread}s in the system */ + loopCount++; + } + } + + // emits a line of poetry to the telemetry log + void emitPoemLine() { + telemetry.log().add(poem[poemLine]); + poemLine = (poemLine+1) % poem.length; + poemElapsed.reset(); + } + + // Computes the current battery voltage + double getBatteryVoltage() { + double result = Double.POSITIVE_INFINITY; + for (VoltageSensor sensor : hardwareMap.voltageSensor) { + double voltage = sensor.getVoltage(); + if (voltage > 0) { + result = Math.min(result, voltage); + } + } + return result; + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptTensorFlowObjectDetection.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptTensorFlowObjectDetection.java new file mode 100644 index 0000000..cb52ebd --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptTensorFlowObjectDetection.java @@ -0,0 +1,189 @@ +/* Copyright (c) 2019 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import java.util.List; +import org.firstinspires.ftc.robotcore.external.ClassFactory; +import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer; +import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer.CameraDirection; +import org.firstinspires.ftc.robotcore.external.tfod.TFObjectDetector; +import org.firstinspires.ftc.robotcore.external.tfod.Recognition; + +/** + * This 2022-2023 OpMode illustrates the basics of using the TensorFlow Object Detection API to + * determine which image is being presented to the robot. + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list. + * + * IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as + * is explained below. + */ +@TeleOp(name = "Concept: TensorFlow Object Detection", group = "Concept") +@Disabled +public class ConceptTensorFlowObjectDetection extends LinearOpMode { + + /* + * Specify the source for the Tensor Flow Model. + * If the TensorFlowLite object model is included in the Robot Controller App as an "asset", + * the OpMode must to load it using loadModelFromAsset(). However, if a team generated model + * has been downloaded to the Robot Controller's SD FLASH memory, it must to be loaded using loadModelFromFile() + * Here we assume it's an Asset. Also see method initTfod() below . + */ + private static final String TFOD_MODEL_ASSET = "PowerPlay.tflite"; + // private static final String TFOD_MODEL_FILE = "/sdcard/FIRST/tflitemodels/CustomTeamModel.tflite"; + + private static final String[] LABELS = { + "1 Bolt", + "2 Bulb", + "3 Panel" + }; + + /* + * IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which + * 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function. + * A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer + * web site at https://developer.vuforia.com/license-manager. + * + * Vuforia license keys are always 380 characters long, and look as if they contain mostly + * random data. As an example, here is a example of a fragment of a valid key: + * ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGlfOOI2dRzKS4T0hQ8kT ... + * Once you've obtained a license key, copy the string from the Vuforia web site + * and paste it in to your code on the next line, between the double quotes. + */ + private static final String VUFORIA_KEY = + " -- YOUR NEW VUFORIA KEY GOES HERE --- "; + + /** + * {@link #vuforia} is the variable we will use to store our instance of the Vuforia + * localization engine. + */ + private VuforiaLocalizer vuforia; + + /** + * {@link #tfod} is the variable we will use to store our instance of the TensorFlow Object + * Detection engine. + */ + private TFObjectDetector tfod; + + @Override + public void runOpMode() { + // The TFObjectDetector uses the camera frames from the VuforiaLocalizer, so we create that + // first. + initVuforia(); + initTfod(); + + /** + * Activate TensorFlow Object Detection before we wait for the start command. + * Do it here so that the Camera Stream window will have the TensorFlow annotations visible. + **/ + if (tfod != null) { + tfod.activate(); + + // The TensorFlow software will scale the input images from the camera to a lower resolution. + // This can result in lower detection accuracy at longer distances (> 55cm or 22"). + // If your target is at distance greater than 50 cm (20") you can increase the magnification value + // to artificially zoom in to the center of image. For best results, the "aspectRatio" argument + // should be set to the value of the images used to create the TensorFlow Object Detection model + // (typically 16/9). + tfod.setZoom(1.0, 16.0/9.0); + } + + /** Wait for the game to begin */ + telemetry.addData(">", "Press Play to start op mode"); + telemetry.update(); + waitForStart(); + + if (opModeIsActive()) { + while (opModeIsActive()) { + if (tfod != null) { + // getUpdatedRecognitions() will return null if no new information is available since + // the last time that call was made. + List updatedRecognitions = tfod.getUpdatedRecognitions(); + if (updatedRecognitions != null) { + telemetry.addData("# Objects Detected", updatedRecognitions.size()); + + // step through the list of recognitions and display image position/size information for each one + // Note: "Image number" refers to the randomized image orientation/number + for (Recognition recognition : updatedRecognitions) { + double col = (recognition.getLeft() + recognition.getRight()) / 2 ; + double row = (recognition.getTop() + recognition.getBottom()) / 2 ; + double width = Math.abs(recognition.getRight() - recognition.getLeft()) ; + double height = Math.abs(recognition.getTop() - recognition.getBottom()) ; + + telemetry.addData(""," "); + telemetry.addData("Image", "%s (%.0f %% Conf.)", recognition.getLabel(), recognition.getConfidence() * 100 ); + telemetry.addData("- Position (Row/Col)","%.0f / %.0f", row, col); + telemetry.addData("- Size (Width/Height)","%.0f / %.0f", width, height); + } + telemetry.update(); + } + } + } + } + } + + /** + * Initialize the Vuforia localization engine. + */ + private void initVuforia() { + /* + * Configure Vuforia by creating a Parameter object, and passing it to the Vuforia engine. + */ + VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(); + + parameters.vuforiaLicenseKey = VUFORIA_KEY; + parameters.cameraDirection = CameraDirection.BACK; + + // Instantiate the Vuforia engine + vuforia = ClassFactory.getInstance().createVuforia(parameters); + } + + /** + * Initialize the TensorFlow Object Detection engine. + */ + private void initTfod() { + int tfodMonitorViewId = hardwareMap.appContext.getResources().getIdentifier( + "tfodMonitorViewId", "id", hardwareMap.appContext.getPackageName()); + TFObjectDetector.Parameters tfodParameters = new TFObjectDetector.Parameters(tfodMonitorViewId); + tfodParameters.minResultConfidence = 0.75f; + tfodParameters.isModelTensorFlow2 = true; + tfodParameters.inputSize = 300; + tfod = ClassFactory.getInstance().createTFObjectDetector(tfodParameters, vuforia); + + // Use loadModelFromAsset() if the TF Model is built in as an asset by Android Studio + // Use loadModelFromFile() if you have downloaded a custom team model to the Robot Controller's FLASH. + tfod.loadModelFromAsset(TFOD_MODEL_ASSET, LABELS); + // tfod.loadModelFromFile(TFOD_MODEL_FILE, LABELS); + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptTensorFlowObjectDetectionSwitchableCameras.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptTensorFlowObjectDetectionSwitchableCameras.java new file mode 100644 index 0000000..eec021f --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptTensorFlowObjectDetectionSwitchableCameras.java @@ -0,0 +1,224 @@ +/* Copyright (c) 2020 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import java.util.List; +import org.firstinspires.ftc.robotcore.external.ClassFactory; +import org.firstinspires.ftc.robotcore.external.hardware.camera.SwitchableCamera; +import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName; +import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer; +import org.firstinspires.ftc.robotcore.external.tfod.TFObjectDetector; +import org.firstinspires.ftc.robotcore.external.tfod.Recognition; + +/** + * This 2022-2023 OpMode illustrates the basics of using the TensorFlow Object Detection API to + * determine which image is being presented to the robot. + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list. + * + * IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as + * is explained below. + */ +@TeleOp(name = "Concept: TensorFlow Object Detection Switchable Cameras", group = "Concept") +@Disabled +public class ConceptTensorFlowObjectDetectionSwitchableCameras extends LinearOpMode { + + /* + * Specify the source for the Tensor Flow Model. + * If the TensorFlowLite object model is included in the Robot Controller App as an "asset", + * the OpMode must to load it using loadModelFromAsset(). However, if a team generated model + * has been downloaded to the Robot Controller's SD FLASH memory, it must to be loaded using loadModelFromFile() + * Here we assume it's an Asset. Also see method initTfod() below . + */ + private static final String TFOD_MODEL_ASSET = "PowerPlay.tflite"; + // private static final String TFOD_MODEL_FILE = "/sdcard/FIRST/tflitemodels/CustomTeamModel.tflite"; + + private static final String[] LABELS = { + "1 Bolt", + "2 Bulb", + "3 Panel" + }; + + /* + * IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which + * 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function. + * A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer + * web site at https://developer.vuforia.com/license-manager. + * + * Vuforia license keys are always 380 characters long, and look as if they contain mostly + * random data. As an example, here is a example of a fragment of a valid key: + * ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGlfOOI2dRzKS4T0hQ8kT ... + * Once you've obtained a license key, copy the string from the Vuforia web site + * and paste it in to your code on the next line, between the double quotes. + */ + private static final String VUFORIA_KEY = + " -- YOUR NEW VUFORIA KEY GOES HERE --- "; + + /** + * {@link #vuforia} is the variable we will use to store our instance of the Vuforia + * localization engine. + */ + private VuforiaLocalizer vuforia; + + /** + * Variables used for switching cameras. + */ + private WebcamName webcam1, webcam2; + private SwitchableCamera switchableCamera; + private boolean oldLeftBumper; + private boolean oldRightBumper; + + /** + * {@link #tfod} is the variable we will use to store our instance of the TensorFlow Object + * Detection engine. + */ + private TFObjectDetector tfod; + + @Override + public void runOpMode() { + // The TFObjectDetector uses the camera frames from the VuforiaLocalizer, so we create that + // first. + initVuforia(); + initTfod(); + + /** + * Activate TensorFlow Object Detection before we wait for the start command. + * Do it here so that the Camera Stream window will have the TensorFlow annotations visible. + **/ + if (tfod != null) { + tfod.activate(); + + // The TensorFlow software will scale the input images from the camera to a lower resolution. + // This can result in lower detection accuracy at longer distances (> 55cm or 22"). + // If your target is at distance greater than 50 cm (20") you can increase the magnification value + // to artificially zoom in to the center of image. For best results, the "aspectRatio" argument + // should be set to the value of the images used to create the TensorFlow Object Detection model + // (typically 16/9). + tfod.setZoom(1.0, 16.0/9.0); + } + + /** Wait for the game to begin */ + telemetry.addData(">", "Press Play to start op mode"); + telemetry.update(); + waitForStart(); + + if (opModeIsActive()) { + while (opModeIsActive()) { + if (tfod != null) { + doCameraSwitching(); + List recognitions = tfod.getRecognitions(); + telemetry.addData("# Objects Detected", recognitions.size()); + // step through the list of recognitions and display image size and position + // Note: "Image number" refers to the randomized image orientation/number + for (Recognition recognition : recognitions) { + double col = (recognition.getLeft() + recognition.getRight()) / 2 ; + double row = (recognition.getTop() + recognition.getBottom()) / 2 ; + double width = Math.abs(recognition.getRight() - recognition.getLeft()) ; + double height = Math.abs(recognition.getTop() - recognition.getBottom()) ; + + telemetry.addData(""," "); + telemetry.addData("Image", "%s (%.0f %% Conf.)", recognition.getLabel(), recognition.getConfidence() * 100 ); + telemetry.addData("- Position (Row/Col)","%.0f / %.0f", row, col); + telemetry.addData("- Size (Width/Height)","%.0f / %.0f", width, height); + } + telemetry.update(); + } + } + } + } + + /** + * Initialize the Vuforia localization engine. + */ + private void initVuforia() { + /* + * Configure Vuforia by creating a Parameter object, and passing it to the Vuforia engine. + */ + VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(); + + parameters.vuforiaLicenseKey = VUFORIA_KEY; + + // Indicate that we wish to be able to switch cameras. + webcam1 = hardwareMap.get(WebcamName.class, "Webcam 1"); + webcam2 = hardwareMap.get(WebcamName.class, "Webcam 2"); + parameters.cameraName = ClassFactory.getInstance().getCameraManager().nameForSwitchableCamera(webcam1, webcam2); + + // Instantiate the Vuforia engine + vuforia = ClassFactory.getInstance().createVuforia(parameters); + + // Set the active camera to Webcam 1. + switchableCamera = (SwitchableCamera) vuforia.getCamera(); + switchableCamera.setActiveCamera(webcam1); + } + + /** + * Initialize the TensorFlow Object Detection engine. + */ + private void initTfod() { + int tfodMonitorViewId = hardwareMap.appContext.getResources().getIdentifier( + "tfodMonitorViewId", "id", hardwareMap.appContext.getPackageName()); + TFObjectDetector.Parameters tfodParameters = new TFObjectDetector.Parameters(tfodMonitorViewId); + tfodParameters.minResultConfidence = 0.75f; + tfodParameters.isModelTensorFlow2 = true; + tfodParameters.inputSize = 300; + tfod = ClassFactory.getInstance().createTFObjectDetector(tfodParameters, vuforia); + + // Use loadModelFromAsset() if the TF Model is built in as an asset by Android Studio + // Use loadModelFromFile() if you have downloaded a custom team model to the Robot Controller's FLASH. + tfod.loadModelFromAsset(TFOD_MODEL_ASSET, LABELS); + // tfod.loadModelFromFile(TFOD_MODEL_FILE, LABELS); + } + + private void doCameraSwitching() { + // If the left bumper is pressed, use Webcam 1. + // If the right bumper is pressed, use Webcam 2. + boolean newLeftBumper = gamepad1.left_bumper; + boolean newRightBumper = gamepad1.right_bumper; + if (newLeftBumper && !oldLeftBumper) { + switchableCamera.setActiveCamera(webcam1); + } else if (newRightBumper && !oldRightBumper) { + switchableCamera.setActiveCamera(webcam2); + } + oldLeftBumper = newLeftBumper; + oldRightBumper = newRightBumper; + + if (switchableCamera.getActiveCamera().equals(webcam1)) { + telemetry.addData("activeCamera", "Webcam 1"); + telemetry.addData("Press RightBumper", "to switch to Webcam 2"); + } else { + telemetry.addData("activeCamera", "Webcam 2"); + telemetry.addData("Press LeftBumper", "to switch to Webcam 1"); + } + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptTensorFlowObjectDetectionWebcam.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptTensorFlowObjectDetectionWebcam.java new file mode 100644 index 0000000..78bfef9 --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptTensorFlowObjectDetectionWebcam.java @@ -0,0 +1,190 @@ +/* Copyright (c) 2019 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import java.util.List; +import org.firstinspires.ftc.robotcore.external.ClassFactory; +import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName; +import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer; +import org.firstinspires.ftc.robotcore.external.tfod.TFObjectDetector; +import org.firstinspires.ftc.robotcore.external.tfod.Recognition; + +/** + * This 2022-2023 OpMode illustrates the basics of using the TensorFlow Object Detection API to + * determine which image is being presented to the robot. + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list. + * + * IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as + * is explained below. + */ +@TeleOp(name = "Concept: TensorFlow Object Detection Webcam", group = "Concept") +@Disabled +public class ConceptTensorFlowObjectDetectionWebcam extends LinearOpMode { + + /* + * Specify the source for the Tensor Flow Model. + * If the TensorFlowLite object model is included in the Robot Controller App as an "asset", + * the OpMode must to load it using loadModelFromAsset(). However, if a team generated model + * has been downloaded to the Robot Controller's SD FLASH memory, it must to be loaded using loadModelFromFile() + * Here we assume it's an Asset. Also see method initTfod() below . + */ + private static final String TFOD_MODEL_ASSET = "PowerPlay.tflite"; + // private static final String TFOD_MODEL_FILE = "/sdcard/FIRST/tflitemodels/CustomTeamModel.tflite"; + + + private static final String[] LABELS = { + "1 Bolt", + "2 Bulb", + "3 Panel" + }; + + /* + * IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which + * 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function. + * A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer + * web site at https://developer.vuforia.com/license-manager. + * + * Vuforia license keys are always 380 characters long, and look as if they contain mostly + * random data. As an example, here is a example of a fragment of a valid key: + * ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGlfOOI2dRzKS4T0hQ8kT ... + * Once you've obtained a license key, copy the string from the Vuforia web site + * and paste it in to your code on the next line, between the double quotes. + */ + private static final String VUFORIA_KEY = + " -- YOUR NEW VUFORIA KEY GOES HERE --- "; + + /** + * {@link #vuforia} is the variable we will use to store our instance of the Vuforia + * localization engine. + */ + private VuforiaLocalizer vuforia; + + /** + * {@link #tfod} is the variable we will use to store our instance of the TensorFlow Object + * Detection engine. + */ + private TFObjectDetector tfod; + + @Override + public void runOpMode() { + // The TFObjectDetector uses the camera frames from the VuforiaLocalizer, so we create that + // first. + initVuforia(); + initTfod(); + + /** + * Activate TensorFlow Object Detection before we wait for the start command. + * Do it here so that the Camera Stream window will have the TensorFlow annotations visible. + **/ + if (tfod != null) { + tfod.activate(); + + // The TensorFlow software will scale the input images from the camera to a lower resolution. + // This can result in lower detection accuracy at longer distances (> 55cm or 22"). + // If your target is at distance greater than 50 cm (20") you can increase the magnification value + // to artificially zoom in to the center of image. For best results, the "aspectRatio" argument + // should be set to the value of the images used to create the TensorFlow Object Detection model + // (typically 16/9). + tfod.setZoom(1.0, 16.0/9.0); + } + + /** Wait for the game to begin */ + telemetry.addData(">", "Press Play to start op mode"); + telemetry.update(); + waitForStart(); + + if (opModeIsActive()) { + while (opModeIsActive()) { + if (tfod != null) { + // getUpdatedRecognitions() will return null if no new information is available since + // the last time that call was made. + List updatedRecognitions = tfod.getUpdatedRecognitions(); + if (updatedRecognitions != null) { + telemetry.addData("# Objects Detected", updatedRecognitions.size()); + + // step through the list of recognitions and display image position/size information for each one + // Note: "Image number" refers to the randomized image orientation/number + for (Recognition recognition : updatedRecognitions) { + double col = (recognition.getLeft() + recognition.getRight()) / 2 ; + double row = (recognition.getTop() + recognition.getBottom()) / 2 ; + double width = Math.abs(recognition.getRight() - recognition.getLeft()) ; + double height = Math.abs(recognition.getTop() - recognition.getBottom()) ; + + telemetry.addData(""," "); + telemetry.addData("Image", "%s (%.0f %% Conf.)", recognition.getLabel(), recognition.getConfidence() * 100 ); + telemetry.addData("- Position (Row/Col)","%.0f / %.0f", row, col); + telemetry.addData("- Size (Width/Height)","%.0f / %.0f", width, height); + } + telemetry.update(); + } + } + } + } + } + + /** + * Initialize the Vuforia localization engine. + */ + private void initVuforia() { + /* + * Configure Vuforia by creating a Parameter object, and passing it to the Vuforia engine. + */ + VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(); + + parameters.vuforiaLicenseKey = VUFORIA_KEY; + parameters.cameraName = hardwareMap.get(WebcamName.class, "Webcam 1"); + + // Instantiate the Vuforia engine + vuforia = ClassFactory.getInstance().createVuforia(parameters); + } + + /** + * Initialize the TensorFlow Object Detection engine. + */ + private void initTfod() { + int tfodMonitorViewId = hardwareMap.appContext.getResources().getIdentifier( + "tfodMonitorViewId", "id", hardwareMap.appContext.getPackageName()); + TFObjectDetector.Parameters tfodParameters = new TFObjectDetector.Parameters(tfodMonitorViewId); + tfodParameters.minResultConfidence = 0.75f; + tfodParameters.isModelTensorFlow2 = true; + tfodParameters.inputSize = 300; + tfod = ClassFactory.getInstance().createTFObjectDetector(tfodParameters, vuforia); + + // Use loadModelFromAsset() if the TF Model is built in as an asset by Android Studio + // Use loadModelFromFile() if you have downloaded a custom team model to the Robot Controller's FLASH. + tfod.loadModelFromAsset(TFOD_MODEL_ASSET, LABELS); + // tfod.loadModelFromFile(TFOD_MODEL_FILE, LABELS); + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVuMarkIdentification.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVuMarkIdentification.java new file mode 100644 index 0000000..3bdb1af --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVuMarkIdentification.java @@ -0,0 +1,186 @@ +/* Copyright (c) 2017 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; + +import org.firstinspires.ftc.robotcore.external.ClassFactory; +import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix; +import org.firstinspires.ftc.robotcore.external.matrices.VectorF; +import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; +import org.firstinspires.ftc.robotcore.external.navigation.AxesOrder; +import org.firstinspires.ftc.robotcore.external.navigation.AxesReference; +import org.firstinspires.ftc.robotcore.external.navigation.Orientation; +import org.firstinspires.ftc.robotcore.external.navigation.RelicRecoveryVuMark; +import org.firstinspires.ftc.robotcore.external.navigation.VuMarkInstanceId; +import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer; +import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackable; +import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackableDefaultListener; +import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables; + +/** + * This OpMode illustrates the basics of using the Vuforia engine to determine + * the identity of Vuforia VuMarks encountered on the field. The code is structured as + * a LinearOpMode. It shares much structure with {@link ConceptVuforiaFieldNavigation}; we do not here + * duplicate the core Vuforia documentation found there, but rather instead focus on the + * differences between the use of Vuforia for navigation vs VuMark identification. + * + * @see ConceptVuforiaFieldNavigation + * @see VuforiaLocalizer + * @see VuforiaTrackableDefaultListener + * see ftc_app/doc/tutorial/FTC_FieldCoordinateSystemDefinition.pdf + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list. + * + * IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as + * is explained below. + */ + +@TeleOp(name="Concept: VuMark Id", group ="Concept") +@Disabled +public class ConceptVuMarkIdentification extends LinearOpMode { + + public static final String TAG = "Vuforia VuMark Sample"; + + OpenGLMatrix lastLocation = null; + + /** + * {@link #vuforia} is the variable we will use to store our instance of the Vuforia + * localization engine. + */ + VuforiaLocalizer vuforia; + + @Override public void runOpMode() { + + /* + * To start up Vuforia, tell it the view that we wish to use for camera monitor (on the RC phone); + */ + int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName()); + VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(cameraMonitorViewId); + + // OR... Do Not Activate the Camera Monitor View, to save power + // VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(); + + /* + * IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which + * 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function. + * A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer + * web site at https://developer.vuforia.com/license-manager. + * + * Vuforia license keys are always 380 characters long, and look as if they contain mostly + * random data. As an example, here is a example of a fragment of a valid key: + * ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGlfOOI2dRzKS4T0hQ8kT ... + * Once you've obtained a license key, copy the string from the Vuforia web site + * and paste it in to your code on the next line, between the double quotes. + */ + parameters.vuforiaLicenseKey = " -- YOUR NEW VUFORIA KEY GOES HERE --- "; + + /* + * We also indicate which camera on the RC that we wish to use. + * Here we chose the back (HiRes) camera (for greater range), but + * for a competition robot, the front camera might be more convenient. + */ + parameters.cameraDirection = VuforiaLocalizer.CameraDirection.BACK; + + /** + * Instantiate the Vuforia engine + */ + vuforia = ClassFactory.getInstance().createVuforia(parameters); + + + /** + * Load the data set containing the VuMarks for Relic Recovery. There's only one trackable + * in this data set: all three of the VuMarks in the game were created from this one template, + * but differ in their instance id information. + * @see VuMarkInstanceId + */ + VuforiaTrackables relicTrackables = this.vuforia.loadTrackablesFromAsset("RelicVuMark"); + VuforiaTrackable relicTemplate = relicTrackables.get(0); + relicTemplate.setName("relicVuMarkTemplate"); // can help in debugging; otherwise not necessary + + telemetry.addData(">", "Press Play to start"); + telemetry.update(); + waitForStart(); + + relicTrackables.activate(); + + while (opModeIsActive()) { + + /** + * See if any of the instances of {@link relicTemplate} are currently visible. + * {@link RelicRecoveryVuMark} is an enum which can have the following values: + * UNKNOWN, LEFT, CENTER, and RIGHT. When a VuMark is visible, something other than + * UNKNOWN will be returned by {@link RelicRecoveryVuMark#from(VuforiaTrackable)}. + */ + RelicRecoveryVuMark vuMark = RelicRecoveryVuMark.from(relicTemplate); + if (vuMark != RelicRecoveryVuMark.UNKNOWN) { + + /* Found an instance of the template. In the actual game, you will probably + * loop until this condition occurs, then move on to act accordingly depending + * on which VuMark was visible. */ + telemetry.addData("VuMark", "%s visible", vuMark); + + /* For fun, we also exhibit the navigational pose. In the Relic Recovery game, + * it is perhaps unlikely that you will actually need to act on this pose information, but + * we illustrate it nevertheless, for completeness. */ + OpenGLMatrix pose = ((VuforiaTrackableDefaultListener)relicTemplate.getListener()).getPose(); + telemetry.addData("Pose", format(pose)); + + /* We further illustrate how to decompose the pose into useful rotational and + * translational components */ + if (pose != null) { + VectorF trans = pose.getTranslation(); + Orientation rot = Orientation.getOrientation(pose, AxesReference.EXTRINSIC, AxesOrder.XYZ, AngleUnit.DEGREES); + + // Extract the X, Y, and Z components of the offset of the target relative to the robot + double tX = trans.get(0); + double tY = trans.get(1); + double tZ = trans.get(2); + + // Extract the rotational components of the target relative to the robot + double rX = rot.firstAngle; + double rY = rot.secondAngle; + double rZ = rot.thirdAngle; + } + } + else { + telemetry.addData("VuMark", "not visible"); + } + + telemetry.update(); + } + } + + String format(OpenGLMatrix transformationMatrix) { + return (transformationMatrix != null) ? transformationMatrix.formatAsTransform() : "null"; + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVuMarkIdentificationWebcam.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVuMarkIdentificationWebcam.java new file mode 100644 index 0000000..14d2aff --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVuMarkIdentificationWebcam.java @@ -0,0 +1,194 @@ +/* Copyright (c) 2017 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; + +import org.firstinspires.ftc.robotcore.external.ClassFactory; +import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName; +import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix; +import org.firstinspires.ftc.robotcore.external.matrices.VectorF; +import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; +import org.firstinspires.ftc.robotcore.external.navigation.AxesOrder; +import org.firstinspires.ftc.robotcore.external.navigation.AxesReference; +import org.firstinspires.ftc.robotcore.external.navigation.Orientation; +import org.firstinspires.ftc.robotcore.external.navigation.RelicRecoveryVuMark; +import org.firstinspires.ftc.robotcore.external.navigation.VuMarkInstanceId; +import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer; +import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackable; +import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackableDefaultListener; +import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables; + +/** + * This OpMode illustrates the basics of using the Vuforia engine to determine + * the identity of Vuforia VuMarks encountered on the field. The code is structured as + * a LinearOpMode. It shares much structure with {@link ConceptVuforiaFieldNavigationWebcam}; we do not here + * duplicate the core Vuforia documentation found there, but rather instead focus on the + * differences between the use of Vuforia for navigation vs VuMark identification. + * + * @see ConceptVuforiaFieldNavigationWebcam + * @see VuforiaLocalizer + * @see VuforiaTrackableDefaultListener + * see ftc_app/doc/tutorial/FTC_FieldCoordinateSystemDefinition.pdf + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list. + * + * IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as + * is explained below + */ + +@TeleOp(name="Concept: VuMark Id Webcam", group ="Concept") +@Disabled +public class ConceptVuMarkIdentificationWebcam extends LinearOpMode { + + public static final String TAG = "Vuforia VuMark Sample"; + + OpenGLMatrix lastLocation = null; + + /** + * {@link #vuforia} is the variable we will use to store our instance of the Vuforia + * localization engine. + */ + VuforiaLocalizer vuforia; + + /** + * This is the webcam we are to use. As with other hardware devices such as motors and + * servos, this device is identified using the robot configuration tool in the FTC application. + */ + WebcamName webcamName; + + @Override public void runOpMode() { + + /* + * Retrieve the camera we are to use. + */ + webcamName = hardwareMap.get(WebcamName.class, "Webcam 1"); + + /* + * To start up Vuforia, tell it the view that we wish to use for camera monitor (on the RC phone); + * If no camera monitor is desired, use the parameterless constructor instead (commented out below). + */ + int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName()); + VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(cameraMonitorViewId); + + // OR... Do Not Activate the Camera Monitor View, to save power + // VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(); + + /* + * IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which + * 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function. + * A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer + * web site at https://developer.vuforia.com/license-manager. + * + * Vuforia license keys are always 380 characters long, and look as if they contain mostly + * random data. As an example, here is a example of a fragment of a valid key: + * ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGlfOOI2dRzKS4T0hQ8kT ... + * Once you've obtained a license key, copy the string from the Vuforia web site + * and paste it in to your code on the next line, between the double quotes. + */ + parameters.vuforiaLicenseKey = " -- YOUR NEW VUFORIA KEY GOES HERE --- "; + + + /** + * We also indicate which camera on the RC we wish to use. For pedagogical purposes, + * we use the same logic as in {@link ConceptVuforiaNavigationWebcam}. + */ + parameters.cameraName = webcamName; + this.vuforia = ClassFactory.getInstance().createVuforia(parameters); + + /** + * Load the data set containing the VuMarks for Relic Recovery. There's only one trackable + * in this data set: all three of the VuMarks in the game were created from this one template, + * but differ in their instance id information. + * @see VuMarkInstanceId + */ + VuforiaTrackables relicTrackables = this.vuforia.loadTrackablesFromAsset("RelicVuMark"); + VuforiaTrackable relicTemplate = relicTrackables.get(0); + relicTemplate.setName("relicVuMarkTemplate"); // can help in debugging; otherwise not necessary + + telemetry.addData(">", "Press Play to start"); + telemetry.update(); + waitForStart(); + + relicTrackables.activate(); + + while (opModeIsActive()) { + + /** + * See if any of the instances of {@link relicTemplate} are currently visible. + * {@link RelicRecoveryVuMark} is an enum which can have the following values: + * UNKNOWN, LEFT, CENTER, and RIGHT. When a VuMark is visible, something other than + * UNKNOWN will be returned by {@link RelicRecoveryVuMark#from(VuforiaTrackable)}. + */ + RelicRecoveryVuMark vuMark = RelicRecoveryVuMark.from(relicTemplate); + if (vuMark != RelicRecoveryVuMark.UNKNOWN) { + + /* Found an instance of the template. In the actual game, you will probably + * loop until this condition occurs, then move on to act accordingly depending + * on which VuMark was visible. */ + telemetry.addData("VuMark", "%s visible", vuMark); + + /* For fun, we also exhibit the navigational pose. In the Relic Recovery game, + * it is perhaps unlikely that you will actually need to act on this pose information, but + * we illustrate it nevertheless, for completeness. */ + OpenGLMatrix pose = ((VuforiaTrackableDefaultListener)relicTemplate.getListener()).getFtcCameraFromTarget(); + telemetry.addData("Pose", format(pose)); + + /* We further illustrate how to decompose the pose into useful rotational and + * translational components */ + if (pose != null) { + VectorF trans = pose.getTranslation(); + Orientation rot = Orientation.getOrientation(pose, AxesReference.EXTRINSIC, AxesOrder.XYZ, AngleUnit.DEGREES); + + // Extract the X, Y, and Z components of the offset of the target relative to the robot + double tX = trans.get(0); + double tY = trans.get(1); + double tZ = trans.get(2); + + // Extract the rotational components of the target relative to the robot + double rX = rot.firstAngle; + double rY = rot.secondAngle; + double rZ = rot.thirdAngle; + } + } + else { + telemetry.addData("VuMark", "not visible"); + } + + telemetry.update(); + } + } + + String format(OpenGLMatrix transformationMatrix) { + return (transformationMatrix != null) ? transformationMatrix.formatAsTransform() : "null"; + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVuforiaDriveToTargetWebcam.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVuforiaDriveToTargetWebcam.java new file mode 100644 index 0000000..d41ada1 --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVuforiaDriveToTargetWebcam.java @@ -0,0 +1,203 @@ +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.hardware.DcMotor; +import com.qualcomm.robotcore.util.Range; + +import org.firstinspires.ftc.robotcore.external.ClassFactory; +import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName; +import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix; +import org.firstinspires.ftc.robotcore.external.matrices.VectorF; +import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer; +import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackable; +import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackableDefaultListener; +import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables; + +/** + * This OpMode illustrates using a webcam to locate and drive towards ANY Vuforia target. + * The code assumes a basic two-wheel Robot Configuration with motors named left_drive and right_drive. + * The motor directions must be set so a positive drive goes forward and a positive turn rotates to the right. + * + * Under manual control, the left stick will move forward/back, and the right stick will turn left/right. + * This is called POV Joystick mode, different than Tank Drive (where each joystick controls a wheel). + * Manually drive the robot until it displays Target data on the Driver Station. + * Press and hold the *Left Bumper* to enable the automatic "Drive to target" mode. + * Release the Left Bumper to return to manual driving mode. + * + * Use DESIRED_DISTANCE to set how close you want the robot to get to the target. + * Speed and Turn sensitivity can be adjusted using the SPEED_GAIN and TURN_GAIN constants. + * + * For more Vuforia details, or to adapt this OpMode for a phone camera, view the + * ConceptVuforiaFieldNavigation and ConceptVuforiaFieldNavigationWebcam samples. + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list. + * + * IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as + * is explained below. + */ + +@TeleOp(name="Drive To Target", group = "Concept") +@Disabled +public class ConceptVuforiaDriveToTargetWebcam extends LinearOpMode +{ + // Adjust these numbers to suit your robot. + final double DESIRED_DISTANCE = 8.0; // this is how close the camera should get to the target (inches) + // The GAIN constants set the relationship between the measured position error, + // and how much power is applied to the drive motors. Drive = Error * Gain + // Make these values smaller for smoother control. + final double SPEED_GAIN = 0.02 ; // Speed Control "Gain". eg: Ramp up to 50% power at a 25 inch error. (0.50 / 25.0) + final double TURN_GAIN = 0.01 ; // Turn Control "Gain". eg: Ramp up to 25% power at a 25 degree error. (0.25 / 25.0) + + final double MM_PER_INCH = 25.40 ; // Metric conversion + + /* + * IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which + * 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function. + * A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer + * web site at https://developer.vuforia.com/license-manager. + * + * Vuforia license keys are always 380 characters long, and look as if they contain mostly + * random data. As an example, here is a example of a fragment of a valid key: + * ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGlfOOI2dRzKS4T0hQ8kT ... + * Once you've obtained a license key, copy the string from the Vuforia web site + * and paste it in to your code on the next line, between the double quotes. + */ + private static final String VUFORIA_KEY = + " --- YOUR NEW VUFORIA KEY GOES HERE --- "; + + VuforiaLocalizer vuforia = null; + OpenGLMatrix targetPose = null; + String targetName = ""; + + private DcMotor leftDrive = null; + private DcMotor rightDrive = null; + + @Override public void runOpMode() + { + /* + * Configure Vuforia by creating a Parameter object, and passing it to the Vuforia engine. + * To get an on-phone camera preview, use the code below. + * If no camera preview is desired, use the parameter-less constructor instead (commented out below). + */ + int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName()); + VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(cameraMonitorViewId); + // VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(); + + parameters.vuforiaLicenseKey = VUFORIA_KEY; + + // Turn off Extended tracking. Set this true if you want Vuforia to track beyond the target. + parameters.useExtendedTracking = false; + + // Connect to the camera we are to use. This name must match what is set up in Robot Configuration + parameters.cameraName = hardwareMap.get(WebcamName.class, "Webcam 1"); + this.vuforia = ClassFactory.getInstance().createVuforia(parameters); + + // Load the trackable objects from the Assets file, and give them meaningful names + VuforiaTrackables targetsPowerPlay = this.vuforia.loadTrackablesFromAsset("PowerPlay"); + targetsPowerPlay.get(0).setName("Red Audience Wall"); + targetsPowerPlay.get(1).setName("Red Rear Wall"); + targetsPowerPlay.get(2).setName("Blue Audience Wall"); + targetsPowerPlay.get(3).setName("Blue Rear Wall"); + + // Start tracking targets in the background + targetsPowerPlay.activate(); + + // Initialize the hardware variables. Note that the strings used here as parameters + // to 'get' must correspond to the names assigned during the robot configuration + // step (using the FTC Robot Controller app on the phone). + leftDrive = hardwareMap.get(DcMotor.class, "left_drive"); + rightDrive = hardwareMap.get(DcMotor.class, "right_drive"); + + // To drive forward, most robots need the motor on one side to be reversed, because the axles point in opposite directions. + // When run, this OpMode should start both motors driving forward. So adjust these two lines based on your first test drive. + // Note: The settings here assume direct drive on left and right wheels. Gear Reduction or 90 Deg drives may require direction flips + leftDrive.setDirection(DcMotor.Direction.REVERSE); + rightDrive.setDirection(DcMotor.Direction.FORWARD); + + telemetry.addData(">", "Press Play to start"); + telemetry.update(); + + waitForStart(); + + boolean targetFound = false; // Set to true when a target is detected by Vuforia + double targetRange = 0; // Distance from camera to target in Inches + double targetBearing = 0; // Robot Heading, relative to target. Positive degrees means target is to the right. + double drive = 0; // Desired forward power (-1 to +1) + double turn = 0; // Desired turning power (-1 to +1) + + while (opModeIsActive()) + { + // Look for first visible target, and save its pose. + targetFound = false; + for (VuforiaTrackable trackable : targetsPowerPlay) + { + if (((VuforiaTrackableDefaultListener) trackable.getListener()).isVisible()) + { + targetPose = ((VuforiaTrackableDefaultListener)trackable.getListener()).getVuforiaCameraFromTarget(); + + // if we have a target, process the "pose" to determine the position of the target relative to the robot. + if (targetPose != null) + { + targetFound = true; + targetName = trackable.getName(); + VectorF trans = targetPose.getTranslation(); + + // Extract the X & Y components of the offset of the target relative to the robot + double targetX = trans.get(0) / MM_PER_INCH; // Image X axis + double targetY = trans.get(2) / MM_PER_INCH; // Image Z axis + + // target range is based on distance from robot position to origin (right triangle). + targetRange = Math.hypot(targetX, targetY); + + // target bearing is based on angle formed between the X axis and the target range line + targetBearing = Math.toDegrees(Math.asin(targetX / targetRange)); + + break; // jump out of target tracking loop if we find a target. + } + } + } + + // Tell the driver what we see, and what to do. + if (targetFound) { + telemetry.addData(">","HOLD Left-Bumper to Drive to Target\n"); + telemetry.addData("Target", " %s", targetName); + telemetry.addData("Range", "%5.1f inches", targetRange); + telemetry.addData("Bearing","%3.0f degrees", targetBearing); + } else { + telemetry.addData(">","Drive using joystick to find target\n"); + } + + // Drive to target Automatically if Left Bumper is being pressed, AND we have found a target. + if (gamepad1.left_bumper && targetFound) { + + // Determine heading and range error so we can use them to control the robot automatically. + double rangeError = (targetRange - DESIRED_DISTANCE); + double headingError = targetBearing; + + // Use the speed and turn "gains" to calculate how we want the robot to move. + drive = rangeError * SPEED_GAIN; + turn = headingError * TURN_GAIN ; + + telemetry.addData("Auto","Drive %5.2f, Turn %5.2f", drive, turn); + } else { + + // drive using manual POV Joystick mode. + drive = -gamepad1.left_stick_y / 2.0; // Reduce drive rate to 50%. + turn = gamepad1.right_stick_x / 4.0; // Reduce turn rate to 25%. + telemetry.addData("Manual","Drive %5.2f, Turn %5.2f", drive, turn); + } + telemetry.update(); + + // Calculate left and right wheel powers and send to them to the motors. + double leftPower = Range.clip(drive + turn, -1.0, 1.0) ; + double rightPower = Range.clip(drive - turn, -1.0, 1.0) ; + leftDrive.setPower(leftPower); + rightDrive.setPower(rightPower); + + sleep(10); + } + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVuforiaFieldNavigation.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVuforiaFieldNavigation.java new file mode 100644 index 0000000..84f2e7b --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVuforiaFieldNavigation.java @@ -0,0 +1,283 @@ +/* Copyright (c) 2019 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; + +import org.firstinspires.ftc.robotcore.external.ClassFactory; +import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix; +import org.firstinspires.ftc.robotcore.external.matrices.VectorF; +import org.firstinspires.ftc.robotcore.external.navigation.Orientation; +import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer; +import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackable; +import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackableDefaultListener; +import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables; + +import java.util.ArrayList; +import java.util.List; + +import static org.firstinspires.ftc.robotcore.external.navigation.AngleUnit.DEGREES; +import static org.firstinspires.ftc.robotcore.external.navigation.AxesOrder.XYZ; +import static org.firstinspires.ftc.robotcore.external.navigation.AxesOrder.YZX; +import static org.firstinspires.ftc.robotcore.external.navigation.AxesReference.EXTRINSIC; +import static org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer.CameraDirection.BACK; + +/** + * This OpMode illustrates using the Vuforia localizer to determine positioning and orientation of + * robot on the FTC field using the RC phone's camera. The code is structured as a LinearOpMode + * + * Note: If you are using a WEBCAM see ConceptVuforiaFieldNavigationWebcam.java + * + * When images are located, Vuforia is able to determine the position and orientation of the + * image relative to the camera. This sample code then combines that information with a + * knowledge of where the target images are on the field, to determine the location of the camera. + * + * Finally, the location of the camera on the robot is used to determine the + * robot's location and orientation on the field. + * + * To learn more about the FTC field coordinate model, see FTC_FieldCoordinateSystemDefinition.pdf in this folder + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list. + * + * IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as + * is explained below. + */ + +@TeleOp(name="Vuforia Field Nav", group ="Concept") +@Disabled +public class ConceptVuforiaFieldNavigation extends LinearOpMode { + + // IMPORTANT: For Phone Camera, set 1) the camera source and 2) the orientation, based on how your phone is mounted: + // 1) Camera Source. Valid choices are: BACK (behind screen) or FRONT (selfie side) + // 2) Phone Orientation. Choices are: PHONE_IS_PORTRAIT = true (portrait) or PHONE_IS_PORTRAIT = false (landscape) + + private static final VuforiaLocalizer.CameraDirection CAMERA_CHOICE = BACK; + private static final boolean PHONE_IS_PORTRAIT = false ; + + /* + * IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which + * 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function. + * A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer + * web site at https://developer.vuforia.com/license-manager. + * + * Vuforia license keys are always 380 characters long, and look as if they contain mostly + * random data. As an example, here is a example of a fragment of a valid key: + * ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGlfOOI2dRzKS4T0hQ8kT ... + * Once you've obtained a license key, copy the string from the Vuforia web site + * and paste it in to your code on the next line, between the double quotes. + */ + private static final String VUFORIA_KEY = + " -- YOUR NEW VUFORIA KEY GOES HERE --- "; + + // Since ImageTarget trackables use mm to specifiy their dimensions, we must use mm for all the physical dimension. + // We will define some constants and conversions here. These are useful for the FTC competition field. + private static final float mmPerInch = 25.4f; + private static final float mmTargetHeight = 6 * mmPerInch; // the height of the center of the target image above the floor + private static final float halfField = 72 * mmPerInch; + private static final float halfTile = 12 * mmPerInch; + private static final float oneAndHalfTile = 36 * mmPerInch; + + // Class Members + private OpenGLMatrix lastLocation = null; + private VuforiaLocalizer vuforia = null; + private VuforiaTrackables targets = null ; + + private boolean targetVisible = false; + private float phoneXRotate = 0; + private float phoneYRotate = 0; + private float phoneZRotate = 0; + + @Override public void runOpMode() { + /* + * Configure Vuforia by creating a Parameter object, and passing it to the Vuforia engine. + * To get an on-phone camera preview, use the code below. + * If no camera preview is desired, use the parameter-less constructor instead (commented out below). + */ + int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName()); + VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(cameraMonitorViewId); + // VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(); + + parameters.vuforiaLicenseKey = VUFORIA_KEY; + parameters.cameraDirection = CAMERA_CHOICE; + + // Turn off Extended tracking. Set this true if you want Vuforia to track beyond the target. + parameters.useExtendedTracking = false; + + // Instantiate the Vuforia engine + vuforia = ClassFactory.getInstance().createVuforia(parameters); + + // Load the trackable assets. + targets = this.vuforia.loadTrackablesFromAsset("PowerPlay"); + + // For convenience, gather together all the trackable objects in one easily-iterable collection */ + List allTrackables = new ArrayList(); + allTrackables.addAll(targets); + + /** + * In order for localization to work, we need to tell the system where each target is on the field, and + * where the phone resides on the robot. These specifications are in the form of transformation matrices. + * Transformation matrices are a central, important concept in the math here involved in localization. + * See Transformation Matrix + * for detailed information. Commonly, you'll encounter transformation matrices as instances + * of the {@link OpenGLMatrix} class. + * + * If you are standing in the Red Alliance Station looking towards the center of the field, + * - The X axis runs from your left to the right. (positive from the center to the right) + * - The Y axis runs from the Red Alliance Station towards the other side of the field + * where the Blue Alliance Station is. (Positive is from the center, towards the BlueAlliance station) + * - The Z axis runs from the floor, upwards towards the ceiling. (Positive is above the floor) + * + * Before being transformed, each target image is conceptually located at the origin of the field's + * coordinate system (the center of the field), facing up. + */ + + // Name and locate each trackable object + identifyTarget(0, "Red Audience Wall", -halfField, -oneAndHalfTile, mmTargetHeight, 90, 0, 90); + identifyTarget(1, "Red Rear Wall", halfField, -oneAndHalfTile, mmTargetHeight, 90, 0, -90); + identifyTarget(2, "Blue Audience Wall", -halfField, oneAndHalfTile, mmTargetHeight, 90, 0, 90); + identifyTarget(3, "Blue Rear Wall", halfField, oneAndHalfTile, mmTargetHeight, 90, 0, -90); + + /* + * Create a transformation matrix describing where the phone is on the robot. + * + * NOTE !!!! It's very important that you turn OFF your phone's Auto-Screen-Rotation option. + * Lock it into Portrait for these numbers to work. + * + * Info: The coordinate frame for the robot looks the same as the field. + * The robot's "forward" direction is facing out along X axis, with the LEFT side facing out along the Y axis. + * Z is UP on the robot. This equates to a heading angle of Zero degrees. + * + * The phone starts out lying flat, with the screen facing Up and with the physical top of the phone + * pointing to the LEFT side of the Robot. + * The two examples below assume that the camera is facing forward out the front of the robot. + */ + + // We need to rotate the camera around its long axis to bring the correct camera forward. + if (CAMERA_CHOICE == BACK) { + phoneYRotate = -90; + } else { + phoneYRotate = 90; + } + + // Rotate the phone vertical about the X axis if it's in portrait mode + if (PHONE_IS_PORTRAIT) { + phoneXRotate = 90 ; + } + + // Next, translate the camera lens to where it is on the robot. + // In this example, it is centered on the robot (left-to-right and front-to-back), and 6 inches above ground level. + final float CAMERA_FORWARD_DISPLACEMENT = 0.0f * mmPerInch; // eg: Enter the forward distance from the center of the robot to the camera lens + final float CAMERA_VERTICAL_DISPLACEMENT = 6.0f * mmPerInch; // eg: Camera is 6 Inches above ground + final float CAMERA_LEFT_DISPLACEMENT = 0.0f * mmPerInch; // eg: Enter the left distance from the center of the robot to the camera lens + + OpenGLMatrix robotFromCamera = OpenGLMatrix + .translation(CAMERA_FORWARD_DISPLACEMENT, CAMERA_LEFT_DISPLACEMENT, CAMERA_VERTICAL_DISPLACEMENT) + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, YZX, DEGREES, phoneYRotate, phoneZRotate, phoneXRotate)); + + /** Let all the trackable listeners know where the phone is. */ + for (VuforiaTrackable trackable : allTrackables) { + ((VuforiaTrackableDefaultListener) trackable.getListener()).setPhoneInformation(robotFromCamera, parameters.cameraDirection); + } + + /* + * WARNING: + * In this sample, we do not wait for PLAY to be pressed. Target Tracking is started immediately when INIT is pressed. + * This sequence is used to enable the new remote DS Camera Stream feature to be used with this sample. + * CONSEQUENTLY do not put any driving commands in this loop. + * To restore the normal opmode structure, just un-comment the following line: + */ + + // waitForStart(); + + /* Note: To use the remote camera preview: + * AFTER you hit Init on the Driver Station, use the "options menu" to select "Camera Stream" + * Tap the preview window to receive a fresh image. + * It is not permitted to transition to RUN while the camera preview window is active. + * Either press STOP to exit the OpMode, or use the "options menu" again, and select "Camera Stream" to close the preview window. + */ + + targets.activate(); + while (!isStopRequested()) { + + // check all the trackable targets to see which one (if any) is visible. + targetVisible = false; + for (VuforiaTrackable trackable : allTrackables) { + if (((VuforiaTrackableDefaultListener)trackable.getListener()).isVisible()) { + telemetry.addData("Visible Target", trackable.getName()); + targetVisible = true; + + // getUpdatedRobotLocation() will return null if no new information is available since + // the last time that call was made, or if the trackable is not currently visible. + OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener)trackable.getListener()).getUpdatedRobotLocation(); + if (robotLocationTransform != null) { + lastLocation = robotLocationTransform; + } + break; + } + } + + // Provide feedback as to where the robot is located (if we know). + if (targetVisible) { + // express position (translation) of robot in inches. + VectorF translation = lastLocation.getTranslation(); + telemetry.addData("Pos (inches)", "{X, Y, Z} = %.1f, %.1f, %.1f", + translation.get(0) / mmPerInch, translation.get(1) / mmPerInch, translation.get(2) / mmPerInch); + + // express the rotation of the robot in degrees. + Orientation rotation = Orientation.getOrientation(lastLocation, EXTRINSIC, XYZ, DEGREES); + telemetry.addData("Rot (deg)", "{Roll, Pitch, Heading} = %.0f, %.0f, %.0f", rotation.firstAngle, rotation.secondAngle, rotation.thirdAngle); + } + else { + telemetry.addData("Visible Target", "none"); + } + telemetry.update(); + } + + // Disable Tracking when we are done; + targets.deactivate(); + } + + /*** + * Identify a target by naming it, and setting its position and orientation on the field + * @param targetIndex + * @param targetName + * @param dx, dy, dz Target offsets in x,y,z axes + * @param rx, ry, rz Target rotations in x,y,z axes + */ + void identifyTarget(int targetIndex, String targetName, float dx, float dy, float dz, float rx, float ry, float rz) { + VuforiaTrackable aTarget = targets.get(targetIndex); + aTarget.setName(targetName); + aTarget.setLocation(OpenGLMatrix.translation(dx, dy, dz) + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, rx, ry, rz))); + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVuforiaFieldNavigationWebcam.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVuforiaFieldNavigationWebcam.java new file mode 100644 index 0000000..6316c48 --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVuforiaFieldNavigationWebcam.java @@ -0,0 +1,274 @@ +/* Copyright (c) 2019 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; + +import org.firstinspires.ftc.robotcore.external.ClassFactory; +import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName; +import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix; +import org.firstinspires.ftc.robotcore.external.matrices.VectorF; +import org.firstinspires.ftc.robotcore.external.navigation.Orientation; +import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer; +import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackable; +import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackableDefaultListener; +import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables; + +import java.util.ArrayList; +import java.util.List; + +import static org.firstinspires.ftc.robotcore.external.navigation.AngleUnit.DEGREES; +import static org.firstinspires.ftc.robotcore.external.navigation.AxesOrder.XYZ; +import static org.firstinspires.ftc.robotcore.external.navigation.AxesOrder.XZY; +import static org.firstinspires.ftc.robotcore.external.navigation.AxesReference.EXTRINSIC; + +/** + * This OpMode illustrates using the Vuforia localizer to determine positioning and orientation of + * robot on the FTC field using a WEBCAM. The code is structured as a LinearOpMode + * + * NOTE: If you are running on a Phone with a built-in camera, use the ConceptVuforiaFieldNavigation example instead of this one. + * NOTE: It is possible to switch between multiple WebCams (eg: one for the left side and one for the right). + * For a related example of how to do this, see ConceptTensorFlowObjectDetectionSwitchableCameras + * + * When images are located, Vuforia is able to determine the position and orientation of the + * image relative to the camera. This sample code then combines that information with a + * knowledge of where the target images are on the field, to determine the location of the camera. + * + * Finally, the location of the camera on the robot is used to determine the + * robot's location and orientation on the field. + * + * To learn more about the FTC field coordinate model, see FTC_FieldCoordinateSystemDefinition.pdf in this folder + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list. + * + * IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as + * is explained below. + */ + +@TeleOp(name="Vuforia Field Nav Webcam", group ="Concept") +@Disabled +public class ConceptVuforiaFieldNavigationWebcam extends LinearOpMode { + + /* + * IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which + * 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function. + * A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer + * web site at https://developer.vuforia.com/license-manager. + * + * Vuforia license keys are always 380 characters long, and look as if they contain mostly + * random data. As an example, here is a example of a fragment of a valid key: + * ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGlfOOI2dRzKS4T0hQ8kT ... + * Once you've obtained a license key, copy the string from the Vuforia web site + * and paste it in to your code on the next line, between the double quotes. + */ + private static final String VUFORIA_KEY = + " --- YOUR NEW VUFORIA KEY GOES HERE --- "; + + // Since ImageTarget trackables use mm to specifiy their dimensions, we must use mm for all the physical dimension. + // We will define some constants and conversions here + private static final float mmPerInch = 25.4f; + private static final float mmTargetHeight = 6 * mmPerInch; // the height of the center of the target image above the floor + private static final float halfField = 72 * mmPerInch; + private static final float halfTile = 12 * mmPerInch; + private static final float oneAndHalfTile = 36 * mmPerInch; + + // Class Members + private OpenGLMatrix lastLocation = null; + private VuforiaLocalizer vuforia = null; + private VuforiaTrackables targets = null ; + private WebcamName webcamName = null; + + private boolean targetVisible = false; + + @Override public void runOpMode() { + // Connect to the camera we are to use. This name must match what is set up in Robot Configuration + webcamName = hardwareMap.get(WebcamName.class, "Webcam 1"); + + /* + * Configure Vuforia by creating a Parameter object, and passing it to the Vuforia engine. + * We can pass Vuforia the handle to a camera preview resource (on the RC screen); + * If no camera-preview is desired, use the parameter-less constructor instead (commented out below). + * Note: A preview window is required if you want to view the camera stream on the Driver Station Phone. + */ + int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName()); + VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(cameraMonitorViewId); + // VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(); + + parameters.vuforiaLicenseKey = VUFORIA_KEY; + + // We also indicate which camera we wish to use. + parameters.cameraName = webcamName; + + // Turn off Extended tracking. Set this true if you want Vuforia to track beyond the target. + parameters.useExtendedTracking = false; + + // Instantiate the Vuforia engine + vuforia = ClassFactory.getInstance().createVuforia(parameters); + + // Load the data sets for the trackable objects. These particular data + // sets are stored in the 'assets' part of our application. + targets = this.vuforia.loadTrackablesFromAsset("PowerPlay"); + + // For convenience, gather together all the trackable objects in one easily-iterable collection */ + List allTrackables = new ArrayList(); + allTrackables.addAll(targets); + + /** + * In order for localization to work, we need to tell the system where each target is on the field, and + * where the phone resides on the robot. These specifications are in the form of transformation matrices. + * Transformation matrices are a central, important concept in the math here involved in localization. + * See Transformation Matrix + * for detailed information. Commonly, you'll encounter transformation matrices as instances + * of the {@link OpenGLMatrix} class. + * + * If you are standing in the Red Alliance Station looking towards the center of the field, + * - The X axis runs from your left to the right. (positive from the center to the right) + * - The Y axis runs from the Red Alliance Station towards the other side of the field + * where the Blue Alliance Station is. (Positive is from the center, towards the BlueAlliance station) + * - The Z axis runs from the floor, upwards towards the ceiling. (Positive is above the floor) + * + * Before being transformed, each target image is conceptually located at the origin of the field's + * coordinate system (the center of the field), facing up. + */ + + // Name and locate each trackable object + identifyTarget(0, "Red Audience Wall", -halfField, -oneAndHalfTile, mmTargetHeight, 90, 0, 90); + identifyTarget(1, "Red Rear Wall", halfField, -oneAndHalfTile, mmTargetHeight, 90, 0, -90); + identifyTarget(2, "Blue Audience Wall", -halfField, oneAndHalfTile, mmTargetHeight, 90, 0, 90); + identifyTarget(3, "Blue Rear Wall", halfField, oneAndHalfTile, mmTargetHeight, 90, 0, -90); + + /* + * Create a transformation matrix describing where the camera is on the robot. + * + * Info: The coordinate frame for the robot looks the same as the field. + * The robot's "forward" direction is facing out along X axis, with the LEFT side facing out along the Y axis. + * Z is UP on the robot. This equates to a bearing angle of Zero degrees. + * + * For a WebCam, the default starting orientation of the camera is looking UP (pointing in the Z direction), + * with the wide (horizontal) axis of the camera aligned with the X axis, and + * the Narrow (vertical) axis of the camera aligned with the Y axis + * + * But, this example assumes that the camera is actually facing forward out the front of the robot. + * So, the "default" camera position requires two rotations to get it oriented correctly. + * 1) First it must be rotated +90 degrees around the X axis to get it horizontal (its now facing out the right side of the robot) + * 2) Next it must be be rotated +90 degrees (counter-clockwise) around the Z axis to face forward. + * + * Finally the camera can be translated to its actual mounting position on the robot. + * In this example, it is centered on the robot (left-to-right and front-to-back), and 6 inches above ground level. + */ + + final float CAMERA_FORWARD_DISPLACEMENT = 0.0f * mmPerInch; // eg: Enter the forward distance from the center of the robot to the camera lens + final float CAMERA_VERTICAL_DISPLACEMENT = 6.0f * mmPerInch; // eg: Camera is 6 Inches above ground + final float CAMERA_LEFT_DISPLACEMENT = 0.0f * mmPerInch; // eg: Enter the left distance from the center of the robot to the camera lens + + OpenGLMatrix cameraLocationOnRobot = OpenGLMatrix + .translation(CAMERA_FORWARD_DISPLACEMENT, CAMERA_LEFT_DISPLACEMENT, CAMERA_VERTICAL_DISPLACEMENT) + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XZY, DEGREES, 90, 90, 0)); + + /** Let all the trackable listeners know where the camera is. */ + for (VuforiaTrackable trackable : allTrackables) { + ((VuforiaTrackableDefaultListener) trackable.getListener()).setCameraLocationOnRobot(parameters.cameraName, cameraLocationOnRobot); + } + + /* + * WARNING: + * In this sample, we do not wait for PLAY to be pressed. Target Tracking is started immediately when INIT is pressed. + * This sequence is used to enable the new remote DS Camera Preview feature to be used with this sample. + * CONSEQUENTLY do not put any driving commands in this loop. + * To restore the normal opmode structure, just un-comment the following line: + */ + + // waitForStart(); + + /* Note: To use the remote camera preview: + * AFTER you hit Init on the Driver Station, use the "options menu" to select "Camera Stream" + * Tap the preview window to receive a fresh image. + * It is not permitted to transition to RUN while the camera preview window is active. + * Either press STOP to exit the OpMode, or use the "options menu" again, and select "Camera Stream" to close the preview window. + */ + + targets.activate(); + while (!isStopRequested()) { + + // check all the trackable targets to see which one (if any) is visible. + targetVisible = false; + for (VuforiaTrackable trackable : allTrackables) { + if (((VuforiaTrackableDefaultListener)trackable.getListener()).isVisible()) { + telemetry.addData("Visible Target", trackable.getName()); + targetVisible = true; + + // getUpdatedRobotLocation() will return null if no new information is available since + // the last time that call was made, or if the trackable is not currently visible. + OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener)trackable.getListener()).getUpdatedRobotLocation(); + if (robotLocationTransform != null) { + lastLocation = robotLocationTransform; + } + break; + } + } + + // Provide feedback as to where the robot is located (if we know). + if (targetVisible) { + // express position (translation) of robot in inches. + VectorF translation = lastLocation.getTranslation(); + telemetry.addData("Pos (inches)", "{X, Y, Z} = %.1f, %.1f, %.1f", + translation.get(0) / mmPerInch, translation.get(1) / mmPerInch, translation.get(2) / mmPerInch); + + // express the rotation of the robot in degrees. + Orientation rotation = Orientation.getOrientation(lastLocation, EXTRINSIC, XYZ, DEGREES); + telemetry.addData("Rot (deg)", "{Roll, Pitch, Heading} = %.0f, %.0f, %.0f", rotation.firstAngle, rotation.secondAngle, rotation.thirdAngle); + } + else { + telemetry.addData("Visible Target", "none"); + } + telemetry.update(); + } + + // Disable Tracking when we are done; + targets.deactivate(); + } + + /*** + * Identify a target by naming it, and setting its position and orientation on the field + * @param targetIndex + * @param targetName + * @param dx, dy, dz Target offsets in x,y,z axes + * @param rx, ry, rz Target rotations in x,y,z axes + */ + void identifyTarget(int targetIndex, String targetName, float dx, float dy, float dz, float rx, float ry, float rz) { + VuforiaTrackable aTarget = targets.get(targetIndex); + aTarget.setName(targetName); + aTarget.setLocation(OpenGLMatrix.translation(dx, dy, dz) + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, rx, ry, rz))); + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/FTC_FieldCoordinateSystemDefinition.pdf b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/FTC_FieldCoordinateSystemDefinition.pdf new file mode 100644 index 0000000..c424d47 Binary files /dev/null and b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/FTC_FieldCoordinateSystemDefinition.pdf differ diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotAutoDriveByEncoder_Linear.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotAutoDriveByEncoder_Linear.java new file mode 100644 index 0000000..ff7a878 --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotAutoDriveByEncoder_Linear.java @@ -0,0 +1,188 @@ +/* Copyright (c) 2017 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.robotcore.eventloop.opmode.Autonomous; +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.hardware.DcMotor; +import com.qualcomm.robotcore.hardware.Servo; +import com.qualcomm.robotcore.util.ElapsedTime; + +/** + * This file illustrates the concept of driving a path based on encoder counts. + * The code is structured as a LinearOpMode + * + * The code REQUIRES that you DO have encoders on the wheels, + * otherwise you would use: RobotAutoDriveByTime; + * + * This code ALSO requires that the drive Motors have been configured such that a positive + * power command moves them forward, and causes the encoders to count UP. + * + * The desired path in this example is: + * - Drive forward for 48 inches + * - Spin right for 12 Inches + * - Drive Backward for 24 inches + * - Stop and close the claw. + * + * The code is written using a method called: encoderDrive(speed, leftInches, rightInches, timeoutS) + * that performs the actual movement. + * This method assumes that each movement is relative to the last stopping place. + * There are other ways to perform encoder based moves, but this method is probably the simplest. + * This code uses the RUN_TO_POSITION mode to enable the Motor controllers to generate the run profile + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list + */ + +@Autonomous(name="Robot: Auto Drive By Encoder", group="Robot") +@Disabled +public class RobotAutoDriveByEncoder_Linear extends LinearOpMode { + + /* Declare OpMode members. */ + private DcMotor leftDrive = null; + private DcMotor rightDrive = null; + + private ElapsedTime runtime = new ElapsedTime(); + + // Calculate the COUNTS_PER_INCH for your specific drive train. + // Go to your motor vendor website to determine your motor's COUNTS_PER_MOTOR_REV + // For external drive gearing, set DRIVE_GEAR_REDUCTION as needed. + // For example, use a value of 2.0 for a 12-tooth spur gear driving a 24-tooth spur gear. + // This is gearing DOWN for less speed and more torque. + // For gearing UP, use a gear ratio less than 1.0. Note this will affect the direction of wheel rotation. + static final double COUNTS_PER_MOTOR_REV = 1440 ; // eg: TETRIX Motor Encoder + static final double DRIVE_GEAR_REDUCTION = 1.0 ; // No External Gearing. + static final double WHEEL_DIAMETER_INCHES = 4.0 ; // For figuring circumference + static final double COUNTS_PER_INCH = (COUNTS_PER_MOTOR_REV * DRIVE_GEAR_REDUCTION) / + (WHEEL_DIAMETER_INCHES * 3.1415); + static final double DRIVE_SPEED = 0.6; + static final double TURN_SPEED = 0.5; + + @Override + public void runOpMode() { + + // Initialize the drive system variables. + leftDrive = hardwareMap.get(DcMotor.class, "left_drive"); + rightDrive = hardwareMap.get(DcMotor.class, "right_drive"); + + // To drive forward, most robots need the motor on one side to be reversed, because the axles point in opposite directions. + // When run, this OpMode should start both motors driving forward. So adjust these two lines based on your first test drive. + // Note: The settings here assume direct drive on left and right wheels. Gear Reduction or 90 Deg drives may require direction flips + leftDrive.setDirection(DcMotor.Direction.REVERSE); + rightDrive.setDirection(DcMotor.Direction.FORWARD); + + leftDrive.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER); + rightDrive.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER); + + leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER); + rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER); + + // Send telemetry message to indicate successful Encoder reset + telemetry.addData("Starting at", "%7d :%7d", + leftDrive.getCurrentPosition(), + rightDrive.getCurrentPosition()); + telemetry.update(); + + // Wait for the game to start (driver presses PLAY) + waitForStart(); + + // Step through each leg of the path, + // Note: Reverse movement is obtained by setting a negative distance (not speed) + encoderDrive(DRIVE_SPEED, 48, 48, 5.0); // S1: Forward 47 Inches with 5 Sec timeout + encoderDrive(TURN_SPEED, 12, -12, 4.0); // S2: Turn Right 12 Inches with 4 Sec timeout + encoderDrive(DRIVE_SPEED, -24, -24, 4.0); // S3: Reverse 24 Inches with 4 Sec timeout + + telemetry.addData("Path", "Complete"); + telemetry.update(); + sleep(1000); // pause to display final telemetry message. + } + + /* + * Method to perform a relative move, based on encoder counts. + * Encoders are not reset as the move is based on the current position. + * Move will stop if any of three conditions occur: + * 1) Move gets to the desired position + * 2) Move runs out of time + * 3) Driver stops the opmode running. + */ + public void encoderDrive(double speed, + double leftInches, double rightInches, + double timeoutS) { + int newLeftTarget; + int newRightTarget; + + // Ensure that the opmode is still active + if (opModeIsActive()) { + + // Determine new target position, and pass to motor controller + newLeftTarget = leftDrive.getCurrentPosition() + (int)(leftInches * COUNTS_PER_INCH); + newRightTarget = rightDrive.getCurrentPosition() + (int)(rightInches * COUNTS_PER_INCH); + leftDrive.setTargetPosition(newLeftTarget); + rightDrive.setTargetPosition(newRightTarget); + + // Turn On RUN_TO_POSITION + leftDrive.setMode(DcMotor.RunMode.RUN_TO_POSITION); + rightDrive.setMode(DcMotor.RunMode.RUN_TO_POSITION); + + // reset the timeout time and start motion. + runtime.reset(); + leftDrive.setPower(Math.abs(speed)); + rightDrive.setPower(Math.abs(speed)); + + // keep looping while we are still active, and there is time left, and both motors are running. + // Note: We use (isBusy() && isBusy()) in the loop test, which means that when EITHER motor hits + // its target position, the motion will stop. This is "safer" in the event that the robot will + // always end the motion as soon as possible. + // However, if you require that BOTH motors have finished their moves before the robot continues + // onto the next step, use (isBusy() || isBusy()) in the loop test. + while (opModeIsActive() && + (runtime.seconds() < timeoutS) && + (leftDrive.isBusy() && rightDrive.isBusy())) { + + // Display it for the driver. + telemetry.addData("Running to", " %7d :%7d", newLeftTarget, newRightTarget); + telemetry.addData("Currently at", " at %7d :%7d", + leftDrive.getCurrentPosition(), rightDrive.getCurrentPosition()); + telemetry.update(); + } + + // Stop all motion; + leftDrive.setPower(0); + rightDrive.setPower(0); + + // Turn off RUN_TO_POSITION + leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER); + rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER); + + sleep(250); // optional pause after each move. + } + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotAutoDriveByGyro_Linear.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotAutoDriveByGyro_Linear.java new file mode 100644 index 0000000..c710b21 --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotAutoDriveByGyro_Linear.java @@ -0,0 +1,431 @@ +/* Copyright (c) 2022 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.hardware.bosch.BNO055IMU; +import com.qualcomm.robotcore.eventloop.opmode.Autonomous; +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.hardware.DcMotor; +import com.qualcomm.robotcore.util.ElapsedTime; +import com.qualcomm.robotcore.util.Range; + +import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; +import org.firstinspires.ftc.robotcore.external.navigation.AxesOrder; +import org.firstinspires.ftc.robotcore.external.navigation.AxesReference; +import org.firstinspires.ftc.robotcore.external.navigation.Orientation; + +/** + * This file illustrates the concept of driving an autonomous path based on Gyro heading and encoder counts. + * The code is structured as a LinearOpMode + * + * The path to be followed by the robot is built from a series of drive, turn or pause steps. + * Each step on the path is defined by a single function call, and these can be strung together in any order. + * + * The code REQUIRES that you have encoders on the drive motors, otherwise you should use: RobotAutoDriveByTime; + * + * This code ALSO requires that you have a BOSCH BNO055 IMU, otherwise you would use: RobotAutoDriveByEncoder; + * This IMU is found in REV Control/Expansion Hubs shipped prior to July 2022, and possibly also on later models. + * To run as written, the Control/Expansion hub should be mounted horizontally on a flat part of the robot chassis. + * + * This sample requires that the drive Motors have been configured with names : left_drive and right_drive. + * It also requires that a positive power command moves both motors forward, and causes the encoders to count UP. + * So please verify that both of your motors move the robot forward on the first move. If not, make the required correction. + * See the beginning of runOpMode() to set the FORWARD/REVERSE option for each motor. + * + * This code uses RUN_TO_POSITION mode for driving straight, and RUN_USING_ENCODER mode for turning and holding. + * Note: You must call setTargetPosition() at least once before switching to RUN_TO_POSITION mode. + * + * Notes: + * + * All angles are referenced to the coordinate-frame that is set whenever resetHeading() is called. + * In this sample, the heading is reset when the Start button is touched on the Driver station. + * Note: It would be possible to reset the heading after each move, but this would accumulate steering errors. + * + * The angle of movement/rotation is assumed to be a standardized rotation around the robot Z axis, + * which means that a Positive rotation is Counter Clockwise, looking down on the field. + * This is consistent with the FTC field coordinate conventions set out in the document: + * ftc_app\doc\tutorial\FTC_FieldCoordinateSystemDefinition.pdf + * + * Control Approach. + * + * To reach, or maintain a required heading, this code implements a basic Proportional Controller where: + * + * Steering power = Heading Error * Proportional Gain. + * + * "Heading Error" is calculated by taking the difference between the desired heading and the actual heading, + * and then "normalizing" it by converting it to a value in the +/- 180 degree range. + * + * "Proportional Gain" is a constant that YOU choose to set the "strength" of the steering response. + * + * Use Android Studio to Copy this Class, and Paste it into your "TeamCode" folder with a new name. + * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list + */ + +@Autonomous(name="Robot: Auto Drive By Gyro", group="Robot") +@Disabled +public class RobotAutoDriveByGyro_Linear extends LinearOpMode { + + /* Declare OpMode members. */ + private DcMotor leftDrive = null; + private DcMotor rightDrive = null; + private BNO055IMU imu = null; // Control/Expansion Hub IMU + + private double robotHeading = 0; + private double headingOffset = 0; + private double headingError = 0; + + // These variable are declared here (as class members) so they can be updated in various methods, + // but still be displayed by sendTelemetry() + private double targetHeading = 0; + private double driveSpeed = 0; + private double turnSpeed = 0; + private double leftSpeed = 0; + private double rightSpeed = 0; + private int leftTarget = 0; + private int rightTarget = 0; + + // Calculate the COUNTS_PER_INCH for your specific drive train. + // Go to your motor vendor website to determine your motor's COUNTS_PER_MOTOR_REV + // For external drive gearing, set DRIVE_GEAR_REDUCTION as needed. + // For example, use a value of 2.0 for a 12-tooth spur gear driving a 24-tooth spur gear. + // This is gearing DOWN for less speed and more torque. + // For gearing UP, use a gear ratio less than 1.0. Note this will affect the direction of wheel rotation. + static final double COUNTS_PER_MOTOR_REV = 537.7 ; // eg: GoBILDA 312 RPM Yellow Jacket + static final double DRIVE_GEAR_REDUCTION = 1.0 ; // No External Gearing. + static final double WHEEL_DIAMETER_INCHES = 4.0 ; // For figuring circumference + static final double COUNTS_PER_INCH = (COUNTS_PER_MOTOR_REV * DRIVE_GEAR_REDUCTION) / + (WHEEL_DIAMETER_INCHES * 3.1415); + + // These constants define the desired driving/control characteristics + // They can/should be tweaked to suit the specific robot drive train. + static final double DRIVE_SPEED = 0.4; // Max driving speed for better distance accuracy. + static final double TURN_SPEED = 0.2; // Max Turn speed to limit turn rate + static final double HEADING_THRESHOLD = 1.0 ; // How close must the heading get to the target before moving to next step. + // Requiring more accuracy (a smaller number) will often make the turn take longer to get into the final position. + // Define the Proportional control coefficient (or GAIN) for "heading control". + // We define one value when Turning (larger errors), and the other is used when Driving straight (smaller errors). + // Increase these numbers if the heading does not corrects strongly enough (eg: a heavy robot or using tracks) + // Decrease these numbers if the heading does not settle on the correct value (eg: very agile robot with omni wheels) + static final double P_TURN_GAIN = 0.02; // Larger is more responsive, but also less stable + static final double P_DRIVE_GAIN = 0.03; // Larger is more responsive, but also less stable + + + @Override + public void runOpMode() { + + // Initialize the drive system variables. + leftDrive = hardwareMap.get(DcMotor.class, "left_drive"); + rightDrive = hardwareMap.get(DcMotor.class, "right_drive"); + + // To drive forward, most robots need the motor on one side to be reversed, because the axles point in opposite directions. + // When run, this OpMode should start both motors driving forward. So adjust these two lines based on your first test drive. + // Note: The settings here assume direct drive on left and right wheels. Gear Reduction or 90 Deg drives may require direction flips + leftDrive.setDirection(DcMotor.Direction.REVERSE); + rightDrive.setDirection(DcMotor.Direction.FORWARD); + + // define initialization values for IMU, and then initialize it. + BNO055IMU.Parameters parameters = new BNO055IMU.Parameters(); + parameters.angleUnit = BNO055IMU.AngleUnit.DEGREES; + imu = hardwareMap.get(BNO055IMU.class, "imu"); + imu.initialize(parameters); + + // Ensure the robot is stationary. Reset the encoders and set the motors to BRAKE mode + leftDrive.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER); + rightDrive.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER); + leftDrive.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE); + rightDrive.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE); + + // Wait for the game to start (Display Gyro value while waiting) + while (opModeInInit()) { + telemetry.addData(">", "Robot Heading = %4.0f", getRawHeading()); + telemetry.update(); + } + + // Set the encoders for closed loop speed control, and reset the heading. + leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER); + rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER); + resetHeading(); + + // Step through each leg of the path, + // Notes: Reverse movement is obtained by setting a negative distance (not speed) + // holdHeading() is used after turns to let the heading stabilize + // Add a sleep(2000) after any step to keep the telemetry data visible for review + + driveStraight(DRIVE_SPEED, 24.0, 0.0); // Drive Forward 24" + turnToHeading( TURN_SPEED, -45.0); // Turn CW to -45 Degrees + holdHeading( TURN_SPEED, -45.0, 0.5); // Hold -45 Deg heading for a 1/2 second + + driveStraight(DRIVE_SPEED, 17.0, -45.0); // Drive Forward 17" at -45 degrees (12"x and 12"y) + turnToHeading( TURN_SPEED, 45.0); // Turn CCW to 45 Degrees + holdHeading( TURN_SPEED, 45.0, 0.5); // Hold 45 Deg heading for a 1/2 second + + driveStraight(DRIVE_SPEED, 17.0, 45.0); // Drive Forward 17" at 45 degrees (-12"x and 12"y) + turnToHeading( TURN_SPEED, 0.0); // Turn CW to 0 Degrees + holdHeading( TURN_SPEED, 0.0, 1.0); // Hold 0 Deg heading for 1 second + + driveStraight(DRIVE_SPEED,-48.0, 0.0); // Drive in Reverse 48" (should return to approx. staring position) + + telemetry.addData("Path", "Complete"); + telemetry.update(); + sleep(1000); // Pause to display last telemetry message. + } + + /* + * ==================================================================================================== + * Driving "Helper" functions are below this line. + * These provide the high and low level methods that handle driving straight and turning. + * ==================================================================================================== + */ + + // ********** HIGH Level driving functions. ******************** + + /** + * Method to drive in a straight line, on a fixed compass heading (angle), based on encoder counts. + * Move will stop if either of these conditions occur: + * 1) Move gets to the desired position + * 2) Driver stops the opmode running. + * + * @param maxDriveSpeed MAX Speed for forward/rev motion (range 0 to +1.0) . + * @param distance Distance (in inches) to move from current position. Negative distance means move backward. + * @param heading Absolute Heading Angle (in Degrees) relative to last gyro reset. + * 0 = fwd. +ve is CCW from fwd. -ve is CW from forward. + * If a relative angle is required, add/subtract from the current robotHeading. + */ + public void driveStraight(double maxDriveSpeed, + double distance, + double heading) { + + // Ensure that the opmode is still active + if (opModeIsActive()) { + + // Determine new target position, and pass to motor controller + int moveCounts = (int)(distance * COUNTS_PER_INCH); + leftTarget = leftDrive.getCurrentPosition() + moveCounts; + rightTarget = rightDrive.getCurrentPosition() + moveCounts; + + // Set Target FIRST, then turn on RUN_TO_POSITION + leftDrive.setTargetPosition(leftTarget); + rightDrive.setTargetPosition(rightTarget); + + leftDrive.setMode(DcMotor.RunMode.RUN_TO_POSITION); + rightDrive.setMode(DcMotor.RunMode.RUN_TO_POSITION); + + // Set the required driving speed (must be positive for RUN_TO_POSITION) + // Start driving straight, and then enter the control loop + maxDriveSpeed = Math.abs(maxDriveSpeed); + moveRobot(maxDriveSpeed, 0); + + // keep looping while we are still active, and BOTH motors are running. + while (opModeIsActive() && + (leftDrive.isBusy() && rightDrive.isBusy())) { + + // Determine required steering to keep on heading + turnSpeed = getSteeringCorrection(heading, P_DRIVE_GAIN); + + // if driving in reverse, the motor correction also needs to be reversed + if (distance < 0) + turnSpeed *= -1.0; + + // Apply the turning correction to the current driving speed. + moveRobot(driveSpeed, turnSpeed); + + // Display drive status for the driver. + sendTelemetry(true); + } + + // Stop all motion & Turn off RUN_TO_POSITION + moveRobot(0, 0); + leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER); + rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER); + } + } + + /** + * Method to spin on central axis to point in a new direction. + * Move will stop if either of these conditions occur: + * 1) Move gets to the heading (angle) + * 2) Driver stops the opmode running. + * + * @param maxTurnSpeed Desired MAX speed of turn. (range 0 to +1.0) + * @param heading Absolute Heading Angle (in Degrees) relative to last gyro reset. + * 0 = fwd. +ve is CCW from fwd. -ve is CW from forward. + * If a relative angle is required, add/subtract from current heading. + */ + public void turnToHeading(double maxTurnSpeed, double heading) { + + // Run getSteeringCorrection() once to pre-calculate the current error + getSteeringCorrection(heading, P_DRIVE_GAIN); + + // keep looping while we are still active, and not on heading. + while (opModeIsActive() && (Math.abs(headingError) > HEADING_THRESHOLD)) { + + // Determine required steering to keep on heading + turnSpeed = getSteeringCorrection(heading, P_TURN_GAIN); + + // Clip the speed to the maximum permitted value. + turnSpeed = Range.clip(turnSpeed, -maxTurnSpeed, maxTurnSpeed); + + // Pivot in place by applying the turning correction + moveRobot(0, turnSpeed); + + // Display drive status for the driver. + sendTelemetry(false); + } + + // Stop all motion; + moveRobot(0, 0); + } + + /** + * Method to obtain & hold a heading for a finite amount of time + * Move will stop once the requested time has elapsed + * This function is useful for giving the robot a moment to stabilize it's heading between movements. + * + * @param maxTurnSpeed Maximum differential turn speed (range 0 to +1.0) + * @param heading Absolute Heading Angle (in Degrees) relative to last gyro reset. + * 0 = fwd. +ve is CCW from fwd. -ve is CW from forward. + * If a relative angle is required, add/subtract from current heading. + * @param holdTime Length of time (in seconds) to hold the specified heading. + */ + public void holdHeading(double maxTurnSpeed, double heading, double holdTime) { + + ElapsedTime holdTimer = new ElapsedTime(); + holdTimer.reset(); + + // keep looping while we have time remaining. + while (opModeIsActive() && (holdTimer.time() < holdTime)) { + // Determine required steering to keep on heading + turnSpeed = getSteeringCorrection(heading, P_TURN_GAIN); + + // Clip the speed to the maximum permitted value. + turnSpeed = Range.clip(turnSpeed, -maxTurnSpeed, maxTurnSpeed); + + // Pivot in place by applying the turning correction + moveRobot(0, turnSpeed); + + // Display drive status for the driver. + sendTelemetry(false); + } + + // Stop all motion; + moveRobot(0, 0); + } + + // ********** LOW Level driving functions. ******************** + + /** + * This method uses a Proportional Controller to determine how much steering correction is required. + * + * @param desiredHeading The desired absolute heading (relative to last heading reset) + * @param proportionalGain Gain factor applied to heading error to obtain turning power. + * @return Turning power needed to get to required heading. + */ + public double getSteeringCorrection(double desiredHeading, double proportionalGain) { + targetHeading = desiredHeading; // Save for telemetry + + // Get the robot heading by applying an offset to the IMU heading + robotHeading = getRawHeading() - headingOffset; + + // Determine the heading current error + headingError = targetHeading - robotHeading; + + // Normalize the error to be within +/- 180 degrees + while (headingError > 180) headingError -= 360; + while (headingError <= -180) headingError += 360; + + // Multiply the error by the gain to determine the required steering correction/ Limit the result to +/- 1.0 + return Range.clip(headingError * proportionalGain, -1, 1); + } + + /** + * This method takes separate drive (fwd/rev) and turn (right/left) requests, + * combines them, and applies the appropriate speed commands to the left and right wheel motors. + * @param drive forward motor speed + * @param turn clockwise turning motor speed. + */ + public void moveRobot(double drive, double turn) { + driveSpeed = drive; // save this value as a class member so it can be used by telemetry. + turnSpeed = turn; // save this value as a class member so it can be used by telemetry. + + leftSpeed = drive - turn; + rightSpeed = drive + turn; + + // Scale speeds down if either one exceeds +/- 1.0; + double max = Math.max(Math.abs(leftSpeed), Math.abs(rightSpeed)); + if (max > 1.0) + { + leftSpeed /= max; + rightSpeed /= max; + } + + leftDrive.setPower(leftSpeed); + rightDrive.setPower(rightSpeed); + } + + /** + * Display the various control parameters while driving + * + * @param straight Set to true if we are driving straight, and the encoder positions should be included in the telemetry. + */ + private void sendTelemetry(boolean straight) { + + if (straight) { + telemetry.addData("Motion", "Drive Straight"); + telemetry.addData("Target Pos L:R", "%7d:%7d", leftTarget, rightTarget); + telemetry.addData("Actual Pos L:R", "%7d:%7d", leftDrive.getCurrentPosition(), + rightDrive.getCurrentPosition()); + } else { + telemetry.addData("Motion", "Turning"); + } + + telemetry.addData("Angle Target:Current", "%5.2f:%5.0f", targetHeading, robotHeading); + telemetry.addData("Error:Steer", "%5.1f:%5.1f", headingError, turnSpeed); + telemetry.addData("Wheel Speeds L:R.", "%5.2f : %5.2f", leftSpeed, rightSpeed); + telemetry.update(); + } + + /** + * read the raw (un-offset Gyro heading) directly from the IMU + */ + public double getRawHeading() { + Orientation angles = imu.getAngularOrientation(AxesReference.INTRINSIC, AxesOrder.ZYX, AngleUnit.DEGREES); + return angles.firstAngle; + } + + /** + * Reset the "offset" heading back to zero + */ + public void resetHeading() { + // Save a new heading offset equal to the current raw heading. + headingOffset = getRawHeading(); + robotHeading = 0; + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotAutoDriveByTime_Linear.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotAutoDriveByTime_Linear.java new file mode 100644 index 0000000..e27c9c7 --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotAutoDriveByTime_Linear.java @@ -0,0 +1,128 @@ +/* Copyright (c) 2017 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.robotcore.eventloop.opmode.Autonomous; +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.hardware.DcMotor; +import com.qualcomm.robotcore.util.ElapsedTime; + +/** + * This file illustrates the concept of driving a path based on time. + * The code is structured as a LinearOpMode + * + * The code assumes that you do NOT have encoders on the wheels, + * otherwise you would use: RobotAutoDriveByEncoder; + * + * The desired path in this example is: + * - Drive forward for 3 seconds + * - Spin right for 1.3 seconds + * - Drive Backward for 1 Second + * + * The code is written in a simple form with no optimizations. + * However, there are several ways that this type of sequence could be streamlined, + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list + */ + +@Autonomous(name="Robot: Auto Drive By Time", group="Robot") +@Disabled +public class RobotAutoDriveByTime_Linear extends LinearOpMode { + + /* Declare OpMode members. */ + private DcMotor leftDrive = null; + private DcMotor rightDrive = null; + + private ElapsedTime runtime = new ElapsedTime(); + + + static final double FORWARD_SPEED = 0.6; + static final double TURN_SPEED = 0.5; + + @Override + public void runOpMode() { + + // Initialize the drive system variables. + leftDrive = hardwareMap.get(DcMotor.class, "left_drive"); + rightDrive = hardwareMap.get(DcMotor.class, "right_drive"); + + // To drive forward, most robots need the motor on one side to be reversed, because the axles point in opposite directions. + // When run, this OpMode should start both motors driving forward. So adjust these two lines based on your first test drive. + // Note: The settings here assume direct drive on left and right wheels. Gear Reduction or 90 Deg drives may require direction flips + leftDrive.setDirection(DcMotor.Direction.REVERSE); + rightDrive.setDirection(DcMotor.Direction.FORWARD); + + // Send telemetry message to signify robot waiting; + telemetry.addData("Status", "Ready to run"); // + telemetry.update(); + + // Wait for the game to start (driver presses PLAY) + waitForStart(); + + // Step through each leg of the path, ensuring that the Auto mode has not been stopped along the way + + // Step 1: Drive forward for 3 seconds + leftDrive.setPower(FORWARD_SPEED); + rightDrive.setPower(FORWARD_SPEED); + runtime.reset(); + while (opModeIsActive() && (runtime.seconds() < 3.0)) { + telemetry.addData("Path", "Leg 1: %4.1f S Elapsed", runtime.seconds()); + telemetry.update(); + } + + // Step 2: Spin right for 1.3 seconds + leftDrive.setPower(TURN_SPEED); + rightDrive.setPower(-TURN_SPEED); + runtime.reset(); + while (opModeIsActive() && (runtime.seconds() < 1.3)) { + telemetry.addData("Path", "Leg 2: %4.1f S Elapsed", runtime.seconds()); + telemetry.update(); + } + + // Step 3: Drive Backward for 1 Second + leftDrive.setPower(-FORWARD_SPEED); + rightDrive.setPower(-FORWARD_SPEED); + runtime.reset(); + while (opModeIsActive() && (runtime.seconds() < 1.0)) { + telemetry.addData("Path", "Leg 3: %4.1f S Elapsed", runtime.seconds()); + telemetry.update(); + } + + // Step 4: Stop + leftDrive.setPower(0); + rightDrive.setPower(0); + + telemetry.addData("Path", "Complete"); + telemetry.update(); + sleep(1000); + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotAutoDriveToLine_Linear.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotAutoDriveToLine_Linear.java new file mode 100644 index 0000000..9ddf6ad --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotAutoDriveToLine_Linear.java @@ -0,0 +1,144 @@ +/* Copyright (c) 2017 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.robotcore.eventloop.opmode.Autonomous; +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.hardware.ColorSensor; +import com.qualcomm.robotcore.hardware.DcMotor; +import com.qualcomm.robotcore.hardware.LightSensor; +import com.qualcomm.robotcore.hardware.NormalizedColorSensor; +import com.qualcomm.robotcore.hardware.NormalizedRGBA; +import com.qualcomm.robotcore.hardware.SwitchableLight; + +/** + * This file illustrates the concept of driving up to a line and then stopping. + * The code is structured as a LinearOpMode + * + * The Sensor used here can be a REV Color Sensor V2 or V3. Make sure the white LED is turned on. + * The sensor can be plugged into any I2C port, and must be named "sensor_color" in the active configuration. + * + * Depending on the height of your color sensor, you may want to set the sensor "gain". + * The higher the gain, the greater the reflected light reading will be. + * Use the SensorColor sample in this folder to determine the minimum gain value that provides an + * "Alpha" reading of 1.0 when you are on top of the white line. In this sample, we use a gain of 15 + * which works well with a Rev V2 color sensor + * + * Setting the correct WHITE_THRESHOLD value is key to stopping correctly. + * This should be set halfway between the bare-tile, and white-line "Alpha" values. + * The reflected light value can be read on the screen once the OpMode has been INIT, but before it is STARTED. + * Move the sensor on and off the white line and note the min and max readings. + * Edit this code to make WHITE_THRESHOLD halfway between the min and max. + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list + */ + +@Autonomous(name="Robot: Auto Drive To Line", group="Robot") +@Disabled +public class RobotAutoDriveToLine_Linear extends LinearOpMode { + + /* Declare OpMode members. */ + private DcMotor leftDrive = null; + private DcMotor rightDrive = null; + + /** The colorSensor field will contain a reference to our color sensor hardware object */ + NormalizedColorSensor colorSensor; + + static final double WHITE_THRESHOLD = 0.5; // spans between 0.0 - 1.0 from dark to light + static final double APPROACH_SPEED = 0.25; + + @Override + public void runOpMode() { + + // Initialize the drive system variables. + leftDrive = hardwareMap.get(DcMotor.class, "left_drive"); + rightDrive = hardwareMap.get(DcMotor.class, "right_drive"); + + // To drive forward, most robots need the motor on one side to be reversed, because the axles point in opposite directions. + // When run, this OpMode should start both motors driving forward. So adjust these two lines based on your first test drive. + // Note: The settings here assume direct drive on left and right wheels. Gear Reduction or 90 Deg drives may require direction flips + leftDrive.setDirection(DcMotor.Direction.REVERSE); + rightDrive.setDirection(DcMotor.Direction.FORWARD); + + // If there are encoders connected, switch to RUN_USING_ENCODER mode for greater accuracy + // leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER); + // rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER); + + // Get a reference to our sensor object. It's recommended to use NormalizedColorSensor over + // ColorSensor, because NormalizedColorSensor consistently gives values between 0 and 1, while + // the values you get from ColorSensor are dependent on the specific sensor you're using. + colorSensor = hardwareMap.get(NormalizedColorSensor.class, "sensor_color"); + + // If necessary, turn ON the white LED (if there is no LED switch on the sensor) + if (colorSensor instanceof SwitchableLight) { + ((SwitchableLight)colorSensor).enableLight(true); + } + + // Some sensors allow you to set your light sensor gain for optimal sensitivity... + // See the SensorColor sample in this folder for how to determine the optimal gain. + // A gain of 15 causes a Rev Color Sensor V2 to produce an Alpha value of 1.0 at about 1.5" above the floor. + colorSensor.setGain(15); + + // Wait for driver to press PLAY) + // Abort this loop is started or stopped. + while (opModeInInit()) { + + // Send telemetry message to signify robot waiting; + telemetry.addData("Status", "Ready to drive to white line."); // + + // Display the light level while we are waiting to start + getBrightness(); + } + + // Start the robot moving forward, and then begin looking for a white line. + leftDrive.setPower(APPROACH_SPEED); + rightDrive.setPower(APPROACH_SPEED); + + // run until the white line is seen OR the driver presses STOP; + while (opModeIsActive() && (getBrightness() < WHITE_THRESHOLD)) { + sleep(5); + } + + // Stop all motors + leftDrive.setPower(0); + rightDrive.setPower(0); + } + + // to obtain reflected light, read the normalized values from the color sensor. Return the Alpha channel. + double getBrightness() { + NormalizedRGBA colors = colorSensor.getNormalizedColors(); + telemetry.addData("Light Level (0 to 1)", "%4.2f", colors.alpha); + telemetry.update(); + + return colors.alpha; + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotHardware.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotHardware.java new file mode 100644 index 0000000..71b8e0c --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotHardware.java @@ -0,0 +1,167 @@ +/* Copyright (c) 2022 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.hardware.DcMotor; +import com.qualcomm.robotcore.hardware.Servo; +import com.qualcomm.robotcore.util.Range; + +/** + * This file works in conjunction with the External Hardware Class sample called: ConceptExternalHardwareClass.java + * Please read the explanations in that Sample about how to use this class definition. + * + * This file defines a Java Class that performs all the setup and configuration for a sample robot's hardware (motors and sensors). + * It assumes three motors (left_drive, right_drive and arm) and two servos (left_hand and right_hand) + * + * This one file/class can be used by ALL of your OpModes without having to cut & paste the code each time. + * + * Where possible, the actual hardware objects are "abstracted" (or hidden) so the OpMode code just makes calls into the class, + * rather than accessing the internal hardware directly. This is why the objects are declared "private". + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with *exactly the same name*. + * + * Or.. In OnBot Java, add a new file named RobotHardware.java, drawing from this Sample; select Not an OpMode. + * Also add a new OpMode, drawing from the Sample ConceptExternalHardwareClass.java; select TeleOp. + * + */ + +public class RobotHardware { + + /* Declare OpMode members. */ + private LinearOpMode myOpMode = null; // gain access to methods in the calling OpMode. + + // Define Motor and Servo objects (Make them private so they can't be accessed externally) + private DcMotor leftDrive = null; + private DcMotor rightDrive = null; + private DcMotor armMotor = null; + private Servo leftHand = null; + private Servo rightHand = null; + + // Define Drive constants. Make them public so they CAN be used by the calling OpMode + public static final double MID_SERVO = 0.5 ; + public static final double HAND_SPEED = 0.02 ; // sets rate to move servo + public static final double ARM_UP_POWER = 0.45 ; + public static final double ARM_DOWN_POWER = -0.45 ; + + // Define a constructor that allows the OpMode to pass a reference to itself. + public RobotHardware (LinearOpMode opmode) { + myOpMode = opmode; + } + + /** + * Initialize all the robot's hardware. + * This method must be called ONCE when the OpMode is initialized. + * + * All of the hardware devices are accessed via the hardware map, and initialized. + */ + public void init() { + // Define and Initialize Motors (note: need to use reference to actual OpMode). + leftDrive = myOpMode.hardwareMap.get(DcMotor.class, "left_drive"); + rightDrive = myOpMode.hardwareMap.get(DcMotor.class, "right_drive"); + armMotor = myOpMode.hardwareMap.get(DcMotor.class, "arm"); + + // To drive forward, most robots need the motor on one side to be reversed, because the axles point in opposite directions. + // Pushing the left stick forward MUST make robot go forward. So adjust these two lines based on your first test drive. + // Note: The settings here assume direct drive on left and right wheels. Gear Reduction or 90 Deg drives may require direction flips + leftDrive.setDirection(DcMotor.Direction.REVERSE); + rightDrive.setDirection(DcMotor.Direction.FORWARD); + + // If there are encoders connected, switch to RUN_USING_ENCODER mode for greater accuracy + // leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER); + // rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER); + + // Define and initialize ALL installed servos. + leftHand = myOpMode.hardwareMap.get(Servo.class, "left_hand"); + rightHand = myOpMode.hardwareMap.get(Servo.class, "right_hand"); + leftHand.setPosition(MID_SERVO); + rightHand.setPosition(MID_SERVO); + + myOpMode.telemetry.addData(">", "Hardware Initialized"); + myOpMode.telemetry.update(); + } + + /** + * Calculates the left/right motor powers required to achieve the requested + * robot motions: Drive (Axial motion) and Turn (Yaw motion). + * Then sends these power levels to the motors. + * + * @param Drive Fwd/Rev driving power (-1.0 to 1.0) +ve is forward + * @param Turn Right/Left turning power (-1.0 to 1.0) +ve is CW + */ + public void driveRobot(double Drive, double Turn) { + // Combine drive and turn for blended motion. + double left = Drive + Turn; + double right = Drive - Turn; + + // Scale the values so neither exceed +/- 1.0 + double max = Math.max(Math.abs(left), Math.abs(right)); + if (max > 1.0) + { + left /= max; + right /= max; + } + + // Use existing function to drive both wheels. + setDrivePower(left, right); + } + + /** + * Pass the requested wheel motor powers to the appropriate hardware drive motors. + * + * @param leftWheel Fwd/Rev driving power (-1.0 to 1.0) +ve is forward + * @param rightWheel Fwd/Rev driving power (-1.0 to 1.0) +ve is forward + */ + public void setDrivePower(double leftWheel, double rightWheel) { + // Output the values to the motor drives. + leftDrive.setPower(leftWheel); + rightDrive.setPower(rightWheel); + } + + /** + * Pass the requested arm power to the appropriate hardware drive motor + * + * @param power driving power (-1.0 to 1.0) + */ + public void setArmPower(double power) { + armMotor.setPower(power); + } + + /** + * Send the two hand-servos to opposing (mirrored) positions, based on the passed offset. + * + * @param offset + */ + public void setHandPositions(double offset) { + offset = Range.clip(offset, -0.5, 0.5); + leftHand.setPosition(MID_SERVO + offset); + rightHand.setPosition(MID_SERVO - offset); + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotTeleopPOV_Linear.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotTeleopPOV_Linear.java new file mode 100644 index 0000000..d4fac1d --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotTeleopPOV_Linear.java @@ -0,0 +1,159 @@ +/* Copyright (c) 2017 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.hardware.DcMotor; +import com.qualcomm.robotcore.hardware.Servo; +import com.qualcomm.robotcore.util.Range; + +/** + * This particular OpMode executes a POV Game style Teleop for a direct drive robot + * The code is structured as a LinearOpMode + * + * In this mode the left stick moves the robot FWD and back, the Right stick turns left and right. + * It raises and lowers the arm using the Gamepad Y and A buttons respectively. + * It also opens and closes the claws slowly using the left and right Bumper buttons. + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list + */ + +@TeleOp(name="Robot: Teleop POV", group="Robot") +@Disabled +public class RobotTeleopPOV_Linear extends LinearOpMode { + + /* Declare OpMode members. */ + public DcMotor leftDrive = null; + public DcMotor rightDrive = null; + public DcMotor leftArm = null; + public Servo leftClaw = null; + public Servo rightClaw = null; + + double clawOffset = 0; + + public static final double MID_SERVO = 0.5 ; + public static final double CLAW_SPEED = 0.02 ; // sets rate to move servo + public static final double ARM_UP_POWER = 0.45 ; + public static final double ARM_DOWN_POWER = -0.45 ; + + @Override + public void runOpMode() { + double left; + double right; + double drive; + double turn; + double max; + + // Define and Initialize Motors + leftDrive = hardwareMap.get(DcMotor.class, "left_drive"); + rightDrive = hardwareMap.get(DcMotor.class, "right_drive"); + leftArm = hardwareMap.get(DcMotor.class, "left_arm"); + + // To drive forward, most robots need the motor on one side to be reversed, because the axles point in opposite directions. + // Pushing the left stick forward MUST make robot go forward. So adjust these two lines based on your first test drive. + // Note: The settings here assume direct drive on left and right wheels. Gear Reduction or 90 Deg drives may require direction flips + leftDrive.setDirection(DcMotor.Direction.REVERSE); + rightDrive.setDirection(DcMotor.Direction.FORWARD); + + // If there are encoders connected, switch to RUN_USING_ENCODER mode for greater accuracy + // leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER); + // rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER); + + // Define and initialize ALL installed servos. + leftClaw = hardwareMap.get(Servo.class, "left_hand"); + rightClaw = hardwareMap.get(Servo.class, "right_hand"); + leftClaw.setPosition(MID_SERVO); + rightClaw.setPosition(MID_SERVO); + + // Send telemetry message to signify robot waiting; + telemetry.addData(">", "Robot Ready. Press Play."); // + telemetry.update(); + + // Wait for the game to start (driver presses PLAY) + waitForStart(); + + // run until the end of the match (driver presses STOP) + while (opModeIsActive()) { + + // Run wheels in POV mode (note: The joystick goes negative when pushed forward, so negate it) + // In this mode the Left stick moves the robot fwd and back, the Right stick turns left and right. + // This way it's also easy to just drive straight, or just turn. + drive = -gamepad1.left_stick_y; + turn = gamepad1.right_stick_x; + + // Combine drive and turn for blended motion. + left = drive + turn; + right = drive - turn; + + // Normalize the values so neither exceed +/- 1.0 + max = Math.max(Math.abs(left), Math.abs(right)); + if (max > 1.0) + { + left /= max; + right /= max; + } + + // Output the safe vales to the motor drives. + leftDrive.setPower(left); + rightDrive.setPower(right); + + // Use gamepad left & right Bumpers to open and close the claw + if (gamepad1.right_bumper) + clawOffset += CLAW_SPEED; + else if (gamepad1.left_bumper) + clawOffset -= CLAW_SPEED; + + // Move both servos to new position. Assume servos are mirror image of each other. + clawOffset = Range.clip(clawOffset, -0.5, 0.5); + leftClaw.setPosition(MID_SERVO + clawOffset); + rightClaw.setPosition(MID_SERVO - clawOffset); + + // Use gamepad buttons to move arm up (Y) and down (A) + if (gamepad1.y) + leftArm.setPower(ARM_UP_POWER); + else if (gamepad1.a) + leftArm.setPower(ARM_DOWN_POWER); + else + leftArm.setPower(0.0); + + // Send telemetry message to signify robot running; + telemetry.addData("claw", "Offset = %.2f", clawOffset); + telemetry.addData("left", "%.2f", left); + telemetry.addData("right", "%.2f", right); + telemetry.update(); + + // Pace this loop so jaw action is reasonable speed. + sleep(50); + } + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotTeleopTank_Iterative.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotTeleopTank_Iterative.java new file mode 100644 index 0000000..9f1aa7c --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotTeleopTank_Iterative.java @@ -0,0 +1,160 @@ +/* Copyright (c) 2017 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.OpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.hardware.DcMotor; +import com.qualcomm.robotcore.hardware.Servo; +import com.qualcomm.robotcore.util.Range; + +/** + * This particular OpMode executes a Tank Drive control TeleOp a direct drive robot + * The code is structured as an Iterative OpMode + * + * In this mode, the left and right joysticks control the left and right motors respectively. + * Pushing a joystick forward will make the attached motor drive forward. + * It raises and lowers the claw using the Gamepad Y and A buttons respectively. + * It also opens and closes the claws slowly using the left and right Bumper buttons. + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list + */ + +@TeleOp(name="Robot: Teleop Tank", group="Robot") +@Disabled +public class RobotTeleopTank_Iterative extends OpMode{ + + /* Declare OpMode members. */ + public DcMotor leftDrive = null; + public DcMotor rightDrive = null; + public DcMotor leftArm = null; + public Servo leftClaw = null; + public Servo rightClaw = null; + + double clawOffset = 0; + + public static final double MID_SERVO = 0.5 ; + public static final double CLAW_SPEED = 0.02 ; // sets rate to move servo + public static final double ARM_UP_POWER = 0.50 ; // Run arm motor up at 50% power + public static final double ARM_DOWN_POWER = -0.25 ; // Run arm motor down at -25% power + + /* + * Code to run ONCE when the driver hits INIT + */ + @Override + public void init() { + // Define and Initialize Motors + leftDrive = hardwareMap.get(DcMotor.class, "left_drive"); + rightDrive = hardwareMap.get(DcMotor.class, "right_drive"); + leftArm = hardwareMap.get(DcMotor.class, "left_arm"); + + // To drive forward, most robots need the motor on one side to be reversed, because the axles point in opposite directions. + // Pushing the left and right sticks forward MUST make robot go forward. So adjust these two lines based on your first test drive. + // Note: The settings here assume direct drive on left and right wheels. Gear Reduction or 90 Deg drives may require direction flips + leftDrive.setDirection(DcMotor.Direction.REVERSE); + rightDrive.setDirection(DcMotor.Direction.FORWARD); + + // If there are encoders connected, switch to RUN_USING_ENCODER mode for greater accuracy + // leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER); + // rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER); + + // Define and initialize ALL installed servos. + leftClaw = hardwareMap.get(Servo.class, "left_hand"); + rightClaw = hardwareMap.get(Servo.class, "right_hand"); + leftClaw.setPosition(MID_SERVO); + rightClaw.setPosition(MID_SERVO); + + // Send telemetry message to signify robot waiting; + telemetry.addData(">", "Robot Ready. Press Play."); // + } + + /* + * Code to run REPEATEDLY after the driver hits INIT, but before they hit PLAY + */ + @Override + public void init_loop() { + } + + /* + * Code to run ONCE when the driver hits PLAY + */ + @Override + public void start() { + } + + /* + * Code to run REPEATEDLY after the driver hits PLAY but before they hit STOP + */ + @Override + public void loop() { + double left; + double right; + + // Run wheels in tank mode (note: The joystick goes negative when pushed forward, so negate it) + left = -gamepad1.left_stick_y; + right = -gamepad1.right_stick_y; + + leftDrive.setPower(left); + rightDrive.setPower(right); + + // Use gamepad left & right Bumpers to open and close the claw + if (gamepad1.right_bumper) + clawOffset += CLAW_SPEED; + else if (gamepad1.left_bumper) + clawOffset -= CLAW_SPEED; + + // Move both servos to new position. Assume servos are mirror image of each other. + clawOffset = Range.clip(clawOffset, -0.5, 0.5); + leftClaw.setPosition(MID_SERVO + clawOffset); + rightClaw.setPosition(MID_SERVO - clawOffset); + + // Use gamepad buttons to move the arm up (Y) and down (A) + if (gamepad1.y) + leftArm.setPower(ARM_UP_POWER); + else if (gamepad1.a) + leftArm.setPower(ARM_DOWN_POWER); + else + leftArm.setPower(0.0); + + // Send telemetry message to signify robot running; + telemetry.addData("claw", "Offset = %.2f", clawOffset); + telemetry.addData("left", "%.2f", left); + telemetry.addData("right", "%.2f", right); + } + + /* + * Code to run ONCE after the driver hits STOP + */ + @Override + public void stop() { + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SampleRevBlinkinLedDriver.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SampleRevBlinkinLedDriver.java new file mode 100644 index 0000000..337b703 --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SampleRevBlinkinLedDriver.java @@ -0,0 +1,164 @@ +/* + * Copyright (c) 2018 Craig MacFarlane + * + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, are permitted + * (subject to the limitations in the disclaimer below) provided that the following conditions are + * met: + * + * Redistributions of source code must retain the above copyright notice, this list of conditions + * and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this list of conditions + * and the following disclaimer in the documentation and/or other materials provided with the + * distribution. + * + * Neither the name of Craig MacFarlane nor the names of its contributors may be used to + * endorse or promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS LICENSE. THIS + * SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, + * OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF + * THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.hardware.rev.RevBlinkinLedDriver; +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.OpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; + +import org.firstinspires.ftc.robotcore.external.Telemetry; +import org.firstinspires.ftc.robotcore.internal.system.Deadline; + +import java.util.concurrent.TimeUnit; + +/* + * Display patterns of a REV Robotics Blinkin LED Driver. + * AUTO mode cycles through all of the patterns. + * MANUAL mode allows the user to manually change patterns using the + * left and right bumpers of a gamepad. + * + * Configure the driver on a servo port, and name it "blinkin". + * + * Displays the first pattern upon init. + */ +@TeleOp(name="BlinkinExample") +@Disabled +public class SampleRevBlinkinLedDriver extends OpMode { + + /* + * Change the pattern every 10 seconds in AUTO mode. + */ + private final static int LED_PERIOD = 10; + + /* + * Rate limit gamepad button presses to every 500ms. + */ + private final static int GAMEPAD_LOCKOUT = 500; + + RevBlinkinLedDriver blinkinLedDriver; + RevBlinkinLedDriver.BlinkinPattern pattern; + + Telemetry.Item patternName; + Telemetry.Item display; + DisplayKind displayKind; + Deadline ledCycleDeadline; + Deadline gamepadRateLimit; + + protected enum DisplayKind { + MANUAL, + AUTO + } + + @Override + public void init() + { + displayKind = DisplayKind.AUTO; + + blinkinLedDriver = hardwareMap.get(RevBlinkinLedDriver.class, "blinkin"); + pattern = RevBlinkinLedDriver.BlinkinPattern.RAINBOW_RAINBOW_PALETTE; + blinkinLedDriver.setPattern(pattern); + + display = telemetry.addData("Display Kind: ", displayKind.toString()); + patternName = telemetry.addData("Pattern: ", pattern.toString()); + + ledCycleDeadline = new Deadline(LED_PERIOD, TimeUnit.SECONDS); + gamepadRateLimit = new Deadline(GAMEPAD_LOCKOUT, TimeUnit.MILLISECONDS); + } + + @Override + public void loop() + { + handleGamepad(); + + if (displayKind == DisplayKind.AUTO) { + doAutoDisplay(); + } else { + /* + * MANUAL mode: Nothing to do, setting the pattern as a result of a gamepad event. + */ + } + } + + /* + * handleGamepad + * + * Responds to a gamepad button press. Demonstrates rate limiting for + * button presses. If loop() is called every 10ms and and you don't rate + * limit, then any given button press may register as multiple button presses, + * which in this application is problematic. + * + * A: Manual mode, Right bumper displays the next pattern, left bumper displays the previous pattern. + * B: Auto mode, pattern cycles, changing every LED_PERIOD seconds. + */ + protected void handleGamepad() + { + if (!gamepadRateLimit.hasExpired()) { + return; + } + + if (gamepad1.a) { + setDisplayKind(DisplayKind.MANUAL); + gamepadRateLimit.reset(); + } else if (gamepad1.b) { + setDisplayKind(DisplayKind.AUTO); + gamepadRateLimit.reset(); + } else if ((displayKind == DisplayKind.MANUAL) && (gamepad1.left_bumper)) { + pattern = pattern.previous(); + displayPattern(); + gamepadRateLimit.reset(); + } else if ((displayKind == DisplayKind.MANUAL) && (gamepad1.right_bumper)) { + pattern = pattern.next(); + displayPattern(); + gamepadRateLimit.reset(); + } + } + + protected void setDisplayKind(DisplayKind displayKind) + { + this.displayKind = displayKind; + display.setValue(displayKind.toString()); + } + + protected void doAutoDisplay() + { + if (ledCycleDeadline.hasExpired()) { + pattern = pattern.next(); + displayPattern(); + ledCycleDeadline.reset(); + } + } + + protected void displayPattern() + { + blinkinLedDriver.setPattern(pattern); + patternName.setValue(pattern.toString()); + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorBNO055IMU.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorBNO055IMU.java new file mode 100644 index 0000000..9102a10 --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorBNO055IMU.java @@ -0,0 +1,184 @@ +/* Copyright (c) 2017 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.hardware.bosch.BNO055IMU; +import com.qualcomm.hardware.bosch.JustLoggingAccelerationIntegrator; +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; + +import org.firstinspires.ftc.robotcore.external.Func; +import org.firstinspires.ftc.robotcore.external.navigation.Acceleration; +import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; +import org.firstinspires.ftc.robotcore.external.navigation.AxesOrder; +import org.firstinspires.ftc.robotcore.external.navigation.AxesReference; +import org.firstinspires.ftc.robotcore.external.navigation.Orientation; +import org.firstinspires.ftc.robotcore.external.navigation.Position; +import org.firstinspires.ftc.robotcore.external.navigation.Velocity; + +import java.util.Locale; + +/** + * {@link SensorBNO055IMU} gives a short demo on how to use the BNO055 Inertial Motion Unit (IMU) from AdaFruit. + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list + * + * @see Adafruit IMU + */ +@TeleOp(name = "Sensor: BNO055 IMU", group = "Sensor") +@Disabled // Comment this out to add to the opmode list +public class SensorBNO055IMU extends LinearOpMode + { + //---------------------------------------------------------------------------------------------- + // State + //---------------------------------------------------------------------------------------------- + + // The IMU sensor object + BNO055IMU imu; + + // State used for updating telemetry + Orientation angles; + Acceleration gravity; + + //---------------------------------------------------------------------------------------------- + // Main logic + //---------------------------------------------------------------------------------------------- + + @Override public void runOpMode() { + + // Set up the parameters with which we will use our IMU. Note that integration + // algorithm here just reports accelerations to the logcat log; it doesn't actually + // provide positional information. + BNO055IMU.Parameters parameters = new BNO055IMU.Parameters(); + parameters.angleUnit = BNO055IMU.AngleUnit.DEGREES; + parameters.accelUnit = BNO055IMU.AccelUnit.METERS_PERSEC_PERSEC; + parameters.calibrationDataFile = "BNO055IMUCalibration.json"; // see the calibration sample opmode + parameters.loggingEnabled = true; + parameters.loggingTag = "IMU"; + parameters.accelerationIntegrationAlgorithm = new JustLoggingAccelerationIntegrator(); + + // Retrieve and initialize the IMU. We expect the IMU to be attached to an I2C port + // on a Core Device Interface Module, configured to be a sensor of type "AdaFruit IMU", + // and named "imu". + imu = hardwareMap.get(BNO055IMU.class, "imu"); + imu.initialize(parameters); + + // Set up our telemetry dashboard + composeTelemetry(); + + // Wait until we're told to go + waitForStart(); + + // Start the logging of measured acceleration + imu.startAccelerationIntegration(new Position(), new Velocity(), 1000); + + // Loop and update the dashboard + while (opModeIsActive()) { + telemetry.update(); + } + } + + //---------------------------------------------------------------------------------------------- + // Telemetry Configuration + //---------------------------------------------------------------------------------------------- + + void composeTelemetry() { + + // At the beginning of each telemetry update, grab a bunch of data + // from the IMU that we will then display in separate lines. + telemetry.addAction(new Runnable() { @Override public void run() + { + // Acquiring the angles is relatively expensive; we don't want + // to do that in each of the three items that need that info, as that's + // three times the necessary expense. + angles = imu.getAngularOrientation(AxesReference.INTRINSIC, AxesOrder.ZYX, AngleUnit.DEGREES); + gravity = imu.getGravity(); + } + }); + + telemetry.addLine() + .addData("status", new Func() { + @Override public String value() { + return imu.getSystemStatus().toShortString(); + } + }) + .addData("calib", new Func() { + @Override public String value() { + return imu.getCalibrationStatus().toString(); + } + }); + + telemetry.addLine() + .addData("heading", new Func() { + @Override public String value() { + return formatAngle(angles.angleUnit, angles.firstAngle); + } + }) + .addData("roll", new Func() { + @Override public String value() { + return formatAngle(angles.angleUnit, angles.secondAngle); + } + }) + .addData("pitch", new Func() { + @Override public String value() { + return formatAngle(angles.angleUnit, angles.thirdAngle); + } + }); + + telemetry.addLine() + .addData("grvty", new Func() { + @Override public String value() { + return gravity.toString(); + } + }) + .addData("mag", new Func() { + @Override public String value() { + return String.format(Locale.getDefault(), "%.3f", + Math.sqrt(gravity.xAccel*gravity.xAccel + + gravity.yAccel*gravity.yAccel + + gravity.zAccel*gravity.zAccel)); + } + }); + } + + //---------------------------------------------------------------------------------------------- + // Formatting + //---------------------------------------------------------------------------------------------- + + String formatAngle(AngleUnit angleUnit, double angle) { + return formatDegrees(AngleUnit.DEGREES.fromUnit(angleUnit, angle)); + } + + String formatDegrees(double degrees){ + return String.format(Locale.getDefault(), "%.1f", AngleUnit.DEGREES.normalize(degrees)); + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorBNO055IMUCalibration.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorBNO055IMUCalibration.java new file mode 100644 index 0000000..064e8a4 --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorBNO055IMUCalibration.java @@ -0,0 +1,232 @@ +/* Copyright (c) 2017 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.hardware.adafruit.AdafruitBNO055IMU; +import com.qualcomm.hardware.bosch.BNO055IMU; +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.util.ReadWriteFile; + +import org.firstinspires.ftc.robotcore.external.Func; +import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; +import org.firstinspires.ftc.robotcore.external.navigation.AxesOrder; +import org.firstinspires.ftc.robotcore.external.navigation.AxesReference; +import org.firstinspires.ftc.robotcore.external.navigation.Orientation; +import org.firstinspires.ftc.robotcore.internal.system.AppUtil; + +import java.io.File; +import java.util.Locale; + +/** + * {@link SensorBNO055IMUCalibration} calibrates the IMU accelerometer per + * "Section 3.11 Calibration" of the BNO055 specification. + * + *

Manual calibration of the IMU is definitely NOT necessary: except for the magnetometer + * (which is not used by the default {@link BNO055IMU.SensorMode#IMU + * SensorMode#IMU}), the BNO055 is internally self-calibrating and thus can be very successfully + * used without manual intervention. That said, performing a one-time calibration, saving the + * results persistently, then loading them again at each run can help reduce the time that automatic + * calibration requires.

+ * + *

This summary of the calibration process, from + * Intel, is informative:

+ * + *

"This device requires calibration in order to operate accurately. [...] Calibration data is + * lost on a power cycle. See one of the examples for a description of how to calibrate the device, + * but in essence:

+ * + *

There is a calibration status register available [...] that returns the calibration status + * of the accelerometer (ACC), magnetometer (MAG), gyroscope (GYR), and overall system (SYS). + * Each of these values range from 0 (uncalibrated) to 3 (fully calibrated). Calibration [ideally] + * involves certain motions to get all 4 values at 3. The motions are as follows (though see the + * datasheet for more information):

+ * + *
  • + *
      GYR: Simply let the sensor sit flat for a few seconds.
    + *
      ACC: Move the sensor in various positions. Start flat, then rotate slowly by 45 + * degrees, hold for a few seconds, then continue rotating another 45 degrees and + * hold, etc. 6 or more movements of this type may be required. You can move through + * any axis you desire, but make sure that the device is lying at least once + * perpendicular to the x, y, and z axis.
    + *
      MAG: Move slowly in a figure 8 pattern in the air, until the calibration values reaches 3.
    + *
      SYS: This will usually reach 3 when the other items have also reached 3. If not, continue + * slowly moving the device though various axes until it does."
    + *
  • + * + *

    To calibrate the IMU, run this sample opmode with a gamepad attached to the driver station. + * Once the IMU has reached sufficient calibration as reported on telemetry, press the 'A' + * button on the gamepad to write the calibration to a file. That file can then be indicated + * later when running an opmode which uses the IMU.

    + * + *

    Note: if your intended uses of the IMU do not include use of all its sensors (for exmaple, + * you might not use the magnetometer), then it makes little sense for you to wait for full + * calibration of the sensors you are not using before saving the calibration data. Indeed, + * it appears that in a SensorMode that doesn't use the magnetometer (for example), the + * magnetometer cannot actually be calibrated.

    + * + * @see AdafruitBNO055IMU + * @see BNO055IMU.Parameters#calibrationDataFile + * @see BNO055 product page + * @see BNO055 specification + */ +@TeleOp(name = "Sensor: BNO055 IMU Calibration", group = "Sensor") +@Disabled // Uncomment this to add to the opmode list +public class SensorBNO055IMUCalibration extends LinearOpMode + { + //---------------------------------------------------------------------------------------------- + // State + //---------------------------------------------------------------------------------------------- + + // Our sensors, motors, and other devices go here, along with other long term state + BNO055IMU imu; + + // State used for updating telemetry + Orientation angles; + + //---------------------------------------------------------------------------------------------- + // Main logic + //---------------------------------------------------------------------------------------------- + + @Override public void runOpMode() { + + telemetry.log().setCapacity(12); + telemetry.log().add(""); + telemetry.log().add("Please refer to the calibration instructions"); + telemetry.log().add("contained in the Adafruit IMU calibration"); + telemetry.log().add("sample opmode."); + telemetry.log().add(""); + telemetry.log().add("When sufficient calibration has been reached,"); + telemetry.log().add("press the 'A' button to write the current"); + telemetry.log().add("calibration data to a file."); + telemetry.log().add(""); + + // We are expecting the IMU to be attached to an I2C port on a Core Device Interface Module and named "imu". + BNO055IMU.Parameters parameters = new BNO055IMU.Parameters(); + parameters.loggingEnabled = true; + parameters.loggingTag = "IMU"; + imu = hardwareMap.get(BNO055IMU.class, "imu"); + imu.initialize(parameters); + + composeTelemetry(); + telemetry.log().add("Waiting for start..."); + + // Wait until we're told to go + while (!isStarted()) { + telemetry.update(); + idle(); + } + + telemetry.log().add("...started..."); + + while (opModeIsActive()) { + + if (gamepad1.a) { + + // Get the calibration data + BNO055IMU.CalibrationData calibrationData = imu.readCalibrationData(); + + // Save the calibration data to a file. You can choose whatever file + // name you wish here, but you'll want to indicate the same file name + // when you initialize the IMU in an opmode in which it is used. If you + // have more than one IMU on your robot, you'll of course want to use + // different configuration file names for each. + String filename = "AdafruitIMUCalibration.json"; + File file = AppUtil.getInstance().getSettingsFile(filename); + ReadWriteFile.writeFile(file, calibrationData.serialize()); + telemetry.log().add("saved to '%s'", filename); + + // Wait for the button to be released + while (gamepad1.a) { + telemetry.update(); + idle(); + } + } + + telemetry.update(); + } + } + + void composeTelemetry() { + + // At the beginning of each telemetry update, grab a bunch of data + // from the IMU that we will then display in separate lines. + telemetry.addAction(new Runnable() { @Override public void run() + { + // Acquiring the angles is relatively expensive; we don't want + // to do that in each of the three items that need that info, as that's + // three times the necessary expense. + angles = imu.getAngularOrientation(AxesReference.INTRINSIC, AxesOrder.ZYX, AngleUnit.DEGREES); + } + }); + + telemetry.addLine() + .addData("status", new Func() { + @Override public String value() { + return imu.getSystemStatus().toShortString(); + } + }) + .addData("calib", new Func() { + @Override public String value() { + return imu.getCalibrationStatus().toString(); + } + }); + + telemetry.addLine() + .addData("heading", new Func() { + @Override public String value() { + return formatAngle(angles.angleUnit, angles.firstAngle); + } + }) + .addData("roll", new Func() { + @Override public String value() { + return formatAngle(angles.angleUnit, angles.secondAngle); + } + }) + .addData("pitch", new Func() { + @Override public String value() { + return formatAngle(angles.angleUnit, angles.thirdAngle); + } + }); + } + + //---------------------------------------------------------------------------------------------- + // Formatting + //---------------------------------------------------------------------------------------------- + + String formatAngle(AngleUnit angleUnit, double angle) { + return formatDegrees(AngleUnit.DEGREES.fromUnit(angleUnit, angle)); + } + + String formatDegrees(double degrees){ + return String.format(Locale.getDefault(), "%.1f", AngleUnit.DEGREES.normalize(degrees)); + } +} \ No newline at end of file diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorColor.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorColor.java new file mode 100644 index 0000000..8209b83 --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorColor.java @@ -0,0 +1,223 @@ +/* Copyright (c) 2017-2020 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import android.app.Activity; +import android.graphics.Color; +import android.view.View; + +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.hardware.DistanceSensor; +import com.qualcomm.robotcore.hardware.NormalizedColorSensor; +import com.qualcomm.robotcore.hardware.NormalizedRGBA; +import com.qualcomm.robotcore.hardware.SwitchableLight; + +import org.firstinspires.ftc.robotcore.external.navigation.DistanceUnit; + +/** + * This is an example LinearOpMode that shows how to use a color sensor in a generic + * way, regardless of which particular make or model of color sensor is used. The Op Mode + * assumes that the color sensor is configured with a name of "sensor_color". + * + * There will be some variation in the values measured depending on the specific sensor you are using. + * + * You can increase the gain (a multiplier to make the sensor report higher values) by holding down + * the A button on the gamepad, and decrease the gain by holding down the B button on the gamepad. + * + * If the color sensor has a light which is controllable from software, you can use the X button on + * the gamepad to toggle the light on and off. The REV sensors don't support this, but instead have + * a physical switch on them to turn the light on and off, beginning with REV Color Sensor V2. + * + * If the color sensor also supports short-range distance measurements (usually via an infrared + * proximity sensor), the reported distance will be written to telemetry. As of September 2020, + * the only color sensors that support this are the ones from REV Robotics. These infrared proximity + * sensor measurements are only useful at very small distances, and are sensitive to ambient light + * and surface reflectivity. You should use a different sensor if you need precise distance measurements. + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this Op Mode to the Driver Station OpMode list + */ +@TeleOp(name = "Sensor: Color", group = "Sensor") +@Disabled +public class SensorColor extends LinearOpMode { + + /** The colorSensor field will contain a reference to our color sensor hardware object */ + NormalizedColorSensor colorSensor; + + /** The relativeLayout field is used to aid in providing interesting visual feedback + * in this sample application; you probably *don't* need this when you use a color sensor on your + * robot. Note that you won't see anything change on the Driver Station, only on the Robot Controller. */ + View relativeLayout; + + /** + * The runOpMode() method is the root of this Op Mode, as it is in all LinearOpModes. + * Our implementation here, though is a bit unusual: we've decided to put all the actual work + * in the runSample() method rather than directly in runOpMode() itself. The reason we do that is + * that in this sample we're changing the background color of the robot controller screen as the + * Op Mode runs, and we want to be able to *guarantee* that we restore it to something reasonable + * and palatable when the Op Mode ends. The simplest way to do that is to use a try...finally + * block around the main, core logic, and an easy way to make that all clear was to separate + * the former from the latter in separate methods. + */ + @Override public void runOpMode() { + + // Get a reference to the RelativeLayout so we can later change the background + // color of the Robot Controller app to match the hue detected by the RGB sensor. + int relativeLayoutId = hardwareMap.appContext.getResources().getIdentifier("RelativeLayout", "id", hardwareMap.appContext.getPackageName()); + relativeLayout = ((Activity) hardwareMap.appContext).findViewById(relativeLayoutId); + + try { + runSample(); // actually execute the sample + } finally { + // On the way out, *guarantee* that the background is reasonable. It doesn't actually start off + // as pure white, but it's too much work to dig out what actually was used, and this is good + // enough to at least make the screen reasonable again. + // Set the panel back to the default color + relativeLayout.post(new Runnable() { + public void run() { + relativeLayout.setBackgroundColor(Color.WHITE); + } + }); + } + } + + protected void runSample() { + // You can give the sensor a gain value, will be multiplied by the sensor's raw value before the + // normalized color values are calculated. Color sensors (especially the REV Color Sensor V3) + // can give very low values (depending on the lighting conditions), which only use a small part + // of the 0-1 range that is available for the red, green, and blue values. In brighter conditions, + // you should use a smaller gain than in dark conditions. If your gain is too high, all of the + // colors will report at or near 1, and you won't be able to determine what color you are + // actually looking at. For this reason, it's better to err on the side of a lower gain + // (but always greater than or equal to 1). + float gain = 2; + + // Once per loop, we will update this hsvValues array. The first element (0) will contain the + // hue, the second element (1) will contain the saturation, and the third element (2) will + // contain the value. See http://web.archive.org/web/20190311170843/https://infohost.nmt.edu/tcc/help/pubs/colortheory/web/hsv.html + // for an explanation of HSV color. + final float[] hsvValues = new float[3]; + + // xButtonPreviouslyPressed and xButtonCurrentlyPressed keep track of the previous and current + // state of the X button on the gamepad + boolean xButtonPreviouslyPressed = false; + boolean xButtonCurrentlyPressed = false; + + // Get a reference to our sensor object. It's recommended to use NormalizedColorSensor over + // ColorSensor, because NormalizedColorSensor consistently gives values between 0 and 1, while + // the values you get from ColorSensor are dependent on the specific sensor you're using. + colorSensor = hardwareMap.get(NormalizedColorSensor.class, "sensor_color"); + + // If possible, turn the light on in the beginning (it might already be on anyway, + // we just make sure it is if we can). + if (colorSensor instanceof SwitchableLight) { + ((SwitchableLight)colorSensor).enableLight(true); + } + + // Wait for the start button to be pressed. + waitForStart(); + + // Loop until we are asked to stop + while (opModeIsActive()) { + // Explain basic gain information via telemetry + telemetry.addLine("Hold the A button on gamepad 1 to increase gain, or B to decrease it.\n"); + telemetry.addLine("Higher gain values mean that the sensor will report larger numbers for Red, Green, and Blue, and Value\n"); + + // Update the gain value if either of the A or B gamepad buttons is being held + if (gamepad1.a) { + // Only increase the gain by a small amount, since this loop will occur multiple times per second. + gain += 0.005; + } else if (gamepad1.b && gain > 1) { // A gain of less than 1 will make the values smaller, which is not helpful. + gain -= 0.005; + } + + // Show the gain value via telemetry + telemetry.addData("Gain", gain); + + // Tell the sensor our desired gain value (normally you would do this during initialization, + // not during the loop) + colorSensor.setGain(gain); + + // Check the status of the X button on the gamepad + xButtonCurrentlyPressed = gamepad1.x; + + // If the button state is different than what it was, then act + if (xButtonCurrentlyPressed != xButtonPreviouslyPressed) { + // If the button is (now) down, then toggle the light + if (xButtonCurrentlyPressed) { + if (colorSensor instanceof SwitchableLight) { + SwitchableLight light = (SwitchableLight)colorSensor; + light.enableLight(!light.isLightOn()); + } + } + } + xButtonPreviouslyPressed = xButtonCurrentlyPressed; + + // Get the normalized colors from the sensor + NormalizedRGBA colors = colorSensor.getNormalizedColors(); + + /* Use telemetry to display feedback on the driver station. We show the red, green, and blue + * normalized values from the sensor (in the range of 0 to 1), as well as the equivalent + * HSV (hue, saturation and value) values. See http://web.archive.org/web/20190311170843/https://infohost.nmt.edu/tcc/help/pubs/colortheory/web/hsv.html + * for an explanation of HSV color. */ + + // Update the hsvValues array by passing it to Color.colorToHSV() + Color.colorToHSV(colors.toColor(), hsvValues); + + telemetry.addLine() + .addData("Red", "%.3f", colors.red) + .addData("Green", "%.3f", colors.green) + .addData("Blue", "%.3f", colors.blue); + telemetry.addLine() + .addData("Hue", "%.3f", hsvValues[0]) + .addData("Saturation", "%.3f", hsvValues[1]) + .addData("Value", "%.3f", hsvValues[2]); + telemetry.addData("Alpha", "%.3f", colors.alpha); + + /* If this color sensor also has a distance sensor, display the measured distance. + * Note that the reported distance is only useful at very close range, and is impacted by + * ambient light and surface reflectivity. */ + if (colorSensor instanceof DistanceSensor) { + telemetry.addData("Distance (cm)", "%.3f", ((DistanceSensor) colorSensor).getDistance(DistanceUnit.CM)); + } + + telemetry.update(); + + // Change the Robot Controller's background color to match the color detected by the color sensor. + relativeLayout.post(new Runnable() { + public void run() { + relativeLayout.setBackgroundColor(Color.HSVToColor(hsvValues)); + } + }); + } + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorDigitalTouch.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorDigitalTouch.java new file mode 100644 index 0000000..b367924 --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorDigitalTouch.java @@ -0,0 +1,88 @@ +/* Copyright (c) 2017 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.hardware.DigitalChannel; + +/* + * This is an example LinearOpMode that shows how to use + * a REV Robotics Touch Sensor. + * + * It assumes that the touch sensor is configured with a name of "sensor_digital". + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list. + */ +@TeleOp(name = "Sensor: Digital touch", group = "Sensor") +@Disabled +public class SensorDigitalTouch extends LinearOpMode { + /** + * The REV Robotics Touch Sensor + * is treated as a digital channel. It is HIGH if the button is unpressed. + * It pulls LOW if the button is pressed. + * + * Also, when you connect a REV Robotics Touch Sensor to the digital I/O port on the + * Expansion Hub using a 4-wire JST cable, the second pin gets connected to the Touch Sensor. + * The lower (first) pin stays unconnected.* + */ + + DigitalChannel digitalTouch; // Hardware Device Object + + @Override + public void runOpMode() { + + // get a reference to our digitalTouch object. + digitalTouch = hardwareMap.get(DigitalChannel.class, "sensor_digital"); + + // set the digital channel to input. + digitalTouch.setMode(DigitalChannel.Mode.INPUT); + + // wait for the start button to be pressed. + waitForStart(); + + // while the op mode is active, loop and read the light levels. + // Note we use opModeIsActive() as our loop condition because it is an interruptible method. + while (opModeIsActive()) { + + // send the info back to driver station using telemetry function. + // if the digital channel returns true it's HIGH and the button is unpressed. + if (digitalTouch.getState() == true) { + telemetry.addData("Digital Touch", "Is Not Pressed"); + } else { + telemetry.addData("Digital Touch", "Is Pressed"); + } + + telemetry.update(); + } + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorKLNavxMicro.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorKLNavxMicro.java new file mode 100644 index 0000000..583f525 --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorKLNavxMicro.java @@ -0,0 +1,129 @@ +/* Copyright (c) 2017 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.hardware.kauailabs.NavxMicroNavigationSensor; +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.hardware.Gyroscope; +import com.qualcomm.robotcore.hardware.IntegratingGyroscope; +import com.qualcomm.robotcore.util.ElapsedTime; + +import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; +import org.firstinspires.ftc.robotcore.external.navigation.AngularVelocity; +import org.firstinspires.ftc.robotcore.external.navigation.AxesOrder; +import org.firstinspires.ftc.robotcore.external.navigation.AxesReference; +import org.firstinspires.ftc.robotcore.external.navigation.Orientation; + +/* + * This is an example LinearOpMode that shows how to use Kauai Labs navX Micro Robotics Navigation + * Sensor. It assumes that the sensor is configured with a name of "navx". + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list + */ +@TeleOp(name = "Sensor: KL navX Micro", group = "Sensor") +@Disabled +public class SensorKLNavxMicro extends LinearOpMode { + + /** In this sample, for illustration purposes we use two interfaces on the one gyro object. + * That's likely atypical: you'll probably use one or the other in any given situation, + * depending on what you're trying to do. {@link IntegratingGyroscope} (and it's base interface, + * {@link Gyroscope}) are common interfaces supported by possibly several different gyro + * implementations. {@link NavxMicroNavigationSensor}, by contrast, provides functionality that + * is unique to the navX Micro sensor. + */ + IntegratingGyroscope gyro; + NavxMicroNavigationSensor navxMicro; + + // A timer helps provide feedback while calibration is taking place + ElapsedTime timer = new ElapsedTime(); + + @Override public void runOpMode() throws InterruptedException { + // Get a reference to a Modern Robotics GyroSensor object. We use several interfaces + // on this object to illustrate which interfaces support which functionality. + navxMicro = hardwareMap.get(NavxMicroNavigationSensor.class, "navx"); + gyro = (IntegratingGyroscope)navxMicro; + // If you're only interested int the IntegratingGyroscope interface, the following will suffice. + // gyro = hardwareMap.get(IntegratingGyroscope.class, "navx"); + + // The gyro automatically starts calibrating. This takes a few seconds. + telemetry.log().add("Gyro Calibrating. Do Not Move!"); + + // Wait until the gyro calibration is complete + timer.reset(); + while (navxMicro.isCalibrating()) { + telemetry.addData("calibrating", "%s", Math.round(timer.seconds())%2==0 ? "|.." : "..|"); + telemetry.update(); + Thread.sleep(50); + } + telemetry.log().clear(); telemetry.log().add("Gyro Calibrated. Press Start."); + telemetry.clear(); telemetry.update(); + + // Wait for the start button to be pressed + waitForStart(); + telemetry.log().clear(); + + while (opModeIsActive()) { + + // Read dimensionalized data from the gyro. This gyro can report angular velocities + // about all three axes. Additionally, it internally integrates the Z axis to + // be able to report an absolute angular Z orientation. + AngularVelocity rates = gyro.getAngularVelocity(AngleUnit.DEGREES); + Orientation angles = gyro.getAngularOrientation(AxesReference.INTRINSIC, AxesOrder.ZYX, AngleUnit.DEGREES); + + telemetry.addLine() + .addData("dx", formatRate(rates.xRotationRate)) + .addData("dy", formatRate(rates.yRotationRate)) + .addData("dz", "%s deg/s", formatRate(rates.zRotationRate)); + + telemetry.addLine() + .addData("heading", formatAngle(angles.angleUnit, angles.firstAngle)) + .addData("roll", formatAngle(angles.angleUnit, angles.secondAngle)) + .addData("pitch", "%s deg", formatAngle(angles.angleUnit, angles.thirdAngle)); + telemetry.update(); + + idle(); // Always call idle() at the bottom of your while(opModeIsActive()) loop + } + } + + String formatRate(float rate) { + return String.format("%.3f", rate); + } + + String formatAngle(AngleUnit angleUnit, double angle) { + return formatDegrees(AngleUnit.DEGREES.fromUnit(angleUnit, angle)); + } + + String formatDegrees(double degrees){ + return String.format("%.1f", AngleUnit.DEGREES.normalize(degrees)); + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorMRColor.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorMRColor.java new file mode 100644 index 0000000..efba7d1 --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorMRColor.java @@ -0,0 +1,139 @@ +/* Copyright (c) 2017 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import android.app.Activity; +import android.graphics.Color; +import android.view.View; + +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.hardware.ColorSensor; + +/* + * + * This is an example LinearOpMode that shows how to use + * a Modern Robotics Color Sensor. + * + * The op mode assumes that the color sensor + * is configured with a name of "sensor_color". + * + * You can use the X button on gamepad1 to toggle the LED on and off. + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list + */ +@TeleOp(name = "Sensor: MR Color", group = "Sensor") +@Disabled +public class SensorMRColor extends LinearOpMode { + + ColorSensor colorSensor; // Hardware Device Object + + + @Override + public void runOpMode() { + + // hsvValues is an array that will hold the hue, saturation, and value information. + float hsvValues[] = {0F,0F,0F}; + + // values is a reference to the hsvValues array. + final float values[] = hsvValues; + + // get a reference to the RelativeLayout so we can change the background + // color of the Robot Controller app to match the hue detected by the RGB sensor. + int relativeLayoutId = hardwareMap.appContext.getResources().getIdentifier("RelativeLayout", "id", hardwareMap.appContext.getPackageName()); + final View relativeLayout = ((Activity) hardwareMap.appContext).findViewById(relativeLayoutId); + + // bPrevState and bCurrState represent the previous and current state of the button. + boolean bPrevState = false; + boolean bCurrState = false; + + // bLedOn represents the state of the LED. + boolean bLedOn = true; + + // get a reference to our ColorSensor object. + colorSensor = hardwareMap.get(ColorSensor.class, "sensor_color"); + + // Set the LED in the beginning + colorSensor.enableLed(bLedOn); + + // wait for the start button to be pressed. + waitForStart(); + + // while the op mode is active, loop and read the RGB data. + // Note we use opModeIsActive() as our loop condition because it is an interruptible method. + while (opModeIsActive()) { + + // check the status of the x button on either gamepad. + bCurrState = gamepad1.x; + + // check for button state transitions. + if (bCurrState && (bCurrState != bPrevState)) { + + // button is transitioning to a pressed state. So Toggle LED + bLedOn = !bLedOn; + colorSensor.enableLed(bLedOn); + } + + // update previous state variable. + bPrevState = bCurrState; + + // convert the RGB values to HSV values. + Color.RGBToHSV(colorSensor.red() * 8, colorSensor.green() * 8, colorSensor.blue() * 8, hsvValues); + + // send the info back to driver station using telemetry function. + telemetry.addData("LED", bLedOn ? "On" : "Off"); + telemetry.addData("Clear", colorSensor.alpha()); + telemetry.addData("Red ", colorSensor.red()); + telemetry.addData("Green", colorSensor.green()); + telemetry.addData("Blue ", colorSensor.blue()); + telemetry.addData("Hue", hsvValues[0]); + + // change the background color to match the color detected by the RGB sensor. + // pass a reference to the hue, saturation, and value array as an argument + // to the HSVToColor method. + relativeLayout.post(new Runnable() { + public void run() { + relativeLayout.setBackgroundColor(Color.HSVToColor(0xff, values)); + } + }); + + telemetry.update(); + } + + // Set the panel back to the default color + relativeLayout.post(new Runnable() { + public void run() { + relativeLayout.setBackgroundColor(Color.WHITE); + } + }); + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorMRCompass.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorMRCompass.java new file mode 100644 index 0000000..583ac1a --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorMRCompass.java @@ -0,0 +1,148 @@ +/* Copyright (c) 2017 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.hardware.modernrobotics.ModernRoboticsI2cCompassSensor; +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.hardware.CompassSensor; +import com.qualcomm.robotcore.util.ElapsedTime; + +import org.firstinspires.ftc.robotcore.external.navigation.Acceleration; + +/** + * The {@link SensorMRCompass} op mode provides a demonstration of the + * functionality provided by the Modern Robotics compass sensor. + * + * The op mode assumes that the MR compass is configured with a name of "compass". + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list + * + * @see MR Compass Sensor + */ +@TeleOp(name = "Sensor: MR compass", group = "Sensor") +@Disabled // comment out or remove this line to enable this opmode +public class SensorMRCompass extends LinearOpMode { + + ModernRoboticsI2cCompassSensor compass; + ElapsedTime timer = new ElapsedTime(); + + @Override public void runOpMode() { + + // get a reference to our compass + compass = hardwareMap.get(ModernRoboticsI2cCompassSensor.class, "compass"); + + telemetry.log().setCapacity(20); + telemetry.log().add("The compass sensor operates quite well out-of-the"); + telemetry.log().add("box, as shipped by the manufacturer. Precision can"); + telemetry.log().add("however be somewhat improved with calibration."); + telemetry.log().add(""); + telemetry.log().add("To calibrate the compass once the opmode is"); + telemetry.log().add("started, make sure the compass is level, then"); + telemetry.log().add("press 'A' on the gamepad. Next, slowly rotate the "); + telemetry.log().add("compass in a full 360 degree circle while keeping"); + telemetry.log().add("it level. When complete, press 'B'."); + + // wait for the start button to be pressed + waitForStart(); + telemetry.log().clear(); + + while (opModeIsActive()) { + + // If the A button is pressed, start calibration and wait for the A button to rise + if (gamepad1.a && !compass.isCalibrating()) { + + telemetry.log().clear(); + telemetry.log().add("Calibration started"); + telemetry.log().add("Slowly rotate compass 360deg"); + telemetry.log().add("Press 'B' when complete"); + compass.setMode(CompassSensor.CompassMode.CALIBRATION_MODE); + timer.reset(); + + while (gamepad1.a && opModeIsActive()) { + doTelemetry(); + idle(); + } + } + + // If the B button is pressed, stop calibration and wait for the B button to rise + if (gamepad1.b && compass.isCalibrating()) { + + telemetry.log().clear(); + telemetry.log().add("Calibration complete"); + compass.setMode(CompassSensor.CompassMode.MEASUREMENT_MODE); + + if (compass.calibrationFailed()) { + telemetry.log().add("Calibration failed"); + compass.writeCommand(ModernRoboticsI2cCompassSensor.Command.NORMAL); + } + + while (gamepad1.a && opModeIsActive()) { + doTelemetry(); + idle(); + } + } + + doTelemetry(); + } + } + + protected void doTelemetry() { + + if (compass.isCalibrating()) { + + telemetry.addData("compass", "calibrating %s", Math.round(timer.seconds())%2==0 ? "|.." : "..|"); + + } else { + + // getDirection() returns a traditional compass heading in the range [0,360), + // with values increasing in a CW direction + telemetry.addData("heading", "%.1f", compass.getDirection()); + + // getAcceleration() returns the current 3D acceleration experienced by + // the sensor. This is used internally to the sensor to compute its tilt and thence + // to correct the magnetometer reading to produce tilt-corrected values in getDirection() + Acceleration accel = compass.getAcceleration(); + double accelMagnitude = Math.sqrt(accel.xAccel*accel.xAccel + accel.yAccel*accel.yAccel + accel.zAccel*accel.zAccel); + telemetry.addData("accel", accel); + telemetry.addData("accel magnitude", "%.3f", accelMagnitude); + + // getMagneticFlux returns the 3D magnetic field flux experienced by the sensor + telemetry.addData("mag flux", compass.getMagneticFlux()); + } + + // the command register provides status data + telemetry.addData("command", "%s", compass.readCommand()); + + telemetry.update(); + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorMRGyro.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorMRGyro.java new file mode 100644 index 0000000..5425f0d --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorMRGyro.java @@ -0,0 +1,162 @@ +/* Copyright (c) 2017 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.hardware.modernrobotics.ModernRoboticsI2cGyro; +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.hardware.Gyroscope; +import com.qualcomm.robotcore.hardware.IntegratingGyroscope; +import com.qualcomm.robotcore.util.ElapsedTime; + +import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; +import org.firstinspires.ftc.robotcore.external.navigation.AngularVelocity; +import org.firstinspires.ftc.robotcore.external.navigation.AxesOrder; +import org.firstinspires.ftc.robotcore.external.navigation.AxesReference; + +/* + * This is an example LinearOpMode that shows how to use the Modern Robotics Gyro. + * + * The op mode assumes that the gyro sensor is attached to a Device Interface Module + * I2C channel and is configured with a name of "gyro". + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list +*/ +@TeleOp(name = "Sensor: MR Gyro", group = "Sensor") +@Disabled +public class SensorMRGyro extends LinearOpMode { + + /** In this sample, for illustration purposes we use two interfaces on the one gyro object. + * That's likely atypical: you'll probably use one or the other in any given situation, + * depending on what you're trying to do. {@link IntegratingGyroscope} (and it's base interface, + * {@link Gyroscope}) are common interfaces supported by possibly several different gyro + * implementations. {@link ModernRoboticsI2cGyro}, by contrast, provides functionality that + * is unique to the Modern Robotics gyro sensor. + */ + IntegratingGyroscope gyro; + ModernRoboticsI2cGyro modernRoboticsI2cGyro; + + // A timer helps provide feedback while calibration is taking place + ElapsedTime timer = new ElapsedTime(); + + @Override + public void runOpMode() { + + boolean lastResetState = false; + boolean curResetState = false; + + // Get a reference to a Modern Robotics gyro object. We use several interfaces + // on this object to illustrate which interfaces support which functionality. + modernRoboticsI2cGyro = hardwareMap.get(ModernRoboticsI2cGyro.class, "gyro"); + gyro = (IntegratingGyroscope)modernRoboticsI2cGyro; + // If you're only interested int the IntegratingGyroscope interface, the following will suffice. + // gyro = hardwareMap.get(IntegratingGyroscope.class, "gyro"); + // A similar approach will work for the Gyroscope interface, if that's all you need. + + // Start calibrating the gyro. This takes a few seconds and is worth performing + // during the initialization phase at the start of each OpMode. + telemetry.log().add("Gyro Calibrating. Do Not Move!"); + modernRoboticsI2cGyro.calibrate(); + + // Wait until the gyro calibration is complete + timer.reset(); + while (!isStopRequested() && modernRoboticsI2cGyro.isCalibrating()) { + telemetry.addData("calibrating", "%s", Math.round(timer.seconds())%2==0 ? "|.." : "..|"); + telemetry.update(); + sleep(50); + } + + telemetry.log().clear(); telemetry.log().add("Gyro Calibrated. Press Start."); + telemetry.clear(); telemetry.update(); + + // Wait for the start button to be pressed + waitForStart(); + telemetry.log().clear(); + telemetry.log().add("Press A & B to reset heading"); + + // Loop until we're asked to stop + while (opModeIsActive()) { + + // If the A and B buttons are pressed just now, reset Z heading. + curResetState = (gamepad1.a && gamepad1.b); + if (curResetState && !lastResetState) { + modernRoboticsI2cGyro.resetZAxisIntegrator(); + } + lastResetState = curResetState; + + // The raw() methods report the angular rate of change about each of the + // three axes directly as reported by the underlying sensor IC. + int rawX = modernRoboticsI2cGyro.rawX(); + int rawY = modernRoboticsI2cGyro.rawY(); + int rawZ = modernRoboticsI2cGyro.rawZ(); + int heading = modernRoboticsI2cGyro.getHeading(); + int integratedZ = modernRoboticsI2cGyro.getIntegratedZValue(); + + // Read dimensionalized data from the gyro. This gyro can report angular velocities + // about all three axes. Additionally, it internally integrates the Z axis to + // be able to report an absolute angular Z orientation. + AngularVelocity rates = gyro.getAngularVelocity(AngleUnit.DEGREES); + float zAngle = gyro.getAngularOrientation(AxesReference.INTRINSIC, AxesOrder.ZYX, AngleUnit.DEGREES).firstAngle; + + // Read administrative information from the gyro + int zAxisOffset = modernRoboticsI2cGyro.getZAxisOffset(); + int zAxisScalingCoefficient = modernRoboticsI2cGyro.getZAxisScalingCoefficient(); + + telemetry.addLine() + .addData("dx", formatRate(rates.xRotationRate)) + .addData("dy", formatRate(rates.yRotationRate)) + .addData("dz", "%s deg/s", formatRate(rates.zRotationRate)); + telemetry.addData("angle", "%s deg", formatFloat(zAngle)); + telemetry.addData("heading", "%3d deg", heading); + telemetry.addData("integrated Z", "%3d", integratedZ); + telemetry.addLine() + .addData("rawX", formatRaw(rawX)) + .addData("rawY", formatRaw(rawY)) + .addData("rawZ", formatRaw(rawZ)); + telemetry.addLine().addData("z offset", zAxisOffset).addData("z coeff", zAxisScalingCoefficient); + telemetry.update(); + } + } + + String formatRaw(int rawValue) { + return String.format("%d", rawValue); + } + + String formatRate(float rate) { + return String.format("%.3f", rate); + } + + String formatFloat(float rate) { + return String.format("%.3f", rate); + } + +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorMRIrSeeker.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorMRIrSeeker.java new file mode 100644 index 0000000..9bd6452 --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorMRIrSeeker.java @@ -0,0 +1,84 @@ +/* Copyright (c) 2017 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.hardware.IrSeekerSensor; + +/* + * This is an example LinearOpMode that shows how to use + * the Modern Robotics ITR Seeker + * + * The op mode assumes that the IR Seeker + * is configured with a name of "sensor_ir". + * + * Set the switch on the Modern Robotics IR beacon to 1200 at 180.
    + * Turn on the IR beacon. + * Make sure the side of the beacon with the LED on is facing the robot.
    + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list + */ +@TeleOp(name = "Sensor: MR IR Seeker", group = "Sensor") +@Disabled +public class SensorMRIrSeeker extends LinearOpMode { + + @Override + public void runOpMode() { + + IrSeekerSensor irSeeker; // Hardware Device Object + + // get a reference to our GyroSensor object. + irSeeker = hardwareMap.get(IrSeekerSensor.class, "sensor_ir"); + + // wait for the start button to be pressed. + waitForStart(); + + while (opModeIsActive()) { + + // Ensure we have a IR signal + if (irSeeker.signalDetected()) + { + // Display angle and strength + telemetry.addData("Angle", irSeeker.getAngle()); + telemetry.addData("Strength", irSeeker.getStrength()); + } + else + { + // Display loss of signal + telemetry.addData("Seeker", "Signal Lost"); + } + + telemetry.update(); + } + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorMROpticalDistance.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorMROpticalDistance.java new file mode 100644 index 0000000..8424ef7 --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorMROpticalDistance.java @@ -0,0 +1,71 @@ +/* Copyright (c) 2017 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.hardware.OpticalDistanceSensor; + +/* + * This is an example LinearOpMode that shows how to use + * a Modern Robotics Optical Distance Sensor + * It assumes that the ODS sensor is configured with a name of "sensor_ods". + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list + */ +@TeleOp(name = "Sensor: MR ODS", group = "Sensor") +@Disabled +public class SensorMROpticalDistance extends LinearOpMode { + + OpticalDistanceSensor odsSensor; // Hardware Device Object + + @Override + public void runOpMode() { + + // get a reference to our Light Sensor object. + odsSensor = hardwareMap.get(OpticalDistanceSensor.class, "sensor_ods"); + + // wait for the start button to be pressed. + waitForStart(); + + // while the op mode is active, loop and read the light levels. + // Note we use opModeIsActive() as our loop condition because it is an interruptible method. + while (opModeIsActive()) { + + // send the info back to driver station using telemetry function. + telemetry.addData("Raw", odsSensor.getRawLightDetected()); + telemetry.addData("Normal", odsSensor.getLightDetected()); + + telemetry.update(); + } + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorMRRangeSensor.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorMRRangeSensor.java new file mode 100644 index 0000000..d7de1ff --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorMRRangeSensor.java @@ -0,0 +1,72 @@ +/* Copyright (c) 2017 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.hardware.modernrobotics.ModernRoboticsI2cRangeSensor; +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; + +import org.firstinspires.ftc.robotcore.external.navigation.DistanceUnit; + +/** + * {@link SensorMRRangeSensor} illustrates how to use the Modern Robotics + * Range Sensor. + * + * The op mode assumes that the range sensor is configured with a name of "sensor_range". + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list + * + * @see MR Range Sensor + */ +@TeleOp(name = "Sensor: MR range sensor", group = "Sensor") +@Disabled // comment out or remove this line to enable this opmode +public class SensorMRRangeSensor extends LinearOpMode { + + ModernRoboticsI2cRangeSensor rangeSensor; + + @Override public void runOpMode() { + + // get a reference to our compass + rangeSensor = hardwareMap.get(ModernRoboticsI2cRangeSensor.class, "sensor_range"); + + // wait for the start button to be pressed + waitForStart(); + + while (opModeIsActive()) { + telemetry.addData("raw ultrasonic", rangeSensor.rawUltrasonic()); + telemetry.addData("raw optical", rangeSensor.rawOptical()); + telemetry.addData("cm optical", "%.2f cm", rangeSensor.cmOptical()); + telemetry.addData("cm", "%.2f cm", rangeSensor.getDistance(DistanceUnit.CM)); + telemetry.update(); + } + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorREV2mDistance.java b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorREV2mDistance.java new file mode 100644 index 0000000..30bb377 --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorREV2mDistance.java @@ -0,0 +1,89 @@ +/* +Copyright (c) 2018 FIRST + +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted (subject to the limitations in the disclaimer below) provided that +the following conditions are met: + +Redistributions of source code must retain the above copyright notice, this list +of conditions and the following disclaimer. + +Redistributions in binary form must reproduce the above copyright notice, this +list of conditions and the following disclaimer in the documentation and/or +other materials provided with the distribution. + +Neither the name of FIRST nor the names of its contributors may be used to +endorse or promote products derived from this software without specific prior +written permission. + +NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS +LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR +TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.hardware.rev.Rev2mDistanceSensor; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.hardware.DistanceSensor; + +import org.firstinspires.ftc.robotcore.external.navigation.DistanceUnit; + +/** + * {@link SensorREV2mDistance} illustrates how to use the REV Robotics + * Time-of-Flight Range Sensor. + * + * The op mode assumes that the range sensor is configured with a name of "sensor_range". + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list + * + * @see REV Robotics Web Page + */ +@TeleOp(name = "Sensor: REV2mDistance", group = "Sensor") +@Disabled +public class SensorREV2mDistance extends LinearOpMode { + + private DistanceSensor sensorRange; + + @Override + public void runOpMode() { + // you can use this as a regular DistanceSensor. + sensorRange = hardwareMap.get(DistanceSensor.class, "sensor_range"); + + // you can also cast this to a Rev2mDistanceSensor if you want to use added + // methods associated with the Rev2mDistanceSensor class. + Rev2mDistanceSensor sensorTimeOfFlight = (Rev2mDistanceSensor)sensorRange; + + telemetry.addData(">>", "Press start to continue"); + telemetry.update(); + + waitForStart(); + while(opModeIsActive()) { + // generic DistanceSensor methods. + telemetry.addData("deviceName",sensorRange.getDeviceName() ); + telemetry.addData("range", String.format("%.01f mm", sensorRange.getDistance(DistanceUnit.MM))); + telemetry.addData("range", String.format("%.01f cm", sensorRange.getDistance(DistanceUnit.CM))); + telemetry.addData("range", String.format("%.01f m", sensorRange.getDistance(DistanceUnit.METER))); + telemetry.addData("range", String.format("%.01f in", sensorRange.getDistance(DistanceUnit.INCH))); + + // Rev2mDistanceSensor specific methods. + telemetry.addData("ID", String.format("%x", sensorTimeOfFlight.getModelID())); + telemetry.addData("did time out", Boolean.toString(sensorTimeOfFlight.didTimeoutOccur())); + + telemetry.update(); + } + } + +} \ No newline at end of file diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/readme.md b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/readme.md new file mode 100644 index 0000000..326978d --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/readme.md @@ -0,0 +1,45 @@ + +## Caution +No Team-specific code should be placed or modified in this ``.../samples`` folder. + +Samples should be Copied from here, and then Pasted into the team's +[/TeamCode/src/main/java/org/firstinspires/ftc/teamcode](../../../../../../../../../../TeamCode/src/main/java/org/firstinspires/ftc/teamcode) + folder, using the Android Studio cut and paste commands. This automatically changes all file and +class names to be consistent. From there, the sample can be modified to suit the team's needs. + +For more detailed instructions see the /teamcode readme. + +### Naming of Samples + +To gain a better understanding of how the samples are organized, and how to interpret the +naming system, it will help to understand the conventions that were used during their creation. + +These conventions are described (in detail) in the sample_conventions.md file in this folder. + +To summarize: A range of different samples classes will reside in the java/external/samples. +The class names will follow a naming convention which indicates the purpose of each class. +The prefix of the name will be one of the following: + +Basic: This is a minimally functional OpMode used to illustrate the skeleton/structure + of a particular style of OpMode. These are bare bones examples. + +Sensor: This is a Sample OpMode that shows how to use a specific sensor. + It is not intended to drive a functioning robot, it is simply showing the minimal code + required to read and display the sensor values. + +Robot: This is a Sample OpMode that assumes a simple two-motor (differential) drive base. + It may be used to provide a common baseline driving OpMode, or + to demonstrate how a particular sensor or concept can be used to navigate. + +Concept: This is a sample OpMode that illustrates performing a specific function or concept. + These may be complex, but their operation should be explained clearly in the comments, + or the comments should reference an external doc, guide or tutorial. + Each OpMode should try to only demonstrate a single concept so they are easy to + locate based on their name. These OpModes may not produce a drivable robot. + +After the prefix, other conventions will apply: + +* Sensor class names are constructed as: Sensor - Company - Type +* Robot class names are constructed as: Robot - Mode - Action - OpModetype +* Concept class names are constructed as: Concept - Topic - OpModetype + diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/sample_conventions.md b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/sample_conventions.md new file mode 100644 index 0000000..45968ef --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/sample_conventions.md @@ -0,0 +1,108 @@ +## Sample Class/Opmode conventions +#### V 1.1.0 8/9/2017 + +This document defines the FTC Sample OpMode and Class conventions. + +### OpMode Name + +To gain a better understanding of how the samples are organized, and how to interpret the +naming system, it will help to understand the conventions that were used during their creation. + +To summarize: A range of different samples classes will reside in the java/external/samples. +The class names will follow a naming convention which indicates the purpose of each class. +The prefix of the name will be one of the following: + +Basic: This is a minimally functional OpMode used to illustrate the skeleton/structure + of a particular style of OpMode. These are bare bones examples. + +Sensor: This is a Sample OpMode that shows how to use a specific sensor. + It is not intended to drive a functioning robot, it is simply showing the minimal code + required to read and display the sensor values. + +Robot: This is a Sample OpMode that assumes a simple two-motor (differential) drive base. + It may be used to provide a common baseline driving OpMode, or + to demonstrate how a particular sensor or concept can be used to navigate. + +Concept: This is a sample OpMode that illustrates performing a specific function or concept. + These may be complex, but their operation should be explained clearly in the comments, + or the comments should reference an external doc, guide or tutorial. + Each OpMode should try to only demonstrate a single concept so they are easy to + locate based on their name. These OpModes may not produce a drivable robot. + +After the prefix, other conventions will apply: + +* Sensor class names should constructed as: Sensor - Company - Type +* Robot class names should be constructed as: Robot - Mode - Action - OpModetype +* Concept class names should be constructed as: Concept - Topic - OpModetype + +### Sample OpMode Content/Style + +Code is formatted as per the Google Style Guide: + +https://google.github.io/styleguide/javaguide.html + +With “Sensor” and “Hardware” samples, the code should demonstrate the essential function, +and not be embellished with too much additional “clever” code. If a sensor has special +addressing needs, or has a variety of modes or outputs, these should be demonstrated as +simply as possible. + +Special programming methods, or robot control techniques should be reserved for “Concept” Samples, +and where possible, Samples should strive to only demonstrate a single concept, +eg: State machine coding, or a User Menu system, and not combine them into a single “all inclusive” +sample. This will prevent an “all inclusive” Sample being deleted just because one part of it +becomes obsolete. + +### Device Configuration Names + +The following device names are used in the external samples + +** Motors: +left_drive +right_drive +left_arm + +** Servos: +left_hand +right_hand +arm +claw + +** Sensors: +sensor_color +sensor_ir +sensor_light +sensor_ods +sensor_range +sensor_touch +sensor_color_distance +sensor_digital +digin +digout + +** Localization: +compass +gyro +imu +navx + +### Device Object Names + +Device Object names should use the same words as the device’s configuration name, but they +should be re-structured to be a suitable Java variable name. This should keep the same word order, +but adopt the style of beginning with a lower case letter, and then each subsequent word +starting with an upper case letter. + +Eg: from the examples above: tool, leftMotor, rightClawServo, rearLightSensor. + +Note: Sometimes it’s helpful to put the device type first, followed by the variant. +eg: motorLeft and motorRight, but this should only be done if the same word order +is used on the device configuration name. + +### OpMode code Comments + +Sample comments should read like normal code comments, that is, as an explanation of what the +sample code is doing. They should NOT be directives to the user, +like: “insert your joystick code here” as these comments typically aren’t +detailed enough to be useful. They also often get left in the code and become garbage. + +Instead, an example of the joystick code should be shown with a comment describing what it is doing. diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/internal/FtcOpModeRegister.java b/src/main/java/org/firstinspires/ftc/robotcontroller/internal/FtcOpModeRegister.java new file mode 100644 index 0000000..9a94f54 --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/internal/FtcOpModeRegister.java @@ -0,0 +1,70 @@ +/* Copyright (c) 2014, 2015 Qualcomm Technologies Inc + +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted (subject to the limitations in the disclaimer below) provided that +the following conditions are met: + +Redistributions of source code must retain the above copyright notice, this list +of conditions and the following disclaimer. + +Redistributions in binary form must reproduce the above copyright notice, this +list of conditions and the following disclaimer in the documentation and/or +other materials provided with the distribution. + +Neither the name of Qualcomm Technologies Inc nor the names of its contributors +may be used to endorse or promote products derived from this software without +specific prior written permission. + +NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS +LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ + +package org.firstinspires.ftc.robotcontroller.internal; + +import com.qualcomm.robotcore.eventloop.opmode.OpModeManager; +import com.qualcomm.robotcore.eventloop.opmode.OpModeRegister; + +import org.firstinspires.ftc.robotcontroller.external.samples.ConceptNullOp; + +/** + * {@link FtcOpModeRegister} is responsible for registering opmodes for use in an FTC game. + * @see #register(OpModeManager) + */ +public class FtcOpModeRegister implements OpModeRegister { + + /** + * {@link #register(OpModeManager)} is called by the SDK game in order to register + * OpMode classes or instances that will participate in an FTC game. + * + * There are two mechanisms by which an OpMode may be registered. + * + * 1) The preferred method is by means of class annotations in the OpMode itself. + * See, for example the class annotations in {@link ConceptNullOp}. + * + * 2) The other, retired, method is to modify this {@link #register(OpModeManager)} + * method to include explicit calls to OpModeManager.register(). + * This method of modifying this file directly is discouraged, as it + * makes updates to the SDK harder to integrate into your code. + * + * @param manager the object which contains methods for carrying out OpMode registrations + * + * @see com.qualcomm.robotcore.eventloop.opmode.TeleOp + * @see com.qualcomm.robotcore.eventloop.opmode.Autonomous + */ + public void register(OpModeManager manager) { + + /** + * Any manual OpMode class registrations should go here. + */ + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/internal/FtcRobotControllerActivity.java b/src/main/java/org/firstinspires/ftc/robotcontroller/internal/FtcRobotControllerActivity.java new file mode 100644 index 0000000..327d3ec --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/internal/FtcRobotControllerActivity.java @@ -0,0 +1,845 @@ +/* Copyright (c) 2014, 2015 Qualcomm Technologies Inc + +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted (subject to the limitations in the disclaimer below) provided that +the following conditions are met: + +Redistributions of source code must retain the above copyright notice, this list +of conditions and the following disclaimer. + +Redistributions in binary form must reproduce the above copyright notice, this +list of conditions and the following disclaimer in the documentation and/or +other materials provided with the distribution. + +Neither the name of Qualcomm Technologies Inc nor the names of its contributors +may be used to endorse or promote products derived from this software without +specific prior written permission. + +NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS +LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ + +package org.firstinspires.ftc.robotcontroller.internal; + +import android.app.ActionBar; +import android.app.Activity; +import android.app.ActivityManager; +import android.content.ComponentName; +import android.content.Context; +import android.content.Intent; +import android.content.ServiceConnection; +import android.content.SharedPreferences; +import android.content.res.Configuration; +import android.hardware.usb.UsbDevice; +import android.hardware.usb.UsbManager; +import android.net.wifi.WifiManager; +import android.os.Bundle; +import android.os.IBinder; +import android.preference.PreferenceManager; +import androidx.annotation.NonNull; +import androidx.annotation.Nullable; +import androidx.annotation.StringRes; +import android.view.Menu; +import android.view.MenuItem; +import android.view.MotionEvent; +import android.view.View; +import android.view.WindowManager; +import android.webkit.WebView; +import android.widget.ImageButton; +import android.widget.LinearLayout; +import android.widget.LinearLayout.LayoutParams; +import android.widget.PopupMenu; +import android.widget.TextView; + +import com.google.blocks.ftcrobotcontroller.ProgrammingWebHandlers; +import com.google.blocks.ftcrobotcontroller.runtime.BlocksOpMode; +import com.qualcomm.ftccommon.ClassManagerFactory; +import com.qualcomm.ftccommon.FtcAboutActivity; +import com.qualcomm.ftccommon.FtcEventLoop; +import com.qualcomm.ftccommon.FtcEventLoopIdle; +import com.qualcomm.ftccommon.FtcRobotControllerService; +import com.qualcomm.ftccommon.FtcRobotControllerService.FtcRobotControllerBinder; +import com.qualcomm.ftccommon.FtcRobotControllerSettingsActivity; +import com.qualcomm.ftccommon.LaunchActivityConstantsList; +import com.qualcomm.ftccommon.LaunchActivityConstantsList.RequestCode; +import com.qualcomm.ftccommon.Restarter; +import com.qualcomm.ftccommon.UpdateUI; +import com.qualcomm.ftccommon.configuration.EditParameters; +import com.qualcomm.ftccommon.configuration.FtcLoadFileActivity; +import com.qualcomm.ftccommon.configuration.RobotConfigFile; +import com.qualcomm.ftccommon.configuration.RobotConfigFileManager; +import com.qualcomm.ftcrobotcontroller.BuildConfig; +import com.qualcomm.ftcrobotcontroller.R; +import com.qualcomm.hardware.HardwareFactory; +import com.qualcomm.robotcore.eventloop.EventLoopManager; +import com.qualcomm.robotcore.eventloop.opmode.FtcRobotControllerServiceState; +import com.qualcomm.robotcore.eventloop.opmode.OpModeRegister; +import com.qualcomm.robotcore.hardware.configuration.LynxConstants; +import com.qualcomm.robotcore.hardware.configuration.Utility; +import com.qualcomm.robotcore.robot.Robot; +import com.qualcomm.robotcore.robot.RobotState; +import com.qualcomm.robotcore.util.ClockWarningSource; +import com.qualcomm.robotcore.util.Device; +import com.qualcomm.robotcore.util.Dimmer; +import com.qualcomm.robotcore.util.ImmersiveMode; +import com.qualcomm.robotcore.util.RobotLog; +import com.qualcomm.robotcore.util.WebServer; +import com.qualcomm.robotcore.wifi.NetworkConnection; +import com.qualcomm.robotcore.wifi.NetworkConnectionFactory; +import com.qualcomm.robotcore.wifi.NetworkType; + +import org.firstinspires.ftc.ftccommon.external.SoundPlayingRobotMonitor; +import org.firstinspires.ftc.ftccommon.internal.AnnotatedHooksClassFilter; +import org.firstinspires.ftc.ftccommon.internal.FtcRobotControllerWatchdogService; +import org.firstinspires.ftc.ftccommon.internal.ProgramAndManageActivity; +import org.firstinspires.ftc.onbotjava.ExternalLibraries; +import org.firstinspires.ftc.onbotjava.OnBotJavaHelperImpl; +import org.firstinspires.ftc.onbotjava.OnBotJavaProgrammingMode; +import org.firstinspires.ftc.robotcore.external.navigation.MotionDetection; +import org.firstinspires.ftc.robotcore.internal.hardware.android.AndroidBoard; +import org.firstinspires.ftc.robotcore.internal.network.DeviceNameManagerFactory; +import org.firstinspires.ftc.robotcore.internal.network.PreferenceRemoterRC; +import org.firstinspires.ftc.robotcore.internal.network.StartResult; +import org.firstinspires.ftc.robotcore.internal.network.WifiDirectChannelChanger; +import org.firstinspires.ftc.robotcore.internal.network.WifiMuteEvent; +import org.firstinspires.ftc.robotcore.internal.network.WifiMuteStateMachine; +import org.firstinspires.ftc.robotcore.internal.opmode.ClassManager; +import org.firstinspires.ftc.robotcore.internal.opmode.OnBotJavaHelper; +import org.firstinspires.ftc.robotcore.internal.system.AppAliveNotifier; +import org.firstinspires.ftc.robotcore.internal.system.AppUtil; +import org.firstinspires.ftc.robotcore.internal.system.Assert; +import org.firstinspires.ftc.robotcore.internal.system.PreferencesHelper; +import org.firstinspires.ftc.robotcore.internal.system.ServiceController; +import org.firstinspires.ftc.robotcore.internal.ui.ThemedActivity; +import org.firstinspires.ftc.robotcore.internal.ui.UILocation; +import org.firstinspires.ftc.robotcore.internal.webserver.RobotControllerWebInfo; +import org.firstinspires.ftc.robotserver.internal.programmingmode.ProgrammingModeManager; +import org.firstinspires.inspection.RcInspectionActivity; +import org.threeten.bp.YearMonth; +import org.xmlpull.v1.XmlPullParserException; + +import java.io.FileNotFoundException; +import java.util.List; +import java.util.Queue; +import java.util.concurrent.ConcurrentLinkedQueue; + +@SuppressWarnings("WeakerAccess") +public class FtcRobotControllerActivity extends Activity + { + public static final String TAG = "RCActivity"; + public String getTag() { return TAG; } + + private static final int REQUEST_CONFIG_WIFI_CHANNEL = 1; + private static final int NUM_GAMEPADS = 2; + + protected WifiManager.WifiLock wifiLock; + protected RobotConfigFileManager cfgFileMgr; + + private OnBotJavaHelper onBotJavaHelper; + + protected ProgrammingModeManager programmingModeManager; + + protected UpdateUI.Callback callback; + protected Context context; + protected Utility utility; + protected StartResult prefRemoterStartResult = new StartResult(); + protected StartResult deviceNameStartResult = new StartResult(); + protected PreferencesHelper preferencesHelper; + protected final SharedPreferencesListener sharedPreferencesListener = new SharedPreferencesListener(); + + protected ImageButton buttonMenu; + protected TextView textDeviceName; + protected TextView textNetworkConnectionStatus; + protected TextView textRobotStatus; + protected TextView[] textGamepad = new TextView[NUM_GAMEPADS]; + protected TextView textOpMode; + protected TextView textErrorMessage; + protected ImmersiveMode immersion; + + protected UpdateUI updateUI; + protected Dimmer dimmer; + protected LinearLayout entireScreenLayout; + + protected FtcRobotControllerService controllerService; + protected NetworkType networkType; + + protected FtcEventLoop eventLoop; + protected Queue receivedUsbAttachmentNotifications; + + protected WifiMuteStateMachine wifiMuteStateMachine; + protected MotionDetection motionDetection; + + private static boolean permissionsValidated = false; + + private WifiDirectChannelChanger wifiDirectChannelChanger; + + protected class RobotRestarter implements Restarter { + + public void requestRestart() { + requestRobotRestart(); + } + + } + + protected boolean serviceShouldUnbind = false; + protected ServiceConnection connection = new ServiceConnection() { + @Override + public void onServiceConnected(ComponentName name, IBinder service) { + FtcRobotControllerBinder binder = (FtcRobotControllerBinder) service; + onServiceBind(binder.getService()); + } + + @Override + public void onServiceDisconnected(ComponentName name) { + RobotLog.vv(FtcRobotControllerService.TAG, "%s.controllerService=null", TAG); + controllerService = null; + } + }; + + @Override + protected void onNewIntent(Intent intent) { + super.onNewIntent(intent); + + if (UsbManager.ACTION_USB_DEVICE_ATTACHED.equals(intent.getAction())) { + UsbDevice usbDevice = intent.getParcelableExtra(UsbManager.EXTRA_DEVICE); + RobotLog.vv(TAG, "ACTION_USB_DEVICE_ATTACHED: %s", usbDevice.getDeviceName()); + + if (usbDevice != null) { // paranoia + // We might get attachment notifications before the event loop is set up, so + // we hold on to them and pass them along only when we're good and ready. + if (receivedUsbAttachmentNotifications != null) { // *total* paranoia + receivedUsbAttachmentNotifications.add(usbDevice); + passReceivedUsbAttachmentsToEventLoop(); + } + } + } + } + + protected void passReceivedUsbAttachmentsToEventLoop() { + if (this.eventLoop != null) { + for (;;) { + UsbDevice usbDevice = receivedUsbAttachmentNotifications.poll(); + if (usbDevice == null) + break; + this.eventLoop.onUsbDeviceAttached(usbDevice); + } + } + else { + // Paranoia: we don't want the pending list to grow without bound when we don't + // (yet) have an event loop + while (receivedUsbAttachmentNotifications.size() > 100) { + receivedUsbAttachmentNotifications.poll(); + } + } + } + + /** + * There are cases where a permission may be revoked and the system restart will restart the + * FtcRobotControllerActivity, instead of the launch activity. Detect when that happens, and throw + * the device back to the permission validator activity. + */ + protected boolean enforcePermissionValidator() { + if (!permissionsValidated) { + RobotLog.vv(TAG, "Redirecting to permission validator"); + Intent permissionValidatorIntent = new Intent(AppUtil.getDefContext(), PermissionValidatorWrapper.class); + startActivity(permissionValidatorIntent); + finish(); + return true; + } else { + RobotLog.vv(TAG, "Permissions validated already"); + return false; + } + } + + public static void setPermissionsValidated() { + permissionsValidated = true; + } + + @Override + protected void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + + if (enforcePermissionValidator()) { + return; + } + + RobotLog.onApplicationStart(); // robustify against onCreate() following onDestroy() but using the same app instance, which apparently does happen + RobotLog.vv(TAG, "onCreate()"); + ThemedActivity.appAppThemeToActivity(getTag(), this); // do this way instead of inherit to help AppInventor + + // Oddly, sometimes after a crash & restart the root activity will be something unexpected, like from the before crash? We don't yet understand + RobotLog.vv(TAG, "rootActivity is of class %s", AppUtil.getInstance().getRootActivity().getClass().getSimpleName()); + RobotLog.vv(TAG, "launchActivity is of class %s", FtcRobotControllerWatchdogService.launchActivity()); + Assert.assertTrue(FtcRobotControllerWatchdogService.isLaunchActivity(AppUtil.getInstance().getRootActivity())); + Assert.assertTrue(AppUtil.getInstance().isRobotController()); + + // Quick check: should we pretend we're not here, and so allow the Lynx to operate as + // a stand-alone USB-connected module? + if (LynxConstants.isRevControlHub()) { + // Double-sure check that we can talk to the DB over the serial TTY + AndroidBoard.getInstance().getAndroidBoardIsPresentPin().setState(true); + } + + context = this; + utility = new Utility(this); + + DeviceNameManagerFactory.getInstance().start(deviceNameStartResult); + + PreferenceRemoterRC.getInstance().start(prefRemoterStartResult); + + receivedUsbAttachmentNotifications = new ConcurrentLinkedQueue(); + eventLoop = null; + + setContentView(R.layout.activity_ftc_controller); + + preferencesHelper = new PreferencesHelper(TAG, context); + preferencesHelper.writeBooleanPrefIfDifferent(context.getString(R.string.pref_rc_connected), true); + preferencesHelper.getSharedPreferences().registerOnSharedPreferenceChangeListener(sharedPreferencesListener); + + // Check if this RC app is from a later FTC season than what was installed previously + int ftcSeasonYearOfPreviouslyInstalledRc = preferencesHelper.readInt(getString(R.string.pref_ftc_season_year_of_current_rc), 0); + int ftcSeasonYearOfCurrentlyInstalledRc = AppUtil.getInstance().getFtcSeasonYear(AppUtil.getInstance().getLocalSdkBuildMonth()).getValue(); + if (ftcSeasonYearOfCurrentlyInstalledRc > ftcSeasonYearOfPreviouslyInstalledRc) { + preferencesHelper.writeIntPrefIfDifferent(getString(R.string.pref_ftc_season_year_of_current_rc), ftcSeasonYearOfCurrentlyInstalledRc); + // Since it's a new FTC season, we should reset certain settings back to their default values. + preferencesHelper.writeBooleanPrefIfDifferent(getString(R.string.pref_warn_about_2_4_ghz_band), true); + preferencesHelper.writeBooleanPrefIfDifferent(getString(R.string.pref_warn_about_obsolete_software), true); + preferencesHelper.writeBooleanPrefIfDifferent(getString(R.string.pref_warn_about_mismatched_app_versions), true); + preferencesHelper.writeBooleanPrefIfDifferent(getString(R.string.pref_warn_about_incorrect_clocks), true); + } + + entireScreenLayout = (LinearLayout) findViewById(R.id.entire_screen); + buttonMenu = (ImageButton) findViewById(R.id.menu_buttons); + buttonMenu.setOnClickListener(new View.OnClickListener() { + @Override + public void onClick(View v) { + PopupMenu popupMenu = new PopupMenu(FtcRobotControllerActivity.this, v); + popupMenu.setOnMenuItemClickListener(new PopupMenu.OnMenuItemClickListener() { + @Override + public boolean onMenuItemClick(MenuItem item) { + return onOptionsItemSelected(item); // Delegate to the handler for the hardware menu button + } + }); + popupMenu.inflate(R.menu.ftc_robot_controller); + AnnotatedHooksClassFilter.getInstance().callOnCreateMenuMethods( + FtcRobotControllerActivity.this, popupMenu.getMenu()); + popupMenu.show(); + } + }); + + updateMonitorLayout(getResources().getConfiguration()); + + BlocksOpMode.setActivityAndWebView(this, (WebView) findViewById(R.id.webViewBlocksRuntime)); + + ExternalLibraries.getInstance().onCreate(); + onBotJavaHelper = new OnBotJavaHelperImpl(); + + /* + * Paranoia as the ClassManagerFactory requires EXTERNAL_STORAGE permissions + * and we've seen on the DS where the finish() call above does not short-circuit + * the onCreate() call for the activity and then we crash here because we don't + * have permissions. So... + */ + if (permissionsValidated) { + ClassManager.getInstance().setOnBotJavaClassHelper(onBotJavaHelper); + ClassManagerFactory.registerFilters(); + ClassManagerFactory.processAllClasses(); + } + + cfgFileMgr = new RobotConfigFileManager(this); + + // Clean up 'dirty' status after a possible crash + RobotConfigFile configFile = cfgFileMgr.getActiveConfig(); + if (configFile.isDirty()) { + configFile.markClean(); + cfgFileMgr.setActiveConfig(false, configFile); + } + + textDeviceName = (TextView) findViewById(R.id.textDeviceName); + textNetworkConnectionStatus = (TextView) findViewById(R.id.textNetworkConnectionStatus); + textRobotStatus = (TextView) findViewById(R.id.textRobotStatus); + textOpMode = (TextView) findViewById(R.id.textOpMode); + textErrorMessage = (TextView) findViewById(R.id.textErrorMessage); + textGamepad[0] = (TextView) findViewById(R.id.textGamepad1); + textGamepad[1] = (TextView) findViewById(R.id.textGamepad2); + immersion = new ImmersiveMode(getWindow().getDecorView()); + dimmer = new Dimmer(this); + dimmer.longBright(); + + programmingModeManager = new ProgrammingModeManager(); + programmingModeManager.register(new ProgrammingWebHandlers()); + programmingModeManager.register(new OnBotJavaProgrammingMode()); + + updateUI = createUpdateUI(); + callback = createUICallback(updateUI); + + PreferenceManager.setDefaultValues(this, R.xml.app_settings, false); + + WifiManager wifiManager = (WifiManager) getApplicationContext().getSystemService(Context.WIFI_SERVICE); + wifiLock = wifiManager.createWifiLock(WifiManager.WIFI_MODE_FULL_HIGH_PERF, ""); + + hittingMenuButtonBrightensScreen(); + + wifiLock.acquire(); + callback.networkConnectionUpdate(NetworkConnection.NetworkEvent.DISCONNECTED); + readNetworkType(); + ServiceController.startService(FtcRobotControllerWatchdogService.class); + bindToService(); + RobotLog.logAppInfo(); + RobotLog.logDeviceInfo(); + AndroidBoard.getInstance().logAndroidBoardInfo(); + + if (preferencesHelper.readBoolean(getString(R.string.pref_wifi_automute), false)) { + initWifiMute(true); + } + + FtcAboutActivity.setBuildTimeFromBuildConfig(BuildConfig.APP_BUILD_TIME); + + // check to see if there is a preferred Wi-Fi to use. + checkPreferredChannel(); + + AnnotatedHooksClassFilter.getInstance().callOnCreateMethods(this); + } + + protected UpdateUI createUpdateUI() { + Restarter restarter = new RobotRestarter(); + UpdateUI result = new UpdateUI(this, dimmer); + result.setRestarter(restarter); + result.setTextViews(textNetworkConnectionStatus, textRobotStatus, textGamepad, textOpMode, textErrorMessage, textDeviceName); + return result; + } + + protected UpdateUI.Callback createUICallback(UpdateUI updateUI) { + UpdateUI.Callback result = updateUI.new Callback(); + result.setStateMonitor(new SoundPlayingRobotMonitor()); + return result; + } + + @Override + protected void onStart() { + super.onStart(); + RobotLog.vv(TAG, "onStart()"); + + entireScreenLayout.setOnTouchListener(new View.OnTouchListener() { + @Override + public boolean onTouch(View v, MotionEvent event) { + dimmer.handleDimTimer(); + return false; + } + }); + } + + @Override + protected void onResume() { + super.onResume(); + RobotLog.vv(TAG, "onResume()"); + + // In case the user just got back from fixing their clock, refresh ClockWarningSource + ClockWarningSource.getInstance().onPossibleRcClockUpdate(); + } + + @Override + protected void onPause() { + super.onPause(); + RobotLog.vv(TAG, "onPause()"); + } + + @Override + protected void onStop() { + // Note: this gets called even when the configuration editor is launched. That is, it gets + // called surprisingly often. So, we don't actually do much here. + super.onStop(); + RobotLog.vv(TAG, "onStop()"); + } + + @Override + protected void onDestroy() { + super.onDestroy(); + RobotLog.vv(TAG, "onDestroy()"); + + shutdownRobot(); // Ensure the robot is put away to bed + if (callback != null) callback.close(); + + PreferenceRemoterRC.getInstance().stop(prefRemoterStartResult); + DeviceNameManagerFactory.getInstance().stop(deviceNameStartResult); + + unbindFromService(); + // If the app manually (?) is stopped, then we don't need the auto-starting function (?) + ServiceController.stopService(FtcRobotControllerWatchdogService.class); + if (wifiLock != null) wifiLock.release(); + if (preferencesHelper != null) preferencesHelper.getSharedPreferences().unregisterOnSharedPreferenceChangeListener(sharedPreferencesListener); + + RobotLog.cancelWriteLogcatToDisk(); + + AnnotatedHooksClassFilter.getInstance().callOnDestroyMethods(this); + } + + protected void bindToService() { + readNetworkType(); + Intent intent = new Intent(this, FtcRobotControllerService.class); + intent.putExtra(NetworkConnectionFactory.NETWORK_CONNECTION_TYPE, networkType); + serviceShouldUnbind = bindService(intent, connection, Context.BIND_AUTO_CREATE); + } + + protected void unbindFromService() { + if (serviceShouldUnbind) { + unbindService(connection); + serviceShouldUnbind = false; + } + } + + protected void readNetworkType() { + // Control hubs are always running the access point model. Everything else, for the time + // being always runs the Wi-Fi Direct model. + if (Device.isRevControlHub() == true) { + networkType = NetworkType.RCWIRELESSAP; + } else { + networkType = NetworkType.fromString(preferencesHelper.readString(context.getString(R.string.pref_pairing_kind), NetworkType.globalDefaultAsString())); + } + + // update the app_settings + preferencesHelper.writeStringPrefIfDifferent(context.getString(R.string.pref_pairing_kind), networkType.toString()); + } + + @Override + public void onWindowFocusChanged(boolean hasFocus) { + super.onWindowFocusChanged(hasFocus); + + if (hasFocus) { + immersion.hideSystemUI(); + getWindow().setFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_NAVIGATION, WindowManager.LayoutParams.FLAG_TRANSLUCENT_NAVIGATION); + } + } + + @Override + public boolean onCreateOptionsMenu(Menu menu) { + getMenuInflater().inflate(R.menu.ftc_robot_controller, menu); + AnnotatedHooksClassFilter.getInstance().callOnCreateMenuMethods(this, menu); + return true; + } + + private boolean isRobotRunning() { + if (controllerService == null) { + return false; + } + + Robot robot = controllerService.getRobot(); + + if ((robot == null) || (robot.eventLoopManager == null)) { + return false; + } + + RobotState robotState = robot.eventLoopManager.state; + + if (robotState != RobotState.RUNNING) { + return false; + } else { + return true; + } + } + + @Override + public boolean onOptionsItemSelected(MenuItem item) { + int id = item.getItemId(); + + if (id == R.id.action_program_and_manage) { + if (isRobotRunning()) { + Intent programmingModeIntent = new Intent(AppUtil.getDefContext(), ProgramAndManageActivity.class); + RobotControllerWebInfo webInfo = programmingModeManager.getWebServer().getConnectionInformation(); + programmingModeIntent.putExtra(LaunchActivityConstantsList.RC_WEB_INFO, webInfo.toJson()); + startActivity(programmingModeIntent); + } else { + AppUtil.getInstance().showToast(UILocation.ONLY_LOCAL, context.getString(R.string.toastWifiUpBeforeProgrammingMode)); + } + } else if (id == R.id.action_inspection_mode) { + Intent inspectionModeIntent = new Intent(AppUtil.getDefContext(), RcInspectionActivity.class); + startActivity(inspectionModeIntent); + return true; + } else if (id == R.id.action_restart_robot) { + dimmer.handleDimTimer(); + AppUtil.getInstance().showToast(UILocation.BOTH, context.getString(R.string.toastRestartingRobot)); + requestRobotRestart(); + return true; + } + else if (id == R.id.action_configure_robot) { + EditParameters parameters = new EditParameters(); + Intent intentConfigure = new Intent(AppUtil.getDefContext(), FtcLoadFileActivity.class); + parameters.putIntent(intentConfigure); + startActivityForResult(intentConfigure, RequestCode.CONFIGURE_ROBOT_CONTROLLER.ordinal()); + } + else if (id == R.id.action_settings) { + // historical: this once erroneously used FTC_CONFIGURE_REQUEST_CODE_ROBOT_CONTROLLER + Intent settingsIntent = new Intent(AppUtil.getDefContext(), FtcRobotControllerSettingsActivity.class); + startActivityForResult(settingsIntent, RequestCode.SETTINGS_ROBOT_CONTROLLER.ordinal()); + return true; + } + else if (id == R.id.action_about) { + Intent intent = new Intent(AppUtil.getDefContext(), FtcAboutActivity.class); + startActivity(intent); + return true; + } + else if (id == R.id.action_exit_app) { + + //Clear backstack and everything to prevent edge case where VM might be + //restarted (after it was exited) if more than one activity was on the + //backstack for some reason. + finishAffinity(); + + //For lollipop and up, we can clear ourselves from the recents list too + if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) { + ActivityManager manager = (ActivityManager) getSystemService(ACTIVITY_SERVICE); + List tasks = manager.getAppTasks(); + + for (ActivityManager.AppTask task : tasks) { + task.finishAndRemoveTask(); + } + } + + // Allow the user to use the Control Hub operating system's UI, instead of relaunching the app + AppAliveNotifier.getInstance().disableAppWatchdogUntilNextAppStart(); + + //Finally, nuke the VM from orbit + AppUtil.getInstance().exitApplication(); + + return true; + } + + return super.onOptionsItemSelected(item); + } + + @Override + public void onConfigurationChanged(Configuration newConfig) { + super.onConfigurationChanged(newConfig); + // don't destroy assets on screen rotation + updateMonitorLayout(newConfig); + } + + /** + * Updates the orientation of monitorContainer (which contains cameraMonitorView and + * tfodMonitorView) based on the given configuration. Makes the children split the space. + */ + private void updateMonitorLayout(Configuration configuration) { + LinearLayout monitorContainer = (LinearLayout) findViewById(R.id.monitorContainer); + if (configuration.orientation == Configuration.ORIENTATION_LANDSCAPE) { + // When the phone is landscape, lay out the monitor views horizontally. + monitorContainer.setOrientation(LinearLayout.HORIZONTAL); + for (int i = 0; i < monitorContainer.getChildCount(); i++) { + View view = monitorContainer.getChildAt(i); + view.setLayoutParams(new LayoutParams(0, LayoutParams.MATCH_PARENT, 1 /* weight */)); + } + } else { + // When the phone is portrait, lay out the monitor views vertically. + monitorContainer.setOrientation(LinearLayout.VERTICAL); + for (int i = 0; i < monitorContainer.getChildCount(); i++) { + View view = monitorContainer.getChildAt(i); + view.setLayoutParams(new LayoutParams(LayoutParams.MATCH_PARENT, 0, 1 /* weight */)); + } + } + monitorContainer.requestLayout(); + } + + @Override + protected void onActivityResult(int request, int result, Intent intent) { + if (request == REQUEST_CONFIG_WIFI_CHANNEL) { + if (result == RESULT_OK) { + AppUtil.getInstance().showToast(UILocation.BOTH, context.getString(R.string.toastWifiConfigurationComplete)); + } + } + // was some historical confusion about launch codes here, so we err safely + if (request == RequestCode.CONFIGURE_ROBOT_CONTROLLER.ordinal() || request == RequestCode.SETTINGS_ROBOT_CONTROLLER.ordinal()) { + // We always do a refresh, whether it was a cancel or an OK, for robustness + shutdownRobot(); + cfgFileMgr.getActiveConfigAndUpdateUI(); + updateUIAndRequestRobotSetup(); + } + } + + public void onServiceBind(final FtcRobotControllerService service) { + RobotLog.vv(FtcRobotControllerService.TAG, "%s.controllerService=bound", TAG); + controllerService = service; + updateUI.setControllerService(controllerService); + + controllerService.setOnBotJavaHelper(onBotJavaHelper); + + updateUIAndRequestRobotSetup(); + programmingModeManager.setState(new FtcRobotControllerServiceState() { + @NonNull + @Override + public WebServer getWebServer() { + return service.getWebServer(); + } + + @Nullable + @Override + public OnBotJavaHelper getOnBotJavaHelper() { + return service.getOnBotJavaHelper(); + } + + @Override + public EventLoopManager getEventLoopManager() { + return service.getRobot().eventLoopManager; + } + }); + + AnnotatedHooksClassFilter.getInstance().callWebHandlerRegistrarMethods(this, + service.getWebServer().getWebHandlerManager()); + } + + private void updateUIAndRequestRobotSetup() { + if (controllerService != null) { + callback.networkConnectionUpdate(controllerService.getNetworkConnectionStatus()); + callback.updateRobotStatus(controllerService.getRobotStatus()); + // Only show this first-time toast on headless systems: what we have now on non-headless suffices + requestRobotSetup(LynxConstants.isRevControlHub() + ? new Runnable() { + @Override public void run() { + showRestartRobotCompleteToast(R.string.toastRobotSetupComplete); + } + } + : null); + } + } + + private void requestRobotSetup(@Nullable Runnable runOnComplete) { + if (controllerService == null) return; + + RobotConfigFile file = cfgFileMgr.getActiveConfigAndUpdateUI(); + HardwareFactory hardwareFactory = new HardwareFactory(context); + try { + hardwareFactory.setXmlPullParser(file.getXml()); + } catch (FileNotFoundException | XmlPullParserException e) { + RobotLog.ww(TAG, e, "Unable to set configuration file %s. Falling back on noConfig.", file.getName()); + file = RobotConfigFile.noConfig(cfgFileMgr); + try { + hardwareFactory.setXmlPullParser(file.getXml()); + cfgFileMgr.setActiveConfigAndUpdateUI(false, file); + } catch (FileNotFoundException | XmlPullParserException e1) { + RobotLog.ee(TAG, e1, "Failed to fall back on noConfig"); + } + } + + OpModeRegister userOpModeRegister = createOpModeRegister(); + eventLoop = new FtcEventLoop(hardwareFactory, userOpModeRegister, callback, this); + FtcEventLoopIdle idleLoop = new FtcEventLoopIdle(hardwareFactory, userOpModeRegister, callback, this); + + controllerService.setCallback(callback); + controllerService.setupRobot(eventLoop, idleLoop, runOnComplete); + + passReceivedUsbAttachmentsToEventLoop(); + AndroidBoard.showErrorIfUnknownControlHub(); + + AnnotatedHooksClassFilter.getInstance().callOnCreateEventLoopMethods(this, eventLoop); + } + + protected OpModeRegister createOpModeRegister() { + return new FtcOpModeRegister(); + } + + private void shutdownRobot() { + if (controllerService != null) controllerService.shutdownRobot(); + } + + private void requestRobotRestart() { + AppUtil.getInstance().showToast(UILocation.BOTH, AppUtil.getDefContext().getString(R.string.toastRestartingRobot)); + // + RobotLog.clearGlobalErrorMsg(); + RobotLog.clearGlobalWarningMsg(); + shutdownRobot(); + requestRobotSetup(new Runnable() { + @Override public void run() { + showRestartRobotCompleteToast(R.string.toastRestartRobotComplete); + } + }); + } + + private void showRestartRobotCompleteToast(@StringRes int resid) { + AppUtil.getInstance().showToast(UILocation.BOTH, AppUtil.getDefContext().getString(resid)); + } + + private void checkPreferredChannel() { + // For P2P network, check to see what preferred channel is. + if (networkType == NetworkType.WIFIDIRECT) { + int prefChannel = preferencesHelper.readInt(getString(com.qualcomm.ftccommon.R.string.pref_wifip2p_channel), -1); + if (prefChannel == -1) { + prefChannel = 0; + RobotLog.vv(TAG, "pref_wifip2p_channel: No preferred channel defined. Will use a default value of %d", prefChannel); + } else { + RobotLog.vv(TAG, "pref_wifip2p_channel: Found existing preferred channel (%d).", prefChannel); + } + + // attempt to set the preferred channel. + RobotLog.vv(TAG, "pref_wifip2p_channel: attempting to set preferred channel..."); + wifiDirectChannelChanger = new WifiDirectChannelChanger(); + wifiDirectChannelChanger.changeToChannel(prefChannel); + } + } + + protected void hittingMenuButtonBrightensScreen() { + ActionBar actionBar = getActionBar(); + if (actionBar != null) { + actionBar.addOnMenuVisibilityListener(new ActionBar.OnMenuVisibilityListener() { + @Override + public void onMenuVisibilityChanged(boolean isVisible) { + if (isVisible) { + dimmer.handleDimTimer(); + } + } + }); + } + } + + protected class SharedPreferencesListener implements SharedPreferences.OnSharedPreferenceChangeListener { + @Override public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) { + if (key.equals(context.getString(R.string.pref_app_theme))) { + ThemedActivity.restartForAppThemeChange(getTag(), getString(R.string.appThemeChangeRestartNotifyRC)); + } else if (key.equals(context.getString(R.string.pref_wifi_automute))) { + if (preferencesHelper.readBoolean(context.getString(R.string.pref_wifi_automute), false)) { + initWifiMute(true); + } else { + initWifiMute(false); + } + } + } + } + + protected void initWifiMute(boolean enable) { + if (enable) { + wifiMuteStateMachine = new WifiMuteStateMachine(); + wifiMuteStateMachine.initialize(); + wifiMuteStateMachine.start(); + + motionDetection = new MotionDetection(2.0, 10); + motionDetection.startListening(); + motionDetection.registerListener(new MotionDetection.MotionDetectionListener() { + @Override + public void onMotionDetected(double vector) + { + wifiMuteStateMachine.consumeEvent(WifiMuteEvent.USER_ACTIVITY); + } + }); + } else { + wifiMuteStateMachine.stop(); + wifiMuteStateMachine = null; + motionDetection.stopListening(); + motionDetection.purgeListeners(); + motionDetection = null; + } + } + + @Override + public void onUserInteraction() { + if (wifiMuteStateMachine != null) { + wifiMuteStateMachine.consumeEvent(WifiMuteEvent.USER_ACTIVITY); + } + } +} diff --git a/src/main/java/org/firstinspires/ftc/robotcontroller/internal/PermissionValidatorWrapper.java b/src/main/java/org/firstinspires/ftc/robotcontroller/internal/PermissionValidatorWrapper.java new file mode 100644 index 0000000..a0094bc --- /dev/null +++ b/src/main/java/org/firstinspires/ftc/robotcontroller/internal/PermissionValidatorWrapper.java @@ -0,0 +1,91 @@ +/* + * Copyright (c) 2018 Craig MacFarlane + * + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, are permitted + * (subject to the limitations in the disclaimer below) provided that the following conditions are + * met: + * + * Redistributions of source code must retain the above copyright notice, this list of conditions + * and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this list of conditions + * and the following disclaimer in the documentation and/or other materials provided with the + * distribution. + * + * Neither the name of Craig MacFarlane nor the names of its contributors may be used to + * endorse or promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS LICENSE. THIS + * SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, + * OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF + * THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +package org.firstinspires.ftc.robotcontroller.internal; + +import android.Manifest; +import android.os.Bundle; + +import com.qualcomm.ftcrobotcontroller.R; + +import org.firstinspires.ftc.robotcore.internal.system.Misc; +import org.firstinspires.ftc.robotcore.internal.system.PermissionValidatorActivity; + +import java.util.ArrayList; +import java.util.List; + +public class PermissionValidatorWrapper extends PermissionValidatorActivity { + + private final String TAG = "PermissionValidatorWrapper"; + + /* + * The list of dangerous permissions the robot controller needs. + */ + protected List robotControllerPermissions = new ArrayList() {{ + add(Manifest.permission.WRITE_EXTERNAL_STORAGE); + add(Manifest.permission.READ_EXTERNAL_STORAGE); + add(Manifest.permission.CAMERA); + add(Manifest.permission.ACCESS_COARSE_LOCATION); + add(Manifest.permission.ACCESS_FINE_LOCATION); + add(Manifest.permission.READ_PHONE_STATE); + }}; + + private final static Class startApplication = FtcRobotControllerActivity.class; + + public String mapPermissionToExplanation(final String permission) { + if (permission.equals(Manifest.permission.WRITE_EXTERNAL_STORAGE)) { + return Misc.formatForUser(R.string.permRcWriteExternalStorageExplain); + } else if (permission.equals(Manifest.permission.READ_EXTERNAL_STORAGE)) { + return Misc.formatForUser(R.string.permRcReadExternalStorageExplain); + } else if (permission.equals(Manifest.permission.CAMERA)) { + return Misc.formatForUser(R.string.permRcCameraExplain); + } else if (permission.equals(Manifest.permission.ACCESS_COARSE_LOCATION)) { + return Misc.formatForUser(R.string.permAccessLocationExplain); + } else if (permission.equals(Manifest.permission.ACCESS_FINE_LOCATION)) { + return Misc.formatForUser(R.string.permAccessLocationExplain); + } else if (permission.equals(Manifest.permission.READ_PHONE_STATE)) { + return Misc.formatForUser(R.string.permReadPhoneState); + } + return Misc.formatForUser(R.string.permGenericExplain); + } + + @Override + protected void onCreate(Bundle savedInstanceState) + { + super.onCreate(savedInstanceState); + + permissions = robotControllerPermissions; + } + + protected Class onStartApplication() + { + FtcRobotControllerActivity.setPermissionsValidated(); + return startApplication; + } +} diff --git a/src/main/res/drawable-xhdpi/icon_menu.png b/src/main/res/drawable-xhdpi/icon_menu.png new file mode 100644 index 0000000..6b9e997 Binary files /dev/null and b/src/main/res/drawable-xhdpi/icon_menu.png differ diff --git a/src/main/res/drawable-xhdpi/icon_robotcontroller.png b/src/main/res/drawable-xhdpi/icon_robotcontroller.png new file mode 100644 index 0000000..022552f Binary files /dev/null and b/src/main/res/drawable-xhdpi/icon_robotcontroller.png differ diff --git a/src/main/res/layout/activity_ftc_controller.xml b/src/main/res/layout/activity_ftc_controller.xml new file mode 100644 index 0000000..935bb2b --- /dev/null +++ b/src/main/res/layout/activity_ftc_controller.xml @@ -0,0 +1,191 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/main/res/menu/ftc_robot_controller.xml b/src/main/res/menu/ftc_robot_controller.xml new file mode 100644 index 0000000..657c1aa --- /dev/null +++ b/src/main/res/menu/ftc_robot_controller.xml @@ -0,0 +1,78 @@ + + + + + + + + + + + + + + + + + + diff --git a/src/main/res/raw/gold.wav b/src/main/res/raw/gold.wav new file mode 100644 index 0000000..3a7baf8 Binary files /dev/null and b/src/main/res/raw/gold.wav differ diff --git a/src/main/res/raw/silver.wav b/src/main/res/raw/silver.wav new file mode 100644 index 0000000..25918a7 Binary files /dev/null and b/src/main/res/raw/silver.wav differ diff --git a/src/main/res/values/dimens.xml b/src/main/res/values/dimens.xml new file mode 100644 index 0000000..63f1bab --- /dev/null +++ b/src/main/res/values/dimens.xml @@ -0,0 +1,40 @@ + + + + + + 16dp + 5dp + + \ No newline at end of file diff --git a/src/main/res/values/strings.xml b/src/main/res/values/strings.xml new file mode 100644 index 0000000..6ea191e --- /dev/null +++ b/src/main/res/values/strings.xml @@ -0,0 +1,72 @@ + + + + + + + FTC Robot Controller + + + Self Inspect + Program & Manage + Blocks + Settings + Restart Robot + Configure Robot + About + Exit + + + Configuration Complete + Restarting Robot + The Robot Controller must be fully up and running before entering Program and Manage Mode. + + + + @style/AppThemeRedRC + @style/AppThemeGreenRC + @style/AppThemeBlueRC + @style/AppThemePurpleRC + @style/AppThemeOrangeRC + @style/AppThemeTealRC + + + pref_ftc_season_year_of_current_rc_new + + @string/packageNameRobotController + + diff --git a/src/main/res/values/styles.xml b/src/main/res/values/styles.xml new file mode 100644 index 0000000..07689c0 --- /dev/null +++ b/src/main/res/values/styles.xml @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/main/res/xml/app_settings.xml b/src/main/res/xml/app_settings.xml new file mode 100644 index 0000000..58d3aa9 --- /dev/null +++ b/src/main/res/xml/app_settings.xml @@ -0,0 +1,93 @@ + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/main/res/xml/device_filter.xml b/src/main/res/xml/device_filter.xml new file mode 100644 index 0000000..7b75350 --- /dev/null +++ b/src/main/res/xml/device_filter.xml @@ -0,0 +1,49 @@ + + + + + + + + + + + + + +