Compare commits

...

6 commits

Author SHA1 Message Date
Cal Kestis aba72e566c
Merge pull request #324 from FIRST-Tech-Challenge/20220723-130006-release-candidate
FtcRobotController v7.2
2022-07-25 16:15:51 +09:00
Cal Kestis fe758edbd4 FtcRobotController v7.2 2022-07-23 13:50:42 +09:00
Cal Kestis e945da24e6
Merge pull request #274 from FIRST-Tech-Challenge/20211223-120805-release-candidate
FtcRobotController v7.1
2022-01-17 10:46:39 -08:00
Cal Kestis 439a5bf491 FtcRobotController v7.1 2021-12-23 12:41:34 -08:00
Cal Kestis 704b6948e3
Merge pull request #159 from FIRST-Tech-Challenge/20210915-141025-release-candidate
FtcRobotController v7.0
2021-09-20 08:32:05 -07:00
Cal Kestis 724f759dea FtcRobotController v7.0 2021-09-15 15:02:44 -07:00
57 changed files with 1645 additions and 2508 deletions

View file

@ -9,8 +9,9 @@ android {
defaultConfig {
minSdkVersion 23
//noinspection ExpiredTargetSdkVersion
targetSdkVersion 28
buildConfigField "String", "BUILD_TIME", '"' + (new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ", Locale.ROOT).format(new Date())) + '"'
buildConfigField "String", "APP_BUILD_TIME", '"' + (new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ", Locale.ROOT).format(new Date())) + '"'
}
compileSdkVersion 29
@ -19,6 +20,7 @@ android {
sourceCompatibility JavaVersion.VERSION_1_7
targetCompatibility JavaVersion.VERSION_1_7
}
namespace = 'com.qualcomm.ftcrobotcontroller'
}
apply from: '../build.dependencies.gradle'

View file

@ -1,9 +1,8 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
package="com.qualcomm.ftcrobotcontroller"
android:versionCode="40"
android:versionName="6.2">
android:versionCode="45"
android:versionName="7.2">
<uses-permission android:name="android.permission.RECEIVE_BOOT_COMPLETED" />
@ -61,6 +60,21 @@
android:name="com.qualcomm.ftccommon.FtcRobotControllerService"
android:enabled="true" />
<!-- Assistant that autostarts the robot controller on android boot (if it's supposed to) -->
<receiver
android:enabled="true"
android:exported="true"
android:name="org.firstinspires.ftc.ftccommon.internal.RunOnBoot"
android:permission="android.permission.RECEIVE_BOOT_COMPLETED">
<intent-filter>
<category android:name="android.intent.category.DEFAULT" />
<action android:name="android.intent.action.BOOT_COMPLETED" />
<action android:name="android.intent.action.QUICKBOOT_POWERON" />
</intent-filter>
</receiver>
</application>
</manifest>

View file

@ -0,0 +1,167 @@
/* Copyright (c) 2021 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.util.ElapsedTime;
/**
* This file contains an example of a Linear "OpMode".
* An OpMode is a 'program' that runs in either the autonomous or the teleop period of an FTC match.
* The names of OpModes appear on the menu of the FTC Driver Station.
* When a selection is made from the menu, the corresponding OpMode is executed.
*
* This particular OpMode illustrates driving a 4-motor Omni-Directional (or Holonomic) robot.
* This code will work with either a Mecanum-Drive or an X-Drive train.
* Both of these drives are illustrated at https://gm0.org/en/latest/docs/robot-design/drivetrains/holonomic.html
* Note that a Mecanum drive must display an X roller-pattern when viewed from above.
*
* Also note that it is critical to set the correct rotation direction for each motor. See details below.
*
* Holonomic drives provide the ability for the robot to move in three axes (directions) simultaneously.
* Each motion axis is controlled by one Joystick axis.
*
* 1) Axial: Driving forward and backward Left-joystick Forward/Backward
* 2) Lateral: Strafing right and left Left-joystick Right and Left
* 3) Yaw: Rotating Clockwise and counter clockwise Right-joystick Right and Left
*
* This code is written assuming that the right-side motors need to be reversed for the robot to drive forward.
* When you first test your robot, if it moves backward when you push the left stick forward, then you must flip
* the direction of all 4 motors (see code below).
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list
*/
@TeleOp(name="Basic: Omni Linear OpMode", group="Linear Opmode")
@Disabled
public class BasicOmniOpMode_Linear extends LinearOpMode {
// Declare OpMode members for each of the 4 motors.
private ElapsedTime runtime = new ElapsedTime();
private DcMotor leftFrontDrive = null;
private DcMotor leftBackDrive = null;
private DcMotor rightFrontDrive = null;
private DcMotor rightBackDrive = null;
@Override
public void runOpMode() {
// Initialize the hardware variables. Note that the strings used here must correspond
// to the names assigned during the robot configuration step on the DS or RC devices.
leftFrontDrive = hardwareMap.get(DcMotor.class, "left_front_drive");
leftBackDrive = hardwareMap.get(DcMotor.class, "left_back_drive");
rightFrontDrive = hardwareMap.get(DcMotor.class, "right_front_drive");
rightBackDrive = hardwareMap.get(DcMotor.class, "right_back_drive");
// ########################################################################################
// !!! IMPORTANT Drive Information. Test your motor directions. !!!!!
// ########################################################################################
// Most robots need the motors on one side to be reversed to drive forward.
// The motor reversals shown here are for a "direct drive" robot (the wheels turn the same direction as the motor shaft)
// If your robot has additional gear reductions or uses a right-angled drive, it's important to ensure
// that your motors are turning in the correct direction. So, start out with the reversals here, BUT
// when you first test your robot, push the left joystick forward and observe the direction the wheels turn.
// Reverse the direction (flip FORWARD <-> REVERSE ) of any wheel that runs backward
// Keep testing until ALL the wheels move the robot forward when you push the left joystick forward.
leftFrontDrive.setDirection(DcMotor.Direction.REVERSE);
leftBackDrive.setDirection(DcMotor.Direction.REVERSE);
rightFrontDrive.setDirection(DcMotor.Direction.FORWARD);
rightBackDrive.setDirection(DcMotor.Direction.FORWARD);
// Wait for the game to start (driver presses PLAY)
telemetry.addData("Status", "Initialized");
telemetry.update();
waitForStart();
runtime.reset();
// run until the end of the match (driver presses STOP)
while (opModeIsActive()) {
double max;
// POV Mode uses left joystick to go forward & strafe, and right joystick to rotate.
double axial = -gamepad1.left_stick_y; // Note: pushing stick forward gives negative value
double lateral = gamepad1.left_stick_x;
double yaw = gamepad1.right_stick_x;
// Combine the joystick requests for each axis-motion to determine each wheel's power.
// Set up a variable for each drive wheel to save the power level for telemetry.
double leftFrontPower = axial + lateral + yaw;
double rightFrontPower = axial - lateral - yaw;
double leftBackPower = axial - lateral + yaw;
double rightBackPower = axial + lateral - yaw;
// Normalize the values so no wheel power exceeds 100%
// This ensures that the robot maintains the desired motion.
max = Math.max(Math.abs(leftFrontPower), Math.abs(rightFrontPower));
max = Math.max(max, Math.abs(leftBackPower));
max = Math.max(max, Math.abs(rightBackPower));
if (max > 1.0) {
leftFrontPower /= max;
rightFrontPower /= max;
leftBackPower /= max;
rightBackPower /= max;
}
// This is test code:
//
// Uncomment the following code to test your motor directions.
// Each button should make the corresponding motor run FORWARD.
// 1) First get all the motors to take to correct positions on the robot
// by adjusting your Robot Configuration if necessary.
// 2) Then make sure they run in the correct direction by modifying the
// the setDirection() calls above.
// Once the correct motors move in the correct direction re-comment this code.
/*
leftFrontPower = gamepad1.x ? 1.0 : 0.0; // X gamepad
leftBackPower = gamepad1.a ? 1.0 : 0.0; // A gamepad
rightFrontPower = gamepad1.y ? 1.0 : 0.0; // Y gamepad
rightBackPower = gamepad1.b ? 1.0 : 0.0; // B gamepad
*/
// Send calculated power to wheels
leftFrontDrive.setPower(leftFrontPower);
rightFrontDrive.setPower(rightFrontPower);
leftBackDrive.setPower(leftBackPower);
rightBackDrive.setPower(rightBackPower);
// Show the elapsed game time and wheel power.
telemetry.addData("Status", "Run Time: " + runtime.toString());
telemetry.addData("Front left/Right", "%4.2f, %4.2f", leftFrontPower, rightFrontPower);
telemetry.addData("Back left/Right", "%4.2f, %4.2f", leftBackPower, rightBackPower);
telemetry.update();
}
}}

View file

@ -40,13 +40,13 @@ import com.qualcomm.robotcore.util.Range;
* This file contains an example of an iterative (Non-Linear) "OpMode".
* An OpMode is a 'program' that runs in either the autonomous or the teleop period of an FTC match.
* The names of OpModes appear on the menu of the FTC Driver Station.
* When an selection is made from the menu, the corresponding OpMode
* When a selection is made from the menu, the corresponding OpMode
* class is instantiated on the Robot Controller and executed.
*
* This particular OpMode just executes a basic Tank Drive Teleop for a two wheeled robot
* It includes all the skeletal structure that all iterative OpModes contain.
*
* Use Android Studios to Copy this Class, and Paste it into your team's code folder with a new name.
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list
*/
@ -72,10 +72,11 @@ public class BasicOpMode_Iterative extends OpMode
leftDrive = hardwareMap.get(DcMotor.class, "left_drive");
rightDrive = hardwareMap.get(DcMotor.class, "right_drive");
// Most robots need the motor on one side to be reversed to drive forward
// Reverse the motor that runs backwards when connected directly to the battery
leftDrive.setDirection(DcMotor.Direction.FORWARD);
rightDrive.setDirection(DcMotor.Direction.REVERSE);
// To drive forward, most robots need the motor on one side to be reversed, because the axles point in opposite directions.
// Pushing the left stick forward MUST make robot go forward. So adjust these two lines based on your first test drive.
// Note: The settings here assume direct drive on left and right wheels. Gear Reduction or 90 Deg drives may require direction flips
leftDrive.setDirection(DcMotor.Direction.REVERSE);
rightDrive.setDirection(DcMotor.Direction.FORWARD);
// Tell the driver that initialization is complete.
telemetry.addData("Status", "Initialized");

View file

@ -40,13 +40,13 @@ import com.qualcomm.robotcore.util.Range;
/**
* This file contains an minimal example of a Linear "OpMode". An OpMode is a 'program' that runs in either
* the autonomous or the teleop period of an FTC match. The names of OpModes appear on the menu
* of the FTC Driver Station. When an selection is made from the menu, the corresponding OpMode
* of the FTC Driver Station. When a selection is made from the menu, the corresponding OpMode
* class is instantiated on the Robot Controller and executed.
*
* This particular OpMode just executes a basic Tank Drive Teleop for a two wheeled robot
* It includes all the skeletal structure that all linear OpModes contain.
*
* Use Android Studios to Copy this Class, and Paste it into your team's code folder with a new name.
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list
*/
@ -70,10 +70,11 @@ public class BasicOpMode_Linear extends LinearOpMode {
leftDrive = hardwareMap.get(DcMotor.class, "left_drive");
rightDrive = hardwareMap.get(DcMotor.class, "right_drive");
// Most robots need the motor on one side to be reversed to drive forward
// Reverse the motor that runs backwards when connected directly to the battery
leftDrive.setDirection(DcMotor.Direction.FORWARD);
rightDrive.setDirection(DcMotor.Direction.REVERSE);
// To drive forward, most robots need the motor on one side to be reversed, because the axles point in opposite directions.
// Pushing the left stick forward MUST make robot go forward. So adjust these two lines based on your first test drive.
// Note: The settings here assume direct drive on left and right wheels. Gear Reduction or 90 Deg drives may require direction flips
leftDrive.setDirection(DcMotor.Direction.REVERSE);
rightDrive.setDirection(DcMotor.Direction.FORWARD);
// Wait for the game to start (driver presses PLAY)
waitForStart();

View file

@ -33,13 +33,11 @@ import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.CompassSensor;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.util.ElapsedTime;
/**
* This file illustrates the concept of calibrating a MR Compass
* It uses the common Pushbot hardware class to define the drive on the robot.
* The code is structured as a LinearOpMode
*
* This code assumes there is a compass configured with the name "compass"
*
* This code will put the compass into calibration mode, wait three seconds and then attempt
@ -57,7 +55,8 @@ import com.qualcomm.robotcore.util.ElapsedTime;
public class ConceptCompassCalibration extends LinearOpMode {
/* Declare OpMode members. */
HardwarePushbot robot = new HardwarePushbot(); // Use a Pushbot's hardware
public DcMotor leftDrive = null;
public DcMotor rightDrive = null;
private ElapsedTime runtime = new ElapsedTime();
CompassSensor compass;
@ -68,10 +67,15 @@ public class ConceptCompassCalibration extends LinearOpMode {
@Override
public void runOpMode() {
/* Initialize the drive system variables.
* The init() method of the hardware class does all the work here
*/
robot.init(hardwareMap);
// Initialize the drive system variables.
leftDrive = hardwareMap.get(DcMotor.class, "left_drive");
rightDrive = hardwareMap.get(DcMotor.class, "right_drive");
// To drive forward, most robots need the motor on one side to be reversed, because the axles point in opposite directions.
// Pushing the left stick forward MUST make robot go forward. So adjust these two lines based on your first test drive.
// Note: The settings here assume direct drive on left and right wheels. Gear Reduction or 90 Deg drives may require direction flips
leftDrive.setDirection(DcMotor.Direction.REVERSE);
rightDrive.setDirection(DcMotor.Direction.FORWARD);
// get a reference to our Compass Sensor object.
compass = hardwareMap.get(CompassSensor.class, "compass");
@ -93,8 +97,8 @@ public class ConceptCompassCalibration extends LinearOpMode {
// Start the robot rotating clockwise
telemetry.addData("Compass", "Calibration mode. Turning the robot...");
telemetry.update();
robot.leftDrive.setPower(MOTOR_POWER);
robot.rightDrive.setPower(-MOTOR_POWER);
leftDrive.setPower(MOTOR_POWER);
rightDrive.setPower(-MOTOR_POWER);
// run until time expires OR the driver presses STOP;
runtime.reset();
@ -103,8 +107,8 @@ public class ConceptCompassCalibration extends LinearOpMode {
}
// Stop all motors and turn off claibration
robot.leftDrive.setPower(0);
robot.rightDrive.setPower(0);
leftDrive.setPower(0);
rightDrive.setPower(0);
compass.setMode(CompassSensor.CompassMode.MEASUREMENT_MODE);
telemetry.addData("Compass", "Returning to measurement mode");
telemetry.update();

View file

@ -1,103 +0,0 @@
/* Copyright (c) 2017 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.DeviceInterfaceModule;
import com.qualcomm.robotcore.util.ElapsedTime;
/**
* This OpMode illustrates using the Device Interface Module as a signalling device.
* The code is structured as a LinearOpMode
*
* This code assumes a DIM name "dim".
*
* There are many examples where the robot might like to signal the driver, without requiring them
* to look at the driver station. This might be something like a "ball in hopper" condition or a
* "ready to shoot" condition.
*
* The DIM has two user settable indicator LEDs (one red one blue). These can be controlled
* directly from your program.
*
* Use Android Studios to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list
*/
@TeleOp(name = "Concept: DIM As Indicator", group = "Concept")
@Disabled
public class ConceptDIMAsIndicator extends LinearOpMode {
static final int BLUE_LED = 0; // Blue LED channel on DIM
static final int RED_LED = 1; // Red LED Channel on DIM
// Create timer to toggle LEDs
private ElapsedTime runtime = new ElapsedTime();
// Define class members
DeviceInterfaceModule dim;
@Override
public void runOpMode() {
// Connect to motor (Assume standard left wheel)
// Change the text in quotes to match any motor name on your robot.
dim = hardwareMap.get(DeviceInterfaceModule.class, "dim");
// Toggle LEDs while Waiting for the start button
telemetry.addData(">", "Press Play to test LEDs." );
telemetry.update();
while (!isStarted()) {
// Determine if we are on an odd or even second
boolean even = (((int)(runtime.time()) & 0x01) == 0);
dim.setLED(RED_LED, even); // Red for even
dim.setLED(BLUE_LED, !even); // Blue for odd
idle();
}
// Running now
telemetry.addData(">", "Press X for Blue, B for Red." );
telemetry.update();
// Now just use red and blue buttons to set red and blue LEDs
while(opModeIsActive()){
dim.setLED(BLUE_LED, gamepad1.x);
dim.setLED(RED_LED, gamepad1.b);
idle();
}
// Turn off LEDs;
dim.setLED(BLUE_LED, false);
dim.setLED(RED_LED, false);
telemetry.addData(">", "Done");
telemetry.update();
}
}

View file

@ -0,0 +1,200 @@
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.Gamepad;
import com.qualcomm.robotcore.util.ElapsedTime;
/**
* This sample illustrates using the rumble feature of many gamepads.
*
* Note: Some gamepads "rumble" better than others.
* The Xbox & PS4 have a left (rumble1) and right (rumble2) rumble motor.
* These two gamepads have two distinct rumble modes: Large on the left, and small on the right
* The ETpark gamepad may only respond to rumble1, and may only run at full power.
* The Logitech F310 gamepad does not have *any* rumble ability.
*
* Moral: You should use this sample to experiment with your specific gamepads to explore their rumble features.
*
* The rumble motors are accessed through the standard gamepad1 and gamepad2 objects.
* Several new methods were added to the Gamepad class in FTC SDK Rev 7
* The key methods are as follows:
*
* .rumble(double rumble1, double rumble2, int durationMs)
* This method sets the rumble power of both motors for a specific time duration.
* Both rumble arguments are motor-power levels in the 0.0 to 1.0 range.
* durationMs is the desired length of the rumble action in milliseconds.
* This method returns immediately.
* Note:
* Use a durationMs of Gamepad.RUMBLE_DURATION_CONTINUOUS to provide a continuous rumble
* Use a power of 0, or duration of 0 to stop a rumble.
*
* .rumbleBlips(int count) allows an easy way to signal the driver with a series of rumble blips.
* Just specify how many blips you want.
* This method returns immediately.
*
* .runRumbleEffect(customRumbleEffect) allows you to run a custom rumble sequence that you have
* built using the Gamepad.RumbleEffect.Builder()
* A "custom effect" is a sequence of steps, where each step can rumble any of the
* rumble motors for a specific period at a specific power level.
* The Custom Effect will play as the (un-blocked) OpMode continues to run
*
* .isRumbling() returns true if there is a rumble effect in progress.
* Use this call to prevent stepping on a current rumble.
*
* .stopRumble() Stop any ongoing rumble or custom rumble effect.
*
* .rumble(int durationMs) Full power rumble for fixed duration.
*
* Note: Whenever a new Rumble command is issued, any currently executing rumble action will
* be truncated, and the new action started immediately. Take these precautions:
* 1) Do Not SPAM the rumble motors by issuing rapid fire commands
* 2) Multiple sources for rumble commands must coordinate to avoid tromping on each other.
*
* This can be achieved several possible ways:
* 1) Only having one source for rumble actions
* 2) Issuing rumble commands on transitions, rather than states.
* e.g. The moment a touch sensor is pressed, rather than the entire time it is being pressed.
* 3) Scheduling rumble commands based on timed events. e.g. 10 seconds prior to endgame
* 4) Rumble on non-overlapping mechanical actions. e.g. arm fully-extended or fully-retracted.
* 5) Use isRumbling() to hold off on a new rumble if one is already in progress.
*
* The examples shown here are representstive of how to invoke a gamepad rumble.
* It is assumed that these will be modified to suit the specific robot and team strategy needs.
*
* ######## Read the telemetry display on the Driver Station Screen for instructions. ######
*
* Ex 1) This example shows a) how to create a custom rumble effect, and then b) how to trigger it based
* on game time. One use for this might be to alert the driver that half-time or End-game is approaching.
*
* Ex 2) This example shows tying the rumble power to a changing sensor value.
* In this case it is the Gamepad trigger, but it could be any sensor output scaled to the 0-1 range.
* Since it takes over the rumble motors, it is only performed when the Left Bumper is pressed.
* Note that this approach MUST include a way to turn OFF the rumble when the button is released.
*
* Ex 3) This example shows a simple way to trigger a 3-blip sequence. In this case it is
* triggered by the gamepad A (Cross) button, but it could be any sensor, like a touch or light sensor.
* Note that this code ensures that it only rumbles once when the input goes true.
*
* Ex 4) This example shows how to trigger a single rumble when an input value gets over a certain value.
* In this case it is reading the Right Trigger, but it could be any variable sensor, like a
* range sensor, or position sensor. The code needs to ensure that it is only triggered once, so
* it waits till the sensor drops back below the threshold before it can trigger again.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*/
@Disabled
@TeleOp(name="Concept: Gamepad Rumble", group ="Concept")
public class ConceptGamepadRumble extends LinearOpMode
{
boolean lastA = false; // Use to track the prior button state.
boolean lastLB = false; // Use to track the prior button state.
boolean highLevel = false; // used to prevent multiple level-based rumbles.
boolean secondHalf = false; // Use to prevent multiple half-time warning rumbles.
Gamepad.RumbleEffect customRumbleEffect; // Use to build a custom rumble sequence.
ElapsedTime runtime = new ElapsedTime(); // Use to determine when end game is starting.
final double HALF_TIME = 60.0; // Wait this many seconds before rumble-alert for half-time.
final double TRIGGER_THRESHOLD = 0.75; // Squeeze more than 3/4 to get rumble.
@Override
public void runOpMode()
{
// Example 1. a) start by creating a three-pulse rumble sequence: right, LEFT, LEFT
customRumbleEffect = new Gamepad.RumbleEffect.Builder()
.addStep(0.0, 1.0, 500) // Rumble right motor 100% for 500 mSec
.addStep(0.0, 0.0, 300) // Pause for 300 mSec
.addStep(1.0, 0.0, 250) // Rumble left motor 100% for 250 mSec
.addStep(0.0, 0.0, 250) // Pause for 250 mSec
.addStep(1.0, 0.0, 250) // Rumble left motor 100% for 250 mSec
.build();
telemetry.addData(">", "Press Start");
telemetry.update();
waitForStart();
runtime.reset(); // Start game timer.
// Loop while monitoring buttons for rumble triggers
while (opModeIsActive())
{
// Read and save the current gamepad button states.
boolean currentA = gamepad1.a ;
boolean currentLB = gamepad1.left_bumper ;
// Display the current Rumble status. Just for interest.
telemetry.addData(">", "Are we RUMBLING? %s\n", gamepad1.isRumbling() ? "YES" : "no" );
// ----------------------------------------------------------------------------------------
// Example 1. b) Watch the runtime timer, and run the custom rumble when we hit half-time.
// Make sure we only signal once by setting "secondHalf" flag to prevent further rumbles.
// ----------------------------------------------------------------------------------------
if ((runtime.seconds() > HALF_TIME) && !secondHalf) {
gamepad1.runRumbleEffect(customRumbleEffect);
secondHalf =true;
}
// Display the time remaining while we are still counting down.
if (!secondHalf) {
telemetry.addData(">", "Halftime Alert Countdown: %3.0f Sec \n", (HALF_TIME - runtime.seconds()) );
}
// ----------------------------------------------------------------------------------------
// Example 2. If Left Bumper is being pressed, power the rumble motors based on the two trigger depressions.
// This is useful to see how the rumble feels at various power levels.
// ----------------------------------------------------------------------------------------
if (currentLB) {
// Left Bumper is being pressed, so send left and right "trigger" values to left and right rumble motors.
gamepad1.rumble(gamepad1.left_trigger, gamepad1.right_trigger, Gamepad.RUMBLE_DURATION_CONTINUOUS);
// Show what is being sent to rumbles
telemetry.addData(">", "Squeeze triggers to control rumbles");
telemetry.addData("> : Rumble", "Left: %.0f%% Right: %.0f%%", gamepad1.left_trigger * 100, gamepad1.right_trigger * 100);
} else {
// Make sure rumble is turned off when Left Bumper is released (only one time each press)
if (lastLB) {
gamepad1.stopRumble();
}
// Prompt for manual rumble action
telemetry.addData(">", "Hold Left-Bumper to test Manual Rumble");
telemetry.addData(">", "Press A (Cross) for three blips");
telemetry.addData(">", "Squeeze right trigger slowly for 1 blip");
}
lastLB = currentLB; // remember the current button state for next time around the loop
// ----------------------------------------------------------------------------------------
// Example 3. Blip 3 times at the moment that A (Cross) is pressed. (look for pressed transition)
// BUT !!! Skip it altogether if the Gamepad is already rumbling.
// ----------------------------------------------------------------------------------------
if (currentA && !lastA) {
if (!gamepad1.isRumbling()) // Check for possible overlap of rumbles.
gamepad1.rumbleBlips(3);
}
lastA = currentA; // remember the current button state for next time around the loop
// ----------------------------------------------------------------------------------------
// Example 4. Rumble once when gamepad right trigger goes above the THRESHOLD.
// ----------------------------------------------------------------------------------------
if (gamepad1.right_trigger > TRIGGER_THRESHOLD) {
if (!highLevel) {
gamepad1.rumble(0.9, 0, 200); // 200 mSec burst on left motor.
highLevel = true; // Hold off any more triggers
}
} else {
highLevel = false; // We can trigger again now.
}
// Send the telemetry data to the Driver Station, and then pause to pace the program.
telemetry.update();
sleep(10);
}
}
}

View file

@ -0,0 +1,78 @@
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import org.firstinspires.ftc.robotcore.external.Telemetry;
/**
* This sample illustrates using the touchpad feature found on some gamepads.
*
* The Sony PS4 gamepad can detect two distinct touches on the central touchpad.
* Other gamepads with different touchpads may provide mixed results.
*
* The touchpads are accessed through the standard gamepad1 and gamepad2 objects.
* Several new members were added to the Gamepad class in FTC SDK Rev 7
*
* .touchpad_finger_1 returns true if at least one finger is detected.
* .touchpad_finger_1_x finger 1 X coordinate. Valid if touchpad_finger_1 is true
* .touchpad_finger_1_y finger 1 Y coordinate. Valid if touchpad_finger_1 is true
*
* .touchpad_finger_2 returns true if a second finger is detected
* .touchpad_finger_2_x finger 2 X coordinate. Valid if touchpad_finger_2 is true
* .touchpad_finger_2_y finger 2 Y coordinate. Valid if touchpad_finger_2 is true
*
* Finger touches are reported with an X and Y coordinate in following coordinate system.
*
* 1) X is the Horizontal axis, and Y is the vertical axis
* 2) The 0,0 origin is at the center of the touchpad.
* 3) 1.0, 1.0 is at the top right corner of the touchpad.
* 4) -1.0,-1.0 is at the bottom left corner of the touchpad.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*/
@Disabled
@TeleOp(name="Concept: Gamepad Touchpad", group ="Concept")
public class ConceptGamepadTouchpad extends LinearOpMode
{
@Override
public void runOpMode()
{
telemetry.setDisplayFormat(Telemetry.DisplayFormat.MONOSPACE);
telemetry.addData(">", "Press Start");
telemetry.update();
waitForStart();
while (opModeIsActive())
{
boolean finger = false;
// Display finger 1 x & y position if finger detected
if(gamepad1.touchpad_finger_1)
{
finger = true;
telemetry.addLine(String.format("Finger 1: x=%5.2f y=%5.2f\n", gamepad1.touchpad_finger_1_x, gamepad1.touchpad_finger_1_y));
}
// Display finger 2 x & y position if finger detected
if(gamepad1.touchpad_finger_2)
{
finger = true;
telemetry.addLine(String.format("Finger 2: x=%5.2f y=%5.2f\n", gamepad1.touchpad_finger_2_x, gamepad1.touchpad_finger_2_y));
}
if(!finger)
{
telemetry.addLine("No fingers");
}
telemetry.update();
sleep(10);
}
}
}

View file

@ -1,223 +0,0 @@
/* Copyright (c) 2017 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.hardware.modernrobotics.ModernRoboticsUsbDeviceInterfaceModule;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.DeviceInterfaceModule;
import com.qualcomm.robotcore.hardware.I2cAddr;
import com.qualcomm.robotcore.util.RobotLog;
import com.qualcomm.robotcore.util.TypeConversion;
import java.util.concurrent.locks.Lock;
/**
* An example of a linear op mode that shows how to change the I2C address.
*/
@TeleOp(name = "Concept: I2c Address Change", group = "Concept")
@Disabled
public class ConceptI2cAddressChange extends LinearOpMode {
public static final int ADDRESS_SET_NEW_I2C_ADDRESS = 0x70;
// trigger bytes used to change I2C address on ModernRobotics sensors.
public static final byte TRIGGER_BYTE_1 = 0x55;
public static final byte TRIGGER_BYTE_2 = (byte) 0xaa;
// Expected bytes from the Modern Robotics IR Seeker V3 memory map
public static final byte IR_SEEKER_V3_FIRMWARE_REV = 0x12;
public static final byte IR_SEEKER_V3_SENSOR_ID = 0x49;
public static final I2cAddr IR_SEEKER_V3_ORIGINAL_ADDRESS = I2cAddr.create8bit(0x38);
// Expected bytes from the Modern Robotics Color Sensor memory map
public static final byte COLOR_SENSOR_FIRMWARE_REV = 0x10;
public static final byte COLOR_SENSOR_SENSOR_ID = 0x43;
public static final byte COLOR_SENSOR_ORIGINAL_ADDRESS = 0x3C;
public static final byte MANUFACTURER_CODE = 0x4d;
// Currently, this is set to expect the bytes from the IR Seeker.
// If you change these values so you're setting "FIRMWARE_REV" to
// COLOR_SENSOR_FIRMWARE_REV, and "SENSOR_ID" to "COLOR_SENSOR_SENSOR_ID",
// you'll be able to change the I2C address of the ModernRoboticsColorSensor.
// If the bytes you're expecting are different than what this op mode finds,
// a comparison will be printed out into the logfile.
public static final byte FIRMWARE_REV = IR_SEEKER_V3_FIRMWARE_REV;
public static final byte SENSOR_ID = IR_SEEKER_V3_SENSOR_ID;
// These byte values are common with most Modern Robotics sensors.
public static final int READ_MODE = 0x80;
public static final int ADDRESS_MEMORY_START = 0x0;
public static final int TOTAL_MEMORY_LENGTH = 0x0c;
public static final int BUFFER_CHANGE_ADDRESS_LENGTH = 0x03;
// The port where your sensor is connected.
int port = 5;
byte[] readCache;
Lock readLock;
byte[] writeCache;
Lock writeLock;
I2cAddr currentAddress = IR_SEEKER_V3_ORIGINAL_ADDRESS;
// I2c addresses on Modern Robotics devices must be divisible by 2, and between 0x7e and 0x10
// Different hardware may have different rules.
// Be sure to read the requirements for the hardware you're using!
// If you use an invalid address, you may make your device completely unusable.
I2cAddr newAddress = I2cAddr.create8bit(0x42);
DeviceInterfaceModule dim;
@Override
public void runOpMode() {
// set up the hardware devices we are going to use
dim = hardwareMap.get(DeviceInterfaceModule.class, "dim");
readCache = dim.getI2cReadCache(port);
readLock = dim.getI2cReadCacheLock(port);
writeCache = dim.getI2cWriteCache(port);
writeLock = dim.getI2cWriteCacheLock(port);
// I2c addresses on Modern Robotics devices must be divisible by 2, and between 0x7e and 0x10
// Different hardware may have different rules.
// Be sure to read the requirements for the hardware you're using!
ModernRoboticsUsbDeviceInterfaceModule.throwIfModernRoboticsI2cAddressIsInvalid(newAddress);
// wait for the start button to be pressed
waitForStart();
performAction("read", port, currentAddress, ADDRESS_MEMORY_START, TOTAL_MEMORY_LENGTH);
while(!dim.isI2cPortReady(port)) {
telemetry.addData("I2cAddressChange", "waiting for the port to be ready...");
telemetry.update();
sleep(1000);
}
// update the local cache
dim.readI2cCacheFromController(port);
// make sure the first bytes are what we think they should be.
int count = 0;
int[] initialArray = {READ_MODE, currentAddress.get8Bit(), ADDRESS_MEMORY_START, TOTAL_MEMORY_LENGTH, FIRMWARE_REV, MANUFACTURER_CODE, SENSOR_ID};
while (!foundExpectedBytes(initialArray, readLock, readCache)) {
telemetry.addData("I2cAddressChange", "Confirming that we're reading the correct bytes...");
telemetry.update();
dim.readI2cCacheFromController(port);
sleep(1000);
count++;
// if we go too long with failure, we probably are expecting the wrong bytes.
if (count >= 10) {
telemetry.addData("I2cAddressChange", String.format("Looping too long with no change, probably have the wrong address. Current address: 8bit=0x%02x", currentAddress.get8Bit()));
hardwareMap.irSeekerSensor.get(String.format("Looping too long with no change, probably have the wrong address. Current address: 8bit=0x%02x", currentAddress.get8Bit()));
telemetry.update();
}
}
// Enable writes to the correct segment of the memory map.
performAction("write", port, currentAddress, ADDRESS_SET_NEW_I2C_ADDRESS, BUFFER_CHANGE_ADDRESS_LENGTH);
// Write out the trigger bytes, and the new desired address.
writeNewAddress();
dim.setI2cPortActionFlag(port);
dim.writeI2cCacheToController(port);
telemetry.addData("I2cAddressChange", "Giving the hardware 60 seconds to make the change...");
telemetry.update();
// Changing the I2C address takes some time.
sleep(60000);
// Query the new address and see if we can get the bytes we expect.
dim.enableI2cReadMode(port, newAddress, ADDRESS_MEMORY_START, TOTAL_MEMORY_LENGTH);
dim.setI2cPortActionFlag(port);
dim.writeI2cCacheToController(port);
int[] confirmArray = {READ_MODE, newAddress.get8Bit(), ADDRESS_MEMORY_START, TOTAL_MEMORY_LENGTH, FIRMWARE_REV, MANUFACTURER_CODE, SENSOR_ID};
while (!foundExpectedBytes(confirmArray, readLock, readCache)) {
telemetry.addData("I2cAddressChange", "Have not confirmed the changes yet...");
telemetry.update();
dim.readI2cCacheFromController(port);
sleep(1000);
}
telemetry.addData("I2cAddressChange", "Successfully changed the I2C address. New address: 8bit=0x%02x", newAddress.get8Bit());
telemetry.update();
RobotLog.i("Successfully changed the I2C address." + String.format("New address: 8bit=0x%02x", newAddress.get8Bit()));
/**** IMPORTANT NOTE ******/
// You need to add a line like this at the top of your op mode
// to update the I2cAddress in the driver.
//irSeeker.setI2cAddress(newAddress);
/***************************/
}
private boolean foundExpectedBytes(int[] byteArray, Lock lock, byte[] cache) {
try {
lock.lock();
boolean allMatch = true;
StringBuilder s = new StringBuilder(300 * 4);
String mismatch = "";
for (int i = 0; i < byteArray.length; i++) {
s.append(String.format("expected: %02x, got: %02x \n", TypeConversion.unsignedByteToInt( (byte) byteArray[i]), cache[i]));
if (TypeConversion.unsignedByteToInt(cache[i]) != TypeConversion.unsignedByteToInt( (byte) byteArray[i])) {
mismatch = String.format("i: %d, byteArray[i]: %02x, cache[i]: %02x", i, byteArray[i], cache[i]);
allMatch = false;
}
}
RobotLog.e(s.toString() + "\n allMatch: " + allMatch + ", mismatch: " + mismatch);
return allMatch;
} finally {
lock.unlock();
}
}
private void performAction(String actionName, int port, I2cAddr i2cAddress, int memAddress, int memLength) {
if (actionName.equalsIgnoreCase("read")) dim.enableI2cReadMode(port, i2cAddress, memAddress, memLength);
if (actionName.equalsIgnoreCase("write")) dim.enableI2cWriteMode(port, i2cAddress, memAddress, memLength);
dim.setI2cPortActionFlag(port);
dim.writeI2cCacheToController(port);
dim.readI2cCacheFromController(port);
}
private void writeNewAddress() {
try {
writeLock.lock();
writeCache[4] = (byte) newAddress.get8Bit();
writeCache[5] = TRIGGER_BYTE_1;
writeCache[6] = TRIGGER_BYTE_2;
} finally {
writeLock.unlock();
}
}
}

View file

@ -38,7 +38,7 @@ import com.qualcomm.robotcore.hardware.DcMotor;
* This OpMode ramps a single motor speed up and down repeatedly until Stop is pressed.
* The code is structured as a LinearOpMode
*
* This code assumes a DC motor configured with the name "left_drive" as is found on a pushbot.
* This code assumes a DC motor configured with the name "left_drive" as is found on a Robot.
*
* INCREMENT sets how much to increase/decrease the power each cycle
* CYCLE_MS sets the update period.

View file

@ -44,8 +44,8 @@ import com.qualcomm.robotcore.util.Range;
* robot configuration, use the drop down list under 'Servos' to select 'REV SPARKmini Controller'
* and name them 'left_drive' and 'right_drive'.
*
* Use Android Studios to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
*/
@TeleOp(name="REV SPARKmini Simple Drive Example", group="Concept")
@ -69,7 +69,7 @@ public class ConceptRevSPARKMini extends LinearOpMode {
rightDrive = hardwareMap.get(DcMotorSimple.class, "right_drive");
// Most robots need the motor on one side to be reversed to drive forward
// Reverse the motor that runs backwards when connected directly to the battery
// Reverse the motor that runs backward when connected directly to the battery
leftDrive.setDirection(DcMotorSimple.Direction.FORWARD);
rightDrive.setDirection(DcMotorSimple.Direction.REVERSE);

View file

@ -35,12 +35,12 @@ import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.Servo;
/**
* This OpMode scans a single servo back and forwards until Stop is pressed.
* This OpMode scans a single servo back and forward until Stop is pressed.
* The code is structured as a LinearOpMode
* INCREMENT sets how much to increase/decrease the servo position each cycle
* CYCLE_MS sets the update period.
*
* This code assumes a Servo configured with the name "left_hand" as is found on a pushbot.
* This code assumes a Servo configured with the name "left_hand" as is found on a Robot.
*
* NOTE: When any servo position is set, ALL attached servos are activated, so ensure that any other
* connected servos are able to move freely before running this test.
@ -66,7 +66,7 @@ public class ConceptScanServo extends LinearOpMode {
@Override
public void runOpMode() {
// Connect to servo (Assume PushBot Left Hand)
// Connect to servo (Assume Robot Left Hand)
// Change the text in quotes to match any servo name on your robot.
servo = hardwareMap.get(Servo.class, "left_hand");

View file

@ -43,7 +43,7 @@ import java.io.File;
*
* If you are using OnBotJava, please see the ConceptSoundsOnBotJava sample
*
* Use Android Studios to Copy this Class, and Paste it into your team's code folder with a new name.
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list
*
* Operation:

View file

@ -41,8 +41,8 @@ import com.qualcomm.robotcore.eventloop.opmode.Disabled;
* It does this by creating a simple "chooser" controlled by the gamepad Up Down buttons.
* This code also prevents sounds from stacking up by setting a "playing" flag, which is cleared when the sound finishes playing.
*
* Use Android Studios to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
*
* Operation:
* Use the DPAD to change the selected sound, and the Right Bumper to play it.

View file

@ -41,10 +41,10 @@ import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
/**
* This 2020-2021 OpMode illustrates the basics of using the TensorFlow Object Detection API to
* determine the position of the Ultimate Goal game elements.
* determine the position of the Freight Frenzy game elements.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*
* IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as
* is explained below.
@ -52,9 +52,24 @@ import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
@TeleOp(name = "Concept: TensorFlow Object Detection", group = "Concept")
@Disabled
public class ConceptTensorFlowObjectDetection extends LinearOpMode {
private static final String TFOD_MODEL_ASSET = "UltimateGoal.tflite";
private static final String LABEL_FIRST_ELEMENT = "Quad";
private static final String LABEL_SECOND_ELEMENT = "Single";
/* Note: This sample uses the all-objects Tensor Flow model (FreightFrenzy_BCDM.tflite), which contains
* the following 4 detectable objects
* 0: Ball,
* 1: Cube,
* 2: Duck,
* 3: Marker (duck location tape marker)
*
* Two additional model assets are available which only contain a subset of the objects:
* FreightFrenzy_BC.tflite 0: Ball, 1: Cube
* FreightFrenzy_DM.tflite 0: Duck, 1: Marker
*/
private static final String TFOD_MODEL_ASSET = "FreightFrenzy_BCDM.tflite";
private static final String[] LABELS = {
"Ball",
"Cube",
"Duck",
"Marker"
};
/*
* IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which
@ -128,16 +143,13 @@ public class ConceptTensorFlowObjectDetection extends LinearOpMode {
recognition.getLeft(), recognition.getTop());
telemetry.addData(String.format(" right,bottom (%d)", i), "%.03f , %.03f",
recognition.getRight(), recognition.getBottom());
i++;
}
telemetry.update();
}
}
}
}
if (tfod != null) {
tfod.shutdown();
}
}
/**
@ -166,7 +178,9 @@ public class ConceptTensorFlowObjectDetection extends LinearOpMode {
"tfodMonitorViewId", "id", hardwareMap.appContext.getPackageName());
TFObjectDetector.Parameters tfodParameters = new TFObjectDetector.Parameters(tfodMonitorViewId);
tfodParameters.minResultConfidence = 0.8f;
tfodParameters.isModelTensorFlow2 = true;
tfodParameters.inputSize = 320;
tfod = ClassFactory.getInstance().createTFObjectDetector(tfodParameters, vuforia);
tfod.loadModelFromAsset(TFOD_MODEL_ASSET, LABEL_FIRST_ELEMENT, LABEL_SECOND_ELEMENT);
tfod.loadModelFromAsset(TFOD_MODEL_ASSET, LABELS);
}
}

View file

@ -42,7 +42,7 @@ import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
/**
* This 2020-2021 OpMode illustrates the basics of using the TensorFlow Object Detection API to
* determine the position of the Ultimate Goal game elements.
* determine the position of the Freight Frenzy game elements.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list.
@ -53,9 +53,24 @@ import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
@TeleOp(name = "Concept: TensorFlow Object Detection Switchable Cameras", group = "Concept")
@Disabled
public class ConceptTensorFlowObjectDetectionSwitchableCameras extends LinearOpMode {
private static final String TFOD_MODEL_ASSET = "UltimateGoal.tflite";
private static final String LABEL_FIRST_ELEMENT = "Quad";
private static final String LABEL_SECOND_ELEMENT = "Single";
/* Note: This sample uses the all-objects Tensor Flow model (FreightFrenzy_BCDM.tflite), which contains
* the following 4 detectable objects
* 0: Ball,
* 1: Cube,
* 2: Duck,
* 3: Marker (duck location tape marker)
*
* Two additional model assets are available which only contain a subset of the objects:
* FreightFrenzy_BC.tflite 0: Ball, 1: Cube
* FreightFrenzy_DM.tflite 0: Duck, 1: Marker
*/
private static final String TFOD_MODEL_ASSET = "FreightFrenzy_BCDM.tflite";
private static final String[] LABELS = {
"Ball",
"Cube",
"Duck",
"Marker"
};
/*
* IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which
@ -134,15 +149,12 @@ public class ConceptTensorFlowObjectDetectionSwitchableCameras extends LinearOpM
recognition.getLeft(), recognition.getTop());
telemetry.addData(String.format(" right,bottom (%d)", i), "%.03f , %.03f",
recognition.getRight(), recognition.getBottom());
i++;
}
telemetry.update();
}
}
}
if (tfod != null) {
tfod.shutdown();
}
}
/**
@ -179,8 +191,10 @@ public class ConceptTensorFlowObjectDetectionSwitchableCameras extends LinearOpM
"tfodMonitorViewId", "id", hardwareMap.appContext.getPackageName());
TFObjectDetector.Parameters tfodParameters = new TFObjectDetector.Parameters(tfodMonitorViewId);
tfodParameters.minResultConfidence = 0.8f;
tfodParameters.isModelTensorFlow2 = true;
tfodParameters.inputSize = 320;
tfod = ClassFactory.getInstance().createTFObjectDetector(tfodParameters, vuforia);
tfod.loadModelFromAsset(TFOD_MODEL_ASSET, LABEL_FIRST_ELEMENT, LABEL_SECOND_ELEMENT);
tfod.loadModelFromAsset(TFOD_MODEL_ASSET, LABELS);
}
private void doCameraSwitching() {

View file

@ -41,7 +41,7 @@ import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
/**
* This 2020-2021 OpMode illustrates the basics of using the TensorFlow Object Detection API to
* determine the position of the Ultimate Goal game elements.
* determine the position of the Freight Frenzy game elements.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list.
@ -52,9 +52,24 @@ import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
@TeleOp(name = "Concept: TensorFlow Object Detection Webcam", group = "Concept")
@Disabled
public class ConceptTensorFlowObjectDetectionWebcam extends LinearOpMode {
private static final String TFOD_MODEL_ASSET = "UltimateGoal.tflite";
private static final String LABEL_FIRST_ELEMENT = "Quad";
private static final String LABEL_SECOND_ELEMENT = "Single";
/* Note: This sample uses the all-objects Tensor Flow model (FreightFrenzy_BCDM.tflite), which contains
* the following 4 detectable objects
* 0: Ball,
* 1: Cube,
* 2: Duck,
* 3: Marker (duck location tape marker)
*
* Two additional model assets are available which only contain a subset of the objects:
* FreightFrenzy_BC.tflite 0: Ball, 1: Cube
* FreightFrenzy_DM.tflite 0: Duck, 1: Marker
*/
private static final String TFOD_MODEL_ASSET = "FreightFrenzy_BCDM.tflite";
private static final String[] LABELS = {
"Ball",
"Cube",
"Duck",
"Marker"
};
/*
* IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which
@ -127,16 +142,13 @@ public class ConceptTensorFlowObjectDetectionWebcam extends LinearOpMode {
recognition.getLeft(), recognition.getTop());
telemetry.addData(String.format(" right,bottom (%d)", i), "%.03f , %.03f",
recognition.getRight(), recognition.getBottom());
i++;
}
telemetry.update();
}
}
}
}
if (tfod != null) {
tfod.shutdown();
}
}
/**
@ -165,7 +177,9 @@ public class ConceptTensorFlowObjectDetectionWebcam extends LinearOpMode {
"tfodMonitorViewId", "id", hardwareMap.appContext.getPackageName());
TFObjectDetector.Parameters tfodParameters = new TFObjectDetector.Parameters(tfodMonitorViewId);
tfodParameters.minResultConfidence = 0.8f;
tfodParameters.isModelTensorFlow2 = true;
tfodParameters.inputSize = 320;
tfod = ClassFactory.getInstance().createTFObjectDetector(tfodParameters, vuforia);
tfod.loadModelFromAsset(TFOD_MODEL_ASSET, LABEL_FIRST_ELEMENT, LABEL_SECOND_ELEMENT);
tfod.loadModelFromAsset(TFOD_MODEL_ASSET, LABELS);
}
}

View file

@ -59,7 +59,7 @@ import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables;
* see ftc_app/doc/tutorial/FTC_FieldCoordinateSystemDefinition.pdf
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*
* IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as
* is explained in {@link ConceptVuforiaNavigation}.

View file

@ -60,7 +60,7 @@ import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables;
* see ftc_app/doc/tutorial/FTC_FieldCoordinateSystemDefinition.pdf
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*
* IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as
* is explained in {@link ConceptVuforiaNavigationWebcam}.

View file

@ -0,0 +1,203 @@
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.util.Range;
import org.firstinspires.ftc.robotcore.external.ClassFactory;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix;
import org.firstinspires.ftc.robotcore.external.matrices.VectorF;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackable;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackableDefaultListener;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables;
/**
* This OpMode illustrates using a webcam to locate and drive towards ANY Vuforia target.
* The code assumes a basic two-wheel Robot Configuration with motors named left_drive and right_drive.
* The motor directions must be set so a positive drive goes forward and a positive turn rotates to the right.
*
* Under manual control, the left stick will move forward/back, and the right stick will turn left/right.
* This is called POV Joystick mode, different than Tank Drive (where each joystick controls a wheel).
* Manually drive the robot until it displays Target data on the Driver Station.
* Press and hold the *Left Bumper* to enable the automatic "Drive to target" mode.
* Release the Left Bumper to return to manual driving mode.
*
* Use DESIRED_DISTANCE to set how close you want the robot to get to the target.
* Speed and Turn sensitivity can be adjusted using the SPEED_GAIN and TURN_GAIN constants.
*
* For more Vuforia details, or to adapt this OpMode for a phone camera, view the
* ConceptVuforiaFieldNavigation and ConceptVuforiaFieldNavigationWebcam samples.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list.
*
* IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as
* is explained below.
*/
@TeleOp(name="Drive To Target", group = "Concept")
@Disabled
public class ConceptVuforiaDriveToTargetWebcam extends LinearOpMode
{
// Adjust these numbers to suit your robot.
final double DESIRED_DISTANCE = 8.0; // this is how close the camera should get to the target (inches)
// The GAIN constants set the relationship between the measured position error,
// and how much power is applied to the drive motors. Drive = Error * Gain
// Make these values smaller for smoother control.
final double SPEED_GAIN = 0.02 ; // Speed Control "Gain". eg: Ramp up to 50% power at a 25 inch error. (0.50 / 25.0)
final double TURN_GAIN = 0.01 ; // Turn Control "Gain". eg: Ramp up to 25% power at a 25 degree error. (0.25 / 25.0)
final double MM_PER_INCH = 25.40 ; // Metric conversion
/*
* IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which
* 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function.
* A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer
* web site at https://developer.vuforia.com/license-manager.
*
* Vuforia license keys are always 380 characters long, and look as if they contain mostly
* random data. As an example, here is a example of a fragment of a valid key:
* ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGlfOOI2dRzKS4T0hQ8kT ...
* Once you've obtained a license key, copy the string from the Vuforia web site
* and paste it in to your code on the next line, between the double quotes.
*/
private static final String VUFORIA_KEY =
" --- YOUR NEW VUFORIA KEY GOES HERE --- ";
VuforiaLocalizer vuforia = null;
OpenGLMatrix targetPose = null;
String targetName = "";
private DcMotor leftDrive = null;
private DcMotor rightDrive = null;
@Override public void runOpMode()
{
/*
* Configure Vuforia by creating a Parameter object, and passing it to the Vuforia engine.
* To get an on-phone camera preview, use the code below.
* If no camera preview is desired, use the parameter-less constructor instead (commented out below).
*/
int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName());
VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(cameraMonitorViewId);
// VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters();
parameters.vuforiaLicenseKey = VUFORIA_KEY;
// Turn off Extended tracking. Set this true if you want Vuforia to track beyond the target.
parameters.useExtendedTracking = false;
// Connect to the camera we are to use. This name must match what is set up in Robot Configuration
parameters.cameraName = hardwareMap.get(WebcamName.class, "Webcam 1");
this.vuforia = ClassFactory.getInstance().createVuforia(parameters);
// Load the trackable objects from the Assets file, and give them meaningful names
VuforiaTrackables targetsFreightFrenzy = this.vuforia.loadTrackablesFromAsset("FreightFrenzy");
targetsFreightFrenzy.get(0).setName("Blue Storage");
targetsFreightFrenzy.get(1).setName("Blue Alliance Wall");
targetsFreightFrenzy.get(2).setName("Red Storage");
targetsFreightFrenzy.get(3).setName("Red Alliance Wall");
// Start tracking targets in the background
targetsFreightFrenzy.activate();
// Initialize the hardware variables. Note that the strings used here as parameters
// to 'get' must correspond to the names assigned during the robot configuration
// step (using the FTC Robot Controller app on the phone).
leftDrive = hardwareMap.get(DcMotor.class, "left_drive");
rightDrive = hardwareMap.get(DcMotor.class, "right_drive");
// To drive forward, most robots need the motor on one side to be reversed, because the axles point in opposite directions.
// When run, this OpMode should start both motors driving forward. So adjust these two lines based on your first test drive.
// Note: The settings here assume direct drive on left and right wheels. Gear Reduction or 90 Deg drives may require direction flips
leftDrive.setDirection(DcMotor.Direction.REVERSE);
rightDrive.setDirection(DcMotor.Direction.FORWARD);
telemetry.addData(">", "Press Play to start");
telemetry.update();
waitForStart();
boolean targetFound = false; // Set to true when a target is detected by Vuforia
double targetRange = 0; // Distance from camera to target in Inches
double targetBearing = 0; // Robot Heading, relative to target. Positive degrees means target is to the right.
double drive = 0; // Desired forward power (-1 to +1)
double turn = 0; // Desired turning power (-1 to +1)
while (opModeIsActive())
{
// Look for first visible target, and save its pose.
targetFound = false;
for (VuforiaTrackable trackable : targetsFreightFrenzy)
{
if (((VuforiaTrackableDefaultListener) trackable.getListener()).isVisible())
{
targetPose = ((VuforiaTrackableDefaultListener)trackable.getListener()).getVuforiaCameraFromTarget();
// if we have a target, process the "pose" to determine the position of the target relative to the robot.
if (targetPose != null)
{
targetFound = true;
targetName = trackable.getName();
VectorF trans = targetPose.getTranslation();
// Extract the X & Y components of the offset of the target relative to the robot
double targetX = trans.get(0) / MM_PER_INCH; // Image X axis
double targetY = trans.get(2) / MM_PER_INCH; // Image Z axis
// target range is based on distance from robot position to origin (right triangle).
targetRange = Math.hypot(targetX, targetY);
// target bearing is based on angle formed between the X axis and the target range line
targetBearing = Math.toDegrees(Math.asin(targetX / targetRange));
break; // jump out of target tracking loop if we find a target.
}
}
}
// Tell the driver what we see, and what to do.
if (targetFound) {
telemetry.addData(">","HOLD Left-Bumper to Drive to Target\n");
telemetry.addData("Target", " %s", targetName);
telemetry.addData("Range", "%5.1f inches", targetRange);
telemetry.addData("Bearing","%3.0f degrees", targetBearing);
} else {
telemetry.addData(">","Drive using joystick to find target\n");
}
// Drive to target Automatically if Left Bumper is being pressed, AND we have found a target.
if (gamepad1.left_bumper && targetFound) {
// Determine heading and range error so we can use them to control the robot automatically.
double rangeError = (targetRange - DESIRED_DISTANCE);
double headingError = targetBearing;
// Use the speed and turn "gains" to calculate how we want the robot to move.
drive = rangeError * SPEED_GAIN;
turn = headingError * TURN_GAIN ;
telemetry.addData("Auto","Drive %5.2f, Turn %5.2f", drive, turn);
} else {
// drive using manual POV Joystick mode.
drive = -gamepad1.left_stick_y / 2.0; // Reduce drive rate to 50%.
turn = gamepad1.right_stick_x / 4.0; // Reduce turn rate to 25%.
telemetry.addData("Manual","Drive %5.2f, Turn %5.2f", drive, turn);
}
telemetry.update();
// Calculate left and right wheel powers and send to them to the motors.
double leftPower = Range.clip(drive + turn, -1.0, 1.0) ;
double rightPower = Range.clip(drive - turn, -1.0, 1.0) ;
leftDrive.setPower(leftPower);
rightDrive.setPower(rightPower);
sleep(10);
}
}
}

View file

@ -52,48 +52,35 @@ import static org.firstinspires.ftc.robotcore.external.navigation.AxesReference.
import static org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer.CameraDirection.BACK;
/**
* This 2020-2021 OpMode illustrates the basics of using the Vuforia localizer to determine
* positioning and orientation of robot on the ULTIMATE GOAL FTC field.
* The code is structured as a LinearOpMode
* This OpMode illustrates using the Vuforia localizer to determine positioning and orientation of
* robot on the FTC field using the RC phone's camera. The code is structured as a LinearOpMode
*
* Note: If you are using a WEBCAM see ConceptVuforiaFieldNavigationWebcam.java
*
* When images are located, Vuforia is able to determine the position and orientation of the
* image relative to the camera. This sample code then combines that information with a
* knowledge of where the target images are on the field, to determine the location of the camera.
*
* From the Audience perspective, the Red Alliance station is on the right and the
* Blue Alliance Station is on the left.
* There are a total of five image targets for the ULTIMATE GOAL game.
* Three of the targets are placed in the center of the Red Alliance, Audience (Front),
* and Blue Alliance perimeter walls.
* Two additional targets are placed on the perimeter wall, one in front of each Tower Goal.
* Refer to the Field Setup manual for more specific location details
*
* A final calculation then uses the location of the camera on the robot to determine the
* Finally, the location of the camera on the robot is used to determine the
* robot's location and orientation on the field.
*
* @see VuforiaLocalizer
* @see VuforiaTrackableDefaultListener
* see ultimategoal/doc/tutorial/FTC_FieldCoordinateSystemDefinition.pdf
* To learn more about the FTC field coordinate model, see FTC_FieldCoordinateSystemDefinition.pdf in this folder
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*
* IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as
* is explained below.
*/
@TeleOp(name="ULTIMATEGOAL Vuforia Nav", group ="Concept")
@TeleOp(name="Vuforia Field Nav", group ="Concept")
@Disabled
public class ConceptVuforiaUltimateGoalNavigation extends LinearOpMode {
public class ConceptVuforiaFieldNavigation extends LinearOpMode {
// IMPORTANT: For Phone Camera, set 1) the camera source and 2) the orientation, based on how your phone is mounted:
// 1) Camera Source. Valid choices are: BACK (behind screen) or FRONT (selfie side)
// 2) Phone Orientation. Choices are: PHONE_IS_PORTRAIT = true (portrait) or PHONE_IS_PORTRAIT = false (landscape)
//
// NOTE: If you are running on a CONTROL HUB, with only one USB WebCam, you must select CAMERA_CHOICE = BACK; and PHONE_IS_PORTRAIT = false;
//
private static final VuforiaLocalizer.CameraDirection CAMERA_CHOICE = BACK;
private static final boolean PHONE_IS_PORTRAIT = false ;
@ -113,17 +100,18 @@ public class ConceptVuforiaUltimateGoalNavigation extends LinearOpMode {
" -- YOUR NEW VUFORIA KEY GOES HERE --- ";
// Since ImageTarget trackables use mm to specifiy their dimensions, we must use mm for all the physical dimension.
// We will define some constants and conversions here
// We will define some constants and conversions here. These are useful for the Freight Frenzy field.
private static final float mmPerInch = 25.4f;
private static final float mmTargetHeight = (6) * mmPerInch; // the height of the center of the target image above the floor
// Constants for perimeter targets
private static final float halfField = 72 * mmPerInch;
private static final float quadField = 36 * mmPerInch;
private static final float mmTargetHeight = 6 * mmPerInch; // the height of the center of the target image above the floor
private static final float halfField = 72 * mmPerInch;
private static final float halfTile = 12 * mmPerInch;
private static final float oneAndHalfTile = 36 * mmPerInch;
// Class Members
private OpenGLMatrix lastLocation = null;
private VuforiaLocalizer vuforia = null;
private VuforiaLocalizer vuforia = null;
private VuforiaTrackables targets = null ;
private boolean targetVisible = false;
private float phoneXRotate = 0;
private float phoneYRotate = 0;
@ -132,18 +120,17 @@ public class ConceptVuforiaUltimateGoalNavigation extends LinearOpMode {
@Override public void runOpMode() {
/*
* Configure Vuforia by creating a Parameter object, and passing it to the Vuforia engine.
* We can pass Vuforia the handle to a camera preview resource (on the RC phone);
* If no camera monitor is desired, use the parameter-less constructor instead (commented out below).
* To get an on-phone camera preview, use the code below.
* If no camera preview is desired, use the parameter-less constructor instead (commented out below).
*/
int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName());
VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(cameraMonitorViewId);
// VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters();
parameters.vuforiaLicenseKey = VUFORIA_KEY;
parameters.cameraDirection = CAMERA_CHOICE;
// Make sure extended tracking is disabled for this example.
// Turn off Extended tracking. Set this true if you want Vuforia to track beyond the target.
parameters.useExtendedTracking = false;
// Instantiate the Vuforia engine
@ -151,21 +138,11 @@ public class ConceptVuforiaUltimateGoalNavigation extends LinearOpMode {
// Load the data sets for the trackable objects. These particular data
// sets are stored in the 'assets' part of our application.
VuforiaTrackables targetsUltimateGoal = this.vuforia.loadTrackablesFromAsset("UltimateGoal");
VuforiaTrackable blueTowerGoalTarget = targetsUltimateGoal.get(0);
blueTowerGoalTarget.setName("Blue Tower Goal Target");
VuforiaTrackable redTowerGoalTarget = targetsUltimateGoal.get(1);
redTowerGoalTarget.setName("Red Tower Goal Target");
VuforiaTrackable redAllianceTarget = targetsUltimateGoal.get(2);
redAllianceTarget.setName("Red Alliance Target");
VuforiaTrackable blueAllianceTarget = targetsUltimateGoal.get(3);
blueAllianceTarget.setName("Blue Alliance Target");
VuforiaTrackable frontWallTarget = targetsUltimateGoal.get(4);
frontWallTarget.setName("Front Wall Target");
targets = this.vuforia.loadTrackablesFromAsset("FreightFrenzy");
// For convenience, gather together all the trackable objects in one easily-iterable collection */
List<VuforiaTrackable> allTrackables = new ArrayList<VuforiaTrackable>();
allTrackables.addAll(targetsUltimateGoal);
allTrackables.addAll(targets);
/**
* In order for localization to work, we need to tell the system where each target is on the field, and
@ -185,41 +162,28 @@ public class ConceptVuforiaUltimateGoalNavigation extends LinearOpMode {
* coordinate system (the center of the field), facing up.
*/
//Set the position of the perimeter targets with relation to origin (center of field)
redAllianceTarget.setLocation(OpenGLMatrix
.translation(0, -halfField, mmTargetHeight)
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, 180)));
// Name and locate each trackable object
identifyTarget(0, "Blue Storage", -halfField, oneAndHalfTile, mmTargetHeight, 90, 0, 90);
identifyTarget(1, "Blue Alliance Wall", halfTile, halfField, mmTargetHeight, 90, 0, 0);
identifyTarget(2, "Red Storage", -halfField, -oneAndHalfTile, mmTargetHeight, 90, 0, 90);
identifyTarget(3, "Red Alliance Wall", halfTile, -halfField, mmTargetHeight, 90, 0, 180);
blueAllianceTarget.setLocation(OpenGLMatrix
.translation(0, halfField, mmTargetHeight)
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, 0)));
frontWallTarget.setLocation(OpenGLMatrix
.translation(-halfField, 0, mmTargetHeight)
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0 , 90)));
/*
* Create a transformation matrix describing where the phone is on the robot.
*
* NOTE !!!! It's very important that you turn OFF your phone's Auto-Screen-Rotation option.
* Lock it into Portrait for these numbers to work.
*
* Info: The coordinate frame for the robot looks the same as the field.
* The robot's "forward" direction is facing out along X axis, with the LEFT side facing out along the Y axis.
* Z is UP on the robot. This equates to a heading angle of Zero degrees.
*
* The phone starts out lying flat, with the screen facing Up and with the physical top of the phone
* pointing to the LEFT side of the Robot.
* The two examples below assume that the camera is facing forward out the front of the robot.
*/
// The tower goal targets are located a quarter field length from the ends of the back perimeter wall.
blueTowerGoalTarget.setLocation(OpenGLMatrix
.translation(halfField, quadField, mmTargetHeight)
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0 , -90)));
redTowerGoalTarget.setLocation(OpenGLMatrix
.translation(halfField, -quadField, mmTargetHeight)
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, -90)));
//
// Create a transformation matrix describing where the phone is on the robot.
//
// NOTE !!!! It's very important that you turn OFF your phone's Auto-Screen-Rotation option.
// Lock it into Portrait for these numbers to work.
//
// Info: The coordinate frame for the robot looks the same as the field.
// The robot's "forward" direction is facing out along X axis, with the LEFT side facing out along the Y axis.
// Z is UP on the robot. This equates to a bearing angle of Zero degrees.
//
// The phone starts out lying flat, with the screen facing Up and with the physical top of the phone
// pointing to the LEFT side of the Robot.
// The two examples below assume that the camera is facing forward out the front of the robot.
// We need to rotate the camera around it's long axis to bring the correct camera forward.
// We need to rotate the camera around its long axis to bring the correct camera forward.
if (CAMERA_CHOICE == BACK) {
phoneYRotate = -90;
} else {
@ -232,10 +196,10 @@ public class ConceptVuforiaUltimateGoalNavigation extends LinearOpMode {
}
// Next, translate the camera lens to where it is on the robot.
// In this example, it is centered (left to right), but forward of the middle of the robot, and above ground level.
final float CAMERA_FORWARD_DISPLACEMENT = 4.0f * mmPerInch; // eg: Camera is 4 Inches in front of robot center
final float CAMERA_VERTICAL_DISPLACEMENT = 8.0f * mmPerInch; // eg: Camera is 8 Inches above ground
final float CAMERA_LEFT_DISPLACEMENT = 0; // eg: Camera is ON the robot's center line
// In this example, it is centered on the robot (left-to-right and front-to-back), and 6 inches above ground level.
final float CAMERA_FORWARD_DISPLACEMENT = 0.0f * mmPerInch; // eg: Enter the forward distance from the center of the robot to the camera lens
final float CAMERA_VERTICAL_DISPLACEMENT = 6.0f * mmPerInch; // eg: Camera is 6 Inches above ground
final float CAMERA_LEFT_DISPLACEMENT = 0.0f * mmPerInch; // eg: Enter the left distance from the center of the robot to the camera lens
OpenGLMatrix robotFromCamera = OpenGLMatrix
.translation(CAMERA_FORWARD_DISPLACEMENT, CAMERA_LEFT_DISPLACEMENT, CAMERA_VERTICAL_DISPLACEMENT)
@ -246,19 +210,24 @@ public class ConceptVuforiaUltimateGoalNavigation extends LinearOpMode {
((VuforiaTrackableDefaultListener) trackable.getListener()).setPhoneInformation(robotFromCamera, parameters.cameraDirection);
}
// WARNING:
// In this sample, we do not wait for PLAY to be pressed. Target Tracking is started immediately when INIT is pressed.
// This sequence is used to enable the new remote DS Camera Preview feature to be used with this sample.
// CONSEQUENTLY do not put any driving commands in this loop.
// To restore the normal opmode structure, just un-comment the following line:
/*
* WARNING:
* In this sample, we do not wait for PLAY to be pressed. Target Tracking is started immediately when INIT is pressed.
* This sequence is used to enable the new remote DS Camera Stream feature to be used with this sample.
* CONSEQUENTLY do not put any driving commands in this loop.
* To restore the normal opmode structure, just un-comment the following line:
*/
// waitForStart();
// Note: To use the remote camera preview:
// AFTER you hit Init on the Driver Station, use the "options menu" to select "Camera Stream"
// Tap the preview window to receive a fresh image.
/* Note: To use the remote camera preview:
* AFTER you hit Init on the Driver Station, use the "options menu" to select "Camera Stream"
* Tap the preview window to receive a fresh image.
* It is not permitted to transition to RUN while the camera preview window is active.
* Either press STOP to exit the OpMode, or use the "options menu" again, and select "Camera Stream" to close the preview window.
*/
targetsUltimateGoal.activate();
targets.activate();
while (!isStopRequested()) {
// check all the trackable targets to see which one (if any) is visible.
@ -282,7 +251,7 @@ public class ConceptVuforiaUltimateGoalNavigation extends LinearOpMode {
if (targetVisible) {
// express position (translation) of robot in inches.
VectorF translation = lastLocation.getTranslation();
telemetry.addData("Pos (in)", "{X, Y, Z} = %.1f, %.1f, %.1f",
telemetry.addData("Pos (inches)", "{X, Y, Z} = %.1f, %.1f, %.1f",
translation.get(0) / mmPerInch, translation.get(1) / mmPerInch, translation.get(2) / mmPerInch);
// express the rotation of the robot in degrees.
@ -296,6 +265,20 @@ public class ConceptVuforiaUltimateGoalNavigation extends LinearOpMode {
}
// Disable Tracking when we are done;
targetsUltimateGoal.deactivate();
targets.deactivate();
}
/***
* Identify a target by naming it, and setting its position and orientation on the field
* @param targetIndex
* @param targetName
* @param dx, dy, dz Target offsets in x,y,z axes
* @param rx, ry, rz Target rotations in x,y,z axes
*/
void identifyTarget(int targetIndex, String targetName, float dx, float dy, float dz, float rx, float ry, float rz) {
VuforiaTrackable aTarget = targets.get(targetIndex);
aTarget.setName(targetName);
aTarget.setLocation(OpenGLMatrix.translation(dx, dy, dz)
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, rx, ry, rz)));
}
}

View file

@ -52,41 +52,32 @@ import static org.firstinspires.ftc.robotcore.external.navigation.AxesOrder.XZY;
import static org.firstinspires.ftc.robotcore.external.navigation.AxesReference.EXTRINSIC;
/**
* This 2020-2021 OpMode illustrates the basics of using the Vuforia localizer to determine
* positioning and orientation of robot on the ULTIMATE GOAL FTC field.
* The code is structured as a LinearOpMode
* This OpMode illustrates using the Vuforia localizer to determine positioning and orientation of
* robot on the FTC field using a WEBCAM. The code is structured as a LinearOpMode
*
* NOTE: If you are running on a Phone with a built-in camera, use the ConceptVuforiaFieldNavigation example instead of this one.
* NOTE: It is possible to switch between multiple WebCams (eg: one for the left side and one for the right).
* For a related example of how to do this, see ConceptTensorFlowObjectDetectionSwitchableCameras
*
* When images are located, Vuforia is able to determine the position and orientation of the
* image relative to the camera. This sample code then combines that information with a
* knowledge of where the target images are on the field, to determine the location of the camera.
*
* From the Audience perspective, the Red Alliance station is on the right and the
* Blue Alliance Station is on the left.
* There are a total of five image targets for the ULTIMATE GOAL game.
* Three of the targets are placed in the center of the Red Alliance, Audience (Front),
* and Blue Alliance perimeter walls.
* Two additional targets are placed on the perimeter wall, one in front of each Tower Goal.
* Refer to the Field Setup manual for more specific location details
*
* A final calculation then uses the location of the camera on the robot to determine the
* Finally, the location of the camera on the robot is used to determine the
* robot's location and orientation on the field.
*
* @see VuforiaLocalizer
* @see VuforiaTrackableDefaultListener
* see ultimategoal/doc/tutorial/FTC_FieldCoordinateSystemDefinition.pdf
* To learn more about the FTC field coordinate model, see FTC_FieldCoordinateSystemDefinition.pdf in this folder
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*
* IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as
* is explained below.
*/
@TeleOp(name="ULTIMATEGOAL Vuforia Nav Webcam", group ="Concept")
@TeleOp(name="Vuforia Field Nav Webcam", group ="Concept")
@Disabled
public class ConceptVuforiaUltimateGoalNavigationWebcam extends LinearOpMode {
public class ConceptVuforiaFieldNavigationWebcam extends LinearOpMode {
/*
* IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which
@ -106,52 +97,39 @@ public class ConceptVuforiaUltimateGoalNavigationWebcam extends LinearOpMode {
// Since ImageTarget trackables use mm to specifiy their dimensions, we must use mm for all the physical dimension.
// We will define some constants and conversions here
private static final float mmPerInch = 25.4f;
private static final float mmTargetHeight = (6) * mmPerInch; // the height of the center of the target image above the floor
// Constants for perimeter targets
private static final float halfField = 72 * mmPerInch;
private static final float quadField = 36 * mmPerInch;
private static final float mmTargetHeight = 6 * mmPerInch; // the height of the center of the target image above the floor
private static final float halfField = 72 * mmPerInch;
private static final float halfTile = 12 * mmPerInch;
private static final float oneAndHalfTile = 36 * mmPerInch;
// Class Members
private OpenGLMatrix lastLocation = null;
private VuforiaLocalizer vuforia = null;
private OpenGLMatrix lastLocation = null;
private VuforiaLocalizer vuforia = null;
private VuforiaTrackables targets = null ;
private WebcamName webcamName = null;
/**
* This is the webcam we are to use. As with other hardware devices such as motors and
* servos, this device is identified using the robot configuration tool in the FTC application.
*/
WebcamName webcamName = null;
private boolean targetVisible = false;
private float phoneXRotate = 0;
private float phoneYRotate = 0;
private float phoneZRotate = 0;
private boolean targetVisible = false;
@Override public void runOpMode() {
/*
* Retrieve the camera we are to use.
*/
// Connect to the camera we are to use. This name must match what is set up in Robot Configuration
webcamName = hardwareMap.get(WebcamName.class, "Webcam 1");
/*
* Configure Vuforia by creating a Parameter object, and passing it to the Vuforia engine.
* We can pass Vuforia the handle to a camera preview resource (on the RC screen);
* If no camera monitor is desired, use the parameter-less constructor instead (commented out below).
* If no camera-preview is desired, use the parameter-less constructor instead (commented out below).
* Note: A preview window is required if you want to view the camera stream on the Driver Station Phone.
*/
int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName());
VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(cameraMonitorViewId);
// VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters();
parameters.vuforiaLicenseKey = VUFORIA_KEY;
/**
* We also indicate which camera on the RC we wish to use.
*/
// We also indicate which camera we wish to use.
parameters.cameraName = webcamName;
// Make sure extended tracking is disabled for this example.
// Turn off Extended tracking. Set this true if you want Vuforia to track beyond the target.
parameters.useExtendedTracking = false;
// Instantiate the Vuforia engine
@ -159,21 +137,11 @@ public class ConceptVuforiaUltimateGoalNavigationWebcam extends LinearOpMode {
// Load the data sets for the trackable objects. These particular data
// sets are stored in the 'assets' part of our application.
VuforiaTrackables targetsUltimateGoal = this.vuforia.loadTrackablesFromAsset("UltimateGoal");
VuforiaTrackable blueTowerGoalTarget = targetsUltimateGoal.get(0);
blueTowerGoalTarget.setName("Blue Tower Goal Target");
VuforiaTrackable redTowerGoalTarget = targetsUltimateGoal.get(1);
redTowerGoalTarget.setName("Red Tower Goal Target");
VuforiaTrackable redAllianceTarget = targetsUltimateGoal.get(2);
redAllianceTarget.setName("Red Alliance Target");
VuforiaTrackable blueAllianceTarget = targetsUltimateGoal.get(3);
blueAllianceTarget.setName("Blue Alliance Target");
VuforiaTrackable frontWallTarget = targetsUltimateGoal.get(4);
frontWallTarget.setName("Front Wall Target");
targets = this.vuforia.loadTrackablesFromAsset("FreightFrenzy");
// For convenience, gather together all the trackable objects in one easily-iterable collection */
List<VuforiaTrackable> allTrackables = new ArrayList<VuforiaTrackable>();
allTrackables.addAll(targetsUltimateGoal);
allTrackables.addAll(targets);
/**
* In order for localization to work, we need to tell the system where each target is on the field, and
@ -193,71 +161,63 @@ public class ConceptVuforiaUltimateGoalNavigationWebcam extends LinearOpMode {
* coordinate system (the center of the field), facing up.
*/
//Set the position of the perimeter targets with relation to origin (center of field)
redAllianceTarget.setLocation(OpenGLMatrix
.translation(0, -halfField, mmTargetHeight)
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, 180)));
// Name and locate each trackable object
identifyTarget(0, "Blue Storage", -halfField, oneAndHalfTile, mmTargetHeight, 90, 0, 90);
identifyTarget(1, "Blue Alliance Wall", halfTile, halfField, mmTargetHeight, 90, 0, 0);
identifyTarget(2, "Red Storage", -halfField, -oneAndHalfTile, mmTargetHeight, 90, 0, 90);
identifyTarget(3, "Red Alliance Wall", halfTile, -halfField, mmTargetHeight, 90, 0, 180);
blueAllianceTarget.setLocation(OpenGLMatrix
.translation(0, halfField, mmTargetHeight)
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, 0)));
frontWallTarget.setLocation(OpenGLMatrix
.translation(-halfField, 0, mmTargetHeight)
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, 90)));
/*
* Create a transformation matrix describing where the camera is on the robot.
*
* Info: The coordinate frame for the robot looks the same as the field.
* The robot's "forward" direction is facing out along X axis, with the LEFT side facing out along the Y axis.
* Z is UP on the robot. This equates to a bearing angle of Zero degrees.
*
* For a WebCam, the default starting orientation of the camera is looking UP (pointing in the Z direction),
* with the wide (horizontal) axis of the camera aligned with the X axis, and
* the Narrow (vertical) axis of the camera aligned with the Y axis
*
* But, this example assumes that the camera is actually facing forward out the front of the robot.
* So, the "default" camera position requires two rotations to get it oriented correctly.
* 1) First it must be rotated +90 degrees around the X axis to get it horizontal (its now facing out the right side of the robot)
* 2) Next it must be be rotated +90 degrees (counter-clockwise) around the Z axis to face forward.
*
* Finally the camera can be translated to its actual mounting position on the robot.
* In this example, it is centered on the robot (left-to-right and front-to-back), and 6 inches above ground level.
*/
// The tower goal targets are located a quarter field length from the ends of the back perimeter wall.
blueTowerGoalTarget.setLocation(OpenGLMatrix
.translation(halfField, quadField, mmTargetHeight)
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, -90)));
redTowerGoalTarget.setLocation(OpenGLMatrix
.translation(halfField, -quadField, mmTargetHeight)
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, -90)));
//
// Create a transformation matrix describing where the phone is on the robot.
//
// Info: The coordinate frame for the robot looks the same as the field.
// The robot's "forward" direction is facing out along X axis, with the LEFT side facing out along the Y axis.
// Z is UP on the robot. This equates to a bearing angle of Zero degrees.
//
// For a WebCam, the default starting orientation of the camera is looking UP (pointing in the Z direction),
// with the wide (horizontal) axis of the camera aligned with the X axis, and
// the Narrow (vertical) axis of the camera aligned with the Y axis
//
// But, this example assumes that the camera is actually facing forward out the front of the robot.
// So, the "default" camera position requires two rotations to get it oriented correctly.
// 1) First it must be rotated +90 degrees around the X axis to get it horizontal (it's now facing out the right side of the robot)
// 2) Next it must be be rotated +90 degrees (counter-clockwise) around the Z axis to face forward.
//
// Finally the camera can be translated to its actual mounting position on the robot.
// In this example, it is centered (left to right), but 4" forward of the middle of the robot, and 8" above ground level.
final float CAMERA_FORWARD_DISPLACEMENT = 4.0f * mmPerInch; // eg: Camera is 4 Inches in front of robot-center
final float CAMERA_VERTICAL_DISPLACEMENT = 8.0f * mmPerInch; // eg: Camera is 8 Inches above ground
final float CAMERA_LEFT_DISPLACEMENT = 0; // eg: Camera is ON the robot's center line
final float CAMERA_FORWARD_DISPLACEMENT = 0.0f * mmPerInch; // eg: Enter the forward distance from the center of the robot to the camera lens
final float CAMERA_VERTICAL_DISPLACEMENT = 6.0f * mmPerInch; // eg: Camera is 6 Inches above ground
final float CAMERA_LEFT_DISPLACEMENT = 0.0f * mmPerInch; // eg: Enter the left distance from the center of the robot to the camera lens
OpenGLMatrix cameraLocationOnRobot = OpenGLMatrix
.translation(CAMERA_FORWARD_DISPLACEMENT, CAMERA_LEFT_DISPLACEMENT, CAMERA_VERTICAL_DISPLACEMENT)
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XZY, DEGREES, 90, 90, 0));
/** Let all the trackable listeners know where the phone is. */
/** Let all the trackable listeners know where the camera is. */
for (VuforiaTrackable trackable : allTrackables) {
((VuforiaTrackableDefaultListener) trackable.getListener()).setCameraLocationOnRobot(parameters.cameraName, cameraLocationOnRobot);
}
// WARNING:
// In this sample, we do not wait for PLAY to be pressed. Target Tracking is started immediately when INIT is pressed.
// This sequence is used to enable the new remote DS Camera Preview feature to be used with this sample.
// CONSEQUENTLY do not put any driving commands in this loop.
// To restore the normal opmode structure, just un-comment the following line:
/*
* WARNING:
* In this sample, we do not wait for PLAY to be pressed. Target Tracking is started immediately when INIT is pressed.
* This sequence is used to enable the new remote DS Camera Preview feature to be used with this sample.
* CONSEQUENTLY do not put any driving commands in this loop.
* To restore the normal opmode structure, just un-comment the following line:
*/
// waitForStart();
// Note: To use the remote camera preview:
// AFTER you hit Init on the Driver Station, use the "options menu" to select "Camera Stream"
// Tap the preview window to receive a fresh image.
/* Note: To use the remote camera preview:
* AFTER you hit Init on the Driver Station, use the "options menu" to select "Camera Stream"
* Tap the preview window to receive a fresh image.
* It is not permitted to transition to RUN while the camera preview window is active.
* Either press STOP to exit the OpMode, or use the "options menu" again, and select "Camera Stream" to close the preview window.
*/
targetsUltimateGoal.activate();
targets.activate();
while (!isStopRequested()) {
// check all the trackable targets to see which one (if any) is visible.
@ -281,7 +241,7 @@ public class ConceptVuforiaUltimateGoalNavigationWebcam extends LinearOpMode {
if (targetVisible) {
// express position (translation) of robot in inches.
VectorF translation = lastLocation.getTranslation();
telemetry.addData("Pos (in)", "{X, Y, Z} = %.1f, %.1f, %.1f",
telemetry.addData("Pos (inches)", "{X, Y, Z} = %.1f, %.1f, %.1f",
translation.get(0) / mmPerInch, translation.get(1) / mmPerInch, translation.get(2) / mmPerInch);
// express the rotation of the robot in degrees.
@ -295,6 +255,20 @@ public class ConceptVuforiaUltimateGoalNavigationWebcam extends LinearOpMode {
}
// Disable Tracking when we are done;
targetsUltimateGoal.deactivate();
targets.deactivate();
}
/***
* Identify a target by naming it, and setting its position and orientation on the field
* @param targetIndex
* @param targetName
* @param dx, dy, dz Target offsets in x,y,z axes
* @param rx, ry, rz Target rotations in x,y,z axes
*/
void identifyTarget(int targetIndex, String targetName, float dx, float dy, float dz, float rx, float ry, float rz) {
VuforiaTrackable aTarget = targets.get(targetIndex);
aTarget.setName(targetName);
aTarget.setLocation(OpenGLMatrix.translation(dx, dy, dz)
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, rx, ry, rz)));
}
}

View file

@ -1,336 +0,0 @@
/* Copyright (c) 2017 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.util.RobotLog;
import org.firstinspires.ftc.robotcore.external.ClassFactory;
import org.firstinspires.ftc.robotcore.external.matrices.MatrixF;
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.robotcore.external.navigation.AxesOrder;
import org.firstinspires.ftc.robotcore.external.navigation.AxesReference;
import org.firstinspires.ftc.robotcore.external.navigation.Orientation;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackable;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackableDefaultListener;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables;
import java.util.ArrayList;
import java.util.List;
/**
* This 2016-2017 OpMode illustrates the basics of using the Vuforia localizer to determine
* positioning and orientation of robot on the FTC field.
* The code is structured as a LinearOpMode
*
* Vuforia uses the phone's camera to inspect it's surroundings, and attempt to locate target images.
*
* When images are located, Vuforia is able to determine the position and orientation of the
* image relative to the camera. This sample code than combines that information with a
* knowledge of where the target images are on the field, to determine the location of the camera.
*
* This example assumes a "diamond" field configuration where the red and blue alliance stations
* are adjacent on the corner of the field furthest from the audience.
* From the Audience perspective, the Red driver station is on the right.
* The two vision target are located on the two walls closest to the audience, facing in.
* The Stones are on the RED side of the field, and the Chips are on the Blue side.
*
* A final calculation then uses the location of the camera on the robot to determine the
* robot's location and orientation on the field.
*
* @see VuforiaLocalizer
* @see VuforiaTrackableDefaultListener
* see ftc_app/doc/tutorial/FTC_FieldCoordinateSystemDefinition.pdf
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list.
*
* IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as
* is explained below.
*/
@TeleOp(name="Concept: Vuforia Navigation", group ="Concept")
@Disabled
public class ConceptVuforiaNavigation extends LinearOpMode {
public static final String TAG = "Vuforia Navigation Sample";
OpenGLMatrix lastLocation = null;
/**
* {@link #vuforia} is the variable we will use to store our instance of the Vuforia
* localization engine.
*/
VuforiaLocalizer vuforia;
@Override public void runOpMode() {
/*
* To start up Vuforia, tell it the view that we wish to use for camera monitor (on the RC phone);
* If no camera monitor is desired, use the parameterless constructor instead (commented out below).
*/
int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName());
VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(cameraMonitorViewId);
// OR... Do Not Activate the Camera Monitor View, to save power
// VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters();
/*
* IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which
* 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function.
* A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer
* web site at https://developer.vuforia.com/license-manager.
*
* Vuforia license keys are always 380 characters long, and look as if they contain mostly
* random data. As an example, here is a example of a fragment of a valid key:
* ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGlfOOI2dRzKS4T0hQ8kT ...
* Once you've obtained a license key, copy the string from the Vuforia web site
* and paste it in to your code on the next line, between the double quotes.
*/
parameters.vuforiaLicenseKey = " -- YOUR NEW VUFORIA KEY GOES HERE --- ";
/*
* We also indicate which camera on the RC that we wish to use.
* Here we chose the back (HiRes) camera (for greater range), but
* for a competition robot, the front camera might be more convenient.
*/
parameters.cameraDirection = VuforiaLocalizer.CameraDirection.BACK;
/**
* Instantiate the Vuforia engine
*/
vuforia = ClassFactory.getInstance().createVuforia(parameters);
/**
* Load the data sets that for the trackable objects we wish to track. These particular data
* sets are stored in the 'assets' part of our application (you'll see them in the Android
* Studio 'Project' view over there on the left of the screen). You can make your own datasets
* with the Vuforia Target Manager: https://developer.vuforia.com/target-manager. PDFs for the
* example "StonesAndChips", datasets can be found in in this project in the
* documentation directory.
*/
VuforiaTrackables stonesAndChips = this.vuforia.loadTrackablesFromAsset("StonesAndChips");
VuforiaTrackable redTarget = stonesAndChips.get(0);
redTarget.setName("RedTarget"); // Stones
VuforiaTrackable blueTarget = stonesAndChips.get(1);
blueTarget.setName("BlueTarget"); // Chips
/** For convenience, gather together all the trackable objects in one easily-iterable collection */
List<VuforiaTrackable> allTrackables = new ArrayList<VuforiaTrackable>();
allTrackables.addAll(stonesAndChips);
/**
* We use units of mm here because that's the recommended units of measurement for the
* size values specified in the XML for the ImageTarget trackables in data sets. E.g.:
* <ImageTarget name="stones" size="247 173"/>
* You don't *have to* use mm here, but the units here and the units used in the XML
* target configuration files *must* correspond for the math to work out correctly.
*/
float mmPerInch = 25.4f;
float mmBotWidth = 18 * mmPerInch; // ... or whatever is right for your robot
float mmFTCFieldWidth = (12*12 - 2) * mmPerInch; // the FTC field is ~11'10" center-to-center of the glass panels
/**
* In order for localization to work, we need to tell the system where each target we
* wish to use for navigation resides on the field, and we need to specify where on the robot
* the phone resides. These specifications are in the form of <em>transformation matrices.</em>
* Transformation matrices are a central, important concept in the math here involved in localization.
* See <a href="https://en.wikipedia.org/wiki/Transformation_matrix">Transformation Matrix</a>
* for detailed information. Commonly, you'll encounter transformation matrices as instances
* of the {@link OpenGLMatrix} class.
*
* For the most part, you don't need to understand the details of the math of how transformation
* matrices work inside (as fascinating as that is, truly). Just remember these key points:
* <ol>
*
* <li>You can put two transformations together to produce a third that combines the effect of
* both of them. If, for example, you have a rotation transform R and a translation transform T,
* then the combined transformation matrix RT which does the rotation first and then the translation
* is given by {@code RT = T.multiplied(R)}. That is, the transforms are multiplied in the
* <em>reverse</em> of the chronological order in which they applied.</li>
*
* <li>A common way to create useful transforms is to use methods in the {@link OpenGLMatrix}
* class and the Orientation class. See, for example, {@link OpenGLMatrix#translation(float,
* float, float)}, {@link OpenGLMatrix#rotation(AngleUnit, float, float, float, float)}, and
* {@link Orientation#getRotationMatrix(AxesReference, AxesOrder, AngleUnit, float, float, float)}.
* Related methods in {@link OpenGLMatrix}, such as {@link OpenGLMatrix#rotated(AngleUnit,
* float, float, float, float)}, are syntactic shorthands for creating a new transform and
* then immediately multiplying the receiver by it, which can be convenient at times.</li>
*
* <li>If you want to break open the black box of a transformation matrix to understand
* what it's doing inside, use {@link MatrixF#getTranslation()} to fetch how much the
* transform will move you in x, y, and z, and use {@link Orientation#getOrientation(MatrixF,
* AxesReference, AxesOrder, AngleUnit)} to determine the rotational motion that the transform
* will impart. See {@link #format(OpenGLMatrix)} below for an example.</li>
*
* </ol>
*
* This example places the "stones" image on the perimeter wall to the Left
* of the Red Driver station wall. Similar to the Red Beacon Location on the Res-Q
*
* This example places the "chips" image on the perimeter wall to the Right
* of the Blue Driver station. Similar to the Blue Beacon Location on the Res-Q
*
* See the doc folder of this project for a description of the field Axis conventions.
*
* Initially the target is conceptually lying at the origin of the field's coordinate system
* (the center of the field), facing up.
*
* In this configuration, the target's coordinate system aligns with that of the field.
*
* In a real situation we'd also account for the vertical (Z) offset of the target,
* but for simplicity, we ignore that here; for a real robot, you'll want to fix that.
*
* To place the Stones Target on the Red Audience wall:
* - First we rotate it 90 around the field's X axis to flip it upright
* - Then we rotate it 90 around the field's Z access to face it away from the audience.
* - Finally, we translate it back along the X axis towards the red audience wall.
*/
OpenGLMatrix redTargetLocationOnField = OpenGLMatrix
/* Then we translate the target off to the RED WALL. Our translation here
is a negative translation in X.*/
.translation(-mmFTCFieldWidth/2, 0, 0)
.multiplied(Orientation.getRotationMatrix(
/* First, in the fixed (field) coordinate system, we rotate 90deg in X, then 90 in Z */
AxesReference.EXTRINSIC, AxesOrder.XZX,
AngleUnit.DEGREES, 90, 90, 0));
redTarget.setLocation(redTargetLocationOnField);
RobotLog.ii(TAG, "Red Target=%s", format(redTargetLocationOnField));
/*
* To place the Stones Target on the Blue Audience wall:
* - First we rotate it 90 around the field's X axis to flip it upright
* - Finally, we translate it along the Y axis towards the blue audience wall.
*/
OpenGLMatrix blueTargetLocationOnField = OpenGLMatrix
/* Then we translate the target off to the Blue Audience wall.
Our translation here is a positive translation in Y.*/
.translation(0, mmFTCFieldWidth/2, 0)
.multiplied(Orientation.getRotationMatrix(
/* First, in the fixed (field) coordinate system, we rotate 90deg in X */
AxesReference.EXTRINSIC, AxesOrder.XZX,
AngleUnit.DEGREES, 90, 0, 0));
blueTarget.setLocation(blueTargetLocationOnField);
RobotLog.ii(TAG, "Blue Target=%s", format(blueTargetLocationOnField));
/**
* Create a transformation matrix describing where the phone is on the robot. Here, we
* put the phone on the right hand side of the robot with the screen facing in (see our
* choice of BACK camera above) and in landscape mode. Starting from alignment between the
* robot's and phone's axes, this is a rotation of -90deg along the Y axis.
*
* When determining whether a rotation is positive or negative, consider yourself as looking
* down the (positive) axis of rotation from the positive towards the origin. Positive rotations
* are then CCW, and negative rotations CW. An example: consider looking down the positive Z
* axis towards the origin. A positive rotation about Z (ie: a rotation parallel to the the X-Y
* plane) is then CCW, as one would normally expect from the usual classic 2D geometry.
*/
OpenGLMatrix phoneLocationOnRobot = OpenGLMatrix
.translation(mmBotWidth/2,0,0)
.multiplied(Orientation.getRotationMatrix(
AxesReference.EXTRINSIC, AxesOrder.YZY,
AngleUnit.DEGREES, -90, 0, 0));
RobotLog.ii(TAG, "phone=%s", format(phoneLocationOnRobot));
/**
* Let the trackable listeners we care about know where the phone is. We know that each
* listener is a {@link VuforiaTrackableDefaultListener} and can so safely cast because
* we have not ourselves installed a listener of a different type.
*/
((VuforiaTrackableDefaultListener)redTarget.getListener()).setPhoneInformation(phoneLocationOnRobot, parameters.cameraDirection);
((VuforiaTrackableDefaultListener)blueTarget.getListener()).setPhoneInformation(phoneLocationOnRobot, parameters.cameraDirection);
/**
* A brief tutorial: here's how all the math is going to work:
*
* C = phoneLocationOnRobot maps phone coords -> robot coords
* P = tracker.getPose() maps image target coords -> phone coords
* L = redTargetLocationOnField maps image target coords -> field coords
*
* So
*
* C.inverted() maps robot coords -> phone coords
* P.inverted() maps phone coords -> imageTarget coords
*
* Putting that all together,
*
* L x P.inverted() x C.inverted() maps robot coords to field coords.
*
* @see VuforiaTrackableDefaultListener#getRobotLocation()
*/
/** Wait for the game to begin */
telemetry.addData(">", "Press Play to start tracking");
telemetry.update();
waitForStart();
/** Start tracking the data sets we care about. */
stonesAndChips.activate();
while (opModeIsActive()) {
for (VuforiaTrackable trackable : allTrackables) {
/**
* getUpdatedRobotLocation() will return null if no new information is available since
* the last time that call was made, or if the trackable is not currently visible.
* getRobotLocation() will return null if the trackable is not currently visible.
*/
telemetry.addData(trackable.getName(), ((VuforiaTrackableDefaultListener)trackable.getListener()).isVisible() ? "Visible" : "Not Visible"); //
OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener)trackable.getListener()).getUpdatedRobotLocation();
if (robotLocationTransform != null) {
lastLocation = robotLocationTransform;
}
}
/**
* Provide feedback as to where the robot was last located (if we know).
*/
if (lastLocation != null) {
// RobotLog.vv(TAG, "robot=%s", format(lastLocation));
telemetry.addData("Pos", format(lastLocation));
} else {
telemetry.addData("Pos", "Unknown");
}
telemetry.update();
}
}
/**
* A simple utility that extracts positioning information from a transformation matrix
* and formats it in a form palatable to a human being.
*/
String format(OpenGLMatrix transformationMatrix) {
return transformationMatrix.formatAsTransform();
}
}

View file

@ -1,461 +0,0 @@
/* Copyright (c) 2017 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import android.graphics.Bitmap;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.util.RobotLog;
import com.qualcomm.robotcore.util.ThreadPool;
import com.vuforia.Frame;
import org.firstinspires.ftc.robotcore.external.ClassFactory;
import org.firstinspires.ftc.robotcore.external.function.Consumer;
import org.firstinspires.ftc.robotcore.external.function.Continuation;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.robotcore.external.matrices.MatrixF;
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.robotcore.external.navigation.AxesOrder;
import org.firstinspires.ftc.robotcore.external.navigation.AxesReference;
import org.firstinspires.ftc.robotcore.external.navigation.Orientation;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackable;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackableDefaultListener;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables;
import org.firstinspires.ftc.robotcore.internal.system.AppUtil;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
/**
* This 2016-2017 OpMode illustrates the basics of using the Vuforia localizer to determine
* positioning and orientation of robot on the FTC field.
* The code is structured as a LinearOpMode
*
* Vuforia uses the phone's camera to inspect it's surroundings, and attempt to locate target images.
*
* When images are located, Vuforia is able to determine the position and orientation of the
* image relative to the camera. This sample code than combines that information with a
* knowledge of where the target images are on the field, to determine the location of the camera.
*
* This example assumes a "diamond" field configuration where the red and blue alliance stations
* are adjacent on the corner of the field furthest from the audience.
* From the Audience perspective, the Red driver station is on the right.
* The two vision target are located on the two walls closest to the audience, facing in.
* The Stones are on the RED side of the field, and the Chips are on the Blue side.
*
* A final calculation then uses the location of the camera on the robot to determine the
* robot's location and orientation on the field.
*
* @see VuforiaLocalizer
* @see VuforiaTrackableDefaultListener
* see ftc_app/doc/tutorial/FTC_FieldCoordinateSystemDefinition.pdf
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list.
*
* IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as
* is explained below.
*/
@TeleOp(name="Concept: Vuforia Nav Webcam", group ="Concept")
@Disabled
public class ConceptVuforiaNavigationWebcam extends LinearOpMode {
public static final String TAG = "Vuforia Navigation Sample";
OpenGLMatrix lastLocation = null;
/**
* @see #captureFrameToFile()
*/
int captureCounter = 0;
File captureDirectory = AppUtil.ROBOT_DATA_DIR;
/**
* {@link #vuforia} is the variable we will use to store our instance of the Vuforia
* localization engine.
*/
VuforiaLocalizer vuforia;
/**
* This is the webcam we are to use. As with other hardware devices such as motors and
* servos, this device is identified using the robot configuration tool in the FTC application.
*/
WebcamName webcamName;
@Override public void runOpMode() {
/*
* Retrieve the camera we are to use.
*/
webcamName = hardwareMap.get(WebcamName.class, "Webcam 1");
/*
* To start up Vuforia, tell it the view that we wish to use for camera monitor (on the RC phone);
* If no camera monitor is desired, use the parameterless constructor instead (commented out below).
*/
int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName());
VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(cameraMonitorViewId);
// OR... Do Not Activate the Camera Monitor View, to save power
// VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters();
/*
* IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which
* 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function.
* A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer
* web site at https://developer.vuforia.com/license-manager.
*
* Vuforia license keys are always 380 characters long, and look as if they contain mostly
* random data. As an example, here is a example of a fragment of a valid key:
* ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGlfOOI2dRzKS4T0hQ8kT ...
* Once you've obtained a license key, copy the string from the Vuforia web site
* and paste it in to your code on the next line, between the double quotes.
*/
parameters.vuforiaLicenseKey = " -- YOUR NEW VUFORIA KEY GOES HERE --- ";
/**
* We also indicate which camera on the RC we wish to use.
*/
parameters.cameraName = webcamName;
/**
* Instantiate the Vuforia engine
*/
vuforia = ClassFactory.getInstance().createVuforia(parameters);
/**
* Because this opmode processes frames in order to write them to a file, we tell Vuforia
* that we want to ensure that certain frame formats are available in the {@link Frame}s we
* see.
*/
vuforia.enableConvertFrameToBitmap();
/** @see #captureFrameToFile() */
AppUtil.getInstance().ensureDirectoryExists(captureDirectory);
/**
* Load the data sets that for the trackable objects we wish to track. These particular data
* sets are stored in the 'assets' part of our application (you'll see them in the Android
* Studio 'Project' view over there on the left of the screen). You can make your own datasets
* with the Vuforia Target Manager: https://developer.vuforia.com/target-manager. PDFs for the
* example "StonesAndChips", datasets can be found in in this project in the
* documentation directory.
*/
VuforiaTrackables stonesAndChips = vuforia.loadTrackablesFromAsset("StonesAndChips");
VuforiaTrackable redTarget = stonesAndChips.get(0);
redTarget.setName("RedTarget"); // Stones
VuforiaTrackable blueTarget = stonesAndChips.get(1);
blueTarget.setName("BlueTarget"); // Chips
/** For convenience, gather together all the trackable objects in one easily-iterable collection */
List<VuforiaTrackable> allTrackables = new ArrayList<VuforiaTrackable>();
allTrackables.addAll(stonesAndChips);
/**
* We use units of mm here because that's the recommended units of measurement for the
* size values specified in the XML for the ImageTarget trackables in data sets. E.g.:
* <ImageTarget name="stones" size="247 173"/>
* You don't *have to* use mm here, but the units here and the units used in the XML
* target configuration files *must* correspond for the math to work out correctly.
*/
float mmPerInch = 25.4f;
float mmBotWidth = 18 * mmPerInch; // ... or whatever is right for your robot
float mmFTCFieldWidth = (12*12 - 2) * mmPerInch; // the FTC field is ~11'10" center-to-center of the glass panels
/**
* In order for localization to work, we need to tell the system where each target we
* wish to use for navigation resides on the field, and we need to specify where on the robot
* the camera resides. These specifications are in the form of <em>transformation matrices.</em>
* Transformation matrices are a central, important concept in the math here involved in localization.
* See <a href="https://en.wikipedia.org/wiki/Transformation_matrix">Transformation Matrix</a>
* for detailed information. Commonly, you'll encounter transformation matrices as instances
* of the {@link OpenGLMatrix} class.
*
* For the most part, you don't need to understand the details of the math of how transformation
* matrices work inside (as fascinating as that is, truly). Just remember these key points:
* <ol>
*
* <li>You can put two transformations together to produce a third that combines the effect of
* both of them. If, for example, you have a rotation transform R and a translation transform T,
* then the combined transformation matrix RT which does the rotation first and then the translation
* is given by {@code RT = T.multiplied(R)}. That is, the transforms are multiplied in the
* <em>reverse</em> of the chronological order in which they applied.</li>
*
* <li>A common way to create useful transforms is to use methods in the {@link OpenGLMatrix}
* class and the Orientation class. See, for example, {@link OpenGLMatrix#translation(float,
* float, float)}, {@link OpenGLMatrix#rotation(AngleUnit, float, float, float, float)}, and
* {@link Orientation#getRotationMatrix(AxesReference, AxesOrder, AngleUnit, float, float, float)}.
* Related methods in {@link OpenGLMatrix}, such as {@link OpenGLMatrix#rotated(AngleUnit,
* float, float, float, float)}, are syntactic shorthands for creating a new transform and
* then immediately multiplying the receiver by it, which can be convenient at times.</li>
*
* <li>If you want to break open the black box of a transformation matrix to understand
* what it's doing inside, use {@link MatrixF#getTranslation()} to fetch how much the
* transform will move you in x, y, and z, and use {@link Orientation#getOrientation(MatrixF,
* AxesReference, AxesOrder, AngleUnit)} to determine the rotational motion that the transform
* will impart. See {@link #format(OpenGLMatrix)} below for an example.</li>
*
* </ol>
*
* This example places the "stones" image on the perimeter wall to the Left
* of the Red Driver station wall. Similar to the Red Beacon Location on the Res-Q
*
* This example places the "chips" image on the perimeter wall to the Right
* of the Blue Driver station. Similar to the Blue Beacon Location on the Res-Q
*
* See the doc folder of this project for a description of the Field Coordinate System
* conventions.
*
* Initially the target is conceptually lying at the origin of the Field Coordinate System
* (the center of the field), facing up.
*
* In this configuration, the target's coordinate system aligns with that of the field.
*
* In a real situation we'd also account for the vertical (Z) offset of the target,
* but for simplicity, we ignore that here; for a real robot, you'll want to fix that.
*
* To place the Stones Target on the Red Audience wall:
* - First we rotate it 90 around the field's X axis to flip it upright
* - Then we rotate it 90 around the field's Z access to face it away from the audience.
* - Finally, we translate it back along the X axis towards the red audience wall.
*/
OpenGLMatrix redTargetLocationOnField = OpenGLMatrix
/* Then we translate the target off to the RED WALL. Our translation here
is a negative translation in X.*/
.translation(-mmFTCFieldWidth/2, 0, 0)
.multiplied(Orientation.getRotationMatrix(
/* First, in the fixed (field) coordinate system, we rotate 90deg in X, then 90 in Z */
AxesReference.EXTRINSIC, AxesOrder.XZX,
AngleUnit.DEGREES, 90, 90, 0));
redTarget.setLocationFtcFieldFromTarget(redTargetLocationOnField);
RobotLog.ii(TAG, "Red Target=%s", format(redTargetLocationOnField));
/*
* To place the Stones Target on the Blue Audience wall:
* - First we rotate it 90 around the field's X axis to flip it upright
* - Finally, we translate it along the Y axis towards the blue audience wall.
*/
OpenGLMatrix blueTargetLocationOnField = OpenGLMatrix
/* Then we translate the target off to the Blue Audience wall.
Our translation here is a positive translation in Y.*/
.translation(0, mmFTCFieldWidth/2, 0)
.multiplied(Orientation.getRotationMatrix(
/* First, in the fixed (field) coordinate system, we rotate 90deg in X */
AxesReference.EXTRINSIC, AxesOrder.XZX,
AngleUnit.DEGREES, 90, 0, 0));
blueTarget.setLocationFtcFieldFromTarget(blueTargetLocationOnField);
RobotLog.ii(TAG, "Blue Target=%s", format(blueTargetLocationOnField));
/**
* We also need to tell Vuforia where the <em>cameras</em> are relative to the robot.
*
* Just as there is a Field Coordinate System, so too there is a Robot Coordinate System.
* The two share many similarities. The origin of the Robot Coordinate System is wherever
* you choose to make it on the robot, but typically you'd choose somewhere in the middle
* of the robot. From that origin, the Y axis is horizontal and positive out towards the
* "front" of the robot (however you choose "front" to be defined), the X axis is horizontal
* and positive out towards the "right" of the robot (i.e.: 90deg horizontally clockwise from
* the positive Y axis), and the Z axis is vertical towards the sky.
*
* Similarly, for each camera there is a Camera Coordinate System. The origin of a Camera
* Coordinate System lies in the middle of the sensor inside of the camera. The Z axis is
* positive coming out of the lens of the camera in a direction perpendicular to the plane
* of the sensor. When looking at the face of the lens of the camera (down the positive Z
* axis), the X axis is positive off to the right in the plane of the sensor, and the Y axis
* is positive out the top of the lens in the plane of the sensor at 90 horizontally
* counter clockwise from the X axis.
*
* Next, there is Phone Coordinate System (for robots that have phones, of course), though
* with the advent of Vuforia support for Webcams, this coordinate system is less significant
* than it was previously. The Phone Coordinate System is defined thusly: with the phone in
* flat front of you in portrait mode (i.e. as it is when running the robot controller app)
* and you are staring straight at the face of the phone,
* * X is positive heading off to your right,
* * Y is positive heading up through the top edge of the phone, and
* * Z is pointing out of the screen, toward you.
* The origin of the Phone Coordinate System is at the origin of the Camera Coordinate System
* of the front-facing camera on the phone.
*
* Finally, it is worth noting that trackable Vuforia Image Targets have their <em>own</em>
* coordinate system (see {@link VuforiaTrackable}. This is sometimes referred to as the
* Target Coordinate System. In keeping with the above, when looking at the target in its
* natural orientation, in the Target Coodinate System
* * X is positive heading off to your right,
* * Y is positive heading up through the top edge of the target, and
* * Z is pointing out of the target, toward you.
*
* One can observe that the Camera Coordinate System of the front-facing camera on a phone
* coincides with the Phone Coordinate System. Further, when a phone is placed on its back
* at the origin of the Robot Coordinate System and aligned appropriately, those coordinate
* systems also coincide with the Robot Coordinate System. Got it?
*
* In this example here, we're going to assume that we put the camera on the right side
* of the robot (facing outwards, of course). To determine the transformation matrix that
* describes that location, first consider the camera as lying on its back at the origin
* of the Robot Coordinate System such that the Camera Coordinate System and Robot Coordinate
* System coincide. Then the transformation we need is
* * first a rotation of the camera by +90deg along the robot X axis,
* * then a rotation of the camera by +90deg along the robot Z axis, and
* * finally a translation of the camera to the side of the robot.
*
* When determining whether a rotation is positive or negative, consider yourself as looking
* down the (positive) axis of rotation from the positive towards the origin. Positive rotations
* are then CCW, and negative rotations CW. An example: consider looking down the positive Z
* axis towards the origin. A positive rotation about Z (ie: a rotation parallel to the the X-Y
* plane) is then CCW, as one would normally expect from the usual classic 2D geometry.
*/
OpenGLMatrix robotFromCamera = OpenGLMatrix
.translation(mmBotWidth/2,0,0)
.multiplied(Orientation.getRotationMatrix(
AxesReference.EXTRINSIC, AxesOrder.XZY,
AngleUnit.DEGREES, 90, 90, 0));
RobotLog.ii(TAG, "camera=%s", format(robotFromCamera));
/**
* Let the trackable listeners we care about know where the camera is. We know that each
* listener is a {@link VuforiaTrackableDefaultListener} and can so safely cast because
* we have not ourselves installed a listener of a different type.
*/
((VuforiaTrackableDefaultListener)redTarget.getListener()).setCameraLocationOnRobot(parameters.cameraName, robotFromCamera);
((VuforiaTrackableDefaultListener)blueTarget.getListener()).setCameraLocationOnRobot(parameters.cameraName, robotFromCamera);
/**
* A brief tutorial: here's how all the math is going to work:
*
* C = robotFromCamera maps camera coords -> robot coords
* P = tracker.getPose() maps image target coords -> camera coords
* L = redTargetLocationOnField maps image target coords -> field coords
*
* So
*
* C.inverted() maps robot coords -> camera coords
* P.inverted() maps camera coords -> imageTarget coords
*
* Putting that all together,
*
* L x P.inverted() x C.inverted() maps robot coords to field coords.
*
* @see VuforiaTrackableDefaultListener#getRobotLocation()
*/
/** Wait for the game to begin */
telemetry.addData(">", "Press Play to start tracking");
telemetry.update();
waitForStart();
/** Start tracking the data sets we care about. */
stonesAndChips.activate();
boolean buttonPressed = false;
while (opModeIsActive()) {
if (gamepad1.a && !buttonPressed) {
captureFrameToFile();
}
buttonPressed = gamepad1.a;
for (VuforiaTrackable trackable : allTrackables) {
/**
* getUpdatedRobotLocation() will return null if no new information is available since
* the last time that call was made, or if the trackable is not currently visible.
* getRobotLocation() will return null if the trackable is not currently visible.
*/
telemetry.addData(trackable.getName(), ((VuforiaTrackableDefaultListener)trackable.getListener()).isVisible() ? "Visible" : "Not Visible"); //
OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener)trackable.getListener()).getUpdatedRobotLocation();
if (robotLocationTransform != null) {
lastLocation = robotLocationTransform;
}
}
/**
* Provide feedback as to where the robot was last located (if we know).
*/
if (lastLocation != null) {
// RobotLog.vv(TAG, "robot=%s", format(lastLocation));
telemetry.addData("Pos", format(lastLocation));
} else {
telemetry.addData("Pos", "Unknown");
}
telemetry.update();
}
}
/**
* A simple utility that extracts positioning information from a transformation matrix
* and formats it in a form palatable to a human being.
*/
String format(OpenGLMatrix transformationMatrix) {
return transformationMatrix.formatAsTransform();
}
/**
* Sample one frame from the Vuforia stream and write it to a .PNG image file on the robot
* controller in the /sdcard/FIRST/data directory. The images can be downloaded using Android
* Studio's Device File Explorer, ADB, or the Media Transfer Protocol (MTP) integration into
* Windows Explorer, among other means. The images can be useful during robot design and calibration
* in order to get a sense of what the camera is actually seeing and so assist in camera
* aiming and alignment.
*/
void captureFrameToFile() {
vuforia.getFrameOnce(Continuation.create(ThreadPool.getDefault(), new Consumer<Frame>()
{
@Override public void accept(Frame frame)
{
Bitmap bitmap = vuforia.convertFrameToBitmap(frame);
if (bitmap != null) {
File file = new File(captureDirectory, String.format(Locale.getDefault(), "VuforiaFrame-%d.png", captureCounter++));
try {
FileOutputStream outputStream = new FileOutputStream(file);
try {
bitmap.compress(Bitmap.CompressFormat.PNG, 100, outputStream);
} finally {
outputStream.close();
telemetry.log().add("captured %s", file.getName());
}
} catch (IOException e) {
RobotLog.ee(TAG, e, "exception in captureFrameToFile()");
}
}
}
}));
}
}

View file

@ -1,310 +0,0 @@
/* Copyright (c) 2020 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import android.graphics.Bitmap;
import android.graphics.ImageFormat;
import android.os.Handler;
import androidx.annotation.NonNull;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.util.RobotLog;
import org.firstinspires.ftc.robotcore.external.ClassFactory;
import org.firstinspires.ftc.robotcore.external.android.util.Size;
import org.firstinspires.ftc.robotcore.external.function.Consumer;
import org.firstinspires.ftc.robotcore.external.function.Continuation;
import org.firstinspires.ftc.robotcore.external.hardware.camera.Camera;
import org.firstinspires.ftc.robotcore.external.hardware.camera.CameraCaptureRequest;
import org.firstinspires.ftc.robotcore.external.hardware.camera.CameraCaptureSequenceId;
import org.firstinspires.ftc.robotcore.external.hardware.camera.CameraCaptureSession;
import org.firstinspires.ftc.robotcore.external.hardware.camera.CameraCharacteristics;
import org.firstinspires.ftc.robotcore.external.hardware.camera.CameraException;
import org.firstinspires.ftc.robotcore.external.hardware.camera.CameraFrame;
import org.firstinspires.ftc.robotcore.external.hardware.camera.CameraManager;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.robotcore.internal.collections.EvictingBlockingQueue;
import org.firstinspires.ftc.robotcore.internal.network.CallbackLooper;
import org.firstinspires.ftc.robotcore.internal.system.AppUtil;
import org.firstinspires.ftc.robotcore.internal.system.ContinuationSynchronizer;
import org.firstinspires.ftc.robotcore.internal.system.Deadline;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Locale;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.TimeUnit;
/**
* This OpMode illustrates how to open a webcam and retrieve images from it. It requires a configuration
* containing a webcam with the default name ("Webcam 1"). When the opmode runs, pressing the 'A' button
* will cause a frame from the camera to be written to a file on the device, which can then be retrieved
* by various means (e.g.: Device File Explorer in Android Studio; plugging the device into a PC and
* using Media Transfer; ADB; etc)
*/
@TeleOp(name="Concept: Webcam", group ="Concept")
@Disabled
public class ConceptWebcam extends LinearOpMode {
//----------------------------------------------------------------------------------------------
// State
//----------------------------------------------------------------------------------------------
private static final String TAG = "Webcam Sample";
/** How long we are to wait to be granted permission to use the camera before giving up. Here,
* we wait indefinitely */
private static final int secondsPermissionTimeout = Integer.MAX_VALUE;
/** State regarding our interaction with the camera */
private CameraManager cameraManager;
private WebcamName cameraName;
private Camera camera;
private CameraCaptureSession cameraCaptureSession;
/** The queue into which all frames from the camera are placed as they become available.
* Frames which are not processed by the OpMode are automatically discarded. */
private EvictingBlockingQueue<Bitmap> frameQueue;
/** State regarding where and how to save frames when the 'A' button is pressed. */
private int captureCounter = 0;
private File captureDirectory = AppUtil.ROBOT_DATA_DIR;
/** A utility object that indicates where the asynchronous callbacks from the camera
* infrastructure are to run. In this OpMode, that's all hidden from you (but see {@link #startCamera}
* if you're curious): no knowledge of multi-threading is needed here. */
private Handler callbackHandler;
//----------------------------------------------------------------------------------------------
// Main OpMode entry
//----------------------------------------------------------------------------------------------
@Override public void runOpMode() {
callbackHandler = CallbackLooper.getDefault().getHandler();
cameraManager = ClassFactory.getInstance().getCameraManager();
cameraName = hardwareMap.get(WebcamName.class, "Webcam 1");
initializeFrameQueue(2);
AppUtil.getInstance().ensureDirectoryExists(captureDirectory);
try {
openCamera();
if (camera == null) return;
startCamera();
if (cameraCaptureSession == null) return;
telemetry.addData(">", "Press Play to start");
telemetry.update();
waitForStart();
telemetry.clear();
telemetry.addData(">", "Started...Press 'A' to capture frame");
boolean buttonPressSeen = false;
boolean captureWhenAvailable = false;
while (opModeIsActive()) {
boolean buttonIsPressed = gamepad1.a;
if (buttonIsPressed && !buttonPressSeen) {
captureWhenAvailable = true;
}
buttonPressSeen = buttonIsPressed;
if (captureWhenAvailable) {
Bitmap bmp = frameQueue.poll();
if (bmp != null) {
captureWhenAvailable = false;
onNewFrame(bmp);
}
}
telemetry.update();
}
} finally {
closeCamera();
}
}
/** Do something with the frame */
private void onNewFrame(Bitmap frame) {
saveBitmap(frame);
frame.recycle(); // not strictly necessary, but helpful
}
//----------------------------------------------------------------------------------------------
// Camera operations
//----------------------------------------------------------------------------------------------
private void initializeFrameQueue(int capacity) {
/** The frame queue will automatically throw away bitmap frames if they are not processed
* quickly by the OpMode. This avoids a buildup of frames in memory */
frameQueue = new EvictingBlockingQueue<Bitmap>(new ArrayBlockingQueue<Bitmap>(capacity));
frameQueue.setEvictAction(new Consumer<Bitmap>() {
@Override public void accept(Bitmap frame) {
// RobotLog.ii(TAG, "frame recycled w/o processing");
frame.recycle(); // not strictly necessary, but helpful
}
});
}
private void openCamera() {
if (camera != null) return; // be idempotent
Deadline deadline = new Deadline(secondsPermissionTimeout, TimeUnit.SECONDS);
camera = cameraManager.requestPermissionAndOpenCamera(deadline, cameraName, null);
if (camera == null) {
error("camera not found or permission to use not granted: %s", cameraName);
}
}
private void startCamera() {
if (cameraCaptureSession != null) return; // be idempotent
/** YUY2 is supported by all Webcams, per the USB Webcam standard: See "USB Device Class Definition
* for Video Devices: Uncompressed Payload, Table 2-1". Further, often this is the *only*
* image format supported by a camera */
final int imageFormat = ImageFormat.YUY2;
/** Verify that the image is supported, and fetch size and desired frame rate if so */
CameraCharacteristics cameraCharacteristics = cameraName.getCameraCharacteristics();
if (!contains(cameraCharacteristics.getAndroidFormats(), imageFormat)) {
error("image format not supported");
return;
}
final Size size = cameraCharacteristics.getDefaultSize(imageFormat);
final int fps = cameraCharacteristics.getMaxFramesPerSecond(imageFormat, size);
/** Some of the logic below runs asynchronously on other threads. Use of the synchronizer
* here allows us to wait in this method until all that asynchrony completes before returning. */
final ContinuationSynchronizer<CameraCaptureSession> synchronizer = new ContinuationSynchronizer<>();
try {
/** Create a session in which requests to capture frames can be made */
camera.createCaptureSession(Continuation.create(callbackHandler, new CameraCaptureSession.StateCallbackDefault() {
@Override public void onConfigured(@NonNull CameraCaptureSession session) {
try {
/** The session is ready to go. Start requesting frames */
final CameraCaptureRequest captureRequest = camera.createCaptureRequest(imageFormat, size, fps);
session.startCapture(captureRequest,
new CameraCaptureSession.CaptureCallback() {
@Override public void onNewFrame(@NonNull CameraCaptureSession session, @NonNull CameraCaptureRequest request, @NonNull CameraFrame cameraFrame) {
/** A new frame is available. The frame data has <em>not</em> been copied for us, and we can only access it
* for the duration of the callback. So we copy here manually. */
Bitmap bmp = captureRequest.createEmptyBitmap();
cameraFrame.copyToBitmap(bmp);
frameQueue.offer(bmp);
}
},
Continuation.create(callbackHandler, new CameraCaptureSession.StatusCallback() {
@Override public void onCaptureSequenceCompleted(@NonNull CameraCaptureSession session, CameraCaptureSequenceId cameraCaptureSequenceId, long lastFrameNumber) {
RobotLog.ii(TAG, "capture sequence %s reports completed: lastFrame=%d", cameraCaptureSequenceId, lastFrameNumber);
}
})
);
synchronizer.finish(session);
} catch (CameraException|RuntimeException e) {
RobotLog.ee(TAG, e, "exception starting capture");
error("exception starting capture");
session.close();
synchronizer.finish(null);
}
}
}));
} catch (CameraException|RuntimeException e) {
RobotLog.ee(TAG, e, "exception starting camera");
error("exception starting camera");
synchronizer.finish(null);
}
/** Wait for all the asynchrony to complete */
try {
synchronizer.await();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
/** Retrieve the created session. This will be null on error. */
cameraCaptureSession = synchronizer.getValue();
}
private void stopCamera() {
if (cameraCaptureSession != null) {
cameraCaptureSession.stopCapture();
cameraCaptureSession.close();
cameraCaptureSession = null;
}
}
private void closeCamera() {
stopCamera();
if (camera != null) {
camera.close();
camera = null;
}
}
//----------------------------------------------------------------------------------------------
// Utilities
//----------------------------------------------------------------------------------------------
private void error(String msg) {
telemetry.log().add(msg);
telemetry.update();
}
private void error(String format, Object...args) {
telemetry.log().add(format, args);
telemetry.update();
}
private boolean contains(int[] array, int value) {
for (int i : array) {
if (i == value) return true;
}
return false;
}
private void saveBitmap(Bitmap bitmap) {
File file = new File(captureDirectory, String.format(Locale.getDefault(), "webcam-frame-%d.jpg", captureCounter++));
try {
try (FileOutputStream outputStream = new FileOutputStream(file)) {
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, outputStream);
telemetry.log().add("captured %s", file.getName());
}
} catch (IOException e) {
RobotLog.ee(TAG, e, "exception in saveBitmap()");
error("exception saving %s", file.getName());
}
}
}

View file

@ -1,105 +0,0 @@
/* Copyright (c) 2017 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.HardwareMap;
import com.qualcomm.robotcore.hardware.Servo;
import com.qualcomm.robotcore.util.ElapsedTime;
/**
* This is NOT an opmode.
*
* This class can be used to define all the specific hardware for a single robot.
* In this case that robot is a Pushbot.
* See PushbotTeleopTank_Iterative and others classes starting with "Pushbot" for usage examples.
*
* This hardware class assumes the following device names have been configured on the robot:
* Note: All names are lower case and some have single spaces between words.
*
* Motor channel: Left drive motor: "left_drive"
* Motor channel: Right drive motor: "right_drive"
* Motor channel: Manipulator drive motor: "left_arm"
* Servo channel: Servo to open left claw: "left_hand"
* Servo channel: Servo to open right claw: "right_hand"
*/
public class HardwarePushbot
{
/* Public OpMode members. */
public DcMotor leftDrive = null;
public DcMotor rightDrive = null;
public DcMotor leftArm = null;
public Servo leftClaw = null;
public Servo rightClaw = null;
public static final double MID_SERVO = 0.5 ;
public static final double ARM_UP_POWER = 0.45 ;
public static final double ARM_DOWN_POWER = -0.45 ;
/* local OpMode members. */
HardwareMap hwMap = null;
private ElapsedTime period = new ElapsedTime();
/* Constructor */
public HardwarePushbot(){
}
/* Initialize standard Hardware interfaces */
public void init(HardwareMap ahwMap) {
// Save reference to Hardware map
hwMap = ahwMap;
// Define and Initialize Motors
leftDrive = hwMap.get(DcMotor.class, "left_drive");
rightDrive = hwMap.get(DcMotor.class, "right_drive");
leftArm = hwMap.get(DcMotor.class, "left_arm");
leftDrive.setDirection(DcMotor.Direction.FORWARD); // Set to REVERSE if using AndyMark motors
rightDrive.setDirection(DcMotor.Direction.REVERSE);// Set to FORWARD if using AndyMark motors
// Set all motors to zero power
leftDrive.setPower(0);
rightDrive.setPower(0);
leftArm.setPower(0);
// Set all motors to run without encoders.
// May want to use RUN_USING_ENCODERS if encoders are installed.
leftDrive.setMode(DcMotor.RunMode.RUN_WITHOUT_ENCODER);
rightDrive.setMode(DcMotor.RunMode.RUN_WITHOUT_ENCODER);
leftArm.setMode(DcMotor.RunMode.RUN_WITHOUT_ENCODER);
// Define and initialize ALL installed servos.
leftClaw = hwMap.get(Servo.class, "left_hand");
rightClaw = hwMap.get(Servo.class, "right_hand");
leftClaw.setPosition(MID_SERVO);
rightClaw.setPosition(MID_SERVO);
}
}

View file

@ -1,118 +0,0 @@
/* Copyright (c) 2017 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.hardware.LightSensor;
/**
* This file illustrates the concept of driving up to a line and then stopping.
* It uses the common Pushbot hardware class to define the drive on the robot.
* The code is structured as a LinearOpMode
*
* The code shows using two different light sensors:
* The Primary sensor shown in this code is a legacy NXT Light sensor (called "sensor_light")
* Alternative "commented out" code uses a MR Optical Distance Sensor (called "sensor_ods")
* instead of the LEGO sensor. Chose to use one sensor or the other.
*
* Setting the correct WHITE_THRESHOLD value is key to stopping correctly.
* This should be set half way between the light and dark values.
* These values can be read on the screen once the OpMode has been INIT, but before it is STARTED.
* Move the senso on asnd off the white line and not the min and max readings.
* Edit this code to make WHITE_THRESHOLD half way between the min and max.
*
* Use Android Studios to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list
*/
@Autonomous(name="Pushbot: Auto Drive To Line", group="Pushbot")
@Disabled
public class PushbotAutoDriveToLine_Linear extends LinearOpMode {
/* Declare OpMode members. */
HardwarePushbot robot = new HardwarePushbot(); // Use a Pushbot's hardware
LightSensor lightSensor; // Primary LEGO Light sensor,
// OpticalDistanceSensor lightSensor; // Alternative MR ODS sensor
static final double WHITE_THRESHOLD = 0.2; // spans between 0.1 - 0.5 from dark to light
static final double APPROACH_SPEED = 0.5;
@Override
public void runOpMode() {
/* Initialize the drive system variables.
* The init() method of the hardware class does all the work here
*/
robot.init(hardwareMap);
// If there are encoders connected, switch to RUN_USING_ENCODER mode for greater accuracy
// robot.leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
// robot.rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
// get a reference to our Light Sensor object.
lightSensor = hardwareMap.lightSensor.get("sensor_light"); // Primary LEGO Light Sensor
// lightSensor = hardwareMap.opticalDistanceSensor.get("sensor_ods"); // Alternative MR ODS sensor.
// turn on LED of light sensor.
lightSensor.enableLed(true);
// Send telemetry message to signify robot waiting;
telemetry.addData("Status", "Ready to run"); //
telemetry.update();
// Wait for the game to start (driver presses PLAY)
// Abort this loop is started or stopped.
while (!(isStarted() || isStopRequested())) {
// Display the light level while we are waiting to start
telemetry.addData("Light Level", lightSensor.getLightDetected());
telemetry.update();
idle();
}
// Start the robot moving forward, and then begin looking for a white line.
robot.leftDrive.setPower(APPROACH_SPEED);
robot.rightDrive.setPower(APPROACH_SPEED);
// run until the white line is seen OR the driver presses STOP;
while (opModeIsActive() && (lightSensor.getLightDetected() < WHITE_THRESHOLD)) {
// Display the light level while we are looking for the line
telemetry.addData("Light Level", lightSensor.getLightDetected());
telemetry.update();
}
// Stop all motors
robot.leftDrive.setPower(0);
robot.rightDrive.setPower(0);
}
}

View file

@ -33,45 +33,53 @@ import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.Servo;
import com.qualcomm.robotcore.util.ElapsedTime;
/**
* This file illustrates the concept of driving a path based on encoder counts.
* It uses the common Pushbot hardware class to define the drive on the robot.
* The code is structured as a LinearOpMode
*
* The code REQUIRES that you DO have encoders on the wheels,
* otherwise you would use: PushbotAutoDriveByTime;
* otherwise you would use: RobotAutoDriveByTime;
*
* This code ALSO requires that the drive Motors have been configured such that a positive
* power command moves them forwards, and causes the encoders to count UP.
* power command moves them forward, and causes the encoders to count UP.
*
* The desired path in this example is:
* - Drive forward for 48 inches
* - Spin right for 12 Inches
* - Drive Backwards for 24 inches
* - Drive Backward for 24 inches
* - Stop and close the claw.
*
* The code is written using a method called: encoderDrive(speed, leftInches, rightInches, timeoutS)
* that performs the actual movement.
* This methods assumes that each movement is relative to the last stopping place.
* This method assumes that each movement is relative to the last stopping place.
* There are other ways to perform encoder based moves, but this method is probably the simplest.
* This code uses the RUN_TO_POSITION mode to enable the Motor controllers to generate the run profile
*
* Use Android Studios to Copy this Class, and Paste it into your team's code folder with a new name.
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list
*/
@Autonomous(name="Pushbot: Auto Drive By Encoder", group="Pushbot")
@Autonomous(name="Robot: Auto Drive By Encoder", group="Robot")
@Disabled
public class PushbotAutoDriveByEncoder_Linear extends LinearOpMode {
public class RobotAutoDriveByEncoder_Linear extends LinearOpMode {
/* Declare OpMode members. */
HardwarePushbot robot = new HardwarePushbot(); // Use a Pushbot's hardware
private DcMotor leftDrive = null;
private DcMotor rightDrive = null;
private ElapsedTime runtime = new ElapsedTime();
// Calculate the COUNTS_PER_INCH for your specific drive train.
// Go to your motor vendor website to determine your motor's COUNTS_PER_MOTOR_REV
// For external drive gearing, set DRIVE_GEAR_REDUCTION as needed.
// For example, use a value of 2.0 for a 12-tooth spur gear driving a 24-tooth spur gear.
// This is gearing DOWN for less speed and more torque.
// For gearing UP, use a gear ratio less than 1.0. Note this will affect the direction of wheel rotation.
static final double COUNTS_PER_MOTOR_REV = 1440 ; // eg: TETRIX Motor Encoder
static final double DRIVE_GEAR_REDUCTION = 2.0 ; // This is < 1.0 if geared UP
static final double DRIVE_GEAR_REDUCTION = 1.0 ; // No External Gearing.
static final double WHEEL_DIAMETER_INCHES = 4.0 ; // For figuring circumference
static final double COUNTS_PER_INCH = (COUNTS_PER_MOTOR_REV * DRIVE_GEAR_REDUCTION) /
(WHEEL_DIAMETER_INCHES * 3.1415);
@ -81,26 +89,26 @@ public class PushbotAutoDriveByEncoder_Linear extends LinearOpMode {
@Override
public void runOpMode() {
/*
* Initialize the drive system variables.
* The init() method of the hardware class does all the work here
*/
robot.init(hardwareMap);
// Initialize the drive system variables.
leftDrive = hardwareMap.get(DcMotor.class, "left_drive");
rightDrive = hardwareMap.get(DcMotor.class, "right_drive");
// Send telemetry message to signify robot waiting;
telemetry.addData("Status", "Resetting Encoders"); //
telemetry.update();
// To drive forward, most robots need the motor on one side to be reversed, because the axles point in opposite directions.
// When run, this OpMode should start both motors driving forward. So adjust these two lines based on your first test drive.
// Note: The settings here assume direct drive on left and right wheels. Gear Reduction or 90 Deg drives may require direction flips
leftDrive.setDirection(DcMotor.Direction.REVERSE);
rightDrive.setDirection(DcMotor.Direction.FORWARD);
robot.leftDrive.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
robot.rightDrive.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
leftDrive.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
rightDrive.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
robot.leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
robot.rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
// Send telemetry message to indicate successful Encoder reset
telemetry.addData("Path0", "Starting at %7d :%7d",
robot.leftDrive.getCurrentPosition(),
robot.rightDrive.getCurrentPosition());
telemetry.addData("Starting at", "%7d :%7d",
leftDrive.getCurrentPosition(),
rightDrive.getCurrentPosition());
telemetry.update();
// Wait for the game to start (driver presses PLAY)
@ -112,12 +120,9 @@ public class PushbotAutoDriveByEncoder_Linear extends LinearOpMode {
encoderDrive(TURN_SPEED, 12, -12, 4.0); // S2: Turn Right 12 Inches with 4 Sec timeout
encoderDrive(DRIVE_SPEED, -24, -24, 4.0); // S3: Reverse 24 Inches with 4 Sec timeout
robot.leftClaw.setPosition(1.0); // S4: Stop and close the claw.
robot.rightClaw.setPosition(0.0);
sleep(1000); // pause for servos to move
telemetry.addData("Path", "Complete");
telemetry.update();
sleep(1000); // pause to display final telemetry message.
}
/*
@ -138,19 +143,19 @@ public class PushbotAutoDriveByEncoder_Linear extends LinearOpMode {
if (opModeIsActive()) {
// Determine new target position, and pass to motor controller
newLeftTarget = robot.leftDrive.getCurrentPosition() + (int)(leftInches * COUNTS_PER_INCH);
newRightTarget = robot.rightDrive.getCurrentPosition() + (int)(rightInches * COUNTS_PER_INCH);
robot.leftDrive.setTargetPosition(newLeftTarget);
robot.rightDrive.setTargetPosition(newRightTarget);
newLeftTarget = leftDrive.getCurrentPosition() + (int)(leftInches * COUNTS_PER_INCH);
newRightTarget = rightDrive.getCurrentPosition() + (int)(rightInches * COUNTS_PER_INCH);
leftDrive.setTargetPosition(newLeftTarget);
rightDrive.setTargetPosition(newRightTarget);
// Turn On RUN_TO_POSITION
robot.leftDrive.setMode(DcMotor.RunMode.RUN_TO_POSITION);
robot.rightDrive.setMode(DcMotor.RunMode.RUN_TO_POSITION);
leftDrive.setMode(DcMotor.RunMode.RUN_TO_POSITION);
rightDrive.setMode(DcMotor.RunMode.RUN_TO_POSITION);
// reset the timeout time and start motion.
runtime.reset();
robot.leftDrive.setPower(Math.abs(speed));
robot.rightDrive.setPower(Math.abs(speed));
leftDrive.setPower(Math.abs(speed));
rightDrive.setPower(Math.abs(speed));
// keep looping while we are still active, and there is time left, and both motors are running.
// Note: We use (isBusy() && isBusy()) in the loop test, which means that when EITHER motor hits
@ -160,25 +165,24 @@ public class PushbotAutoDriveByEncoder_Linear extends LinearOpMode {
// onto the next step, use (isBusy() || isBusy()) in the loop test.
while (opModeIsActive() &&
(runtime.seconds() < timeoutS) &&
(robot.leftDrive.isBusy() && robot.rightDrive.isBusy())) {
(leftDrive.isBusy() && rightDrive.isBusy())) {
// Display it for the driver.
telemetry.addData("Path1", "Running to %7d :%7d", newLeftTarget, newRightTarget);
telemetry.addData("Path2", "Running at %7d :%7d",
robot.leftDrive.getCurrentPosition(),
robot.rightDrive.getCurrentPosition());
telemetry.addData("Running to", " %7d :%7d", newLeftTarget, newRightTarget);
telemetry.addData("Currently at", " at %7d :%7d",
leftDrive.getCurrentPosition(), rightDrive.getCurrentPosition());
telemetry.update();
}
// Stop all motion;
robot.leftDrive.setPower(0);
robot.rightDrive.setPower(0);
leftDrive.setPower(0);
rightDrive.setPower(0);
// Turn off RUN_TO_POSITION
robot.leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
robot.rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
// sleep(250); // optional pause after each move
sleep(250); // optional pause after each move.
}
}
}

View file

@ -39,14 +39,13 @@ import com.qualcomm.robotcore.util.Range;
/**
* This file illustrates the concept of driving a path based on Gyro heading and encoder counts.
* It uses the common Pushbot hardware class to define the drive on the robot.
* The code is structured as a LinearOpMode
*
* The code REQUIRES that you DO have encoders on the wheels,
* otherwise you would use: PushbotAutoDriveByTime;
* otherwise you would use: RobotAutoDriveByTime;
*
* This code ALSO requires that you have a Modern Robotics I2C gyro with the name "gyro"
* otherwise you would use: PushbotAutoDriveByEncoder;
* otherwise you would use: RobotAutoDriveByEncoder;
*
* This code requires that the drive Motors have been configured such that a positive
* power command moves them forward, and causes the encoders to count UP.
@ -62,24 +61,33 @@ import com.qualcomm.robotcore.util.Range;
* the Gyro Calibration process, or whenever the program issues a resetZAxisIntegrator() call on the Gyro.
*
* The angle of movement/rotation is assumed to be a standardized rotation around the robot Z axis,
* which means that a Positive rotation is Counter Clock Wise, looking down on the field.
* which means that a Positive rotation is Counter Clockwise, looking down on the field.
* This is consistent with the FTC field coordinate conventions set out in the document:
* ftc_app\doc\tutorial\FTC_FieldCoordinateSystemDefinition.pdf
*
* Use Android Studios to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list
*/
@Autonomous(name="Pushbot: Auto Drive By Gyro", group="Pushbot")
@Autonomous(name="Robot: Auto Drive By Gyro", group="Robot")
@Disabled
public class PushbotAutoDriveByGyro_Linear extends LinearOpMode {
public class RobotAutoDriveByGyro_Linear extends LinearOpMode {
/* Declare OpMode members. */
HardwarePushbot robot = new HardwarePushbot(); // Use a Pushbot's hardware
private DcMotor leftDrive = null;
private DcMotor rightDrive = null;
ModernRoboticsI2cGyro gyro = null; // Additional Gyro device
private double robotHeading = 0;
// Calculate the COUNTS_PER_INCH for your specific drive train.
// Go to your motor vendor website to determine your motor's COUNTS_PER_MOTOR_REV
// For external drive gearing, set DRIVE_GEAR_REDUCTION as needed.
// For example, use a value of 2.0 for a 12-tooth spur gear driving a 24-tooth spur gear.
// This is gearing DOWN for less speed and more torque.
// For gearing UP, use a gear ratio less than 1.0. Note this will affect the direction of wheel rotation.
static final double COUNTS_PER_MOTOR_REV = 1440 ; // eg: TETRIX Motor Encoder
static final double DRIVE_GEAR_REDUCTION = 2.0 ; // This is < 1.0 if geared UP
static final double DRIVE_GEAR_REDUCTION = 1.0 ; // No External Gearing.
static final double WHEEL_DIAMETER_INCHES = 4.0 ; // For figuring circumference
static final double COUNTS_PER_INCH = (COUNTS_PER_MOTOR_REV * DRIVE_GEAR_REDUCTION) /
(WHEEL_DIAMETER_INCHES * 3.1415);
@ -97,16 +105,21 @@ public class PushbotAutoDriveByGyro_Linear extends LinearOpMode {
@Override
public void runOpMode() {
/*
* Initialize the standard drive system variables.
* The init() method of the hardware class does most of the work here
*/
robot.init(hardwareMap);
// Initialize the drive system variables.
leftDrive = hardwareMap.get(DcMotor.class, "left_drive");
rightDrive = hardwareMap.get(DcMotor.class, "right_drive");
// To drive forward, most robots need the motor on one side to be reversed, because the axles point in opposite directions.
// When run, this OpMode should start both motors driving forward. So adjust these two lines based on your first test drive.
// Note: The settings here assume direct drive on left and right wheels. Gear Reduction or 90 Deg drives may require direction flips
leftDrive.setDirection(DcMotor.Direction.REVERSE);
rightDrive.setDirection(DcMotor.Direction.FORWARD);
gyro = (ModernRoboticsI2cGyro)hardwareMap.gyroSensor.get("gyro");
// Ensure the robot it stationary, then reset the encoders and calibrate the gyro.
robot.leftDrive.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
robot.rightDrive.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
leftDrive.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
rightDrive.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
// Send telemetry message to alert driver that we are calibrating;
telemetry.addData(">", "Calibrating Gyro"); //
@ -123,43 +136,46 @@ public class PushbotAutoDriveByGyro_Linear extends LinearOpMode {
telemetry.addData(">", "Robot Ready."); //
telemetry.update();
robot.leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
robot.rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
// Wait for the game to start (Display Gyro value), and reset gyro before we move..
while (!isStarted()) {
telemetry.addData(">", "Robot Heading = %d", gyro.getIntegratedZValue());
// Wait for the game to start (Display Gyro value)
while (opModeInInit()) {
telemetry.addData(">", "Robot Heading = %4.0f", getHeading());
telemetry.update();
}
// Reset gyro before we move..
gyro.resetZAxisIntegrator();
getHeading();
// Step through each leg of the path,
// Note: Reverse movement is obtained by setting a negative distance (not speed)
// Put a hold after each turn
gyroDrive(DRIVE_SPEED, 48.0, 0.0); // Drive FWD 48 inches
gyroTurn( TURN_SPEED, -45.0); // Turn CCW to -45 Degrees
gyroHold( TURN_SPEED, -45.0, 0.5); // Hold -45 Deg heading for a 1/2 second
gyroDrive(DRIVE_SPEED, 48.0, 0.0); // Drive FWD 48 inches
gyroTurn( TURN_SPEED, -45.0); // Turn CW to -45 Degrees
gyroHold( TURN_SPEED, -45.0, 0.5); // Hold -45 Deg heading for a 1/2 second
gyroDrive(DRIVE_SPEED, 12.0, -45.0); // Drive FWD 12 inches at 45 degrees
gyroTurn( TURN_SPEED, 45.0); // Turn CW to 45 Degrees
gyroTurn( TURN_SPEED, 45.0); // Turn CCW to 45 Degrees
gyroHold( TURN_SPEED, 45.0, 0.5); // Hold 45 Deg heading for a 1/2 second
gyroTurn( TURN_SPEED, 0.0); // Turn CW to 0 Degrees
gyroTurn( TURN_SPEED, 0.0); // Turn CW to 0 Degrees
gyroHold( TURN_SPEED, 0.0, 1.0); // Hold 0 Deg heading for a 1 second
gyroDrive(DRIVE_SPEED,-48.0, 0.0); // Drive REV 48 inches
telemetry.addData("Path", "Complete");
telemetry.addData("Final Heading", "%5.0f", getHeading());
telemetry.update();
sleep(1000); // Pause to display last telemetry message.
}
/**
* Method to drive on a fixed compass bearing (angle), based on encoder counts.
* Move will stop if either of these conditions occur:
* 1) Move gets to the desired position
* 2) Driver stops the opmode running.
*
* @param speed Target speed for forward motion. Should allow for _/- variance for adjusting heading
* @param distance Distance (in inches) to move from current position. Negative distance means move backwards.
* @param speed Target speed for forward motion. Should allow for +/- variance for adjusting heading
* @param distance Distance (in inches) to move from current position. Negative distance means move backward.
* @param angle Absolute Angle (in Degrees) relative to last gyro reset.
* 0 = fwd. +ve is CCW from fwd. -ve is CW from forward.
* If a relative angle is required, add/subtract from current heading.
@ -182,24 +198,24 @@ public class PushbotAutoDriveByGyro_Linear extends LinearOpMode {
// Determine new target position, and pass to motor controller
moveCounts = (int)(distance * COUNTS_PER_INCH);
newLeftTarget = robot.leftDrive.getCurrentPosition() + moveCounts;
newRightTarget = robot.rightDrive.getCurrentPosition() + moveCounts;
newLeftTarget = leftDrive.getCurrentPosition() + moveCounts;
newRightTarget = rightDrive.getCurrentPosition() + moveCounts;
// Set Target and Turn On RUN_TO_POSITION
robot.leftDrive.setTargetPosition(newLeftTarget);
robot.rightDrive.setTargetPosition(newRightTarget);
leftDrive.setTargetPosition(newLeftTarget);
rightDrive.setTargetPosition(newRightTarget);
robot.leftDrive.setMode(DcMotor.RunMode.RUN_TO_POSITION);
robot.rightDrive.setMode(DcMotor.RunMode.RUN_TO_POSITION);
leftDrive.setMode(DcMotor.RunMode.RUN_TO_POSITION);
rightDrive.setMode(DcMotor.RunMode.RUN_TO_POSITION);
// start motion.
speed = Range.clip(Math.abs(speed), 0.0, 1.0);
robot.leftDrive.setPower(speed);
robot.rightDrive.setPower(speed);
leftDrive.setPower(speed);
rightDrive.setPower(speed);
// keep looping while we are still active, and BOTH motors are running.
while (opModeIsActive() &&
(robot.leftDrive.isBusy() && robot.rightDrive.isBusy())) {
(leftDrive.isBusy() && rightDrive.isBusy())) {
// adjust relative speed based on heading error.
error = getError(angle);
@ -220,25 +236,26 @@ public class PushbotAutoDriveByGyro_Linear extends LinearOpMode {
rightSpeed /= max;
}
robot.leftDrive.setPower(leftSpeed);
robot.rightDrive.setPower(rightSpeed);
leftDrive.setPower(leftSpeed);
rightDrive.setPower(rightSpeed);
// Display drive status for the driver.
telemetry.addData("Err/St", "%5.1f/%5.1f", error, steer);
telemetry.addData("Target", "%7d:%7d", newLeftTarget, newRightTarget);
telemetry.addData("Actual", "%7d:%7d", robot.leftDrive.getCurrentPosition(),
robot.rightDrive.getCurrentPosition());
telemetry.addData("Speed", "%5.2f:%5.2f", leftSpeed, rightSpeed);
telemetry.addData("Angle Target:Current", "%5.2f:%5.0f", angle, robotHeading);
telemetry.addData("Error:Steer", "%5.1f:%5.1f", error, steer);
telemetry.addData("Target L:R", "%7d:%7d", newLeftTarget, newRightTarget);
telemetry.addData("Actual L:R", "%7d:%7d", leftDrive.getCurrentPosition(),
rightDrive.getCurrentPosition());
telemetry.addData("Speed L:R", "%5.2f:%5.2f", leftSpeed, rightSpeed);
telemetry.update();
}
// Stop all motion;
robot.leftDrive.setPower(0);
robot.rightDrive.setPower(0);
leftDrive.setPower(0);
rightDrive.setPower(0);
// Turn off RUN_TO_POSITION
robot.leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
robot.rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
}
}
@ -285,8 +302,8 @@ public class PushbotAutoDriveByGyro_Linear extends LinearOpMode {
}
// Stop all motion;
robot.leftDrive.setPower(0);
robot.rightDrive.setPower(0);
leftDrive.setPower(0);
rightDrive.setPower(0);
}
/**
@ -322,13 +339,13 @@ public class PushbotAutoDriveByGyro_Linear extends LinearOpMode {
}
// Send desired speeds to motors.
robot.leftDrive.setPower(leftSpeed);
robot.rightDrive.setPower(rightSpeed);
leftDrive.setPower(leftSpeed);
rightDrive.setPower(rightSpeed);
// Display it for the driver.
telemetry.addData("Target", "%5.2f", angle);
telemetry.addData("Err/St", "%5.2f/%5.2f", error, steer);
telemetry.addData("Speed.", "%5.2f:%5.2f", leftSpeed, rightSpeed);
telemetry.addData("Target/Current", "%5.2f / %5.0f", angle, robotHeading);
telemetry.addData("Error/Steer", "%5.2f / %5.2f", error, steer);
telemetry.addData("Speed.", "%5.2f : %5.2f", leftSpeed, rightSpeed);
return onTarget;
}
@ -344,12 +361,20 @@ public class PushbotAutoDriveByGyro_Linear extends LinearOpMode {
double robotError;
// calculate error in -179 to +180 range (
robotError = targetAngle - gyro.getIntegratedZValue();
robotError = targetAngle - getHeading();
while (robotError > 180) robotError -= 360;
while (robotError <= -180) robotError += 360;
return robotError;
}
/**
* read and save the current robot heading
*/
public double getHeading() {
robotHeading = (double)gyro.getIntegratedZValue();
return robotHeading;
}
/**
* returns desired steering force. +/- 1 range. +ve = steer left
* @param error Error angle in robot relative degrees

View file

@ -32,35 +32,36 @@ package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.util.ElapsedTime;
/**
* This file illustrates the concept of driving a path based on time.
* It uses the common Pushbot hardware class to define the drive on the robot.
* The code is structured as a LinearOpMode
*
* The code assumes that you do NOT have encoders on the wheels,
* otherwise you would use: PushbotAutoDriveByEncoder;
* otherwise you would use: RobotAutoDriveByEncoder;
*
* The desired path in this example is:
* - Drive forward for 3 seconds
* - Spin right for 1.3 seconds
* - Drive Backwards for 1 Second
* - Stop and close the claw.
* - Drive Backward for 1 Second
*
* The code is written in a simple form with no optimizations.
* However, there are several ways that this type of sequence could be streamlined,
*
* Use Android Studios to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
*/
@Autonomous(name="Pushbot: Auto Drive By Time", group="Pushbot")
@Autonomous(name="Robot: Auto Drive By Time", group="Robot")
@Disabled
public class PushbotAutoDriveByTime_Linear extends LinearOpMode {
public class RobotAutoDriveByTime_Linear extends LinearOpMode {
/* Declare OpMode members. */
HardwarePushbot robot = new HardwarePushbot(); // Use a Pushbot's hardware
private DcMotor leftDrive = null;
private DcMotor rightDrive = null;
private ElapsedTime runtime = new ElapsedTime();
@ -70,11 +71,15 @@ public class PushbotAutoDriveByTime_Linear extends LinearOpMode {
@Override
public void runOpMode() {
/*
* Initialize the drive system variables.
* The init() method of the hardware class does all the work here
*/
robot.init(hardwareMap);
// Initialize the drive system variables.
leftDrive = hardwareMap.get(DcMotor.class, "left_drive");
rightDrive = hardwareMap.get(DcMotor.class, "right_drive");
// To drive forward, most robots need the motor on one side to be reversed, because the axles point in opposite directions.
// When run, this OpMode should start both motors driving forward. So adjust these two lines based on your first test drive.
// Note: The settings here assume direct drive on left and right wheels. Gear Reduction or 90 Deg drives may require direction flips
leftDrive.setDirection(DcMotor.Direction.REVERSE);
rightDrive.setDirection(DcMotor.Direction.FORWARD);
// Send telemetry message to signify robot waiting;
telemetry.addData("Status", "Ready to run"); //
@ -86,37 +91,35 @@ public class PushbotAutoDriveByTime_Linear extends LinearOpMode {
// Step through each leg of the path, ensuring that the Auto mode has not been stopped along the way
// Step 1: Drive forward for 3 seconds
robot.leftDrive.setPower(FORWARD_SPEED);
robot.rightDrive.setPower(FORWARD_SPEED);
leftDrive.setPower(FORWARD_SPEED);
rightDrive.setPower(FORWARD_SPEED);
runtime.reset();
while (opModeIsActive() && (runtime.seconds() < 3.0)) {
telemetry.addData("Path", "Leg 1: %2.5f S Elapsed", runtime.seconds());
telemetry.addData("Path", "Leg 1: %4.1f S Elapsed", runtime.seconds());
telemetry.update();
}
// Step 2: Spin right for 1.3 seconds
robot.leftDrive.setPower(TURN_SPEED);
robot.rightDrive.setPower(-TURN_SPEED);
leftDrive.setPower(TURN_SPEED);
rightDrive.setPower(-TURN_SPEED);
runtime.reset();
while (opModeIsActive() && (runtime.seconds() < 1.3)) {
telemetry.addData("Path", "Leg 2: %2.5f S Elapsed", runtime.seconds());
telemetry.addData("Path", "Leg 2: %4.1f S Elapsed", runtime.seconds());
telemetry.update();
}
// Step 3: Drive Backwards for 1 Second
robot.leftDrive.setPower(-FORWARD_SPEED);
robot.rightDrive.setPower(-FORWARD_SPEED);
// Step 3: Drive Backward for 1 Second
leftDrive.setPower(-FORWARD_SPEED);
rightDrive.setPower(-FORWARD_SPEED);
runtime.reset();
while (opModeIsActive() && (runtime.seconds() < 1.0)) {
telemetry.addData("Path", "Leg 3: %2.5f S Elapsed", runtime.seconds());
telemetry.addData("Path", "Leg 3: %4.1f S Elapsed", runtime.seconds());
telemetry.update();
}
// Step 4: Stop and close the claw.
robot.leftDrive.setPower(0);
robot.rightDrive.setPower(0);
robot.leftClaw.setPosition(1.0);
robot.rightClaw.setPosition(0.0);
// Step 4: Stop
leftDrive.setPower(0);
rightDrive.setPower(0);
telemetry.addData("Path", "Complete");
telemetry.update();

View file

@ -0,0 +1,144 @@
/* Copyright (c) 2017 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.hardware.ColorSensor;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.LightSensor;
import com.qualcomm.robotcore.hardware.NormalizedColorSensor;
import com.qualcomm.robotcore.hardware.NormalizedRGBA;
import com.qualcomm.robotcore.hardware.SwitchableLight;
/**
* This file illustrates the concept of driving up to a line and then stopping.
* The code is structured as a LinearOpMode
*
* The Sensor used here can be a REV Color Sensor V2 or V3. Make sure the white LED is turned on.
* The sensor can be plugged into any I2C port, and must be named "sensor_color" in the active configuration.
*
* Depending on the height of your color sensor, you may want to set the sensor "gain".
* The higher the gain, the greater the reflected light reading will be.
* Use the SensorColor sample in this folder to determine the minimum gain value that provides an
* "Alpha" reading of 1.0 when you are on top of the white line. In this sample, we use a gain of 15
* which works well with a Rev V2 color sensor
*
* Setting the correct WHITE_THRESHOLD value is key to stopping correctly.
* This should be set halfway between the bare-tile, and white-line "Alpha" values.
* The reflected light value can be read on the screen once the OpMode has been INIT, but before it is STARTED.
* Move the sensor on and off the white line and note the min and max readings.
* Edit this code to make WHITE_THRESHOLD halfway between the min and max.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
*/
@Autonomous(name="Robot: Auto Drive To Line", group="Robot")
@Disabled
public class RobotAutoDriveToLine_Linear extends LinearOpMode {
/* Declare OpMode members. */
private DcMotor leftDrive = null;
private DcMotor rightDrive = null;
/** The colorSensor field will contain a reference to our color sensor hardware object */
NormalizedColorSensor colorSensor;
static final double WHITE_THRESHOLD = 0.5; // spans between 0.0 - 1.0 from dark to light
static final double APPROACH_SPEED = 0.25;
@Override
public void runOpMode() {
// Initialize the drive system variables.
leftDrive = hardwareMap.get(DcMotor.class, "left_drive");
rightDrive = hardwareMap.get(DcMotor.class, "right_drive");
// To drive forward, most robots need the motor on one side to be reversed, because the axles point in opposite directions.
// When run, this OpMode should start both motors driving forward. So adjust these two lines based on your first test drive.
// Note: The settings here assume direct drive on left and right wheels. Gear Reduction or 90 Deg drives may require direction flips
leftDrive.setDirection(DcMotor.Direction.REVERSE);
rightDrive.setDirection(DcMotor.Direction.FORWARD);
// If there are encoders connected, switch to RUN_USING_ENCODER mode for greater accuracy
// robot.leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
// robot.rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
// Get a reference to our sensor object. It's recommended to use NormalizedColorSensor over
// ColorSensor, because NormalizedColorSensor consistently gives values between 0 and 1, while
// the values you get from ColorSensor are dependent on the specific sensor you're using.
colorSensor = hardwareMap.get(NormalizedColorSensor.class, "sensor_color");
// If necessary, turn ON the white LED (if there is no LED switch on the sensor)
if (colorSensor instanceof SwitchableLight) {
((SwitchableLight)colorSensor).enableLight(true);
}
// Some sensors allow you to set your light sensor gain for optimal sensitivity...
// See the SensorColor sample in this folder for how to determine the optimal gain.
// A gain of 15 causes a Rev Color Sensor V2 to produce an Alpha value of 1.0 at about 1.5" above the floor.
colorSensor.setGain(15);
// Wait for driver to press PLAY)
// Abort this loop is started or stopped.
while (opModeInInit()) {
// Send telemetry message to signify robot waiting;
telemetry.addData("Status", "Ready to drive to white line."); //
// Display the light level while we are waiting to start
getBrightness();
}
// Start the robot moving forward, and then begin looking for a white line.
leftDrive.setPower(APPROACH_SPEED);
rightDrive.setPower(APPROACH_SPEED);
// run until the white line is seen OR the driver presses STOP;
while (opModeIsActive() && (getBrightness() < WHITE_THRESHOLD)) {
sleep(5);
}
// Stop all motors
leftDrive.setPower(0);
rightDrive.setPower(0);
}
// to obtain reflected light, read the normalized values from the color sensor. Return the Alpha channel.
double getBrightness() {
NormalizedRGBA colors = colorSensor.getNormalizedColors();
telemetry.addData("Light Level (0 to 1)", "%4.2f", colors.alpha);
telemetry.update();
return colors.alpha;
}
}

View file

@ -32,30 +32,39 @@ package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.Servo;
import com.qualcomm.robotcore.util.Range;
/**
* This OpMode uses the common Pushbot hardware class to define the devices on the robot.
* All device access is managed through the HardwarePushbot class.
* This particular OpMode executes a POV Game style Teleop for a direct drive robot
* The code is structured as a LinearOpMode
*
* This particular OpMode executes a POV Game style Teleop for a PushBot
* In this mode the left stick moves the robot FWD and back, the Right stick turns left and right.
* It raises and lowers the claw using the Gampad Y and A buttons respectively.
* It raises and lowers the arm using the Gamepad Y and A buttons respectively.
* It also opens and closes the claws slowly using the left and right Bumper buttons.
*
* Use Android Studios to Copy this Class, and Paste it into your team's code folder with a new name.
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list
*/
@TeleOp(name="Pushbot: Teleop POV", group="Pushbot")
@TeleOp(name="Robot: Teleop POV", group="Robot")
@Disabled
public class PushbotTeleopPOV_Linear extends LinearOpMode {
public class RobotTeleopPOV_Linear extends LinearOpMode {
/* Declare OpMode members. */
HardwarePushbot robot = new HardwarePushbot(); // Use a Pushbot's hardware
double clawOffset = 0; // Servo mid position
final double CLAW_SPEED = 0.02 ; // sets rate to move servo
public DcMotor leftDrive = null;
public DcMotor rightDrive = null;
public DcMotor leftArm = null;
public Servo leftClaw = null;
public Servo rightClaw = null;
double clawOffset = 0;
public static final double MID_SERVO = 0.5 ;
public static final double CLAW_SPEED = 0.02 ; // sets rate to move servo
public static final double ARM_UP_POWER = 0.45 ;
public static final double ARM_DOWN_POWER = -0.45 ;
@Override
public void runOpMode() {
@ -65,10 +74,26 @@ public class PushbotTeleopPOV_Linear extends LinearOpMode {
double turn;
double max;
/* Initialize the hardware variables.
* The init() method of the hardware class does all the work here
*/
robot.init(hardwareMap);
// Define and Initialize Motors
leftDrive = hardwareMap.get(DcMotor.class, "left_drive");
rightDrive = hardwareMap.get(DcMotor.class, "right_drive");
leftArm = hardwareMap.get(DcMotor.class, "left_arm");
// To drive forward, most robots need the motor on one side to be reversed, because the axles point in opposite directions.
// Pushing the left stick forward MUST make robot go forward. So adjust these two lines based on your first test drive.
// Note: The settings here assume direct drive on left and right wheels. Gear Reduction or 90 Deg drives may require direction flips
leftDrive.setDirection(DcMotor.Direction.REVERSE);
rightDrive.setDirection(DcMotor.Direction.FORWARD);
// If there are encoders connected, switch to RUN_USING_ENCODER mode for greater accuracy
// robot.leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
// robot.rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
// Define and initialize ALL installed servos.
leftClaw = hardwareMap.get(Servo.class, "left_hand");
rightClaw = hardwareMap.get(Servo.class, "right_hand");
leftClaw.setPosition(MID_SERVO);
rightClaw.setPosition(MID_SERVO);
// Send telemetry message to signify robot waiting;
telemetry.addData("Say", "Hello Driver"); //
@ -80,7 +105,7 @@ public class PushbotTeleopPOV_Linear extends LinearOpMode {
// run until the end of the match (driver presses STOP)
while (opModeIsActive()) {
// Run wheels in POV mode (note: The joystick goes negative when pushed forwards, so negate it)
// Run wheels in POV mode (note: The joystick goes negative when pushed forward, so negate it)
// In this mode the Left stick moves the robot fwd and back, the Right stick turns left and right.
// This way it's also easy to just drive straight, or just turn.
drive = -gamepad1.left_stick_y;
@ -99,8 +124,8 @@ public class PushbotTeleopPOV_Linear extends LinearOpMode {
}
// Output the safe vales to the motor drives.
robot.leftDrive.setPower(left);
robot.rightDrive.setPower(right);
leftDrive.setPower(left);
rightDrive.setPower(right);
// Use gamepad left & right Bumpers to open and close the claw
if (gamepad1.right_bumper)
@ -110,16 +135,16 @@ public class PushbotTeleopPOV_Linear extends LinearOpMode {
// Move both servos to new position. Assume servos are mirror image of each other.
clawOffset = Range.clip(clawOffset, -0.5, 0.5);
robot.leftClaw.setPosition(robot.MID_SERVO + clawOffset);
robot.rightClaw.setPosition(robot.MID_SERVO - clawOffset);
leftClaw.setPosition(MID_SERVO + clawOffset);
rightClaw.setPosition(MID_SERVO - clawOffset);
// Use gamepad buttons to move arm up (Y) and down (A)
if (gamepad1.y)
robot.leftArm.setPower(robot.ARM_UP_POWER);
leftArm.setPower(ARM_UP_POWER);
else if (gamepad1.a)
robot.leftArm.setPower(robot.ARM_DOWN_POWER);
leftArm.setPower(ARM_DOWN_POWER);
else
robot.leftArm.setPower(0.0);
leftArm.setPower(0.0);
// Send telemetry message to signify robot running;
telemetry.addData("claw", "Offset = %.2f", clawOffset);

View file

@ -32,41 +32,66 @@ package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.Servo;
import com.qualcomm.robotcore.util.Range;
/**
* This file provides basic Telop driving for a Pushbot robot.
* This particular OpMode executes a Tank Drive control TeleOp a direct drive robot
* The code is structured as an Iterative OpMode
*
* This OpMode uses the common Pushbot hardware class to define the devices on the robot.
* All device access is managed through the HardwarePushbot class.
*
* This particular OpMode executes a basic Tank Drive Teleop for a PushBot
* It raises and lowers the claw using the Gampad Y and A buttons respectively.
* In this mode, the left and right joysticks control the left and right motors respectively.
* Pushing a joystick forward will make the attached motor drive forward.
* It raises and lowers the claw using the Gamepad Y and A buttons respectively.
* It also opens and closes the claws slowly using the left and right Bumper buttons.
*
* Use Android Studios to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
*/
@TeleOp(name="Pushbot: Teleop Tank", group="Pushbot")
@TeleOp(name="Robot: Teleop Tank", group="Robot")
@Disabled
public class PushbotTeleopTank_Iterative extends OpMode{
public class RobotTeleopTank_Iterative extends OpMode{
/* Declare OpMode members. */
HardwarePushbot robot = new HardwarePushbot(); // use the class created to define a Pushbot's hardware
double clawOffset = 0.0 ; // Servo mid position
final double CLAW_SPEED = 0.02 ; // sets rate to move servo
public DcMotor leftDrive = null;
public DcMotor rightDrive = null;
public DcMotor leftArm = null;
public Servo leftClaw = null;
public Servo rightClaw = null;
double clawOffset = 0;
public static final double MID_SERVO = 0.5 ;
public static final double CLAW_SPEED = 0.02 ; // sets rate to move servo
public static final double ARM_UP_POWER = 0.50 ; // Run arm motor up at 50% power
public static final double ARM_DOWN_POWER = -0.25 ; // Run arm motor down at -25% power
/*
* Code to run ONCE when the driver hits INIT
*/
@Override
public void init() {
/* Initialize the hardware variables.
* The init() method of the hardware class does all the work here
*/
robot.init(hardwareMap);
// Define and Initialize Motors
leftDrive = hardwareMap.get(DcMotor.class, "left_drive");
rightDrive = hardwareMap.get(DcMotor.class, "right_drive");
leftArm = hardwareMap.get(DcMotor.class, "left_arm");
// To drive forward, most robots need the motor on one side to be reversed, because the axles point in opposite directions.
// Pushing the left and right sticks forward MUST make robot go forward. So adjust these two lines based on your first test drive.
// Note: The settings here assume direct drive on left and right wheels. Gear Reduction or 90 Deg drives may require direction flips
leftDrive.setDirection(DcMotor.Direction.REVERSE);
rightDrive.setDirection(DcMotor.Direction.FORWARD);
// If there are encoders connected, switch to RUN_USING_ENCODER mode for greater accuracy
// robot.leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
// robot.rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
// Define and initialize ALL installed servos.
leftClaw = hardwareMap.get(Servo.class, "left_hand");
rightClaw = hardwareMap.get(Servo.class, "right_hand");
leftClaw.setPosition(MID_SERVO);
rightClaw.setPosition(MID_SERVO);
// Send telemetry message to signify robot waiting;
telemetry.addData("Say", "Hello Driver"); //
@ -94,12 +119,12 @@ public class PushbotTeleopTank_Iterative extends OpMode{
double left;
double right;
// Run wheels in tank mode (note: The joystick goes negative when pushed forwards, so negate it)
// Run wheels in tank mode (note: The joystick goes negative when pushed forward, so negate it)
left = -gamepad1.left_stick_y;
right = -gamepad1.right_stick_y;
robot.leftDrive.setPower(left);
robot.rightDrive.setPower(right);
leftDrive.setPower(left);
rightDrive.setPower(right);
// Use gamepad left & right Bumpers to open and close the claw
if (gamepad1.right_bumper)
@ -109,16 +134,16 @@ public class PushbotTeleopTank_Iterative extends OpMode{
// Move both servos to new position. Assume servos are mirror image of each other.
clawOffset = Range.clip(clawOffset, -0.5, 0.5);
robot.leftClaw.setPosition(robot.MID_SERVO + clawOffset);
robot.rightClaw.setPosition(robot.MID_SERVO - clawOffset);
leftClaw.setPosition(MID_SERVO + clawOffset);
rightClaw.setPosition(MID_SERVO - clawOffset);
// Use gamepad buttons to move the arm up (Y) and down (A)
if (gamepad1.y)
robot.leftArm.setPower(robot.ARM_UP_POWER);
leftArm.setPower(ARM_UP_POWER);
else if (gamepad1.a)
robot.leftArm.setPower(robot.ARM_DOWN_POWER);
leftArm.setPower(ARM_DOWN_POWER);
else
robot.leftArm.setPower(0.0);
leftArm.setPower(0.0);
// Send telemetry message to signify robot running;
telemetry.addData("claw", "Offset = %.2f", clawOffset);

View file

@ -1,167 +0,0 @@
/* Copyright (c) 2017 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import android.app.Activity;
import android.graphics.Color;
import android.view.View;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.ColorSensor;
import com.qualcomm.robotcore.hardware.DeviceInterfaceModule;
import com.qualcomm.robotcore.hardware.DigitalChannel;
/*
*
* This is an example LinearOpMode that shows how to use
* the Adafruit RGB Sensor. It assumes that the I2C
* cable for the sensor is connected to an I2C port on the
* Core Device Interface Module.
*
* It also assuems that the LED pin of the sensor is connected
* to the digital signal pin of a digital port on the
* Core Device Interface Module.
*
* You can use the digital port to turn the sensor's onboard
* LED on or off.
*
* The op mode assumes that the Core Device Interface Module
* is configured with a name of "dim" and that the Adafruit color sensor
* is configured as an I2C device with a name of "sensor_color".
*
* It also assumes that the LED pin of the RGB sensor
* is connected to the signal pin of digital port #5 (zero indexed)
* of the Core Device Interface Module.
*
* You can use the X button on gamepad1 to toggle the LED on and off.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list
*/
@TeleOp(name = "Sensor: AdafruitRGB", group = "Sensor")
@Disabled // Comment this out to add to the opmode list
public class SensorAdafruitRGB extends LinearOpMode {
ColorSensor sensorRGB;
DeviceInterfaceModule cdim;
// we assume that the LED pin of the RGB sensor is connected to
// digital port 5 (zero indexed).
static final int LED_CHANNEL = 5;
@Override
public void runOpMode() {
// hsvValues is an array that will hold the hue, saturation, and value information.
float hsvValues[] = {0F,0F,0F};
// values is a reference to the hsvValues array.
final float values[] = hsvValues;
// get a reference to the RelativeLayout so we can change the background
// color of the Robot Controller app to match the hue detected by the RGB sensor.
int relativeLayoutId = hardwareMap.appContext.getResources().getIdentifier("RelativeLayout", "id", hardwareMap.appContext.getPackageName());
final View relativeLayout = ((Activity) hardwareMap.appContext).findViewById(relativeLayoutId);
// bPrevState and bCurrState represent the previous and current state of the button.
boolean bPrevState = false;
boolean bCurrState = false;
// bLedOn represents the state of the LED.
boolean bLedOn = true;
// get a reference to our DeviceInterfaceModule object.
cdim = hardwareMap.deviceInterfaceModule.get("dim");
// set the digital channel to output mode.
// remember, the Adafruit sensor is actually two devices.
// It's an I2C sensor and it's also an LED that can be turned on or off.
cdim.setDigitalChannelMode(LED_CHANNEL, DigitalChannel.Mode.OUTPUT);
// get a reference to our ColorSensor object.
sensorRGB = hardwareMap.colorSensor.get("sensor_color");
// turn the LED on in the beginning, just so user will know that the sensor is active.
cdim.setDigitalChannelState(LED_CHANNEL, bLedOn);
// wait for the start button to be pressed.
waitForStart();
// loop and read the RGB data.
// Note we use opModeIsActive() as our loop condition because it is an interruptible method.
while (opModeIsActive()) {
// check the status of the x button on gamepad.
bCurrState = gamepad1.x;
// check for button-press state transitions.
if ((bCurrState == true) && (bCurrState != bPrevState)) {
// button is transitioning to a pressed state. Toggle the LED.
bLedOn = !bLedOn;
cdim.setDigitalChannelState(LED_CHANNEL, bLedOn);
}
// update previous state variable.
bPrevState = bCurrState;
// convert the RGB values to HSV values.
Color.RGBToHSV((sensorRGB.red() * 255) / 800, (sensorRGB.green() * 255) / 800, (sensorRGB.blue() * 255) / 800, hsvValues);
// send the info back to driver station using telemetry function.
telemetry.addData("LED", bLedOn ? "On" : "Off");
telemetry.addData("Clear", sensorRGB.alpha());
telemetry.addData("Red ", sensorRGB.red());
telemetry.addData("Green", sensorRGB.green());
telemetry.addData("Blue ", sensorRGB.blue());
telemetry.addData("Hue", hsvValues[0]);
// change the background color to match the color detected by the RGB sensor.
// pass a reference to the hue, saturation, and value array as an argument
// to the HSVToColor method.
relativeLayout.post(new Runnable() {
public void run() {
relativeLayout.setBackgroundColor(Color.HSVToColor(0xff, values));
}
});
telemetry.update();
}
// Set the panel back to the default color
relativeLayout.post(new Runnable() {
public void run() {
relativeLayout.setBackgroundColor(Color.WHITE);
}
});
}
}

View file

@ -1,107 +0,0 @@
/* Copyright (c) 2017 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.DeviceInterfaceModule;
import com.qualcomm.robotcore.hardware.DigitalChannel;
/*
* This is an example LinearOpMode that shows how to use the digital inputs and outputs on the
* the Modern Robotics Device Interface Module. In addition, it shows how to use the Red and Blue LED
*
* This op mode assumes that there is a Device Interface Module attached, named 'dim'.
* On this DIM there is a digital input named 'digin' and an output named 'digout'
*
* To fully exercise this sample, connect pin 3 of the digin connector to pin 3 of the digout.
* Note: Pin 1 is indicated by the black stripe, so pin 3 is at the opposite end.
*
* The X button on the gamepad will be used to activate the digital output pin.
* The Red/Blue LED will be used to indicate the state of the digital input pin.
* Blue = false (0V), Red = true (5V)
* If the two pins are linked, the gamepad will change the LED color.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list
*/
@TeleOp(name = "Sensor: DIM DIO", group = "Sensor")
@Disabled
public class SensorDIO extends LinearOpMode {
final int BLUE_LED_CHANNEL = 0;
final int RED_LED_CHANNEL = 1;
@Override
public void runOpMode() {
boolean inputPin; // Input State
boolean outputPin; // Output State
DeviceInterfaceModule dim; // Device Object
DigitalChannel digIn; // Device Object
DigitalChannel digOut; // Device Object
// get a reference to a Modern Robotics DIM, and IO channels.
dim = hardwareMap.get(DeviceInterfaceModule.class, "dim"); // Use generic form of device mapping
digIn = hardwareMap.get(DigitalChannel.class, "digin"); // Use generic form of device mapping
digOut = hardwareMap.get(DigitalChannel.class, "digout"); // Use generic form of device mapping
digIn.setMode(DigitalChannel.Mode.INPUT); // Set the direction of each channel
digOut.setMode(DigitalChannel.Mode.OUTPUT);
// wait for the start button to be pressed.
telemetry.addData(">", "Press play, and then user X button to set DigOut");
telemetry.update();
waitForStart();
while (opModeIsActive()) {
outputPin = gamepad1.x ; // Set the output pin based on x button
digOut.setState(outputPin);
inputPin = digIn.getState(); // Read the input pin
// Display input pin state on LEDs
if (inputPin) {
dim.setLED(RED_LED_CHANNEL, true);
dim.setLED(BLUE_LED_CHANNEL, false);
}
else {
dim.setLED(RED_LED_CHANNEL, false);
dim.setLED(BLUE_LED_CHANNEL, true);
}
telemetry.addData("Output", outputPin );
telemetry.addData("Input", inputPin );
telemetry.addData("LED", inputPin ? "Red" : "Blue" );
telemetry.update();
}
}
}

View file

@ -49,7 +49,7 @@ import com.qualcomm.robotcore.hardware.ColorSensor;
* You can use the X button on gamepad1 to toggle the LED on and off.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
*/
@TeleOp(name = "Sensor: MR Color", group = "Sensor")
@Disabled

View file

@ -49,7 +49,7 @@ import org.firstinspires.ftc.robotcore.external.navigation.AxesReference;
* I2C channel and is configured with a name of "gyro".
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
*/
@TeleOp(name = "Sensor: MR Gyro", group = "Sensor")
@Disabled
@ -83,7 +83,7 @@ public class SensorMRGyro extends LinearOpMode {
// A similar approach will work for the Gyroscope interface, if that's all you need.
// Start calibrating the gyro. This takes a few seconds and is worth performing
// during the initialization phase at the start of each opMode.
// during the initialization phase at the start of each OpMode.
telemetry.log().add("Gyro Calibrating. Do Not Move!");
modernRoboticsI2cGyro.calibrate();

View file

@ -27,15 +27,9 @@ Sensor: This is a Sample OpMode that shows how to use a specific sensor.
It is not intended to drive a functioning robot, it is simply showing the minimal code
required to read and display the sensor values.
Hardware: This is NOT an OpMode, but a helper class that is used to describe
one particular robot's hardware configuration: eg: For the K9 or Pushbot.
Look at any Pushbot sample to see how this can be used in an OpMode.
Teams can copy one of these to their team folder to create their own robot definition.
Pushbot: This is a Sample OpMode that uses the Pushbot robot hardware as a base.
It may be used to provide some standard baseline Pushbot OpModes, or
to demonstrate how a particular sensor or concept can be used directly on the
Pushbot chassis.
Robot: This is a Sample OpMode that assumes a simple two-motor (differential) drive base.
It may be used to provide a common baseline driving OpMode, or
to demonstrate how a particular sensor or concept can be used to navigate.
Concept: This is a sample OpMode that illustrates performing a specific function or concept.
These may be complex, but their operation should be explained clearly in the comments,
@ -50,8 +44,7 @@ Library: This is a class, or set of classes used to implement some strategy.
After the prefix, other conventions will apply:
* Sensor class names are constructed as: Sensor - Company - Type
* Hardware class names are constructed as: Hardware - Robot type
* Pushbot class names are constructed as: Pushbot - Mode - Action - OpModetype
* Robot class names are constructed as: Robot - Mode - Action - OpModetype
* Concept class names are constructed as: Concept - Topic - OpModetype
* Library class names are constructed as: Library - Topic - OpModetype

View file

@ -18,14 +18,14 @@ Sensor: This is a Sample OpMode that shows how to use a specific sensor.
required to read and display the sensor values.
Hardware: This is not an actual OpMode, but a helper class that is used to describe
one particular robot's hardware configuration: eg: For the K9 or Pushbot.
Look at any Pushbot sample to see how this can be used in an OpMode.
one particular robot's hardware configuration: eg: For the K9 or Robot.
Look at any Robot sample to see how this can be used in an OpMode.
Teams can copy one of these to create their own robot definition.
Pushbot: This is a Sample OpMode that uses the Pushbot robot hardware as a base.
It may be used to provide some standard baseline Pushbot opmodes, or
Robot: This is a Sample OpMode that uses the Robot robot hardware as a base.
It may be used to provide some standard baseline Robot opmodes, or
to demonstrate how a particular sensor or concept can be used directly on the
Pushbot chassis.
Robot chassis.
Concept: This is a sample OpMode that illustrates performing a specific function or concept.
These may be complex, but their operation should be explained clearly in the comments,
@ -41,7 +41,7 @@ After the prefix, other conventions will apply:
* Sensor class names should constructed as: Sensor - Company - Type
* Hardware class names should be constructed as: Hardware - Robot type
* Pushbot class names should be constructed as: Pushbot - Mode - Action - OpModetype
* Robot class names should be constructed as: Robot - Mode - Action - OpModetype
* Concept class names should be constructed as: Concept - Topic - OpModetype
* Library class names should be constructed as: Library - Topic - OpModetype

View file

@ -40,7 +40,6 @@ import android.content.Intent;
import android.content.ServiceConnection;
import android.content.SharedPreferences;
import android.content.res.Configuration;
import android.content.res.Resources;
import android.hardware.usb.UsbDevice;
import android.hardware.usb.UsbManager;
import android.net.wifi.WifiManager;
@ -89,6 +88,7 @@ import com.qualcomm.robotcore.hardware.configuration.LynxConstants;
import com.qualcomm.robotcore.hardware.configuration.Utility;
import com.qualcomm.robotcore.robot.Robot;
import com.qualcomm.robotcore.robot.RobotState;
import com.qualcomm.robotcore.util.ClockWarningSource;
import com.qualcomm.robotcore.util.Device;
import com.qualcomm.robotcore.util.Dimmer;
import com.qualcomm.robotcore.util.ImmersiveMode;
@ -99,8 +99,10 @@ import com.qualcomm.robotcore.wifi.NetworkConnectionFactory;
import com.qualcomm.robotcore.wifi.NetworkType;
import org.firstinspires.ftc.ftccommon.external.SoundPlayingRobotMonitor;
import org.firstinspires.ftc.ftccommon.internal.AnnotatedHooksClassFilter;
import org.firstinspires.ftc.ftccommon.internal.FtcRobotControllerWatchdogService;
import org.firstinspires.ftc.ftccommon.internal.ProgramAndManageActivity;
import org.firstinspires.ftc.onbotjava.ExternalLibraries;
import org.firstinspires.ftc.onbotjava.OnBotJavaHelperImpl;
import org.firstinspires.ftc.onbotjava.OnBotJavaProgrammingMode;
import org.firstinspires.ftc.robotcore.external.navigation.MotionDetection;
@ -112,6 +114,7 @@ import org.firstinspires.ftc.robotcore.internal.network.WifiDirectChannelChanger
import org.firstinspires.ftc.robotcore.internal.network.WifiMuteEvent;
import org.firstinspires.ftc.robotcore.internal.network.WifiMuteStateMachine;
import org.firstinspires.ftc.robotcore.internal.opmode.ClassManager;
import org.firstinspires.ftc.robotcore.internal.opmode.OnBotJavaHelper;
import org.firstinspires.ftc.robotcore.internal.system.AppAliveNotifier;
import org.firstinspires.ftc.robotcore.internal.system.AppUtil;
import org.firstinspires.ftc.robotcore.internal.system.Assert;
@ -122,7 +125,10 @@ import org.firstinspires.ftc.robotcore.internal.ui.UILocation;
import org.firstinspires.ftc.robotcore.internal.webserver.RobotControllerWebInfo;
import org.firstinspires.ftc.robotserver.internal.programmingmode.ProgrammingModeManager;
import org.firstinspires.inspection.RcInspectionActivity;
import org.threeten.bp.YearMonth;
import org.xmlpull.v1.XmlPullParserException;
import java.io.FileNotFoundException;
import java.util.List;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
@ -139,6 +145,8 @@ public class FtcRobotControllerActivity extends Activity
protected WifiManager.WifiLock wifiLock;
protected RobotConfigFileManager cfgFileMgr;
private OnBotJavaHelper onBotJavaHelper;
protected ProgrammingModeManager programmingModeManager;
protected UpdateUI.Callback callback;
@ -298,6 +306,18 @@ public class FtcRobotControllerActivity extends Activity
preferencesHelper.writeBooleanPrefIfDifferent(context.getString(R.string.pref_rc_connected), true);
preferencesHelper.getSharedPreferences().registerOnSharedPreferenceChangeListener(sharedPreferencesListener);
// Check if this RC app is from a later FTC season that what was installed previously
int ftcSeasonYearOfPreviouslyInstalledRc = preferencesHelper.readInt(getString(R.string.pref_ftc_season_year_of_current_rc), 0);
int ftcSeasonYearOfCurrentlyInstalledRc = AppUtil.getInstance().getFtcSeasonYear(YearMonth.now()).getValue();
if (ftcSeasonYearOfCurrentlyInstalledRc > ftcSeasonYearOfPreviouslyInstalledRc) {
preferencesHelper.writeIntPrefIfDifferent(getString(R.string.pref_ftc_season_year_of_current_rc), ftcSeasonYearOfCurrentlyInstalledRc);
// Since it's a new FTC season, we should reset certain settings back to their default values.
preferencesHelper.writeBooleanPrefIfDifferent(getString(R.string.pref_warn_about_2_4_ghz_band), true);
preferencesHelper.writeBooleanPrefIfDifferent(getString(R.string.pref_warn_about_obsolete_software), true);
preferencesHelper.writeBooleanPrefIfDifferent(getString(R.string.pref_warn_about_mismatched_app_versions), true);
preferencesHelper.writeBooleanPrefIfDifferent(getString(R.string.pref_warn_about_incorrect_clocks), true);
}
entireScreenLayout = (LinearLayout) findViewById(R.id.entire_screen);
buttonMenu = (ImageButton) findViewById(R.id.menu_buttons);
buttonMenu.setOnClickListener(new View.OnClickListener() {
@ -311,6 +331,8 @@ public class FtcRobotControllerActivity extends Activity
}
});
popupMenu.inflate(R.menu.ftc_robot_controller);
AnnotatedHooksClassFilter.getInstance().callOnCreateMenuMethods(
FtcRobotControllerActivity.this, popupMenu.getMenu());
popupMenu.show();
}
});
@ -319,6 +341,9 @@ public class FtcRobotControllerActivity extends Activity
BlocksOpMode.setActivityAndWebView(this, (WebView) findViewById(R.id.webViewBlocksRuntime));
ExternalLibraries.getInstance().onCreate();
onBotJavaHelper = new OnBotJavaHelperImpl();
/*
* Paranoia as the ClassManagerFactory requires EXTERNAL_STORAGE permissions
* and we've seen on the DS where the finish() call above does not short-circuit
@ -326,7 +351,7 @@ public class FtcRobotControllerActivity extends Activity
* have permissions. So...
*/
if (permissionsValidated) {
ClassManager.getInstance().setOnBotJavaClassHelper(new OnBotJavaHelperImpl());
ClassManager.getInstance().setOnBotJavaClassHelper(onBotJavaHelper);
ClassManagerFactory.registerFilters();
ClassManagerFactory.processAllClasses();
}
@ -379,10 +404,12 @@ public class FtcRobotControllerActivity extends Activity
initWifiMute(true);
}
FtcAboutActivity.setBuildTimeFromBuildConfig(BuildConfig.BUILD_TIME);
FtcAboutActivity.setBuildTimeFromBuildConfig(BuildConfig.APP_BUILD_TIME);
// check to see if there is a preferred Wi-Fi to use.
checkPreferredChannel();
AnnotatedHooksClassFilter.getInstance().callOnCreateMethods(this);
}
protected UpdateUI createUpdateUI() {
@ -417,6 +444,9 @@ public class FtcRobotControllerActivity extends Activity
protected void onResume() {
super.onResume();
RobotLog.vv(TAG, "onResume()");
// In case the user just got back from fixing their clock, refresh ClockWarningSource
ClockWarningSource.getInstance().onPossibleRcClockUpdate();
}
@Override
@ -451,6 +481,8 @@ public class FtcRobotControllerActivity extends Activity
if (preferencesHelper != null) preferencesHelper.getSharedPreferences().unregisterOnSharedPreferenceChangeListener(sharedPreferencesListener);
RobotLog.cancelWriteLogcatToDisk();
AnnotatedHooksClassFilter.getInstance().callOnDestroyMethods(this);
}
protected void bindToService() {
@ -481,18 +513,8 @@ public class FtcRobotControllerActivity extends Activity
}
protected void readNetworkType() {
// The code here used to defer to the value found in a configuration file
// to configure the network type. If the file was absent, then it initialized
// it with a default.
//
// However, bugs have been reported with that approach (empty config files, specifically).
// Moreover, the non-Wifi-Direct networking is end-of-life, so the simplest and most robust
// (e.g.: no one can screw things up by messing with the contents of the config file) fix is
// to do away with configuration file entirely.
//
// Control hubs are always running the access point model. Everything else, for the time
// being always runs the wifi direct model.
// being always runs the Wi-Fi Direct model.
if (Device.isRevControlHub() == true) {
networkType = NetworkType.RCWIRELESSAP;
} else {
@ -516,6 +538,7 @@ public class FtcRobotControllerActivity extends Activity
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.ftc_robot_controller, menu);
AnnotatedHooksClassFilter.getInstance().callOnCreateMenuMethods(this, menu);
return true;
}
@ -660,6 +683,8 @@ public class FtcRobotControllerActivity extends Activity
controllerService = service;
updateUI.setControllerService(controllerService);
controllerService.setOnBotJavaHelper(onBotJavaHelper);
updateUIAndRequestRobotSetup();
programmingModeManager.setState(new FtcRobotControllerServiceState() {
@NonNull
@ -668,11 +693,20 @@ public class FtcRobotControllerActivity extends Activity
return service.getWebServer();
}
@Nullable
@Override
public OnBotJavaHelper getOnBotJavaHelper() {
return service.getOnBotJavaHelper();
}
@Override
public EventLoopManager getEventLoopManager() {
return service.getRobot().eventLoopManager;
}
});
AnnotatedHooksClassFilter.getInstance().callWebHandlerRegistrarMethods(this,
service.getWebServer().getWebHandlerManager());
}
private void updateUIAndRequestRobotSetup() {
@ -697,10 +731,15 @@ public class FtcRobotControllerActivity extends Activity
HardwareFactory hardwareFactory = new HardwareFactory(context);
try {
hardwareFactory.setXmlPullParser(file.getXml());
} catch (Resources.NotFoundException e) {
} catch (FileNotFoundException | XmlPullParserException e) {
RobotLog.ww(TAG, e, "Unable to set configuration file %s. Falling back on noConfig.", file.getName());
file = RobotConfigFile.noConfig(cfgFileMgr);
hardwareFactory.setXmlPullParser(file.getXml());
cfgFileMgr.setActiveConfigAndUpdateUI(false, file);
try {
hardwareFactory.setXmlPullParser(file.getXml());
cfgFileMgr.setActiveConfigAndUpdateUI(false, file);
} catch (FileNotFoundException | XmlPullParserException e1) {
RobotLog.ee(TAG, e1, "Failed to fall back on noConfig");
}
}
OpModeRegister userOpModeRegister = createOpModeRegister();
@ -712,6 +751,8 @@ public class FtcRobotControllerActivity extends Activity
passReceivedUsbAttachmentsToEventLoop();
AndroidBoard.showErrorIfUnknownControlHub();
AnnotatedHooksClassFilter.getInstance().callOnCreateEventLoopMethods(this, eventLoop);
}
protected OpModeRegister createOpModeRegister() {

View file

@ -31,36 +31,37 @@ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-->
<menu xmlns:android="http://schemas.android.com/apk/res/android" >
<menu xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto">
<item
android:id="@+id/action_settings"
android:orderInCategory="100"
android:showAsAction="never"
app:showAsAction="never"
android:title="@string/settings_menu_item"/>
<item
android:id="@+id/action_restart_robot"
android:orderInCategory="200"
android:showAsAction="never"
app:showAsAction="never"
android:title="@string/restart_robot_menu_item"/>
<item
android:id="@+id/action_configure_robot"
android:orderInCategory="300"
android:showAsAction="never"
app:showAsAction="never"
android:title="@string/configure_robot_menu_item"/>
<item
android:id="@+id/action_program_and_manage"
android:orderInCategory="550"
android:showAsAction="never"
app:showAsAction="never"
android:title="@string/program_and_manage_menu_item"/>
<item
android:id="@+id/action_inspection_mode"
android:orderInCategory="600"
android:showAsAction="never"
android:title="@string/inspection_mode_menu_item"/>
android:id="@+id/action_inspection_mode"
android:orderInCategory="600"
app:showAsAction="never"
android:title="@string/inspection_mode_menu_item"/>
<item
android:id="@+id/action_about"

View file

@ -65,6 +65,8 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
<item>@style/AppThemeTealRC</item>
</integer-array>
<string name="pref_ftc_season_year_of_current_rc">pref_ftc_season_year_of_current_rc</string>
<string name="packageName">@string/packageNameRobotController</string>
</resources>

View file

@ -37,7 +37,6 @@ https://developer.android.com/guide/topics/connectivity/usb/host
<!-- see also RobotUsbDevice.getUsbIdentifiers() -->
<resources>
<usb-device vendor-id="1027" product-id="24577" /> <!-- FT232 Modern Robotics: 0x0403/0x6001 -->
<usb-device vendor-id="1027" product-id="24597" /> <!-- FT232 Lynx: 0x0403/0x6015 -->
<!-- cameras -->

29
LICENSE Normal file
View file

@ -0,0 +1,29 @@
Copyright (c) 2014-2022 FIRST. All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted (subject to the limitations in the disclaimer below) provided that
the following conditions are met:
Redistributions of source code must retain the above copyright notice, this list
of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
Neither the name of FIRST nor the names of its contributors
may be used to endorse or promote products derived from this software without
specific prior written permission.
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

186
README.md
View file

@ -1,8 +1,6 @@
## NOTICE
This repository contains the public FTC SDK for the Ultimate Goal (2020-2021) competition season.
Formerly this software project was hosted [here](https://github.com/FIRST-Tech-Challenge/Skystone). Teams who are competing in the Ultimate Goal Challenge should use this [new FtcRobotController repository](https://github.com/FIRST-Tech-Challenge/FtcRobotController) instead of last season's (no longer updated) Skystone repository.
This repository contains the public FTC SDK for the Freight Frenzy (2021-2022) competition season.
## Welcome!
This GitHub repository contains the source code that is used to build an Android app to control a *FIRST* Tech Challenge competition robot. To use this SDK, download/clone the entire project to your local computer.
@ -40,7 +38,7 @@ Note that the online documentation is an "evergreen" document that is constantly
### Javadoc Reference Material
The Javadoc reference documentation for the FTC SDK is now available online. Click on the following link to view the FTC SDK Javadoc documentation as a live website:
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;[FTC Javadoc Documentation](https://first-tech-challenge.github.io/FtcRobotController)
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;[FTC Javadoc Documentation](https://javadoc.io/doc/org.firstinspires.ftc)
### Online User Forum
For technical questions regarding the Control System or the FTC SDK, please visit the FTC Technology forum:
@ -56,12 +54,151 @@ The readme.md file located in the [/TeamCode/src/main/java/org/firstinspires/ftc
# Release Information
## Version 7.2 (20220723-130006)
### Breaking Changes
* Updates the build tooling. For Android Studio users, this change requires Android Studio Chipmunk 2021.2.1.
* Removes support for devices that are not competition legal, including Modern Robotics Core Control Modules, the Matrix Controller, and HiTechnic/NXT controllers and sensors. Support remains for Modern Robotics I2C sensors.
### Enhancements
* Increases the height of the 3-dots Landscape menu touch area on the Driver Station, making it much easier to select.
* Adds `terminateOpModeNow()` method to allow OpModes to cleanly self-exit immediately.
* Adds `opModeInInit()` method to `LinearOpMode` to facilitate init-loops. Similar to `opModeIsActive()` but for the init phase.
* Warns user if they have a Logitech F310 gamepad connected that is set to DirectInput mode.
* Allows SPARKmini motor controllers to react more quickly to speed changes.
* Hides the version number of incorrectly installed sister app (i.e. DS installed on RC device or vice-versa) on inspection screen.
* Adds support for allowing the user to edit the comment for the runOpMode block.
* Adds parameterDefaultValues field to @ExportToBlocks. This provides the ability for a java method with an @ExportToBlocks annotation to specify default values for method parameters when it is shown in the block editor.
* Make LinearOpMode blocks more readable. The opmode name is displayed on the runOpMode block, but not on the other LinearOpMode blocks.
* Added support to TensorFlow Object Detection for using a different frame generator, instead of Vuforia.
Using Vuforia to pass the camera frame to TFOD is still supported.
* Removes usage of Renderscript.
* Fixes logspam on app startup of repeated stacktraces relating to `"Failed resolution of: Landroid/net/wifi/p2p/WifiP2pManager$DeviceInfoListener"`
* Allows disabling bluetooth radio from inspection screen
* Improves warning messages when I2C devices are not responding
* Adds support for controlling the RGB LED present on PS4/Etpark gamepads from OpModes
* Removes legacy Pushbot references from OpMode samples. Renames "Pushbot" samples to "Robot". Motor directions reversed to be compatible with "direct Drive" drive train.
### Bug fixes
* Fixes [issue #316](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/316) (MatrixF.inverted() returned an incorrectly-sized matrix for 1x1 and 2x2 matrixes).
* Self inspect now allows for Driver Station and Robot Controller compatibility between point releases.
* Fixes bug where if the same `RumbleEffect` object instance was queued for multiple gamepads, it
could happen that both rumble commands would be sent to just one gamepad.
* Fixes bug in Driver Station where on the Driver Hub, if Advanced Gamepad Features was disabled and
an officially supported gamepad was connected, then opening the Advanced Gamepad Features or
Gamepad Type Overrides screens would cause the gamepad to be rebound by the custom USB driver even
though advanced gamepad features was disabled.
* Protects against (unlikely) null pointer exception in Vuforia Localizer.
* Harden OnBotJava and Blocks saves to protect against save issues when disconnecting from Program and Manage
* Fixes issue where the RC app would hang if a REV Hub I2C write failed because the previous I2C
operation was still in progress. This hang most commonly occurred during REV 2M Distance Sensor initialization
* Removes ConceptWebcam.java sample program. This sample is not compatible with OnBotJava.
* Fixes bug where using html tags in an @ExportToBlocks comment field prevented the blocks editor from loading.
* Fixes blocks editor so it doesn't ask you to save when you haven't modified anything.
* Fixes uploading a very large blocks project to offline blocks editor.
* Fixes bug that caused blocks for DcMotorEx to be omitted from the blocks editor toolbox.
* Fixes [Blocks Programs Stripped of Blocks (due to using TensorFlow Label block)](https://ftcforum.firstinspires.org/forum/ftc-technology/blocks-programming/87035-blocks-programs-stripped-of-blocks)
## Version 7.1 (20211223-120805)
* Fixes crash when calling `isPwmEnabled()` ([issue #223](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/233)).
* Fixes lint error ([issue #4](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/4)).
* Fixes Driver Station crash when attempting to use DualShock4 v1 gamepad with Advanced Gamepad Features enabled ([issue #173](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/173)).
* Fixes possible (but unlikely) Driver Station crash when connecting gamepads of any type.
* Fixes bug where Driver Station would use generic 20% deadzone for Xbox360 and Logitech F310 gamepads when Advanced Gamepad Features was disabled.
* Added SimpleOmniDrive sample OpMode.
* Adds UVC white balance control API.
* Fixes [issue #259](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/259) Most blocks samples for TensorFlow can't be used for a different model.
* The blocks previously labeled TensorFlowObjectDetectionFreightFrenzy (from the subcategory named "Optimized for Freight Frenzy") and TensorFlowObjectDetectionCustomModel (from the subcategory named "Custom Model") have been replaced with blocks labeled TensorFlowObjectDetection. Blocks in existing opmodes will be automatically updated to the new blocks when opened in the blocks editor.
* Fixes [issue #260](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/260) Blocks can't call java method that has a VuforiaLocalizer parameter.
* Blocks now has a block labeled VuforiaFreightFrenzy.getVuforiaLocalizer for this.
* Added a page to manage the TensorFlow Lite models in /sdcard/FIRST/tflitemodels. To get to the TFLite Models page:
* You can click on the link at the bottom of the the Manage page.
* You can click on the link at the upper-right the Blocks project page.
* Fixes logspam when `isBusy()` is called on a motor not in RTP mode.
* Hides the "RC Password" item on the inspection screen for phone-based Robot Controllers. (It is only applicable for Control Hubs).
* Adds channel 165 to Wi-Fi Direct channel selection menu in the settings screen. (165 was previously available through the web UI, but not locally in the app).
## Version 7.0 (20210915-141025)
### Enhancements and New Features
* Adds support for external libraries to OnBotJava and Blocks.
* Upload .jar and .aar files in OnBotJava.
* Known limitation - RobotController device must be running Android 7.0 or greater.
* Known limitation - .aar files with assets are not supported.
* External libraries can provide support for hardware devices by using the annotation in the
com.qualcomm.robotcore.hardware.configuration.annotations package.
* External libraries can include .so files for native code.
* External libraries can be used from OnBotJava op modes.
* External libraries that use the following annotations can be used from Blocks op modes.
* org.firstinspires.ftc.robotcore.external.ExportClassToBlocks
* org.firstinspires.ftc.robotcore.external.ExportToBlocks
* External libraries that use the following annotations can add new hardware devices:
* com.qualcomm.robotcore.hardware.configuration.annotations.AnalogSensorType
* com.qualcomm.robotcore.hardware.configuration.annotations.DeviceProperties
* com.qualcomm.robotcore.hardware.configuration.annotations.DigitalIoDeviceType
* com.qualcomm.robotcore.hardware.configuration.annotations.I2cDeviceType
* com.qualcomm.robotcore.hardware.configuration.annotations.MotorType
* com.qualcomm.robotcore.hardware.configuration.annotations.ServoType
* External libraries that use the following annotations can add new functionality to the Robot Controller:
* org.firstinspires.ftc.ftccommon.external.OnCreate
* org.firstinspires.ftc.ftccommon.external.OnCreateEventLoop
* org.firstinspires.ftc.ftccommon.external.OnCreateMenu
* org.firstinspires.ftc.ftccommon.external.OnDestroy
* org.firstinspires.ftc.ftccommon.external.WebHandlerRegistrar
* Adds support for REV Robotics Driver Hub.
* Adds fully custom userspace USB gamepad driver to Driver Station (see "Advanced Gamepad Features" menu in DS settings).
* Allows gamepads to work on devices without native Linux kernel support (e.g. some Romanian Motorola devices).
* Allows the DS to read the unique serial number of each gamepad, enabling auto-recovery of dropped gamepads even if two gamepads of the same model drop. *(NOTE: unfortunately this does not apply to Etpark gamepads, because they do not have a unique serial)*.
* Reading the unique serial number also provides the ability to configure the DS to assign gamepads to a certain position by default (so no need to do start+a/b at all).
* The LED ring on the Xbox360 gamepad and the RGB LED bar on the PS4 gamepad is used to indicate the driver position the gamepad is bound to.
* The rumble motors on the Xbox360, PS4, and Etpark gamepads can be controlled from OpModes.
* The 2-point touchpad on the PS4 gamepad can be read from OpModes.
* The "back" and "guide" buttons on the gamepad can now be safely bound to robot controls (Previously, on many devices, Android would intercept these buttons as home button presses and close the app).
* Advanced Gamepad features are enabled by default, but may be disabled through the settings menu in order to revert to gamepad support provided natively by Android.
* Improves accuracy of ping measurement.
* Fixes issue where the ping time showed as being higher than reality when initially connecting to or restarting the robot.
* To see the full improvement, you must update both the Robot Controller and Driver Station apps.
* Updates samples located at [/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples](FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples).
* Added ConceptGamepadRumble and ConceptGamepadTouchpad samples to illustrate the use of these new gampad capabilities.
* Condensed existing Vuforia samples into just 2 samples (ConceptVuforiaFieldNavigation & ConceptVuforiaFieldNavigationWebcam) showing how to determine the robot's location on the field using Vuforia. These both use the current season's Target images.
* Added ConceptVuforiaDriveToTargetWebcam to illustrate an easy way to drive directly to any visible Vuforia target.
* Makes many improvements to the warning system and individual warnings.
* Warnings are now much more spaced out, so that they are easier to read.
* New warnings were added for conditions that should be resolved before competing.
* The mismatched apps warning now uses the major and minor app versions, not the version code.
* The warnings are automatically re-enabled when a Robot Controller app from a new FTC season is installed.
* Adds support for I2C transactions on the Expansion Hub / Control Hub without specifying a register address.
* See section 3 of the [TI I2C spec](https://www.ti.com/lit/an/slva704/slva704.pdf).
* Calling these new methods when using Modern Robotics hardware will result in an UnsupportedOperationException.
* Changes VuforiaLocalizer `close()` method to be public.
* Adds support for TensorFlow v2 object detection models.
* Reduces ambiguity of the Self Inspect language and graphics.
* OnBotJava now warns about potentially unintended file overwrites.
* Improves behavior of the Wi-Fi band and channel selector on the Manage webpage.
### Bug fixes
* Fixes Robot Controller app crash on Android 9+ when a Driver Station connects.
* Fixes issue where an Op Mode was responsible for calling shutdown on the
TensorFlow TFObjectDetector. Now this is done automatically.
* Fixes Vuforia initialization blocks to allow user to chose AxesOrder. Updated
relevant blocks sample opmodes.
* Fixes [FtcRobotController issue #114](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/114)
LED blocks and Java class do not work.
* Fixes match logging for Op Modes that contain special characters in their names.
* Fixes Driver Station OpMode controls becoming unresponsive if the Driver Station was set to the landscape layout and an OnBotJava build was triggered while an OpMode was running.
* Fixes the Driver Station app closing itself when it is switched away from, or the screen is turned off.
* Fixes "black swirl of doom" (Infinite "configuring Wi-Fi Direct" message) on older devices.
* Updates the wiki comment on the OnBotJava intro page.
## Version 6.2 (20210218-074821)
### Enhancements
* Attempts to automatically fix the condition where a Control Hub's internal Expansion Hub is not
working by re-flashing its firmware
* Makes various improvements to the WiFi Direct pairing screen, especially in landscape mode
* Makes various improvements to the Wi-Fi Direct pairing screen, especially in landscape mode
* Makes the Robot Controller service no longer be categorically restarted when the main activity is brought to foreground
* (e.g. the service is no longer restarted simply by viewing the Self Inspect screen and pressing the back button)
* It is still restarted if the Settings menu or Configure Robot menu is opened
@ -75,7 +212,7 @@ The readme.md file located in the [/TeamCode/src/main/java/org/firstinspires/ftc
* Fixes issue where the Control Hub OS's watchdog would restart the Robot Controller app if
the Control Hub was not able to communicate with its internal Expansion Hub
* Fixes certain I2C devices not showing up in the appropriate `HardwareMap` fields (such as `hardwareMap.colorSensor`)
* Fixes issue where performing a WiFi factory reset on the Control Hub would not set the WiFi band to 2.4 GHz
* Fixes issue where performing a Wi-Fi factory reset on the Control Hub would not set the Wi-Fi band to 2.4 GHz
* Fixes issue where OnBotJava might fail to create a new file if the option to "Setup Code for Configured Hardware" was selected
* Fixes issue where performing certain operations after an Op Mode crashes would temporarily break Control/Expansion Hub communication
* Fixes issue where a Control Hub with a configured USB-connected Expansion Hub would not work if the Expansion Hub was missing at startup
@ -101,7 +238,7 @@ The readme.md file located in the [/TeamCode/src/main/java/org/firstinspires/ftc
* Introduces an automatic TeleOp preselection feature
* For details and usage guide, please see [this wiki entry](https://github.com/FIRST-Tech-Challenge/FtcRobotController/wiki/Automatically-Loading-a-Driver-Controlled-Op-Mode)
* Shows icon next to OpMode name in the OpMode list dropdown on the Driver Station to indicate the source of the OpMode (i.e. the programming tool used to create it)
* Fixes issue where the Driver Station app would exit after displaying the Configuring WiFi Direct screen
* Fixes issue where the Driver Station app would exit after displaying the Configuring Wi-Fi Direct screen
* Fixes Blocks and OnBotJava prompts when accessed via the REV Hardware Client
## Version 6.0 (20200921-085816)
@ -155,7 +292,10 @@ Version 5.5 requires Android Studio 4.0 or later.
### New features
* Adds support for calling custom Java classes from Blocks OpModes (fixes [SkyStone issue #161](https://github.com/FIRST-Tech-Challenge/SkyStone/issues/161)).
* Classes must be in the org.firstinspires.ftc.teamcode package.
* To have easy access to the opMode, hardwareMap, telemetry, gamepad1, and gamepad2, classes can
extends org.firstinspires.ftc.robotcore.external.BlocksOpModeCompanion.
* Methods must be public static and have no more than 21 parameters.
* Methods must be annotated with org.firstinspires.ftc.robotcore.external.ExportToBlocks.
* Parameters declared as OpMode, LinearOpMode, Telemetry, and HardwareMap are supported and the
argument is provided automatically, regardless of the order of the parameters. On the block,
the sockets for those parameters are automatically filled in.
@ -175,10 +315,10 @@ Version 5.5 requires Android Studio 4.0 or later.
* Adds Blocks support for TensorFlow Object Detection with a custom model.
* Adds support for uploading a custom TensorFlow Object Detection model in the Manage page, which
is especially useful for Blocks and OnBotJava users.
* Shows new Control Hub blink codes when the WiFi band is switched using the Control Hub's button (only possible on Control Hub OS 1.1.2)
* Shows new Control Hub blink codes when the Wi-Fi band is switched using the Control Hub's button (only possible on Control Hub OS 1.1.2)
* Adds new warnings which can be disabled in the Advanced RC Settings
* Mismatched app versions warning
* Unnecessary 2.4 GHz WiFi usage warning
* Unnecessary 2.4 GHz Wi-Fi usage warning
* REV Hub is running outdated firmware (older than version 1.8.2)
* Adds support for Sony PS4 gamepad, and reworks how gamepads work on the Driver Station
* Removes preference which sets gamepad type based on driver position. Replaced with menu which allows specifying type for gamepads with unknown VID and PID
@ -223,16 +363,16 @@ Version 5.5 requires Android Studio 4.0 or later.
* Not all types of runaway conditions are stoppable, but if the user code attempts to talk to hardware during the runaway, the system should be able to capture it.
* Makes various tweaks to the Self Inspect screen
* Renames "OS version" entry to "Android version"
* Renames "WiFi Direct Name" to "WiFi Name"
* Renames "Wi-Fi Direct Name" to "Wi-Fi Name"
* Adds Control Hub OS version, when viewing the report of a Control Hub
* Hides the airplane mode entry, when viewing the report of a Control Hub
* Removes check for ZTE Speed Channel Changer
* Shows firmware version for **all** Expansion and Control Hubs
* Reworks network settings portion of Manage page
* All network settings are now applied with a single click
* The WiFi Direct channel of phone-based Robot Controllers can now be changed from the Manage page
* WiFi channels are filtered by band (2.4 vs 5 GHz) and whether they overlap with other channels
* The current WiFi channel is pre-selected on phone-based Robot Controllers, and Control Hubs running OS 1.1.2 or later.
* The Wi-Fi Direct channel of phone-based Robot Controllers can now be changed from the Manage page
* Wi-Fi channels are filtered by band (2.4 vs 5 GHz) and whether they overlap with other channels
* The current Wi-Fi channel is pre-selected on phone-based Robot Controllers, and Control Hubs running OS 1.1.2 or later.
* On Control Hubs running OS 1.1.2 or later, you can choose to have the system automatically select a channel on the 5 GHz band
* Improves OnBotJava
* New light and dark themes replace the old themes (chaos, github, chrome,...)
@ -244,7 +384,7 @@ Version 5.5 requires Android Studio 4.0 or later.
* Shows a warning message if a LinearOpMode exists prematurely due to failure to monitor for the start condition
* Improves error message shown when the Driver Station and Robot Controller are incompatible with each other
* Driver Station OpMode Control Panel now disabled while a Restart Robot is in progress
* Disables advanced settings related to WiFi direct when the Robot Controller is a Control Hub.
* Disables advanced settings related to Wi-Fi Direct when the Robot Controller is a Control Hub.
* Tint phone battery icons on Driver Station when low/critical.
* Uses names "Control Hub Portal" and "Control Hub" (when appropriate) in new configuration files
* Improve I2C read performance
@ -405,11 +545,11 @@ Known issues:
* Support for the REV Robotics Control Hub.
* Adds a Java preview pane to the Blocks editor.
* Adds a new offline export feature to the Blocks editor.
* Display wifi channel in Network circle on Driver Station.
* Display Wi-Fi channel in Network circle on Driver Station.
* Adds calibration for Logitech C270
* Updates build tooling and target SDK.
* Compliance with Google's permissions infrastructure (Required after build tooling update).
* Keep Alives to mitigate the Motorola wifi scanning problem. Telemetry substitute no longer necessary.
* Keep Alives to mitigate the Motorola Wi-Fi scanning problem. Telemetry substitute no longer necessary.
* Improves Vuforia error reporting.
* Fixes ftctechnh/ftc_app issues 621, 713.
* Miscellaneous bug fixes and improvements.
@ -548,7 +688,7 @@ Known issues:
- When user selects a wireless channel, this channel does not necessarily persist if the phone is power cycled.
+ Tech Team is hoping to eventually address this issue in a future release.
+ Issue has been present since apps were introduced (i.e., it is not new with the v4.0 release).
- Wireless channel is not currently displayed for WiFi Direct connections.
- Wireless channel is not currently displayed for Wi-Fi Direct connections.
* Miscellaneous
- The blink indication feature that shows which Expansion Hub is currently being configured does not work for a newly created configuration file.
@ -943,7 +1083,7 @@ Changes include:
* Added getCallbackCount() to I2cDevice.
* Added missing clearI2cPortActionFlag.
* Added code to create log messages while waiting for LinearOpMode shutdown.
* Fix so Wifi Direct Config activity will no longer launch multiple times.
* Fix so Wi-Fi Direct Config activity will no longer launch multiple times.
* Added the ability to specify an alternate i2c address in software for the Modern Robotics gyro.
## Release 16.02.09
@ -958,18 +1098,18 @@ Changes include:
* NXT light sensor output is now properly scaled. Note that teams might have to readjust their light threshold values in their op modes.
* On DS user interface, gamepad icon for a driver will disappear if the matching gamepad is disconnected or if that gamepad gets designated as a different driver.
* Robot Protocol (ROBOCOL) version number info is displayed in About screen on RC and DS apps.
* Incorporated a display filter on pairing screen to filter out devices that dont use the “<TEAM NUMBER>-“ format. This filter can be turned off to show all WiFi Direct devices.
* Incorporated a display filter on pairing screen to filter out devices that dont use the “<TEAM NUMBER>-“ format. This filter can be turned off to show all Wi-Fi Direct devices.
* Updated text in License file.
* Fixed formatting error in OpticalDistanceSensor.toString().
* Fixed issue on with a blank (“”) device name that would disrupt WiFi Direct Pairing.
* Made a change so that the WiFi info and battery info can be displayed more quickly on the DS upon connecting to RC.
* Fixed issue on with a blank (“”) device name that would disrupt Wi-Fi Direct Pairing.
* Made a change so that the Wi-Fi info and battery info can be displayed more quickly on the DS upon connecting to RC.
* Improved javadoc generation.
* Modified code to make it easier to support language localization in the future.
## Release 16.01.04
* Updated compileSdkVersion for apps
* Prevent Wifi from entering power saving mode
* Prevent Wi-Fi from entering power saving mode
* removed unused import from driver station
* Corrrected "Dead zone" joystick code.
* LED.getDeviceName and .getConnectionInfo() return null
@ -984,7 +1124,7 @@ Changes include:
* added some log statements for thread life cycle.
* moved gamepad reset logic inside of initActiveOpMode() for robustness
* changes made to mitigate risk of race conditions on public methods.
* changes to try and flag when WiFi Direct name contains non-printable characters.
* changes to try and flag when Wi-Fi Direct name contains non-printable characters.
* fix to correct race condition between .run() and .close() in ReadWriteRunnableStandard.
* updated FTDI driver
* made ReadWriteRunnableStanard interface public.

View file

@ -15,6 +15,10 @@
apply from: '../build.common.gradle'
apply from: '../build.dependencies.gradle'
android {
namespace = 'org.firstinspires.ftc.teamcode'
}
dependencies {
implementation project(':FtcRobotController')
annotationProcessor files('lib/OpModeAnnotationProcessor.jar')

View file

@ -6,7 +6,6 @@
<!-- The package name here determines the package for your R class and your BuildConfig class -->
<manifest
package="org.firstinspires.ftc.teamcode"
xmlns:android="http://schemas.android.com/apk/res/android">
<application/>
</manifest>

View file

@ -55,6 +55,7 @@ android {
signingConfig signingConfigs.debug
applicationId 'com.qualcomm.ftcrobotcontroller'
minSdkVersion 23
//noinspection ExpiredTargetSdkVersion
targetSdkVersion 28
/**
@ -99,7 +100,6 @@ android {
debug {
debuggable true
jniDebuggable true
renderscriptDebuggable true
ndk {
abiFilters "armeabi-v7a", "arm64-v8a"
}
@ -107,10 +107,13 @@ android {
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_7
targetCompatibility JavaVersion.VERSION_1_7
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
packagingOptions {
pickFirst '**/*.so'
}
sourceSets.main {
jni.srcDirs = []
jniLibs.srcDir rootProject.file('libs')

View file

@ -1,23 +1,22 @@
repositories {
mavenCentral()
google() // Needed for androidx
jcenter() // Needed for tensorflow-lite
flatDir {
dirs rootProject.file('libs')
}
}
dependencies {
implementation 'org.firstinspires.ftc:Inspection:6.2.1'
implementation 'org.firstinspires.ftc:Blocks:6.2.1'
implementation 'org.firstinspires.ftc:RobotCore:6.2.1'
implementation 'org.firstinspires.ftc:RobotServer:6.2.1'
implementation 'org.firstinspires.ftc:OnBotJava:6.2.1'
implementation 'org.firstinspires.ftc:Hardware:6.2.1'
implementation 'org.firstinspires.ftc:FtcCommon:6.2.1'
implementation 'org.firstinspires.ftc:tfod:1.0.2'
implementation 'org.tensorflow:tensorflow-lite:1.10.0'
implementation 'org.firstinspires.ftc:Inspection:7.2.0'
implementation 'org.firstinspires.ftc:Blocks:7.2.0'
implementation 'org.firstinspires.ftc:Tfod:7.2.0'
implementation 'org.firstinspires.ftc:RobotCore:7.2.0'
implementation 'org.firstinspires.ftc:RobotServer:7.2.0'
implementation 'org.firstinspires.ftc:OnBotJava:7.2.0'
implementation 'org.firstinspires.ftc:Hardware:7.2.0'
implementation 'org.firstinspires.ftc:FtcCommon:7.2.0'
implementation 'org.tensorflow:tensorflow-lite-task-vision:0.2.0'
implementation 'androidx.appcompat:appcompat:1.2.0'
implementation 'org.firstinspires.ftc:gameAssets-FreightFrenzy:1.0.0'
}

View file

@ -4,18 +4,13 @@
* It is extraordinarily rare that you will ever need to edit this file.
*/
configurations {
doc { transitive false }
}
buildscript {
repositories {
mavenCentral()
google()
jcenter()
}
dependencies {
classpath 'com.android.tools.build:gradle:4.0.1'
classpath 'com.android.tools.build:gradle:7.2.0'
}
}
@ -25,7 +20,6 @@ allprojects {
repositories {
mavenCentral()
google()
jcenter()
}
}
@ -36,34 +30,3 @@ repositories {
dirs '../libs'
}
}
dependencies {
doc 'org.firstinspires.ftc:Hardware:6.2.0'
doc 'org.firstinspires.ftc:RobotCore:6.2.0'
doc 'org.firstinspires.ftc:FtcCommon:6.2.0'
doc 'org.firstinspires.ftc:OnBotJava:6.2.0'
doc 'org.firstinspires.ftc:Inspection:6.2.0'
}
task extractJavadoc {
doLast {
def componentIds = configurations.doc.incoming.resolutionResult.allDependencies.collect { it.selected.id }
def result = dependencies.createArtifactResolutionQuery()
.forComponents(componentIds)
.withArtifacts(JvmLibrary, SourcesArtifact, JavadocArtifact)
.execute()
for (component in result.resolvedComponents) {
component.getArtifacts(JavadocArtifact).each { artifact ->
def version = artifact.identifier.componentIdentifier.version
def libName = artifact.identifier.componentIdentifier.moduleIdentifier.name
copy {
from zipTree(artifact.file)
into "docs/$version/$libName/"
}
}
}
}
}

Binary file not shown.

View file

@ -1,6 +1,5 @@
#Fri Jul 24 14:30:03 PDT 2020
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-7.4.2-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-6.1.1-all.zip

0
gradlew vendored Normal file → Executable file
View file

Binary file not shown.