diff --git a/build.gradle b/build.gradle index 92388aa..ff21459 100644 --- a/build.gradle +++ b/build.gradle @@ -1,14 +1,14 @@ // Top-level build file where you can add configuration options common to all sub-projects/modules. buildscript { - ext.ftc_version = '4.3' + ext.ftc_version = '5.2' repositories { google() jcenter() } dependencies { - classpath 'com.android.tools.build:gradle:3.3.0' + classpath 'com.android.tools.build:gradle:3.5.0' classpath 'com.github.dcendents:android-maven-gradle-plugin:2.1' // NOTE: Do not place your application dependencies here; they belong // in the individual module build.gradle files diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index dc9a468..9e0e0eb 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ -#Mon Oct 30 19:28:07 CDT 2017 +#Thu Sep 26 15:11:49 CDT 2019 distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-5.1.1-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-5.4.1-all.zip diff --git a/library/build.gradle b/library/build.gradle index c6d7298..f5c4081 100644 --- a/library/build.gradle +++ b/library/build.gradle @@ -10,8 +10,8 @@ android { targetSdkVersion 19 testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner" - versionCode 30 - versionName "$ftc_version.1" + versionCode 31 + versionName "$ftc_version.0" ndk { abiFilters "armeabi-v7a" @@ -37,6 +37,8 @@ dependencies { implementation "org.first.ftc:analytics:$ftc_version" implementation "org.first.ftc:wireless-p2p:$ftc_version" implementation "org.first.ftc:vuforia-incomplete:$ftc_version" + implementation "org.first.ftc:onbotjava:$ftc_version" + implementation "org.first.ftc:robotserver:$ftc_version" // Repackaged dependencies implementation "com.github.modular-ftc:robotcore-repackaged:$ftc_version.0" diff --git a/library/src/main/AndroidManifest.xml b/library/src/main/AndroidManifest.xml index a230902..349fc5b 100644 --- a/library/src/main/AndroidManifest.xml +++ b/library/src/main/AndroidManifest.xml @@ -2,8 +2,8 @@ + android:versionCode="34" + android:versionName="5.2"> @@ -14,7 +14,25 @@ android:label="@string/app_name" android:theme="@style/AppThemeRedRC" > + + + + + + + + + + + + - - - - - @@ -35,10 +48,10 @@ android:name="android.hardware.usb.action.USB_DEVICE_ATTACHED" android:resource="@xml/device_filter" /> + + android:value="true" /> diff --git a/library/src/main/assets/Skystone.dat b/library/src/main/assets/Skystone.dat new file mode 100644 index 0000000..595f8dd Binary files /dev/null and b/library/src/main/assets/Skystone.dat differ diff --git a/library/src/main/assets/Skystone.tflite b/library/src/main/assets/Skystone.tflite new file mode 100644 index 0000000..fe89cd7 Binary files /dev/null and b/library/src/main/assets/Skystone.tflite differ diff --git a/library/src/main/assets/Skystone.xml b/library/src/main/assets/Skystone.xml new file mode 100644 index 0000000..c4a988b --- /dev/null +++ b/library/src/main/assets/Skystone.xml @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + diff --git a/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptRevSPARKMini.java b/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptRevSPARKMini.java index b81b7de..1b1ecc3 100644 --- a/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptRevSPARKMini.java +++ b/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptRevSPARKMini.java @@ -39,16 +39,16 @@ /** * - * This OpMode executes a basic Tank Drive Teleop for a two wheeled robot using two REV SPARK Minis. - * To use this example, connect two REV SPARK Minis into servo ports on the Expansion Hub. On the - * robot configuration, use the drop down list under 'Servos' to select 'REV SPARK Mini Controller' + * This OpMode executes a basic Tank Drive Teleop for a two wheeled robot using two REV SPARKminis. + * To use this example, connect two REV SPARKminis into servo ports on the Expansion Hub. On the + * robot configuration, use the drop down list under 'Servos' to select 'REV SPARKmini Controller' * and name them 'left_drive' and 'right_drive'. * * Use Android Studios to Copy this Class, and Paste it into your team's code folder with a new name. * Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list */ -@TeleOp(name="REV SPARK Mini Simple Drive Example", group="Concept") +@TeleOp(name="REV SPARKmini Simple Drive Example", group="Concept") @Disabled public class ConceptRevSPARKMini extends LinearOpMode { diff --git a/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptScanServo.java b/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptScanServo.java index ce389ce..8e9c899 100644 --- a/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptScanServo.java +++ b/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptScanServo.java @@ -40,7 +40,7 @@ * INCREMENT sets how much to increase/decrease the servo position each cycle * CYCLE_MS sets the update period. * - * This code assumes a Servo configured with the name "left claw" as is found on a pushbot. + * This code assumes a Servo configured with the name "left_hand" as is found on a pushbot. * * NOTE: When any servo position is set, ALL attached servos are activated, so ensure that any other * connected servos are able to move freely before running this test. diff --git a/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptSoundsSKYSTONE.java b/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptSoundsSKYSTONE.java new file mode 100644 index 0000000..e8f3dde --- /dev/null +++ b/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptSoundsSKYSTONE.java @@ -0,0 +1,123 @@ +/* Copyright (c) 2018 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import android.content.Context; + +import com.qualcomm.ftccommon.SoundPlayer; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.eventloop.opmode.Disabled; + +/** + * This file demonstrates how to play one of the several SKYSTONE/Star Wars sounds loaded into the SDK. + * It does this by creating a simple "chooser" controlled by the gamepad Up Down buttons. + * This code also prevents sounds from stacking up by setting a "playing" flag, which is cleared when the sound finishes playing. + * + * Use Android Studios to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list + * + * Operation: + * Use the DPAD to change the selected sound, and the Right Bumper to play it. + */ + +@TeleOp(name="SKYSTONE Sounds", group="Concept") +@Disabled +public class ConceptSoundsSKYSTONE extends LinearOpMode { + + // List of available sound resources + String sounds[] = {"ss_alarm", "ss_bb8_down", "ss_bb8_up", "ss_darth_vader", "ss_fly_by", + "ss_mf_fail", "ss_laser", "ss_laser_burst", "ss_light_saber", "ss_light_saber_long", "ss_light_saber_short", + "ss_light_speed", "ss_mine", "ss_power_up", "ss_r2d2_up", "ss_roger_roger", "ss_siren", "ss_wookie" }; + boolean soundPlaying = false; + + @Override + public void runOpMode() { + + // Variables for choosing from the available sounds + int soundIndex = 0; + int soundID = -1; + boolean was_dpad_up = false; + boolean was_dpad_down = false; + + Context myApp = hardwareMap.appContext; + + // create a sound parameter that holds the desired player parameters. + SoundPlayer.PlaySoundParams params = new SoundPlayer.PlaySoundParams(); + params.loopControl = 0; + params.waitForNonLoopingSoundsToFinish = true; + + // In this sample, we will skip waiting for the user to press play, and start displaying sound choices right away + while (!isStopRequested()) { + + // Look for DPAD presses to change the selection + if (gamepad1.dpad_down && !was_dpad_down) { + // Go to next sound (with list wrap) and display it + soundIndex = (soundIndex + 1) % sounds.length; + } + + if (gamepad1.dpad_up && !was_dpad_up) { + // Go to previous sound (with list wrap) and display it + soundIndex = (soundIndex + sounds.length - 1) % sounds.length; + } + + // Look for trigger to see if we should play sound + // Only start a new sound if we are currently not playing one. + if (gamepad1.right_bumper && !soundPlaying) { + + // Determine Resource IDs for the sounds you want to play, and make sure it's valid. + if ((soundID = myApp.getResources().getIdentifier(sounds[soundIndex], "raw", myApp.getPackageName())) != 0){ + + // Signal that the sound is now playing. + soundPlaying = true; + + // Start playing, and also Create a callback that will clear the playing flag when the sound is complete. + SoundPlayer.getInstance().startPlaying(myApp, soundID, params, null, + new Runnable() { + public void run() { + soundPlaying = false; + }} ); + } + } + + // Remember the last state of the dpad to detect changes. + was_dpad_up = gamepad1.dpad_up; + was_dpad_down = gamepad1.dpad_down; + + // Display the current sound choice, and the playing status. + telemetry.addData("", "Use DPAD up/down to choose sound."); + telemetry.addData("", "Press Right Bumper to play sound."); + telemetry.addData("", ""); + telemetry.addData("Sound >", sounds[soundIndex]); + telemetry.addData("Status >", soundPlaying ? "Playing" : "Stopped"); + telemetry.update(); + } + } +} diff --git a/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptTensorFlowObjectDetection.java b/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptTensorFlowObjectDetection.java index 1b7c9ef..7d41580 100644 --- a/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptTensorFlowObjectDetection.java +++ b/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptTensorFlowObjectDetection.java @@ -1,4 +1,4 @@ -/* Copyright (c) 2018 FIRST. All rights reserved. +/* Copyright (c) 2019 FIRST. All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, * are permitted (subject to the limitations in the disclaimer below) provided that @@ -40,8 +40,8 @@ import org.firstinspires.ftc.robotcore.external.tfod.Recognition; /** - * This 2018-2019 OpMode illustrates the basics of using the TensorFlow Object Detection API to - * determine the position of the gold and silver minerals. + * This 2019-2020 OpMode illustrates the basics of using the TensorFlow Object Detection API to + * determine the position of the Skystone game elements. * * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. * Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list. @@ -52,9 +52,9 @@ @TeleOp(name = "Concept: TensorFlow Object Detection", group = "Concept") @Disabled public class ConceptTensorFlowObjectDetection extends LinearOpMode { - private static final String TFOD_MODEL_ASSET = "RoverRuckus.tflite"; - private static final String LABEL_GOLD_MINERAL = "Gold Mineral"; - private static final String LABEL_SILVER_MINERAL = "Silver Mineral"; + private static final String TFOD_MODEL_ASSET = "Skystone.tflite"; + private static final String LABEL_FIRST_ELEMENT = "Stone"; + private static final String LABEL_SECOND_ELEMENT = "Skystone"; /* * IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which @@ -68,7 +68,8 @@ public class ConceptTensorFlowObjectDetection extends LinearOpMode { * Once you've obtained a license key, copy the string from the Vuforia web site * and paste it in to your code on the next line, between the double quotes. */ - private static final String VUFORIA_KEY = " -- YOUR NEW VUFORIA KEY GOES HERE --- "; + private static final String VUFORIA_KEY = + " -- YOUR NEW VUFORIA KEY GOES HERE --- "; /** * {@link #vuforia} is the variable we will use to store our instance of the Vuforia @@ -77,7 +78,7 @@ public class ConceptTensorFlowObjectDetection extends LinearOpMode { private VuforiaLocalizer vuforia; /** - * {@link #tfod} is the variable we will use to store our instance of the Tensor Flow Object + * {@link #tfod} is the variable we will use to store our instance of the TensorFlow Object * Detection engine. */ private TFObjectDetector tfod; @@ -94,17 +95,20 @@ public void runOpMode() { telemetry.addData("Sorry!", "This device is not compatible with TFOD"); } + /** + * Activate TensorFlow Object Detection before we wait for the start command. + * Do it here so that the Camera Stream window will have the TensorFlow annotations visible. + **/ + if (tfod != null) { + tfod.activate(); + } + /** Wait for the game to begin */ - telemetry.addData(">", "Press Play to start tracking"); + telemetry.addData(">", "Press Play to start op mode"); telemetry.update(); waitForStart(); if (opModeIsActive()) { - /** Activate Tensor Flow Object Detection. */ - if (tfod != null) { - tfod.activate(); - } - while (opModeIsActive()) { if (tfod != null) { // getUpdatedRecognitions() will return null if no new information is available since @@ -112,28 +116,15 @@ public void runOpMode() { List updatedRecognitions = tfod.getUpdatedRecognitions(); if (updatedRecognitions != null) { telemetry.addData("# Object Detected", updatedRecognitions.size()); - if (updatedRecognitions.size() == 3) { - int goldMineralX = -1; - int silverMineral1X = -1; - int silverMineral2X = -1; - for (Recognition recognition : updatedRecognitions) { - if (recognition.getLabel().equals(LABEL_GOLD_MINERAL)) { - goldMineralX = (int) recognition.getLeft(); - } else if (silverMineral1X == -1) { - silverMineral1X = (int) recognition.getLeft(); - } else { - silverMineral2X = (int) recognition.getLeft(); - } - } - if (goldMineralX != -1 && silverMineral1X != -1 && silverMineral2X != -1) { - if (goldMineralX < silverMineral1X && goldMineralX < silverMineral2X) { - telemetry.addData("Gold Mineral Position", "Left"); - } else if (goldMineralX > silverMineral1X && goldMineralX > silverMineral2X) { - telemetry.addData("Gold Mineral Position", "Right"); - } else { - telemetry.addData("Gold Mineral Position", "Center"); - } - } + + // step through the list of recognitions and display boundary info. + int i = 0; + for (Recognition recognition : updatedRecognitions) { + telemetry.addData(String.format("label (%d)", i), recognition.getLabel()); + telemetry.addData(String.format(" left,top (%d)", i), "%.03f , %.03f", + recognition.getLeft(), recognition.getTop()); + telemetry.addData(String.format(" right,bottom (%d)", i), "%.03f , %.03f", + recognition.getRight(), recognition.getBottom()); } telemetry.update(); } @@ -161,17 +152,18 @@ private void initVuforia() { // Instantiate the Vuforia engine vuforia = ClassFactory.getInstance().createVuforia(parameters); - // Loading trackables is not necessary for the Tensor Flow Object Detection engine. + // Loading trackables is not necessary for the TensorFlow Object Detection engine. } /** - * Initialize the Tensor Flow Object Detection engine. + * Initialize the TensorFlow Object Detection engine. */ private void initTfod() { int tfodMonitorViewId = hardwareMap.appContext.getResources().getIdentifier( "tfodMonitorViewId", "id", hardwareMap.appContext.getPackageName()); TFObjectDetector.Parameters tfodParameters = new TFObjectDetector.Parameters(tfodMonitorViewId); + tfodParameters.minimumConfidence = 0.8; tfod = ClassFactory.getInstance().createTFObjectDetector(tfodParameters, vuforia); - tfod.loadModelFromAsset(TFOD_MODEL_ASSET, LABEL_GOLD_MINERAL, LABEL_SILVER_MINERAL); + tfod.loadModelFromAsset(TFOD_MODEL_ASSET, LABEL_FIRST_ELEMENT, LABEL_SECOND_ELEMENT); } } diff --git a/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptTensorFlowObjectDetectionWebcam.java b/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptTensorFlowObjectDetectionWebcam.java index f68eed1..b8df806 100644 --- a/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptTensorFlowObjectDetectionWebcam.java +++ b/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptTensorFlowObjectDetectionWebcam.java @@ -1,4 +1,4 @@ -/* Copyright (c) 2018 FIRST. All rights reserved. +/* Copyright (c) 2019 FIRST. All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, * are permitted (subject to the limitations in the disclaimer below) provided that @@ -40,8 +40,8 @@ import org.firstinspires.ftc.robotcore.external.tfod.Recognition; /** - * This 2018-2019 OpMode illustrates the basics of using the TensorFlow Object Detection API to - * determine the position of the gold and silver minerals. + * This 2019-2020 OpMode illustrates the basics of using the TensorFlow Object Detection API to + * determine the position of the Skystone game elements. * * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. * Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list. @@ -52,9 +52,9 @@ @TeleOp(name = "Concept: TensorFlow Object Detection Webcam", group = "Concept") @Disabled public class ConceptTensorFlowObjectDetectionWebcam extends LinearOpMode { - private static final String TFOD_MODEL_ASSET = "RoverRuckus.tflite"; - private static final String LABEL_GOLD_MINERAL = "Gold Mineral"; - private static final String LABEL_SILVER_MINERAL = "Silver Mineral"; + private static final String TFOD_MODEL_ASSET = "Skystone.tflite"; + private static final String LABEL_FIRST_ELEMENT = "Stone"; + private static final String LABEL_SECOND_ELEMENT = "Skystone"; /* * IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which @@ -68,7 +68,8 @@ public class ConceptTensorFlowObjectDetectionWebcam extends LinearOpMode { * Once you've obtained a license key, copy the string from the Vuforia web site * and paste it in to your code on the next line, between the double quotes. */ - private static final String VUFORIA_KEY = " -- YOUR NEW VUFORIA KEY GOES HERE --- "; + private static final String VUFORIA_KEY = + " -- YOUR NEW VUFORIA KEY GOES HERE --- "; /** * {@link #vuforia} is the variable we will use to store our instance of the Vuforia @@ -77,7 +78,7 @@ public class ConceptTensorFlowObjectDetectionWebcam extends LinearOpMode { private VuforiaLocalizer vuforia; /** - * {@link #tfod} is the variable we will use to store our instance of the Tensor Flow Object + * {@link #tfod} is the variable we will use to store our instance of the TensorFlow Object * Detection engine. */ private TFObjectDetector tfod; @@ -94,17 +95,20 @@ public void runOpMode() { telemetry.addData("Sorry!", "This device is not compatible with TFOD"); } + /** + * Activate TensorFlow Object Detection before we wait for the start command. + * Do it here so that the Camera Stream window will have the TensorFlow annotations visible. + **/ + if (tfod != null) { + tfod.activate(); + } + /** Wait for the game to begin */ - telemetry.addData(">", "Press Play to start tracking"); + telemetry.addData(">", "Press Play to start op mode"); telemetry.update(); waitForStart(); if (opModeIsActive()) { - /** Activate Tensor Flow Object Detection. */ - if (tfod != null) { - tfod.activate(); - } - while (opModeIsActive()) { if (tfod != null) { // getUpdatedRecognitions() will return null if no new information is available since @@ -112,28 +116,14 @@ public void runOpMode() { List updatedRecognitions = tfod.getUpdatedRecognitions(); if (updatedRecognitions != null) { telemetry.addData("# Object Detected", updatedRecognitions.size()); - if (updatedRecognitions.size() == 3) { - int goldMineralX = -1; - int silverMineral1X = -1; - int silverMineral2X = -1; - for (Recognition recognition : updatedRecognitions) { - if (recognition.getLabel().equals(LABEL_GOLD_MINERAL)) { - goldMineralX = (int) recognition.getLeft(); - } else if (silverMineral1X == -1) { - silverMineral1X = (int) recognition.getLeft(); - } else { - silverMineral2X = (int) recognition.getLeft(); - } - } - if (goldMineralX != -1 && silverMineral1X != -1 && silverMineral2X != -1) { - if (goldMineralX < silverMineral1X && goldMineralX < silverMineral2X) { - telemetry.addData("Gold Mineral Position", "Left"); - } else if (goldMineralX > silverMineral1X && goldMineralX > silverMineral2X) { - telemetry.addData("Gold Mineral Position", "Right"); - } else { - telemetry.addData("Gold Mineral Position", "Center"); - } - } + // step through the list of recognitions and display boundary info. + int i = 0; + for (Recognition recognition : updatedRecognitions) { + telemetry.addData(String.format("label (%d)", i), recognition.getLabel()); + telemetry.addData(String.format(" left,top (%d)", i), "%.03f , %.03f", + recognition.getLeft(), recognition.getTop()); + telemetry.addData(String.format(" right,bottom (%d)", i), "%.03f , %.03f", + recognition.getRight(), recognition.getBottom()); } telemetry.update(); } @@ -161,17 +151,18 @@ private void initVuforia() { // Instantiate the Vuforia engine vuforia = ClassFactory.getInstance().createVuforia(parameters); - // Loading trackables is not necessary for the Tensor Flow Object Detection engine. + // Loading trackables is not necessary for the TensorFlow Object Detection engine. } /** - * Initialize the Tensor Flow Object Detection engine. + * Initialize the TensorFlow Object Detection engine. */ private void initTfod() { int tfodMonitorViewId = hardwareMap.appContext.getResources().getIdentifier( "tfodMonitorViewId", "id", hardwareMap.appContext.getPackageName()); TFObjectDetector.Parameters tfodParameters = new TFObjectDetector.Parameters(tfodMonitorViewId); - tfod = ClassFactory.getInstance().createTFObjectDetector(tfodParameters, vuforia); - tfod.loadModelFromAsset(TFOD_MODEL_ASSET, LABEL_GOLD_MINERAL, LABEL_SILVER_MINERAL); + tfodParameters.minimumConfidence = 0.8; + tfod = ClassFactory.getInstance().createTFObjectDetector(tfodParameters, vuforia); + tfod.loadModelFromAsset(TFOD_MODEL_ASSET, LABEL_FIRST_ELEMENT, LABEL_SECOND_ELEMENT); } } diff --git a/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVuforiaNavRoverRuckus.java b/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVuforiaSkyStoneNavigation.java similarity index 50% rename from library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVuforiaNavRoverRuckus.java rename to library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVuforiaSkyStoneNavigation.java index eb21cf7..7277716 100644 --- a/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVuforiaNavRoverRuckus.java +++ b/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVuforiaSkyStoneNavigation.java @@ -1,4 +1,4 @@ -/* Copyright (c) 2018 FIRST. All rights reserved. +/* Copyright (c) 2019 FIRST. All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, * are permitted (subject to the limitations in the disclaimer below) provided that @@ -29,11 +29,12 @@ package org.firstinspires.ftc.robotcontroller.external.samples; +import com.qualcomm.robotcore.eventloop.opmode.Disabled; import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; import com.qualcomm.robotcore.eventloop.opmode.TeleOp; -import com.qualcomm.robotcore.eventloop.opmode.Disabled; import org.firstinspires.ftc.robotcore.external.ClassFactory; +import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName; import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix; import org.firstinspires.ftc.robotcore.external.matrices.VectorF; import org.firstinspires.ftc.robotcore.external.navigation.Orientation; @@ -42,47 +43,37 @@ import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackableDefaultListener; import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables; +import java.util.ArrayList; +import java.util.List; + import static org.firstinspires.ftc.robotcore.external.navigation.AngleUnit.DEGREES; import static org.firstinspires.ftc.robotcore.external.navigation.AxesOrder.XYZ; import static org.firstinspires.ftc.robotcore.external.navigation.AxesOrder.YZX; import static org.firstinspires.ftc.robotcore.external.navigation.AxesReference.EXTRINSIC; import static org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer.CameraDirection.BACK; -import static org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer.CameraDirection.FRONT; - -import java.util.ArrayList; -import java.util.List; - /** - * This 2018-2019 OpMode illustrates the basics of using the Vuforia localizer to determine - * positioning and orientation of robot on the FTC field. + * This 2019-2020 OpMode illustrates the basics of using the Vuforia localizer to determine + * positioning and orientation of robot on the SKYSTONE FTC field. * The code is structured as a LinearOpMode * - * Vuforia uses the phone's camera to inspect it's surroundings, and attempt to locate target images. - * * When images are located, Vuforia is able to determine the position and orientation of the - * image relative to the camera. This sample code than combines that information with a + * image relative to the camera. This sample code then combines that information with a * knowledge of where the target images are on the field, to determine the location of the camera. * - * This example assumes a "square" field configuration where the red and blue alliance stations - * are on opposite walls of each other. - * * From the Audience perspective, the Red Alliance station is on the right and the * Blue Alliance Station is on the left. - * The four vision targets are located in the center of each of the perimeter walls with - * the images facing inwards towards the robots: - * - BlueRover is the Mars Rover image target on the wall closest to the blue alliance - * - RedFootprint is the Lunar Footprint target on the wall closest to the red alliance - * - FrontCraters is the Lunar Craters image target on the wall closest to the audience - * - BackSpace is the Deep Space image target on the wall farthest from the audience + * Eight perimeter targets are distributed evenly around the four perimeter walls + * Four Bridge targets are located on the bridge uprights. + * Refer to the Field Setup manual for more specific location details * * A final calculation then uses the location of the camera on the robot to determine the * robot's location and orientation on the field. * * @see VuforiaLocalizer * @see VuforiaTrackableDefaultListener - * see ftc_app/doc/tutorial/FTC_FieldCoordinateSystemDefinition.pdf + * see skystone/doc/tutorial/FTC_FieldCoordinateSystemDefinition.pdf * * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. * Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list. @@ -91,9 +82,19 @@ * is explained below. */ -@TeleOp(name="Concept: Vuforia Rover Nav", group ="Concept") + +@TeleOp(name="SKYSTONE Vuforia Nav", group ="Concept") @Disabled -public class ConceptVuforiaNavRoverRuckus extends LinearOpMode { +public class ConceptVuforiaSkyStoneNavigation extends LinearOpMode { + + // IMPORTANT: For Phone Camera, set 1) the camera source and 2) the orientation, based on how your phone is mounted: + // 1) Camera Source. Valid choices are: BACK (behind screen) or FRONT (selfie side) + // 2) Phone Orientation. Choices are: PHONE_IS_PORTRAIT = true (portrait) or PHONE_IS_PORTRAIT = false (landscape) + // + // NOTE: If you are running on a CONTROL HUB, with only one USB WebCam, you must select CAMERA_CHOICE = BACK; and PHONE_IS_PORTRAIT = false; + // + private static final VuforiaLocalizer.CameraDirection CAMERA_CHOICE = BACK; + private static final boolean PHONE_IS_PORTRAIT = false ; /* * IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which @@ -107,59 +108,87 @@ public class ConceptVuforiaNavRoverRuckus extends LinearOpMode { * Once you've obtained a license key, copy the string from the Vuforia web site * and paste it in to your code on the next line, between the double quotes. */ - private static final String VUFORIA_KEY = " -- YOUR NEW VUFORIA KEY GOES HERE --- "; + private static final String VUFORIA_KEY = + " -- YOUR NEW VUFORIA KEY GOES HERE --- "; // Since ImageTarget trackables use mm to specifiy their dimensions, we must use mm for all the physical dimension. // We will define some constants and conversions here private static final float mmPerInch = 25.4f; - private static final float mmFTCFieldWidth = (12*6) * mmPerInch; // the width of the FTC field (from the center point to the outer panels) private static final float mmTargetHeight = (6) * mmPerInch; // the height of the center of the target image above the floor - // Select which camera you want use. The FRONT camera is the one on the same side as the screen. - // Valid choices are: BACK or FRONT - private static final VuforiaLocalizer.CameraDirection CAMERA_CHOICE = BACK; + // Constant for Stone Target + private static final float stoneZ = 2.00f * mmPerInch; + + // Constants for the center support targets + private static final float bridgeZ = 6.42f * mmPerInch; + private static final float bridgeY = 23 * mmPerInch; + private static final float bridgeX = 5.18f * mmPerInch; + private static final float bridgeRotY = 59; // Units are degrees + private static final float bridgeRotZ = 180; + + // Constants for perimeter targets + private static final float halfField = 72 * mmPerInch; + private static final float quadField = 36 * mmPerInch; + // Class Members private OpenGLMatrix lastLocation = null; + private VuforiaLocalizer vuforia = null; private boolean targetVisible = false; - - /** - * {@link #vuforia} is the variable we will use to store our instance of the Vuforia - * localization engine. - */ - VuforiaLocalizer vuforia; + private float phoneXRotate = 0; + private float phoneYRotate = 0; + private float phoneZRotate = 0; @Override public void runOpMode() { /* * Configure Vuforia by creating a Parameter object, and passing it to the Vuforia engine. * We can pass Vuforia the handle to a camera preview resource (on the RC phone); - * If no camera monitor is desired, use the parameterless constructor instead (commented out below). + * If no camera monitor is desired, use the parameter-less constructor instead (commented out below). */ int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName()); VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(cameraMonitorViewId); // VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(); - parameters.vuforiaLicenseKey = VUFORIA_KEY ; + parameters.vuforiaLicenseKey = VUFORIA_KEY; parameters.cameraDirection = CAMERA_CHOICE; // Instantiate the Vuforia engine vuforia = ClassFactory.getInstance().createVuforia(parameters); - // Load the data sets that for the trackable objects. These particular data + // Load the data sets for the trackable objects. These particular data // sets are stored in the 'assets' part of our application. - VuforiaTrackables targetsRoverRuckus = this.vuforia.loadTrackablesFromAsset("RoverRuckus"); - VuforiaTrackable blueRover = targetsRoverRuckus.get(0); - blueRover.setName("Blue-Rover"); - VuforiaTrackable redFootprint = targetsRoverRuckus.get(1); - redFootprint.setName("Red-Footprint"); - VuforiaTrackable frontCraters = targetsRoverRuckus.get(2); - frontCraters.setName("Front-Craters"); - VuforiaTrackable backSpace = targetsRoverRuckus.get(3); - backSpace.setName("Back-Space"); + VuforiaTrackables targetsSkyStone = this.vuforia.loadTrackablesFromAsset("Skystone"); + + VuforiaTrackable stoneTarget = targetsSkyStone.get(0); + stoneTarget.setName("Stone Target"); + VuforiaTrackable blueRearBridge = targetsSkyStone.get(1); + blueRearBridge.setName("Blue Rear Bridge"); + VuforiaTrackable redRearBridge = targetsSkyStone.get(2); + redRearBridge.setName("Red Rear Bridge"); + VuforiaTrackable redFrontBridge = targetsSkyStone.get(3); + redFrontBridge.setName("Red Front Bridge"); + VuforiaTrackable blueFrontBridge = targetsSkyStone.get(4); + blueFrontBridge.setName("Blue Front Bridge"); + VuforiaTrackable red1 = targetsSkyStone.get(5); + red1.setName("Red Perimeter 1"); + VuforiaTrackable red2 = targetsSkyStone.get(6); + red2.setName("Red Perimeter 2"); + VuforiaTrackable front1 = targetsSkyStone.get(7); + front1.setName("Front Perimeter 1"); + VuforiaTrackable front2 = targetsSkyStone.get(8); + front2.setName("Front Perimeter 2"); + VuforiaTrackable blue1 = targetsSkyStone.get(9); + blue1.setName("Blue Perimeter 1"); + VuforiaTrackable blue2 = targetsSkyStone.get(10); + blue2.setName("Blue Perimeter 2"); + VuforiaTrackable rear1 = targetsSkyStone.get(11); + rear1.setName("Rear Perimeter 1"); + VuforiaTrackable rear2 = targetsSkyStone.get(12); + rear2.setName("Rear Perimeter 2"); // For convenience, gather together all the trackable objects in one easily-iterable collection */ List allTrackables = new ArrayList(); - allTrackables.addAll(targetsRoverRuckus); + allTrackables.addAll(targetsSkyStone); /** * In order for localization to work, we need to tell the system where each target is on the field, and @@ -175,106 +204,124 @@ public class ConceptVuforiaNavRoverRuckus extends LinearOpMode { * where the Blue Alliance Station is. (Positive is from the center, towards the BlueAlliance station) * - The Z axis runs from the floor, upwards towards the ceiling. (Positive is above the floor) * - * This Rover Ruckus sample places a specific target in the middle of each perimeter wall. - * * Before being transformed, each target image is conceptually located at the origin of the field's * coordinate system (the center of the field), facing up. */ - /** - * To place the BlueRover target in the middle of the blue perimeter wall: - * - First we rotate it 90 around the field's X axis to flip it upright. - * - Then, we translate it along the Y axis to the blue perimeter wall. - */ - OpenGLMatrix blueRoverLocationOnField = OpenGLMatrix - .translation(0, mmFTCFieldWidth, mmTargetHeight) - .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, 0)); - blueRover.setLocation(blueRoverLocationOnField); - - /** - * To place the RedFootprint target in the middle of the red perimeter wall: - * - First we rotate it 90 around the field's X axis to flip it upright. - * - Second, we rotate it 180 around the field's Z axis so the image is flat against the red perimeter wall - * and facing inwards to the center of the field. - * - Then, we translate it along the negative Y axis to the red perimeter wall. - */ - OpenGLMatrix redFootprintLocationOnField = OpenGLMatrix - .translation(0, -mmFTCFieldWidth, mmTargetHeight) - .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, 180)); - redFootprint.setLocation(redFootprintLocationOnField); - - /** - * To place the FrontCraters target in the middle of the front perimeter wall: - * - First we rotate it 90 around the field's X axis to flip it upright. - * - Second, we rotate it 90 around the field's Z axis so the image is flat against the front wall - * and facing inwards to the center of the field. - * - Then, we translate it along the negative X axis to the front perimeter wall. - */ - OpenGLMatrix frontCratersLocationOnField = OpenGLMatrix - .translation(-mmFTCFieldWidth, 0, mmTargetHeight) - .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0 , 90)); - frontCraters.setLocation(frontCratersLocationOnField); - - /** - * To place the BackSpace target in the middle of the back perimeter wall: - * - First we rotate it 90 around the field's X axis to flip it upright. - * - Second, we rotate it -90 around the field's Z axis so the image is flat against the back wall - * and facing inwards to the center of the field. - * - Then, we translate it along the X axis to the back perimeter wall. - */ - OpenGLMatrix backSpaceLocationOnField = OpenGLMatrix - .translation(mmFTCFieldWidth, 0, mmTargetHeight) - .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, -90)); - backSpace.setLocation(backSpaceLocationOnField); + // Set the position of the Stone Target. Since it's not fixed in position, assume it's at the field origin. + // Rotated it to to face forward, and raised it to sit on the ground correctly. + // This can be used for generic target-centric approach algorithms + stoneTarget.setLocation(OpenGLMatrix + .translation(0, 0, stoneZ) + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, -90))); + + //Set the position of the bridge support targets with relation to origin (center of field) + blueFrontBridge.setLocation(OpenGLMatrix + .translation(-bridgeX, bridgeY, bridgeZ) + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 0, bridgeRotY, bridgeRotZ))); + + blueRearBridge.setLocation(OpenGLMatrix + .translation(-bridgeX, bridgeY, bridgeZ) + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 0, -bridgeRotY, bridgeRotZ))); + + redFrontBridge.setLocation(OpenGLMatrix + .translation(-bridgeX, -bridgeY, bridgeZ) + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 0, -bridgeRotY, 0))); + + redRearBridge.setLocation(OpenGLMatrix + .translation(bridgeX, -bridgeY, bridgeZ) + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 0, bridgeRotY, 0))); + + //Set the position of the perimeter targets with relation to origin (center of field) + red1.setLocation(OpenGLMatrix + .translation(quadField, -halfField, mmTargetHeight) + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, 180))); + + red2.setLocation(OpenGLMatrix + .translation(-quadField, -halfField, mmTargetHeight) + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, 180))); + + front1.setLocation(OpenGLMatrix + .translation(-halfField, -quadField, mmTargetHeight) + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0 , 90))); + + front2.setLocation(OpenGLMatrix + .translation(-halfField, quadField, mmTargetHeight) + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, 90))); + + blue1.setLocation(OpenGLMatrix + .translation(-quadField, halfField, mmTargetHeight) + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, 0))); + + blue2.setLocation(OpenGLMatrix + .translation(quadField, halfField, mmTargetHeight) + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, 0))); + + rear1.setLocation(OpenGLMatrix + .translation(halfField, quadField, mmTargetHeight) + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0 , -90))); + + rear2.setLocation(OpenGLMatrix + .translation(halfField, -quadField, mmTargetHeight) + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, -90))); + + // + // Create a transformation matrix describing where the phone is on the robot. + // + // NOTE !!!! It's very important that you turn OFF your phone's Auto-Screen-Rotation option. + // Lock it into Portrait for these numbers to work. + // + // Info: The coordinate frame for the robot looks the same as the field. + // The robot's "forward" direction is facing out along X axis, with the LEFT side facing out along the Y axis. + // Z is UP on the robot. This equates to a bearing angle of Zero degrees. + // + // The phone starts out lying flat, with the screen facing Up and with the physical top of the phone + // pointing to the LEFT side of the Robot. + // The two examples below assume that the camera is facing forward out the front of the robot. + + // We need to rotate the camera around it's long axis to bring the correct camera forward. + if (CAMERA_CHOICE == BACK) { + phoneYRotate = -90; + } else { + phoneYRotate = 90; + } - /** - * Create a transformation matrix describing where the phone is on the robot. - * - * The coordinate frame for the robot looks the same as the field. - * The robot's "forward" direction is facing out along X axis, with the LEFT side facing out along the Y axis. - * Z is UP on the robot. This equates to a bearing angle of Zero degrees. - * - * The phone starts out lying flat, with the screen facing Up and with the physical top of the phone - * pointing to the LEFT side of the Robot. It's very important when you test this code that the top of the - * camera is pointing to the left side of the robot. The rotation angles don't work if you flip the phone. - * - * If using the rear (High Res) camera: - * We need to rotate the camera around it's long axis to bring the rear camera forward. - * This requires a negative 90 degree rotation on the Y axis - * - * If using the Front (Low Res) camera - * We need to rotate the camera around it's long axis to bring the FRONT camera forward. - * This requires a Positive 90 degree rotation on the Y axis - * - * Next, translate the camera lens to where it is on the robot. - * In this example, it is centered (left to right), but 110 mm forward of the middle of the robot, and 200 mm above ground level. - */ + // Rotate the phone vertical about the X axis if it's in portrait mode + if (PHONE_IS_PORTRAIT) { + phoneXRotate = 90 ; + } - final int CAMERA_FORWARD_DISPLACEMENT = 110; // eg: Camera is 110 mm in front of robot center - final int CAMERA_VERTICAL_DISPLACEMENT = 200; // eg: Camera is 200 mm above ground - final int CAMERA_LEFT_DISPLACEMENT = 0; // eg: Camera is ON the robot's center line + // Next, translate the camera lens to where it is on the robot. + // In this example, it is centered (left to right), but forward of the middle of the robot, and above ground level. + final float CAMERA_FORWARD_DISPLACEMENT = 4.0f * mmPerInch; // eg: Camera is 4 Inches in front of robot center + final float CAMERA_VERTICAL_DISPLACEMENT = 8.0f * mmPerInch; // eg: Camera is 8 Inches above ground + final float CAMERA_LEFT_DISPLACEMENT = 0; // eg: Camera is ON the robot's center line - OpenGLMatrix phoneLocationOnRobot = OpenGLMatrix - .translation(CAMERA_FORWARD_DISPLACEMENT, CAMERA_LEFT_DISPLACEMENT, CAMERA_VERTICAL_DISPLACEMENT) - .multiplied(Orientation.getRotationMatrix(EXTRINSIC, YZX, DEGREES, - CAMERA_CHOICE == FRONT ? 90 : -90, 0, 0)); + OpenGLMatrix robotFromCamera = OpenGLMatrix + .translation(CAMERA_FORWARD_DISPLACEMENT, CAMERA_LEFT_DISPLACEMENT, CAMERA_VERTICAL_DISPLACEMENT) + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, YZX, DEGREES, phoneYRotate, phoneZRotate, phoneXRotate)); /** Let all the trackable listeners know where the phone is. */ - for (VuforiaTrackable trackable : allTrackables) - { - ((VuforiaTrackableDefaultListener)trackable.getListener()).setPhoneInformation(phoneLocationOnRobot, parameters.cameraDirection); + for (VuforiaTrackable trackable : allTrackables) { + ((VuforiaTrackableDefaultListener) trackable.getListener()).setPhoneInformation(robotFromCamera, parameters.cameraDirection); } - /** Wait for the game to begin */ - telemetry.addData(">", "Press Play to start tracking"); - telemetry.update(); - waitForStart(); + // WARNING: + // In this sample, we do not wait for PLAY to be pressed. Target Tracking is started immediately when INIT is pressed. + // This sequence is used to enable the new remote DS Camera Preview feature to be used with this sample. + // CONSEQUENTLY do not put any driving commands in this loop. + // To restore the normal opmode structure, just un-comment the following line: + + // waitForStart(); - /** Start tracking the data sets we care about. */ - targetsRoverRuckus.activate(); - while (opModeIsActive()) { + // Note: To use the remote camera preview: + // AFTER you hit Init on the Driver Station, use the "options menu" to select "Camera Stream" + // Tap the preview window to receive a fresh image. - // check all the trackable target to see which one (if any) is visible. + targetsSkyStone.activate(); + while (!isStopRequested()) { + + // check all the trackable targets to see which one (if any) is visible. targetVisible = false; for (VuforiaTrackable trackable : allTrackables) { if (((VuforiaTrackableDefaultListener)trackable.getListener()).isVisible()) { @@ -307,5 +354,8 @@ public class ConceptVuforiaNavRoverRuckus extends LinearOpMode { } telemetry.update(); } + + // Disable Tracking when we are done; + targetsSkyStone.deactivate(); } } diff --git a/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVuforiaSkyStoneNavigationWebcam.java b/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVuforiaSkyStoneNavigationWebcam.java new file mode 100644 index 0000000..8f66a6d --- /dev/null +++ b/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVuforiaSkyStoneNavigationWebcam.java @@ -0,0 +1,371 @@ +/* Copyright (c) 2019 FIRST. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted (subject to the limitations in the disclaimer below) provided that + * the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, this list + * of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this + * list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * + * Neither the name of FIRST nor the names of its contributors may be used to endorse or + * promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS + * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; + +import org.firstinspires.ftc.robotcore.external.ClassFactory; +import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName; +import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix; +import org.firstinspires.ftc.robotcore.external.matrices.VectorF; +import org.firstinspires.ftc.robotcore.external.navigation.Orientation; +import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer; +import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackable; +import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackableDefaultListener; +import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables; + +import java.util.ArrayList; +import java.util.List; + +import static org.firstinspires.ftc.robotcore.external.navigation.AngleUnit.DEGREES; +import static org.firstinspires.ftc.robotcore.external.navigation.AxesOrder.XYZ; +import static org.firstinspires.ftc.robotcore.external.navigation.AxesOrder.YZX; +import static org.firstinspires.ftc.robotcore.external.navigation.AxesReference.EXTRINSIC; +import static org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer.CameraDirection.BACK; + +/** + * This 2019-2020 OpMode illustrates the basics of using the Vuforia localizer to determine + * positioning and orientation of robot on the SKYSTONE FTC field. + * The code is structured as a LinearOpMode + * + * When images are located, Vuforia is able to determine the position and orientation of the + * image relative to the camera. This sample code then combines that information with a + * knowledge of where the target images are on the field, to determine the location of the camera. + * + * From the Audience perspective, the Red Alliance station is on the right and the + * Blue Alliance Station is on the left. + + * Eight perimeter targets are distributed evenly around the four perimeter walls + * Four Bridge targets are located on the bridge uprights. + * Refer to the Field Setup manual for more specific location details + * + * A final calculation then uses the location of the camera on the robot to determine the + * robot's location and orientation on the field. + * + * @see VuforiaLocalizer + * @see VuforiaTrackableDefaultListener + * see skystone/doc/tutorial/FTC_FieldCoordinateSystemDefinition.pdf + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list. + * + * IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as + * is explained below. + */ + +@TeleOp(name="SKYSTONE Vuforia Nav Webcam", group ="Concept") +@Disabled +public class ConceptVuforiaSkyStoneNavigationWebcam extends LinearOpMode { + + // IMPORTANT: If you are using a USB WebCam, you must select CAMERA_CHOICE = BACK; and PHONE_IS_PORTRAIT = false; + private static final VuforiaLocalizer.CameraDirection CAMERA_CHOICE = BACK; + private static final boolean PHONE_IS_PORTRAIT = false ; + + /* + * IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which + * 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function. + * A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer + * web site at https://developer.vuforia.com/license-manager. + * + * Vuforia license keys are always 380 characters long, and look as if they contain mostly + * random data. As an example, here is a example of a fragment of a valid key: + * ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGlfOOI2dRzKS4T0hQ8kT ... + * Once you've obtained a license key, copy the string from the Vuforia web site + * and paste it in to your code on the next line, between the double quotes. + */ + private static final String VUFORIA_KEY = + " --- YOUR NEW VUFORIA KEY GOES HERE --- "; + + // Since ImageTarget trackables use mm to specifiy their dimensions, we must use mm for all the physical dimension. + // We will define some constants and conversions here + private static final float mmPerInch = 25.4f; + private static final float mmTargetHeight = (6) * mmPerInch; // the height of the center of the target image above the floor + + // Constant for Stone Target + private static final float stoneZ = 2.00f * mmPerInch; + + // Constants for the center support targets + private static final float bridgeZ = 6.42f * mmPerInch; + private static final float bridgeY = 23 * mmPerInch; + private static final float bridgeX = 5.18f * mmPerInch; + private static final float bridgeRotY = 59; // Units are degrees + private static final float bridgeRotZ = 180; + + // Constants for perimeter targets + private static final float halfField = 72 * mmPerInch; + private static final float quadField = 36 * mmPerInch; + + // Class Members + private OpenGLMatrix lastLocation = null; + private VuforiaLocalizer vuforia = null; + + /** + * This is the webcam we are to use. As with other hardware devices such as motors and + * servos, this device is identified using the robot configuration tool in the FTC application. + */ + WebcamName webcamName = null; + + private boolean targetVisible = false; + private float phoneXRotate = 0; + private float phoneYRotate = 0; + private float phoneZRotate = 0; + + @Override public void runOpMode() { + /* + * Retrieve the camera we are to use. + */ + webcamName = hardwareMap.get(WebcamName.class, "Webcam 1"); + + /* + * Configure Vuforia by creating a Parameter object, and passing it to the Vuforia engine. + * We can pass Vuforia the handle to a camera preview resource (on the RC phone); + * If no camera monitor is desired, use the parameter-less constructor instead (commented out below). + */ + int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName()); + VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(cameraMonitorViewId); + + // VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(); + + parameters.vuforiaLicenseKey = VUFORIA_KEY; + + /** + * We also indicate which camera on the RC we wish to use. + */ + parameters.cameraName = webcamName; + + // Instantiate the Vuforia engine + vuforia = ClassFactory.getInstance().createVuforia(parameters); + + // Load the data sets for the trackable objects. These particular data + // sets are stored in the 'assets' part of our application. + VuforiaTrackables targetsSkyStone = this.vuforia.loadTrackablesFromAsset("Skystone"); + + VuforiaTrackable stoneTarget = targetsSkyStone.get(0); + stoneTarget.setName("Stone Target"); + VuforiaTrackable blueRearBridge = targetsSkyStone.get(1); + blueRearBridge.setName("Blue Rear Bridge"); + VuforiaTrackable redRearBridge = targetsSkyStone.get(2); + redRearBridge.setName("Red Rear Bridge"); + VuforiaTrackable redFrontBridge = targetsSkyStone.get(3); + redFrontBridge.setName("Red Front Bridge"); + VuforiaTrackable blueFrontBridge = targetsSkyStone.get(4); + blueFrontBridge.setName("Blue Front Bridge"); + VuforiaTrackable red1 = targetsSkyStone.get(5); + red1.setName("Red Perimeter 1"); + VuforiaTrackable red2 = targetsSkyStone.get(6); + red2.setName("Red Perimeter 2"); + VuforiaTrackable front1 = targetsSkyStone.get(7); + front1.setName("Front Perimeter 1"); + VuforiaTrackable front2 = targetsSkyStone.get(8); + front2.setName("Front Perimeter 2"); + VuforiaTrackable blue1 = targetsSkyStone.get(9); + blue1.setName("Blue Perimeter 1"); + VuforiaTrackable blue2 = targetsSkyStone.get(10); + blue2.setName("Blue Perimeter 2"); + VuforiaTrackable rear1 = targetsSkyStone.get(11); + rear1.setName("Rear Perimeter 1"); + VuforiaTrackable rear2 = targetsSkyStone.get(12); + rear2.setName("Rear Perimeter 2"); + + // For convenience, gather together all the trackable objects in one easily-iterable collection */ + List allTrackables = new ArrayList(); + allTrackables.addAll(targetsSkyStone); + + /** + * In order for localization to work, we need to tell the system where each target is on the field, and + * where the phone resides on the robot. These specifications are in the form of transformation matrices. + * Transformation matrices are a central, important concept in the math here involved in localization. + * See Transformation Matrix + * for detailed information. Commonly, you'll encounter transformation matrices as instances + * of the {@link OpenGLMatrix} class. + * + * If you are standing in the Red Alliance Station looking towards the center of the field, + * - The X axis runs from your left to the right. (positive from the center to the right) + * - The Y axis runs from the Red Alliance Station towards the other side of the field + * where the Blue Alliance Station is. (Positive is from the center, towards the BlueAlliance station) + * - The Z axis runs from the floor, upwards towards the ceiling. (Positive is above the floor) + * + * Before being transformed, each target image is conceptually located at the origin of the field's + * coordinate system (the center of the field), facing up. + */ + + // Set the position of the Stone Target. Since it's not fixed in position, assume it's at the field origin. + // Rotated it to to face forward, and raised it to sit on the ground correctly. + // This can be used for generic target-centric approach algorithms + stoneTarget.setLocation(OpenGLMatrix + .translation(0, 0, stoneZ) + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, -90))); + + //Set the position of the bridge support targets with relation to origin (center of field) + blueFrontBridge.setLocation(OpenGLMatrix + .translation(-bridgeX, bridgeY, bridgeZ) + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 0, bridgeRotY, bridgeRotZ))); + + blueRearBridge.setLocation(OpenGLMatrix + .translation(-bridgeX, bridgeY, bridgeZ) + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 0, -bridgeRotY, bridgeRotZ))); + + redFrontBridge.setLocation(OpenGLMatrix + .translation(-bridgeX, -bridgeY, bridgeZ) + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 0, -bridgeRotY, 0))); + + redRearBridge.setLocation(OpenGLMatrix + .translation(bridgeX, -bridgeY, bridgeZ) + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 0, bridgeRotY, 0))); + + //Set the position of the perimeter targets with relation to origin (center of field) + red1.setLocation(OpenGLMatrix + .translation(quadField, -halfField, mmTargetHeight) + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, 180))); + + red2.setLocation(OpenGLMatrix + .translation(-quadField, -halfField, mmTargetHeight) + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, 180))); + + front1.setLocation(OpenGLMatrix + .translation(-halfField, -quadField, mmTargetHeight) + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0 , 90))); + + front2.setLocation(OpenGLMatrix + .translation(-halfField, quadField, mmTargetHeight) + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, 90))); + + blue1.setLocation(OpenGLMatrix + .translation(-quadField, halfField, mmTargetHeight) + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, 0))); + + blue2.setLocation(OpenGLMatrix + .translation(quadField, halfField, mmTargetHeight) + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, 0))); + + rear1.setLocation(OpenGLMatrix + .translation(halfField, quadField, mmTargetHeight) + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0 , -90))); + + rear2.setLocation(OpenGLMatrix + .translation(halfField, -quadField, mmTargetHeight) + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, -90))); + + // + // Create a transformation matrix describing where the phone is on the robot. + // + // NOTE !!!! It's very important that you turn OFF your phone's Auto-Screen-Rotation option. + // Lock it into Portrait for these numbers to work. + // + // Info: The coordinate frame for the robot looks the same as the field. + // The robot's "forward" direction is facing out along X axis, with the LEFT side facing out along the Y axis. + // Z is UP on the robot. This equates to a bearing angle of Zero degrees. + // + // The phone starts out lying flat, with the screen facing Up and with the physical top of the phone + // pointing to the LEFT side of the Robot. + // The two examples below assume that the camera is facing forward out the front of the robot. + + // We need to rotate the camera around it's long axis to bring the correct camera forward. + if (CAMERA_CHOICE == BACK) { + phoneYRotate = -90; + } else { + phoneYRotate = 90; + } + + // Rotate the phone vertical about the X axis if it's in portrait mode + if (PHONE_IS_PORTRAIT) { + phoneXRotate = 90 ; + } + + // Next, translate the camera lens to where it is on the robot. + // In this example, it is centered (left to right), but forward of the middle of the robot, and above ground level. + final float CAMERA_FORWARD_DISPLACEMENT = 4.0f * mmPerInch; // eg: Camera is 4 Inches in front of robot-center + final float CAMERA_VERTICAL_DISPLACEMENT = 8.0f * mmPerInch; // eg: Camera is 8 Inches above ground + final float CAMERA_LEFT_DISPLACEMENT = 0; // eg: Camera is ON the robot's center line + + OpenGLMatrix robotFromCamera = OpenGLMatrix + .translation(CAMERA_FORWARD_DISPLACEMENT, CAMERA_LEFT_DISPLACEMENT, CAMERA_VERTICAL_DISPLACEMENT) + .multiplied(Orientation.getRotationMatrix(EXTRINSIC, YZX, DEGREES, phoneYRotate, phoneZRotate, phoneXRotate)); + + /** Let all the trackable listeners know where the phone is. */ + for (VuforiaTrackable trackable : allTrackables) { + ((VuforiaTrackableDefaultListener) trackable.getListener()).setPhoneInformation(robotFromCamera, parameters.cameraDirection); + } + + // WARNING: + // In this sample, we do not wait for PLAY to be pressed. Target Tracking is started immediately when INIT is pressed. + // This sequence is used to enable the new remote DS Camera Preview feature to be used with this sample. + // CONSEQUENTLY do not put any driving commands in this loop. + // To restore the normal opmode structure, just un-comment the following line: + + // waitForStart(); + + // Note: To use the remote camera preview: + // AFTER you hit Init on the Driver Station, use the "options menu" to select "Camera Stream" + // Tap the preview window to receive a fresh image. + + targetsSkyStone.activate(); + while (!isStopRequested()) { + + // check all the trackable targets to see which one (if any) is visible. + targetVisible = false; + for (VuforiaTrackable trackable : allTrackables) { + if (((VuforiaTrackableDefaultListener)trackable.getListener()).isVisible()) { + telemetry.addData("Visible Target", trackable.getName()); + targetVisible = true; + + // getUpdatedRobotLocation() will return null if no new information is available since + // the last time that call was made, or if the trackable is not currently visible. + OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener)trackable.getListener()).getUpdatedRobotLocation(); + if (robotLocationTransform != null) { + lastLocation = robotLocationTransform; + } + break; + } + } + + // Provide feedback as to where the robot is located (if we know). + if (targetVisible) { + // express position (translation) of robot in inches. + VectorF translation = lastLocation.getTranslation(); + telemetry.addData("Pos (in)", "{X, Y, Z} = %.1f, %.1f, %.1f", + translation.get(0) / mmPerInch, translation.get(1) / mmPerInch, translation.get(2) / mmPerInch); + + // express the rotation of the robot in degrees. + Orientation rotation = Orientation.getOrientation(lastLocation, EXTRINSIC, XYZ, DEGREES); + telemetry.addData("Rot (deg)", "{Roll, Pitch, Heading} = %.0f, %.0f, %.0f", rotation.firstAngle, rotation.secondAngle, rotation.thirdAngle); + } + else { + telemetry.addData("Visible Target", "none"); + } + telemetry.update(); + } + + // Disable Tracking when we are done; + targetsSkyStone.deactivate(); + } +} diff --git a/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorColor.java b/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorColor.java index 4b1811d..5f86743 100644 --- a/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorColor.java +++ b/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorColor.java @@ -43,7 +43,7 @@ /* * This is an example LinearOpMode that shows how to use a color sensor in a generic * way, insensitive which particular make or model of color sensor is used. The opmode - * assumes that the color sensor is configured with a name of "color sensor". + * assumes that the color sensor is configured with a name of "sensor_color". * * If the color sensor has a light which is controllable, you can use the X button on * the gamepad to toggle the light on and off. diff --git a/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorDigitalTouch.java b/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorDigitalTouch.java index 6bb6480..b367924 100644 --- a/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorDigitalTouch.java +++ b/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorDigitalTouch.java @@ -38,7 +38,7 @@ * This is an example LinearOpMode that shows how to use * a REV Robotics Touch Sensor. * - * It assumes that the touch sensor is configured with a name of "digitalTouch". + * It assumes that the touch sensor is configured with a name of "sensor_digital". * * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. * Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list. diff --git a/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorREV2mDistance.java b/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorREV2mDistance.java index 880bd84..30bb377 100644 --- a/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorREV2mDistance.java +++ b/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorREV2mDistance.java @@ -21,7 +21,7 @@ are permitted (subject to the limitations in the disclaimer below) provided that NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESSFOR A PARTICULAR PURPOSE +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR diff --git a/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorREVColorDistance.java b/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorREVColorDistance.java index 37910ca..ab12d78 100644 --- a/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorREVColorDistance.java +++ b/library/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/SensorREVColorDistance.java @@ -58,10 +58,22 @@ public class SensorREVColorDistance extends LinearOpMode { /** * Note that the REV Robotics Color-Distance incorporates two sensors into one device. - * It has a light/distance (range) sensor. It also has an RGB color sensor. - * The light/distance sensor saturates at around 2" (5cm). This means that targets that are 2" + * It has an IR proximity sensor which is used to calculate distance and an RGB color sensor. + * + * There will be some variation in the values measured depending on whether you are using a + * V3 color sensor versus the older V2 and V1 sensors, as the V3 is based around a different chip. + * + * For V1/V2, the light/distance sensor saturates at around 2" (5cm). This means that targets that are 2" * or closer will display the same value for distance/light detected. * + * For V3, the distance sensor as configured can handle distances between 0.25" (~0.6cm) and 6" (~15cm). + * Any target closer than 0.25" will dislay as 0.25" and any target farther than 6" will display as 6". + * + * Note that the distance sensor function of both chips is built around an IR proximity sensor, which is + * sensitive to ambient light and the reflectivity of the surface against which you are measuring. If + * very accurate distance is required you should consider calibrating the raw optical values read from the + * chip to your exact situation. + * * Although you configure a single REV Robotics Color-Distance sensor in your configuration file, * you can treat the sensor as two separate sensors that share the same name in your op mode. * @@ -70,7 +82,7 @@ public class SensorREVColorDistance extends LinearOpMode { * color of the screen to match the detected color. * * In this example, we also use the distance sensor to display the distance - * to the target object. Note that the distance sensor saturates at around 2" (5 cm). + * to the target object. * */ ColorSensor sensorColor; diff --git a/library/src/main/java/org/firstinspires/ftc/robotcontroller/internal/FtcRobotControllerActivity.java b/library/src/main/java/org/firstinspires/ftc/robotcontroller/internal/FtcRobotControllerActivity.java index a8c68fd..9488570 100644 --- a/library/src/main/java/org/firstinspires/ftc/robotcontroller/internal/FtcRobotControllerActivity.java +++ b/library/src/main/java/org/firstinspires/ftc/robotcontroller/internal/FtcRobotControllerActivity.java @@ -43,7 +43,6 @@ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE import android.hardware.usb.UsbDevice; import android.hardware.usb.UsbManager; import android.net.wifi.WifiManager; -import android.os.Build; import android.os.Bundle; import android.os.IBinder; import android.preference.PreferenceManager; @@ -89,10 +88,13 @@ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE import com.qualcomm.robotcore.eventloop.opmode.OpModeRegister; import com.qualcomm.robotcore.hardware.configuration.LynxConstants; import com.qualcomm.robotcore.hardware.configuration.Utility; +import com.qualcomm.robotcore.robot.Robot; +import com.qualcomm.robotcore.robot.RobotState; import com.qualcomm.robotcore.util.Device; import com.qualcomm.robotcore.util.Dimmer; import com.qualcomm.robotcore.util.ImmersiveMode; import com.qualcomm.robotcore.util.RobotLog; +import com.qualcomm.robotcore.util.WebServer; import com.qualcomm.robotcore.wifi.NetworkConnection; import com.qualcomm.robotcore.wifi.NetworkConnectionFactory; import com.qualcomm.robotcore.wifi.NetworkType; @@ -100,15 +102,17 @@ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE import org.firstinspires.ftc.ftccommon.external.SoundPlayingRobotMonitor; import org.firstinspires.ftc.ftccommon.internal.FtcRobotControllerWatchdogService; import org.firstinspires.ftc.ftccommon.internal.ProgramAndManageActivity; +import org.firstinspires.ftc.onbotjava.OnBotJavaHelperImpl; +import org.firstinspires.ftc.onbotjava.OnBotJavaProgrammingMode; import org.firstinspires.ftc.robotcore.external.navigation.MotionDetection; -import org.firstinspires.ftc.robotcore.internal.hardware.DragonboardLynxDragonboardIsPresentPin; -import org.firstinspires.ftc.robotcore.internal.network.DeviceNameManager; +import org.firstinspires.ftc.robotcore.internal.hardware.android.AndroidBoard; import org.firstinspires.ftc.robotcore.internal.network.DeviceNameManagerFactory; -import org.firstinspires.ftc.robotcore.internal.network.WifiDirectDeviceNameManager; import org.firstinspires.ftc.robotcore.internal.network.PreferenceRemoterRC; import org.firstinspires.ftc.robotcore.internal.network.StartResult; +import org.firstinspires.ftc.robotcore.internal.network.WifiDirectChannelChanger; import org.firstinspires.ftc.robotcore.internal.network.WifiMuteEvent; import org.firstinspires.ftc.robotcore.internal.network.WifiMuteStateMachine; +import org.firstinspires.ftc.robotcore.internal.opmode.ClassManager; import org.firstinspires.ftc.robotcore.internal.system.AppUtil; import org.firstinspires.ftc.robotcore.internal.system.Assert; import org.firstinspires.ftc.robotcore.internal.system.PreferencesHelper; @@ -117,7 +121,7 @@ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE import org.firstinspires.ftc.robotcore.internal.ui.ThemedActivity; import org.firstinspires.ftc.robotcore.internal.ui.UILocation; import org.firstinspires.ftc.robotcore.internal.webserver.RobotControllerWebInfo; -import org.firstinspires.ftc.robotcore.internal.webserver.WebServer; +import org.firstinspires.ftc.robotserver.internal.programmingmode.ProgrammingModeManager; import org.firstinspires.inspection.RcInspectionActivity; import java.util.Queue; @@ -135,7 +139,7 @@ public class FtcRobotControllerActivity extends Activity protected WifiManager.WifiLock wifiLock; protected RobotConfigFileManager cfgFileMgr; - protected ProgrammingWebHandlers programmingWebHandlers; + protected ProgrammingModeManager programmingModeManager; protected ProgrammingModeController programmingModeController; protected UpdateUI.Callback callback; @@ -168,6 +172,10 @@ public class FtcRobotControllerActivity extends Activity protected WifiMuteStateMachine wifiMuteStateMachine; protected MotionDetection motionDetection; + private static boolean permissionsValidated = false; + + private WifiDirectChannelChanger wifiDirectChannelChanger; + protected class RobotRestarter implements Restarter { public void requestRestart() { @@ -176,6 +184,7 @@ public void requestRestart() { } + protected boolean serviceShouldUnbind = false; protected ServiceConnection connection = new ServiceConnection() { @Override public void onServiceConnected(ComponentName name, IBinder service) { @@ -227,30 +236,57 @@ protected void passReceivedUsbAttachmentsToEventLoop() { } } + /** + * There are cases where a permission may be revoked and the system restart will restart the + * FtcRobotControllerActivity, instead of the launch activity. Detect when that happens, and throw + * the device back to the permission validator activity. + */ + protected boolean enforcePermissionValidator() { + if (!permissionsValidated) { + RobotLog.vv(TAG, "Redirecting to permission validator"); + Intent permissionValidatorIntent = new Intent(AppUtil.getDefContext(), PermissionValidatorWrapper.class); + startActivity(permissionValidatorIntent); + finish(); + return true; + } else { + RobotLog.vv(TAG, "Permissions validated already"); + return false; + } + } + + public static void setPermissionsValidated() { + permissionsValidated = true; + } + @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); + + if (enforcePermissionValidator()) { + return; + } + RobotLog.onApplicationStart(); // robustify against onCreate() following onDestroy() but using the same app instance, which apparently does happen RobotLog.vv(TAG, "onCreate()"); ThemedActivity.appAppThemeToActivity(getTag(), this); // do this way instead of inherit to help AppInventor // Oddly, sometimes after a crash & restart the root activity will be something unexpected, like from the before crash? We don't yet understand RobotLog.vv(TAG, "rootActivity is of class %s", AppUtil.getInstance().getRootActivity().getClass().getSimpleName()); - Assert.assertTrue(FtcRobotControllerWatchdogService.isFtcRobotControllerActivity(AppUtil.getInstance().getRootActivity())); + RobotLog.vv(TAG, "launchActivity is of class %s", FtcRobotControllerWatchdogService.launchActivity()); + Assert.assertTrue(FtcRobotControllerWatchdogService.isLaunchActivity(AppUtil.getInstance().getRootActivity())); Assert.assertTrue(AppUtil.getInstance().isRobotController()); // Quick check: should we pretend we're not here, and so allow the Lynx to operate as // a stand-alone USB-connected module? if (LynxConstants.isRevControlHub()) { - if (LynxConstants.disableDragonboard()) { + if (LynxConstants.shouldDisableAndroidBoard()) { // Double-sure check that the Lynx Module can operate over USB, etc, then get out of Dodge RobotLog.vv(TAG, "disabling Dragonboard and exiting robot controller"); - DragonboardLynxDragonboardIsPresentPin.getInstance().setState(false); + AndroidBoard.getInstance().getAndroidBoardIsPresentPin().setState(false); AppUtil.getInstance().finishRootActivityAndExitApp(); - } - else { + } else { // Double-sure check that we can talk to the DB over the serial TTY - DragonboardLynxDragonboardIsPresentPin.getInstance().setState(true); + AndroidBoard.getInstance().getAndroidBoardIsPresentPin().setState(true); } } @@ -291,8 +327,18 @@ public boolean onMenuItemClick(MenuItem item) { BlocksOpMode.setActivityAndWebView(this, (WebView) findViewById(R.id.webViewBlocksRuntime)); - ClassManagerFactory.registerFilters(); - ClassManagerFactory.processAllClasses(); + /* + * Paranoia as the ClassManagerFactory requires EXTERNAL_STORAGE permissions + * and we've seen on the DS where the finish() call above does not short-circuit + * the onCreate() call for the activity and then we crash here because we don't + * have permissions. So... + */ + if (permissionsValidated) { + ClassManager.getInstance().setOnBotJavaClassHelper(new OnBotJavaHelperImpl()); + ClassManagerFactory.registerFilters(); + ClassManagerFactory.processAllClasses(); + } + cfgFileMgr = new RobotConfigFileManager(this); // Clean up 'dirty' status after a possible crash @@ -313,9 +359,11 @@ public boolean onMenuItemClick(MenuItem item) { dimmer = new Dimmer(this); dimmer.longBright(); - programmingWebHandlers = new ProgrammingWebHandlers(); + programmingModeManager = new ProgrammingModeManager(); + programmingModeManager.register(new ProgrammingWebHandlers()); + programmingModeManager.register(new OnBotJavaProgrammingMode()); programmingModeController = new ProgrammingModeControllerImpl( - this, (TextView) findViewById(R.id.textRemoteProgrammingMode), programmingWebHandlers); + this, (TextView) findViewById(R.id.textRemoteProgrammingMode), programmingModeManager); updateUI = createUpdateUI(); callback = createUICallback(updateUI); @@ -334,6 +382,7 @@ public boolean onMenuItemClick(MenuItem item) { bindToService(); logPackageVersions(); logDeviceSerialNumber(); + AndroidBoard.getInstance().logAndroidBoardInfo(); RobotLog.logDeviceInfo(); if (preferencesHelper.readBoolean(getString(R.string.pref_wifi_automute), false)) { @@ -368,6 +417,9 @@ protected void onStart() { cfgFileMgr.getActiveConfigAndUpdateUI(); + // check to see if there is a preferred Wi-Fi to use. + checkPreferredChannel(); + entireScreenLayout.setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { @@ -414,9 +466,9 @@ protected void onDestroy() { unbindFromService(); // If the app manually (?) is stopped, then we don't need the auto-starting function (?) ServiceController.stopService(FtcRobotControllerWatchdogService.class); - wifiLock.release(); + if (wifiLock != null) wifiLock.release(); + if (preferencesHelper != null) preferencesHelper.getSharedPreferences().unregisterOnSharedPreferenceChangeListener(sharedPreferencesListener); - preferencesHelper.getSharedPreferences().unregisterOnSharedPreferenceChangeListener(sharedPreferencesListener); RobotLog.cancelWriteLogcatToDisk(); } @@ -424,12 +476,13 @@ protected void bindToService() { readNetworkType(); Intent intent = new Intent(this, FtcRobotControllerService.class); intent.putExtra(NetworkConnectionFactory.NETWORK_CONNECTION_TYPE, networkType); - bindService(intent, connection, Context.BIND_AUTO_CREATE); + serviceShouldUnbind = bindService(intent, connection, Context.BIND_AUTO_CREATE); } protected void unbindFromService() { - if (controllerService != null) { + if (serviceShouldUnbind) { unbindService(connection); + serviceShouldUnbind = false; } } @@ -443,7 +496,7 @@ protected void logPackageVersions() { } protected void logDeviceSerialNumber() { - RobotLog.ii(TAG, "410c serial number: " + Build.SERIAL); + RobotLog.ii(TAG, "Android device serial number: " + Device.getSerialNumberOrUnknown()); } protected void readNetworkType() { @@ -492,6 +545,26 @@ public boolean onCreateOptionsMenu(Menu menu) { return true; } + private boolean isRobotRunning() { + if (controllerService == null) { + return false; + } + + Robot robot = controllerService.getRobot(); + + if ((robot == null) || (robot.eventLoopManager == null)) { + return false; + } + + RobotState robotState = robot.eventLoopManager.state; + + if (robotState != RobotState.RUNNING) { + return false; + } else { + return true; + } + } + @Override public boolean onOptionsItemSelected(MenuItem item) { int id = item.getItemId(); @@ -500,20 +573,24 @@ public boolean onOptionsItemSelected(MenuItem item) { if (cfgFileMgr.getActiveConfig().isNoConfig()) { // Tell the user they must configure the robot before starting programming mode. // TODO: as we are no longer truly 'modal' this warning should be adapted - AppUtil.getInstance().showToast(UILocation.BOTH, context, context.getString(R.string.toastConfigureRobotBeforeProgrammingMode)); + AppUtil.getInstance().showToast(UILocation.BOTH, context.getString(R.string.toastConfigureRobotBeforeProgrammingMode)); } else { Intent programmingModeIntent = new Intent(AppUtil.getDefContext(), ProgrammingModeActivity.class); programmingModeIntent.putExtra( LaunchActivityConstantsList.PROGRAMMING_MODE_ACTIVITY_PROGRAMMING_WEB_HANDLERS, - new LocalByRefIntentExtraHolder(programmingWebHandlers)); + new LocalByRefIntentExtraHolder(programmingModeManager)); startActivity(programmingModeIntent); } return true; } else if (id == R.id.action_program_and_manage) { - Intent programmingModeIntent = new Intent(AppUtil.getDefContext(), ProgramAndManageActivity.class); - RobotControllerWebInfo webInfo = programmingWebHandlers.getWebServer().getConnectionInformation(); - programmingModeIntent.putExtra(LaunchActivityConstantsList.RC_WEB_INFO, webInfo.toJson()); - startActivity(programmingModeIntent); + if (isRobotRunning()) { + Intent programmingModeIntent = new Intent(AppUtil.getDefContext(), ProgramAndManageActivity.class); + RobotControllerWebInfo webInfo = programmingModeManager.getWebServer().getConnectionInformation(); + programmingModeIntent.putExtra(LaunchActivityConstantsList.RC_WEB_INFO, webInfo.toJson()); + startActivity(programmingModeIntent); + } else { + AppUtil.getInstance().showToast(UILocation.ONLY_LOCAL, context.getString(R.string.toastWifiUpBeforeProgrammingMode)); + } } else if (id == R.id.action_inspection_mode) { Intent inspectionModeIntent = new Intent(AppUtil.getDefContext(), RcInspectionActivity.class); startActivity(inspectionModeIntent); @@ -526,7 +603,7 @@ else if (id == R.id.action_blocks) { } else if (id == R.id.action_restart_robot) { dimmer.handleDimTimer(); - AppUtil.getInstance().showToast(UILocation.BOTH, context, context.getString(R.string.toastRestartingRobot)); + AppUtil.getInstance().showToast(UILocation.BOTH, context.getString(R.string.toastRestartingRobot)); requestRobotRestart(); return true; } @@ -590,7 +667,7 @@ private void updateMonitorLayout(Configuration configuration) { protected void onActivityResult(int request, int result, Intent intent) { if (request == REQUEST_CONFIG_WIFI_CHANNEL) { if (result == RESULT_OK) { - AppUtil.getInstance().showToast(UILocation.BOTH, context, context.getString(R.string.toastWifiConfigurationComplete)); + AppUtil.getInstance().showToast(UILocation.BOTH, context.getString(R.string.toastWifiConfigurationComplete)); } } // was some historical confusion about launch codes here, so we err safely @@ -606,7 +683,7 @@ public void onServiceBind(final FtcRobotControllerService service) { updateUI.setControllerService(controllerService); updateUIAndRequestRobotSetup(); - programmingWebHandlers.setState(new FtcRobotControllerServiceState() { + programmingModeManager.setState(new FtcRobotControllerServiceState() { @NonNull @Override public WebServer getWebServer() { @@ -656,6 +733,7 @@ private void requestRobotSetup(@Nullable Runnable runOnComplete) { controllerService.setupRobot(eventLoop, idleLoop, runOnComplete); passReceivedUsbAttachmentsToEventLoop(); + AndroidBoard.showErrorIfUnknownControlHub(); } protected OpModeRegister createOpModeRegister() { @@ -683,6 +761,24 @@ private void showRestartRobotCompleteToast(@StringRes int resid) { AppUtil.getInstance().showToast(UILocation.BOTH, AppUtil.getDefContext().getString(resid)); } + private void checkPreferredChannel() { + // For P2P network, check to see what preferred channel is. + if (networkType == NetworkType.WIFIDIRECT) { + int prefChannel = preferencesHelper.readInt(getString(com.qualcomm.ftccommon.R.string.pref_wifip2p_channel), -1); + if (prefChannel == -1) { + prefChannel = 0; + RobotLog.vv(TAG, "pref_wifip2p_channel: No preferred channel defined. Will use a default value of %d", prefChannel); + } else { + RobotLog.vv(TAG, "pref_wifip2p_channel: Found existing preferred channel (%d).", prefChannel); + } + + // attempt to set the preferred channel. + RobotLog.vv(TAG, "pref_wifip2p_channel: attempting to set preferred channel..."); + wifiDirectChannelChanger = new WifiDirectChannelChanger(); + wifiDirectChannelChanger.changeToChannel(prefChannel); + } + } + protected void hittingMenuButtonBrightensScreen() { ActionBar actionBar = getActionBar(); if (actionBar != null) { diff --git a/library/src/main/java/org/firstinspires/ftc/robotcontroller/internal/PermissionValidatorWrapper.java b/library/src/main/java/org/firstinspires/ftc/robotcontroller/internal/PermissionValidatorWrapper.java new file mode 100644 index 0000000..1ae71b0 --- /dev/null +++ b/library/src/main/java/org/firstinspires/ftc/robotcontroller/internal/PermissionValidatorWrapper.java @@ -0,0 +1,85 @@ +/* + * Copyright (c) 2018 Craig MacFarlane + * + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, are permitted + * (subject to the limitations in the disclaimer below) provided that the following conditions are + * met: + * + * Redistributions of source code must retain the above copyright notice, this list of conditions + * and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright notice, this list of conditions + * and the following disclaimer in the documentation and/or other materials provided with the + * distribution. + * + * Neither the name of Craig MacFarlane nor the names of its contributors may be used to + * endorse or promote products derived from this software without specific prior written permission. + * + * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS LICENSE. THIS + * SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, + * OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF + * THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +package org.firstinspires.ftc.robotcontroller.internal; + +import android.Manifest; +import android.os.Bundle; + +import com.qualcomm.ftcrobotcontroller.R; + +import org.firstinspires.ftc.robotcore.internal.system.Misc; +import org.firstinspires.ftc.robotcore.internal.system.PermissionValidatorActivity; + +import java.util.ArrayList; +import java.util.List; + +public class PermissionValidatorWrapper extends PermissionValidatorActivity { + + private final String TAG = "PermissionValidatorWrapper"; + + /* + * The list of dangerous permissions the robot controller needs. + */ + protected List robotControllerPermissions = new ArrayList() {{ + add(Manifest.permission.WRITE_EXTERNAL_STORAGE); + add(Manifest.permission.READ_EXTERNAL_STORAGE); + add(Manifest.permission.CAMERA); + add(Manifest.permission.ACCESS_COARSE_LOCATION); + }}; + + private final static Class startApplication = FtcRobotControllerActivity.class; + + public String mapPermissionToExplanation(final String permission) { + if (permission.equals(Manifest.permission.WRITE_EXTERNAL_STORAGE)) { + return Misc.formatForUser(R.string.permRcWriteExternalStorageExplain); + } else if (permission.equals(Manifest.permission.READ_EXTERNAL_STORAGE)) { + return Misc.formatForUser(R.string.permRcReadExternalStorageExplain); + } else if (permission.equals(Manifest.permission.CAMERA)) { + return Misc.formatForUser(R.string.permRcCameraExplain); + } else if (permission.equals(Manifest.permission.ACCESS_COARSE_LOCATION)) { + return Misc.formatForUser(R.string.permAccessCoarseLocationExplain); + } + return Misc.formatForUser(R.string.permGenericExplain); + } + + @Override + protected void onCreate(Bundle savedInstanceState) + { + super.onCreate(savedInstanceState); + + permissions = robotControllerPermissions; + } + + protected Class onStartApplication() + { + FtcRobotControllerActivity.setPermissionsValidated(); + return startApplication; + } +} diff --git a/library/src/main/res/values/strings.xml b/library/src/main/res/values/strings.xml index d148531..76aed05 100644 --- a/library/src/main/res/values/strings.xml +++ b/library/src/main/res/values/strings.xml @@ -55,6 +55,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Configuration Complete Restarting Robot You must Configure Robot before starting Programming Mode. + The Robot Controller must be fully up and running before starting Programming Mode. Is Wifi turned on in settings?