1
|
|
|
package org.usfirst.frc.team3695.robot;
|
2
|
|
|
|
3
|
|
|
import edu.wpi.cscore.CvSink;
|
4
|
|
|
import edu.wpi.cscore.CvSource;
|
5
|
|
|
import edu.wpi.cscore.UsbCamera;
|
6
|
|
|
import edu.wpi.first.wpilibj.CameraServer;
|
7
|
|
|
import edu.wpi.first.wpilibj.DriverStation;
|
8
|
|
|
import edu.wpi.first.wpilibj.IterativeRobot;
|
9
|
|
|
import org.opencv.core.Core;
|
10
|
|
|
import org.opencv.core.CvException;
|
11
|
|
|
import org.opencv.core.Mat;
|
12
|
|
|
import org.opencv.core.Size;
|
13
|
|
|
import org.usfirst.frc.team3695.robot.Constants.VisionConstants;
|
14
|
|
|
import org.usfirst.frc.team3695.robot.enumeration.Bot;
|
15
|
|
|
|
16
|
|
|
import java.util.ArrayList;
|
17
|
|
|
|
18
|
|
|
/**
|
19
|
|
|
* Contains methods used for anything vision
|
20
|
|
|
*/
|
21
|
|
|
public class Vision extends IterativeRobot {
|
22
|
|
|
|
23
|
|
|
/// Two cameras for double FOV
|
24
|
|
|
private UsbCamera cameraLeft;
|
25
|
|
|
private UsbCamera cameraRight;
|
26
|
|
|
|
27
|
|
|
private UsbCamera cameraScrew;
|
28
|
|
|
private UsbCamera cameraFrame;
|
29
|
|
|
|
30
|
|
|
private Mat failImage;
|
31
|
|
|
|
32
|
|
|
public Vision(){
|
33
|
|
|
Size camSize = new Size(VisionConstants.CAM_WIDTH, VisionConstants.CAM_HEIGHT);
|
34
|
|
|
failImage = Mat.zeros(camSize, 0);
|
35
|
|
|
}
|
36
|
|
|
|
37
|
|
|
//Places the vision in a separate thread from everything else as recommended by FIRST.
|
38
|
|
|
public void startConcatCameraThread(){ new Thread(this::concatCameraStream).start(); }
|
39
|
|
|
|
40
|
|
|
public void startScrewCameraThread(){
|
41
|
|
|
new Thread(this::screwCameraStream).start();
|
42
|
|
|
}
|
43
|
|
|
|
44
|
|
|
public void startFrameCameraThread(){
|
45
|
|
|
new Thread(this::frameCameraStream).start();
|
46
|
|
|
}
|
47
|
|
|
|
48
|
|
|
private void screwCameraStream(){
|
49
|
|
|
|
50
|
|
|
cameraScrew = CameraServer.getInstance().startAutomaticCapture("Screw", VisionConstants.SCREW_ID);
|
51
|
|
|
|
52
|
|
|
CvSink cvsinkScrew = new CvSink("screwSink");
|
53
|
|
|
cvsinkScrew.setSource(cameraScrew);
|
54
|
|
|
cvsinkScrew.setEnabled(true);
|
55
|
|
|
|
56
|
|
|
Mat streamImages = new Mat();
|
57
|
|
|
|
58
|
|
|
CvSource outputScrew = CameraServer.getInstance().putVideo("Screw", VisionConstants.CAM_WIDTH, VisionConstants.CAM_HEIGHT);
|
59
|
|
|
while (!Thread.interrupted()){
|
60
|
|
|
try {
|
61
|
|
|
cvsinkScrew.grabFrame(streamImages);
|
62
|
|
|
if ((Robot.bot == Bot.TEUFELSKIND && Constants.TEUFELSKIND.SCREW_CAM_FLIP)
|
63
|
|
|
|| (Robot.bot == Bot.OOF && Constants.OOF.SCREW_CAM_FLIP)) {
|
64
|
|
|
Core.rotate(streamImages, streamImages, Core.ROTATE_180);
|
65
|
|
|
}
|
66
|
|
|
outputScrew.putFrame(streamImages);
|
67
|
|
|
} catch (CvException cameraFail){
|
|
|
|
|
68
|
|
|
DriverStation.reportWarning("Screw Camera: " + cameraFail.toString(), false);
|
69
|
|
|
outputScrew.putFrame(failImage);
|
70
|
|
|
}
|
71
|
|
|
}
|
72
|
|
|
}
|
73
|
|
|
|
74
|
|
|
private void frameCameraStream(){
|
75
|
|
|
cameraFrame = CameraServer.getInstance().startAutomaticCapture("Frame", VisionConstants.HOOK_ID);
|
76
|
|
|
|
77
|
|
|
CvSink cvsinkFrame = new CvSink("frameSink");
|
78
|
|
|
cvsinkFrame.setSource(cameraFrame);
|
79
|
|
|
cvsinkFrame.setEnabled(true);
|
80
|
|
|
|
81
|
|
|
Mat streamImages = new Mat();
|
82
|
|
|
|
83
|
|
|
CvSource outputFrame = CameraServer.getInstance().putVideo("Frame", VisionConstants.CAM_WIDTH, VisionConstants.CAM_HEIGHT);
|
84
|
|
|
while (!Thread.interrupted()){
|
85
|
|
|
try {
|
86
|
|
|
cvsinkFrame.grabFrame(streamImages);
|
87
|
|
|
if ((Robot.bot == Bot.TEUFELSKIND && Constants.TEUFELSKIND.FRAME_CAM_FLIP)
|
88
|
|
|
|| (Robot.bot == Bot.OOF && Constants.OOF.FRAME_CAM_FLIP)) {
|
89
|
|
|
Core.rotate(streamImages, streamImages, Core.ROTATE_180);
|
90
|
|
|
}
|
91
|
|
|
outputFrame.putFrame(streamImages);
|
92
|
|
|
} catch (CvException cameraFail){
|
|
|
|
|
93
|
|
|
DriverStation.reportWarning("Frame Camera: " + cameraFail.toString(), false);
|
94
|
|
|
outputFrame.putFrame(failImage);
|
95
|
|
|
}
|
96
|
|
|
}
|
97
|
|
|
}
|
98
|
|
|
|
99
|
|
|
/**
|
100
|
|
|
* Start both the left and right camera streams and combine them into a single one which is then pushed
|
101
|
|
|
* to an output stream titled Concat.
|
102
|
|
|
* This method should only be used for starting the camera stream.
|
103
|
|
|
*/
|
104
|
|
|
private void concatCameraStream() {
|
105
|
|
|
cameraLeft = CameraServer.getInstance().startAutomaticCapture("Left", VisionConstants.LEFT_ID);
|
106
|
|
|
cameraRight = CameraServer.getInstance().startAutomaticCapture("Right", VisionConstants.RIGHT_ID);
|
107
|
|
|
|
108
|
|
|
/// Dummy sinks to keep camera connections open.
|
109
|
|
|
CvSink cvsinkLeft = new CvSink("leftSink");
|
110
|
|
|
cvsinkLeft.setSource(cameraLeft);
|
111
|
|
|
cvsinkLeft.setEnabled(true);
|
112
|
|
|
CvSink cvsinkRight = new CvSink("rightSink");
|
113
|
|
|
cvsinkRight.setSource(cameraRight);
|
114
|
|
|
cvsinkRight.setEnabled(true);
|
115
|
|
|
|
116
|
|
|
/// Matrices to store each image from the cameras.
|
117
|
|
|
Mat leftSource = new Mat();
|
118
|
|
|
Mat rightSource = new Mat();
|
119
|
|
|
|
120
|
|
|
/// The ArrayList of left and right sources is needed for the hconcat method used to combine the streams
|
121
|
|
|
ArrayList<Mat> sources = new ArrayList<>();
|
122
|
|
|
sources.add(leftSource);
|
123
|
|
|
sources.add(rightSource);
|
124
|
|
|
|
125
|
|
|
/// Concatenation of both matrices
|
126
|
|
|
Mat concat = new Mat();
|
127
|
|
|
|
128
|
|
|
/// Puts the combined video on the SmartDashboard (I think)
|
129
|
|
|
/// The width is multiplied by 2 as the dimensions of the stream will have a width two times that of a single webcam
|
130
|
|
|
CvSource outputStream = CameraServer.getInstance().putVideo("Concat", 2*VisionConstants.CAM_WIDTH, VisionConstants.CAM_HEIGHT);
|
|
|
|
|
131
|
|
|
|
132
|
|
|
while (!Thread.interrupted()) {
|
133
|
|
|
try {
|
134
|
|
|
/// Provide each mat with the current frame
|
135
|
|
|
cvsinkLeft.grabFrame(leftSource);
|
136
|
|
|
cvsinkRight.grabFrame(rightSource);
|
137
|
|
|
/// Combine the frames into a single mat in the Output and stream the image.
|
138
|
|
|
Core.hconcat(sources, concat);
|
139
|
|
|
outputStream.putFrame(concat);
|
140
|
|
|
} catch (CvException cameraFail){
|
|
|
|
|
141
|
|
|
DriverStation.reportWarning("Concat Cameras: " + cameraFail.toString(), false);
|
142
|
|
|
outputStream.putFrame(failImage);
|
143
|
|
|
}
|
144
|
|
|
}
|
145
|
|
|
}
|
146
|
|
|
}
|
147
|
|
|
|
When instantiating a new Exception, you can set another Exception as its cause.
See the Oracle documentation on Throwables.
Usage example
Complete Example: