forked from PHScodemaster/FRC4949
-
Notifications
You must be signed in to change notification settings - Fork 0
/
2015 Vision Color
240 lines (208 loc) · 11.5 KB
/
2015 Vision Color
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
package org.usfirst.frc.team4949.robot;
import java.lang.Math;
import java.util.Comparator;
import java.util.Vector;
import com.ni.vision.NIVision;
import com.ni.vision.NIVision.Image;
import com.ni.vision.NIVision.ImageType;
import edu.wpi.first.wpilibj.CameraServer;
import edu.wpi.first.wpilibj.SampleRobot;
import edu.wpi.first.wpilibj.Timer;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
/**
* Example of finding yellow totes based on color.
* This example utilizes an image file, which you need to copy to the roboRIO
* To use a camera you will have to integrate the appropriate camera details with this example.
* To use a USB camera instead, see the SimpelVision and AdvancedVision examples for details
* on using the USB camera. To use an Axis Camera, see the AxisCamera example for details on
* using an Axis Camera.
*
* Sample omages can be found here: http://wp.wpi.edu/wpilib/2015/01/16/sample-images-for-vision-projects/
*/
public class Robot extends SampleRobot {
//A structure to hold measurements of a particle
public class ParticleReport implements Comparator<ParticleReport>, Comparable<ParticleReport>{
double PercentAreaToImageArea;
double Area;
double ConvexHullArea;
double BoundingRectLeft;
double BoundingRectTop;
double BoundingRectRight;
double BoundingRectBottom;
public int compareTo(ParticleReport r)
{
return (int)(r.Area - this.Area);
}
public int compare(ParticleReport r1, ParticleReport r2)
{
return (int)(r1.Area - r2.Area);
}
};
//Structure to represent the scores for the various tests used for target identification
public class Scores {
double Trapezoid;
double LongAspect;
double ShortAspect;
double AreaToConvexHullArea;
};
//Images
Image frame;
Image binaryFrame;
int imaqError;
//Constants
NIVision.Range TOTE_HUE_RANGE = new NIVision.Range(24, 49); //Default hue range for yellow tote
NIVision.Range TOTE_SAT_RANGE = new NIVision.Range(67, 255); //Default saturation range for yellow tote
NIVision.Range TOTE_VAL_RANGE = new NIVision.Range(49, 255); //Default value range for yellow tote
double AREA_MINIMUM = 0.5; //Default Area minimum for particle as a percentage of total image area
double LONG_RATIO = 2.22; //Tote long side = 26.9 / Tote height = 12.1 = 2.22
double SHORT_RATIO = 1.4; //Tote short side = 16.9 / Tote height = 12.1 = 1.4
double SCORE_MIN = 75.0; //Minimum score to be considered a tote
double VIEW_ANGLE = 49.4; //View angle fo camera, set to Axis m1011 by default, 64 for m1013, 51.7 for 206, 52 for HD3000 square, 60 for HD3000 640x480
NIVision.ParticleFilterCriteria2 criteria[] = new NIVision.ParticleFilterCriteria2[1];
NIVision.ParticleFilterOptions2 filterOptions = new NIVision.ParticleFilterOptions2(0,0,1,1);
Scores scores = new Scores();
public void robotInit() {
// create images
frame = NIVision.imaqCreateImage(ImageType.IMAGE_RGB, 0);
binaryFrame = NIVision.imaqCreateImage(ImageType.IMAGE_U8, 0);
criteria[0] = new NIVision.ParticleFilterCriteria2(NIVision.MeasurementType.MT_AREA_BY_IMAGE_AREA, AREA_MINIMUM, 100.0, 0, 0);
//Put default values to SmartDashboard so fields will appear
SmartDashboard.putNumber("Tote hue min", TOTE_HUE_RANGE.minValue);
SmartDashboard.putNumber("Tote hue max", TOTE_HUE_RANGE.maxValue);
SmartDashboard.putNumber("Tote sat min", TOTE_SAT_RANGE.minValue);
SmartDashboard.putNumber("Tote sat max", TOTE_SAT_RANGE.maxValue);
SmartDashboard.putNumber("Tote val min", TOTE_VAL_RANGE.minValue);
SmartDashboard.putNumber("Tote val max", TOTE_VAL_RANGE.maxValue);
SmartDashboard.putNumber("Area min %", AREA_MINIMUM);
}
public void autonomous() {
while (isAutonomous() && isEnabled())
{
//read file in from disk. For this example to run you need to copy image20.jpg from the SampleImages folder to the
//directory shown below using FTP or SFTP: http://wpilib.screenstepslive.com/s/4485/m/24166/l/282299-roborio-ftp
NIVision.imaqReadFile(frame, "/home/lvuser/SampleImages/image20.jpg");
//Update threshold values from SmartDashboard. For performance reasons it is recommended to remove this after calibration is finished.
TOTE_HUE_RANGE.minValue = (int)SmartDashboard.getNumber("Tote hue min", TOTE_HUE_RANGE.minValue);
TOTE_HUE_RANGE.maxValue = (int)SmartDashboard.getNumber("Tote hue max", TOTE_HUE_RANGE.maxValue);
TOTE_SAT_RANGE.minValue = (int)SmartDashboard.getNumber("Tote sat min", TOTE_SAT_RANGE.minValue);
TOTE_SAT_RANGE.maxValue = (int)SmartDashboard.getNumber("Tote sat max", TOTE_SAT_RANGE.maxValue);
TOTE_VAL_RANGE.minValue = (int)SmartDashboard.getNumber("Tote val min", TOTE_VAL_RANGE.minValue);
TOTE_VAL_RANGE.maxValue = (int)SmartDashboard.getNumber("Tote val max", TOTE_VAL_RANGE.maxValue);
//Threshold the image looking for yellow (tote color)
NIVision.imaqColorThreshold(binaryFrame, frame, 255, NIVision.ColorMode.HSV, TOTE_HUE_RANGE, TOTE_SAT_RANGE, TOTE_VAL_RANGE);
//Send particle count to dashboard
int numParticles = NIVision.imaqCountParticles(binaryFrame, 1);
SmartDashboard.putNumber("Masked particles", numParticles);
//Send masked image to dashboard to assist in tweaking mask.
CameraServer.getInstance().setImage(binaryFrame);
//filter out small particles
float areaMin = (float)SmartDashboard.getNumber("Area min %", AREA_MINIMUM);
criteria[0].lower = areaMin;
imaqError = NIVision.imaqParticleFilter4(binaryFrame, binaryFrame, criteria, filterOptions, null);
//Send particle count after filtering to dashboard
numParticles = NIVision.imaqCountParticles(binaryFrame, 1);
SmartDashboard.putNumber("Filtered particles", numParticles);
if(numParticles > 0)
{
//Measure particles and sort by particle size
Vector<ParticleReport> particles = new Vector<ParticleReport>();
for(int particleIndex = 0; particleIndex < numParticles; particleIndex++)
{
ParticleReport par = new ParticleReport();
par.PercentAreaToImageArea = NIVision.imaqMeasureParticle(binaryFrame, particleIndex, 0, NIVision.MeasurementType.MT_AREA_BY_IMAGE_AREA);
par.Area = NIVision.imaqMeasureParticle(binaryFrame, particleIndex, 0, NIVision.MeasurementType.MT_AREA);
par.ConvexHullArea = NIVision.imaqMeasureParticle(binaryFrame, particleIndex, 0, NIVision.MeasurementType.MT_CONVEX_HULL_AREA);
par.BoundingRectTop = NIVision.imaqMeasureParticle(binaryFrame, particleIndex, 0, NIVision.MeasurementType.MT_BOUNDING_RECT_TOP);
par.BoundingRectLeft = NIVision.imaqMeasureParticle(binaryFrame, particleIndex, 0, NIVision.MeasurementType.MT_BOUNDING_RECT_LEFT);
par.BoundingRectBottom = NIVision.imaqMeasureParticle(binaryFrame, particleIndex, 0, NIVision.MeasurementType.MT_BOUNDING_RECT_BOTTOM);
par.BoundingRectRight = NIVision.imaqMeasureParticle(binaryFrame, particleIndex, 0, NIVision.MeasurementType.MT_BOUNDING_RECT_RIGHT);
particles.add(par);
}
particles.sort(null);
//This example only scores the largest particle. Extending to score all particles and choosing the desired one is left as an exercise
//for the reader. Note that the long and short side scores expect a single tote and will not work for a stack of 2 or more totes.
//Modification of the code to accommodate 2 or more stacked totes is left as an exercise for the reader.
scores.Trapezoid = TrapezoidScore(particles.elementAt(0));
SmartDashboard.putNumber("Trapezoid", scores.Trapezoid);
scores.LongAspect = LongSideScore(particles.elementAt(0));
SmartDashboard.putNumber("Long Aspect", scores.LongAspect);
scores.ShortAspect = ShortSideScore(particles.elementAt(0));
SmartDashboard.putNumber("Short Aspect", scores.ShortAspect);
scores.AreaToConvexHullArea = ConvexHullAreaScore(particles.elementAt(0));
SmartDashboard.putNumber("Convex Hull Area", scores.AreaToConvexHullArea);
boolean isTote = scores.Trapezoid > SCORE_MIN && (scores.LongAspect > SCORE_MIN || scores.ShortAspect > SCORE_MIN) && scores.AreaToConvexHullArea > SCORE_MIN;
boolean isLong = scores.LongAspect > scores.ShortAspect;
//Send distance and tote status to dashboard. The bounding rect, particularly the horizontal center (left - right) may be useful for rotating/driving towards a tote
SmartDashboard.putBoolean("IsTote", isTote);
SmartDashboard.putNumber("Distance", computeDistance(binaryFrame, particles.elementAt(0), isLong));
} else {
SmartDashboard.putBoolean("IsTote", false);
}
Timer.delay(0.005); // wait for a motor update time
}
}
public void operatorControl() {
while(isOperatorControl() && isEnabled()) {
Timer.delay(0.005); // wait for a motor update time
}
}
//Comparator function for sorting particles. Returns true if particle 1 is larger
static boolean CompareParticleSizes(ParticleReport particle1, ParticleReport particle2)
{
//we want descending sort order
return particle1.PercentAreaToImageArea > particle2.PercentAreaToImageArea;
}
/**
* Converts a ratio with ideal value of 1 to a score. The resulting function is piecewise
* linear going from (0,0) to (1,100) to (2,0) and is 0 for all inputs outside the range 0-2
*/
double ratioToScore(double ratio)
{
return (Math.max(0, Math.min(100*(1-Math.abs(1-ratio)), 100)));
}
/**
* Method to score convex hull area. This scores how "complete" the particle is. Particles with large holes will score worse than a filled in shape
*/
double ConvexHullAreaScore(ParticleReport report)
{
return ratioToScore((report.Area/report.ConvexHullArea)*1.18);
}
/**
* Method to score if the particle appears to be a trapezoid. Compares the convex hull (filled in) area to the area of the bounding box.
* The expectation is that the convex hull area is about 95.4% of the bounding box area for an ideal tote.
*/
double TrapezoidScore(ParticleReport report)
{
return ratioToScore(report.ConvexHullArea/((report.BoundingRectRight-report.BoundingRectLeft)*(report.BoundingRectBottom-report.BoundingRectTop)*.954));
}
/**
* Method to score if the aspect ratio of the particle appears to match the long side of a tote.
*/
double LongSideScore(ParticleReport report)
{
return ratioToScore(((report.BoundingRectRight-report.BoundingRectLeft)/(report.BoundingRectBottom-report.BoundingRectTop))/LONG_RATIO);
}
/**
* Method to score if the aspect ratio of the particle appears to match the short side of a tote.
*/
double ShortSideScore(ParticleReport report){
return ratioToScore(((report.BoundingRectRight-report.BoundingRectLeft)/(report.BoundingRectBottom-report.BoundingRectTop))/SHORT_RATIO);
}
/**
* Computes the estimated distance to a target using the width of the particle in the image. For more information and graphics
* showing the math behind this approach see the Vision Processing section of the ScreenStepsLive documentation.
*
* @param image The image to use for measuring the particle estimated rectangle
* @param report The Particle Analysis Report for the particle
* @param isLong Boolean indicating if the target is believed to be the long side of a tote
* @return The estimated distance to the target in feet.
*/
double computeDistance (Image image, ParticleReport report, boolean isLong) {
double normalizedWidth, targetWidth;
NIVision.GetImageSizeResult size;
size = NIVision.imaqGetImageSize(image);
normalizedWidth = 2*(report.BoundingRectRight - report.BoundingRectLeft)/size.width;
targetWidth = isLong ? 26.0 : 16.9;
return targetWidth/(normalizedWidth*12*Math.tan(VIEW_ANGLE*Math.PI/(180*2)));
}
}