Skip to content

Commit

Permalink
[photon-core] 2D Detection data accuracy (#896)
Browse files Browse the repository at this point in the history
Use calibration data for 2d target info when available (principal point, FOV)

Correct perspective distortion in 2d yaw/pitch info
  • Loading branch information
amquake authored Oct 15, 2023
1 parent 760de0f commit c8c9e77
Show file tree
Hide file tree
Showing 5 changed files with 200 additions and 96 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ public class FrameStaticProperties {
* Instantiates a new Frame static properties.
*
* @param mode The Video Mode of the camera.
* @param fov The fov of the image.
* @param fov The FOV (Field Of Vision) of the image in degrees.
*/
public FrameStaticProperties(VideoMode mode, double fov, CameraCalibrationCoefficients cal) {
this(mode != null ? mode.width : 1, mode != null ? mode.height : 1, fov, cal);
Expand All @@ -48,9 +48,9 @@ public FrameStaticProperties(VideoMode mode, double fov, CameraCalibrationCoeffi
/**
* Instantiates a new Frame static properties.
*
* @param imageWidth The width of the image.
* @param imageHeight The width of the image.
* @param fov The fov of the image.
* @param imageWidth The width of the image in pixels.
* @param imageHeight The width of the image in pixels.
* @param fov The FOV (Field Of Vision) of the image in degrees.
*/
public FrameStaticProperties(
int imageWidth, int imageHeight, double fov, CameraCalibrationCoefficients cal) {
Expand All @@ -61,30 +61,47 @@ public FrameStaticProperties(

imageArea = this.imageWidth * this.imageHeight;

// Todo -- if we have calibration, use it's center point?
centerX = ((double) this.imageWidth / 2) - 0.5;
centerY = ((double) this.imageHeight / 2) - 0.5;
centerPoint = new Point(centerX, centerY);

// TODO if we have calibration use it here instead
// pinhole model calculations
DoubleCouple horizVertViews =
calculateHorizontalVerticalFoV(this.fov, this.imageWidth, this.imageHeight);
if (cameraCalibration != null && cameraCalibration.getCameraIntrinsicsMat() != null) {
// Use calibration data
var camIntrinsics = cameraCalibration.getCameraIntrinsicsMat();
centerX = camIntrinsics.get(0, 2)[0];
centerY = camIntrinsics.get(1, 2)[0];
centerPoint = new Point(centerX, centerY);
horizontalFocalLength = camIntrinsics.get(0, 0)[0];
verticalFocalLength = camIntrinsics.get(1, 1)[0];
} else {
// No calibration data. Calculate from user provided diagonal FOV
centerX = (this.imageWidth / 2.0) - 0.5;
centerY = (this.imageHeight / 2.0) - 0.5;
centerPoint = new Point(centerX, centerY);

horizontalFocalLength = this.imageWidth / (2 * Math.tan(horizVertViews.getFirst() / 2));
verticalFocalLength = this.imageHeight / (2 * Math.tan(horizVertViews.getSecond() / 2));
DoubleCouple horizVertViews =
calculateHorizontalVerticalFoV(this.fov, this.imageWidth, this.imageHeight);
double horizFOV = Math.toRadians(horizVertViews.getFirst());
double vertFOV = Math.toRadians(horizVertViews.getSecond());
horizontalFocalLength = (this.imageWidth / 2.0) / Math.tan(horizFOV / 2.0);
verticalFocalLength = (this.imageHeight / 2.0) / Math.tan(vertFOV / 2.0);
}
}

/**
* Calculates the horizontal and vertical FOV components from a given diagonal FOV and image size.
*
* @param diagonalFoV Diagonal FOV in degrees
* @param imageWidth Image width in pixels
* @param imageHeight Image height in pixels
* @return Horizontal and vertical FOV in degrees
*/
public static DoubleCouple calculateHorizontalVerticalFoV(
double diagonalFoV, int imageWidth, int imageHeight) {
double diagonalView = Math.toRadians(diagonalFoV);
diagonalFoV = Math.toRadians(diagonalFoV);
double diagonalAspect = Math.hypot(imageWidth, imageHeight);

double horizontalView =
Math.atan(Math.tan(diagonalView / 2) * (imageWidth / diagonalAspect)) * 2;
double verticalView =
Math.atan(Math.tan(diagonalView / 2) * (imageHeight / diagonalAspect)) * 2;
Math.atan(Math.tan(diagonalFoV / 2) * (imageWidth / diagonalAspect)) * 2;
double verticalView = Math.atan(Math.tan(diagonalFoV / 2) * (imageHeight / diagonalAspect)) * 2;

return new DoubleCouple(horizontalView, verticalView);
return new DoubleCouple(Math.toDegrees(horizontalView), Math.toDegrees(verticalView));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,29 @@
import org.photonvision.vision.opencv.DualOffsetValues;

public class TargetCalculations {
public static double calculateYaw(
double offsetCenterX, double targetCenterX, double horizontalFocalLength) {
return Math.toDegrees(Math.atan((offsetCenterX - targetCenterX) / horizontalFocalLength));
}

public static double calculatePitch(
double offsetCenterY, double targetCenterY, double verticalFocalLength) {
return -Math.toDegrees(Math.atan((offsetCenterY - targetCenterY) / verticalFocalLength));
/**
* Calculates the yaw and pitch of a point in the image. Yaw and pitch must be calculated together
* to account for perspective distortion. Yaw is positive right, and pitch is positive up.
*
* @param offsetCenterX The X value of the offset principal point (cx) in pixels
* @param targetCenterX The X value of the target's center point in pixels
* @param horizontalFocalLength The horizontal focal length (fx) in pixels
* @param offsetCenterY The Y value of the offset principal point (cy) in pixels
* @param targetCenterY The Y value of the target's center point in pixels
* @param verticalFocalLength The vertical focal length (fy) in pixels
* @return The yaw and pitch from the principal axis to the target center, in degrees.
*/
public static DoubleCouple calculateYawPitch(
double offsetCenterX,
double targetCenterX,
double horizontalFocalLength,
double offsetCenterY,
double targetCenterY,
double verticalFocalLength) {
double yaw = Math.atan((targetCenterX - offsetCenterX) / horizontalFocalLength);
double pitch =
Math.atan((offsetCenterY - targetCenterY) / (verticalFocalLength / Math.cos(yaw)));
return new DoubleCouple(Math.toDegrees(yaw), Math.toDegrees(pitch));
}

public static double calculateSkew(boolean isLandscape, RotatedRect minAreaRect) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,13 +73,16 @@ public TrackedTarget(
TargetCalculationParameters params) {
m_targetOffsetPoint = new Point(tagDetection.getCenterX(), tagDetection.getCenterY());
m_robotOffsetPoint = new Point();

m_pitch =
TargetCalculations.calculatePitch(
tagDetection.getCenterY(), params.cameraCenterPoint.y, params.verticalFocalLength);
m_yaw =
TargetCalculations.calculateYaw(
tagDetection.getCenterX(), params.cameraCenterPoint.x, params.horizontalFocalLength);
var yawPitch =
TargetCalculations.calculateYawPitch(
params.cameraCenterPoint.x,
tagDetection.getCenterX(),
params.horizontalFocalLength,
params.cameraCenterPoint.y,
tagDetection.getCenterY(),
params.verticalFocalLength);
m_yaw = yawPitch.getFirst();
m_pitch = yawPitch.getSecond();
var bestPose = new Transform3d();
var altPose = new Transform3d();

Expand Down Expand Up @@ -138,13 +141,16 @@ public TrackedTarget(
public TrackedTarget(ArucoDetectionResult result, TargetCalculationParameters params) {
m_targetOffsetPoint = new Point(result.getCenterX(), result.getCenterY());
m_robotOffsetPoint = new Point();

m_pitch =
TargetCalculations.calculatePitch(
result.getCenterY(), params.cameraCenterPoint.y, params.verticalFocalLength);
m_yaw =
TargetCalculations.calculateYaw(
result.getCenterX(), params.cameraCenterPoint.x, params.horizontalFocalLength);
var yawPitch =
TargetCalculations.calculateYawPitch(
params.cameraCenterPoint.x,
result.getCenterX(),
params.horizontalFocalLength,
params.cameraCenterPoint.y,
result.getCenterY(),
params.verticalFocalLength);
m_yaw = yawPitch.getFirst();
m_pitch = yawPitch.getSecond();

double[] xCorners = result.getxCorners();
double[] yCorners = result.getyCorners();
Expand Down Expand Up @@ -246,7 +252,7 @@ public MatOfPoint2f getApproximateBoundingPolygon() {
}

public void calculateValues(TargetCalculationParameters params) {
// this MUST happen in this exact order!
// this MUST happen in this exact order! (TODO: document why)
m_targetOffsetPoint =
TargetCalculations.calculateTargetOffsetPoint(
params.isLandscape, params.targetOffsetPointEdge, getMinAreaRect());
Expand All @@ -257,13 +263,18 @@ public void calculateValues(TargetCalculationParameters params) {
params.dualOffsetValues,
params.robotOffsetPointMode);

// order of this stuff doesn't matter though
m_pitch =
TargetCalculations.calculatePitch(
m_targetOffsetPoint.y, m_robotOffsetPoint.y, params.verticalFocalLength);
m_yaw =
TargetCalculations.calculateYaw(
m_targetOffsetPoint.x, m_robotOffsetPoint.x, params.horizontalFocalLength);
// order of this stuff doesnt matter though
var yawPitch =
TargetCalculations.calculateYawPitch(
m_robotOffsetPoint.x,
m_targetOffsetPoint.x,
params.horizontalFocalLength,
m_robotOffsetPoint.y,
m_targetOffsetPoint.y,
params.verticalFocalLength);
m_yaw = yawPitch.getFirst();
m_pitch = yawPitch.getSecond();

m_area = m_mainContour.getMinAreaRect().size.area() / params.imageArea * 100;

m_skew = TargetCalculations.calculateSkew(params.isLandscape, getMinAreaRect());
Expand Down
Loading

0 comments on commit c8c9e77

Please sign in to comment.