[INTERNAL]
F = 1 # [m] focal length
SX = 0.001225 # [m] pixel size in X direction
SY = 0.00122 # [m] pixel size in Y direction
X0 = 319.5 # [pixel] X-coordinate of principle from top-left (0,0)
Y0 = 239.5 # [pixel] X-coordinate of principle from top-left (0,0)
[EXTERNAL]
B = 0.3 # [m] width of baseline of stereo camera rig
LATPOS = 0 # [m] lateral position of rectified images (virtual camera)
HEIGHT = 0 # [m] height of rectified images (virtual camera)
DISTANCE= 0 # [m] distance of rectified images (virtual camera)
TILT = 0 # [rad] tilt angle
YAW = 0 # [rad] yaw angle
ROLL = 0 # [rad] roll angle
# Notes:
#
# In a stereo camera system the internal parameters for both cameras are the same.
#
# The camera model is right handed.
# The X axis is the lateral distance (positive to the right)
# The Y axis is the height (positive pointing up)
# The Z axis is in the depth direction (positive in the direction of driving)
#
# The world to camera transformation is performed by first a translation
# (latpos, height, distance) followed by a rotation (tilt, yaw, roll).
#
# The angle directions are:
# tilt > 0 <=> looking down
# yaw > 0 <=> looking right
# roll > 0 <=> rolling clockwise
#
#
# How to calculate "real-world" co-ordinates from the image + disparity
# ---------------------------------------------------------------------
# On the sensor the projected point in metres is:
# (x,y) = (F * X/Z + x0, F * Y/Z + y0)
# where (X,Y,Z) is the world point, (x,y) is the image point
# and ALL units are in meters
#
# Since each direction is scaled by the size of the pixel this gives us
# (u,v) = (x/SX , y/SY) = (F/SX * X/Z + x0/SX, F/SY * Y/Z + y0/SY)
# Providing x0/SX = X0 and y0/SY = Y0, and now taking into account disparity d:
# (u,v,d) = (F/SX * X/Z + X0 , F/SY * Y/Z + Y0 , F/SX * B/Z)
#
# The inverse of this is:
# (X, Y, Z) = B / d * (u - X0, -SY/SX * (v - Y0) , F / SX)