Skip to content

Commit

Permalink
Preliminary mapping and undistort code
Browse files Browse the repository at this point in the history
  • Loading branch information
liukeskywalker98 committed May 4, 2022
1 parent 0b33dea commit 1a87b8a
Show file tree
Hide file tree
Showing 5 changed files with 141 additions and 47 deletions.
16 changes: 13 additions & 3 deletions Mapping.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
# import m2dp
from getPointCloud import getPointCloudPolarInd
from utils import getRotationMatrix
from motionDistortion import MotionDistortionSolver

# Thresholds
ROT_THRESHOLD = 0.2 # radians
Expand All @@ -20,7 +21,7 @@
class Keyframe():

def __init__(self, globalPose: np.ndarray, featurePointsLocal: np.ndarray,
radarPolarImg: np.ndarray) -> None:
radarPolarImg: np.ndarray, velocity: np.ndarray) -> None:
'''
@brief Keyframe class. Contains pose, feature points and point cloud information
@param[in] globalPose (3 x 1) Pose information [x, y, th] in global coordinates,
Expand All @@ -31,11 +32,12 @@ def __init__(self, globalPose: np.ndarray, featurePointsLocal: np.ndarray,
@see updateInfo()
'''
self.updateInfo(globalPose, featurePointsLocal, radarPolarImg)
self.updateInfo(globalPose, featurePointsLocal, radarPolarImg, velocity)

def updateInfo(self, globalPose: np.ndarray,
featurePointsLocal: np.ndarray,
radarPolarImg: np.ndarray) -> None:
radarPolarImg: np.ndarray,
velocity: np.ndarray) -> None:
'''
@brief Update internal information: pose, feature points and point cloud information
@param[in] globalPose (3 x 1) Pose information [x, y, th] in global coordinates,
Expand All @@ -59,6 +61,10 @@ def updateInfo(self, globalPose: np.ndarray,
# TODO: Not sure if needed/useful
self.pointCloud = getPointCloudPolarInd(radarPolarImg)

self.velocity = velocity
self.featurePointsLocalUndistorted = MotionDistortionSolver.undistort(velocity, featurePointsLocal)
self.prunedUndistortedLocals = self.featurePointsLocalUndistorted

def copyFromOtherKeyframe(self, keyframe) -> None:
self.updateInfo(keyframe.pose, keyframe.featurePointsLocal,
keyframe.radarPolarImg)
Expand Down Expand Up @@ -87,6 +93,9 @@ def convertFeaturesLocalToGlobal(
featurePointsGlobal = (R @ (featurePointsGlobal.T + t)).T

return featurePointsGlobal

def getPrunedFeaturesGlobalPosition(self):
return self.convertFeaturesLocalToGlobal(self.prunedUndistortedLocals)

def pruneFeaturePoints(self, corrStatus: np.ndarray) -> None:
'''
Expand All @@ -95,6 +104,7 @@ def pruneFeaturePoints(self, corrStatus: np.ndarray) -> None:
@note In place changing of `self.prunedFeaturePoints` function, which aims to track and prune away the feature points that are part of this keyframe
'''
self.prunedFeaturePoints = self.prunedFeaturePoints[corrStatus]
self.prunedUndistortedLocals = self.prunedUndistortedLocals[corrStatus]

# def isVisible(self, keyframe):
# '''
Expand Down
60 changes: 47 additions & 13 deletions RawROAMSystem.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,8 @@
from trajectoryPlotting import Trajectory, getGroundTruthTrajectory, plotGtAndEstTrajectory
from utils import convertRandHtoDeltas, f_arr, getRotationMatrix, plt_savefig_by_axis, radarImgPathToTimestamp
from Tracker import Tracker

from motionDistortion import MotionDistortionSolver
from utils import *

class RawROAMSystem():

Expand Down Expand Up @@ -124,6 +125,15 @@ def run(self, startSeqInd: int = 0, endSeqInd: int = -1) -> None:
self.gtTraj = gtTraj
self.estTraj = estTraj

# Initialialize Motion Distortion Solver
# Covariance matrix, point errors
cov_p = np.diag([4, 4]) # sigma = 2 pixels
# Covariance matrix, velocity errors
cov_v = np.diag([1, 1, (5 * np.pi / 180) ** 2]) # 1 pixel/s, 1 pixel/s, 5 degrees/s
MDS = MotionDistortionSolver(cov_p, cov_v)
# Prior frame's pose
prev_pose = convertPoseToTransform(initPose)

# Actually run the algorithm
# Get initial polar and Cartesian image
prevImgPolar = getPolarImageFromImgPaths(imgPathArr, startSeqInd)
Expand All @@ -134,7 +144,8 @@ def run(self, startSeqInd: int = 0, endSeqInd: int = -1) -> None:
blobCoord, _ = appendNewFeatures(prevImgCart, blobCoord)

# Initialize first keyframe
old_kf = Keyframe(initPose, blobCoord, prevImgPolar)
zero_velocity = np.zeros((3,))
old_kf = Keyframe(initPose, blobCoord, prevImgPolar, ) # pointer to previous kf
self.map.addKeyframe(old_kf)

possible_kf = Keyframe(initPose, blobCoord, prevImgPolar)
Expand All @@ -149,21 +160,31 @@ def run(self, startSeqInd: int = 0, endSeqInd: int = -1) -> None:
good_old, good_new, rotAngleRad, corrStatus = tracker.track(
prevImgCart, currImgCart, prevImgPolar, currImgPolar,
blobCoord, seqInd)


# Keyframe updating
old_kf.pruneFeaturePoints(corrStatus)

print("Detected", np.rad2deg(rotAngleRad), "[deg] rotation")
estR = getRotationMatrix(-rotAngleRad)

R, h = tracker.getTransform(good_old, good_new)


R, h = tracker.getTransform(good_old, good_new, pixel = False)
# R = estR

# Solve for Motion Compensated Transform
p_w = old_kf.getPrunedFeaturesGlobalPosition()
# Initial Transform guess
T_wj = np.block([[R, h],
[np.zeros((2,)), 1]])

# Update trajectory
self.updateTrajectory(R, h, seqInd)
MDS.update_problem(prev_pose, p_w, good_new, T_wj)
undistort_solution = MDS.optimize_library()
pose_vector = undistort_solution[3:]

# Keyframe updating
old_kf.pruneFeaturePoints(corrStatus)
# Update trajectory
#self.updateTrajectory(R, h, seqInd)
self.updateTrajectoryAbsolute(pose_vector, seqInd)

latestPose = self.estTraj.poses[-1]
latestPose = pose_vector #self.estTraj.poses[-1]
possible_kf.updateInfo(latestPose, good_new, currImgPolar)

# Add a keyframe if it fulfills criteria
Expand All @@ -179,9 +200,15 @@ def run(self, startSeqInd: int = 0, endSeqInd: int = -1) -> None:

print("\nAdding keyframe...\n")

old_kf.copyFromOtherKeyframe(possible_kf)
self.map.addKeyframe(old_kf)
#old_kf.copyFromOtherKeyframe(possible_kf)
self.map.addKeyframe(possible_kf)

# TODO: Aliasing above? old_kf is never assigned the object possible_kf,
# map ends up with a list of N pointers to the same keyframe
# Proposed fix: old_kf = possible_kf # switch ptr to new object
# Initialize new poss_kf for new ptr
old_kf = possible_kf
possible_kf = Keyframe(latestPose, good_new, currImgPolar)
# TODO: do bundle adjustment here
self.map.bundleAdjustment()

Expand All @@ -192,6 +219,7 @@ def run(self, startSeqInd: int = 0, endSeqInd: int = -1) -> None:
# Update incremental variables
blobCoord = good_new.copy()
prevImgCart = currImgCart
prev_pose = convertPoseToTransform(latestPose)

# TODO: Move into trajectory class?
def updateTrajectory(self, R, h, seqInd):
Expand All @@ -202,6 +230,12 @@ def updateTrajectory(self, R, h, seqInd):
self.estTraj.appendRelativeDeltas(timestamp, est_deltas)
# self.estTraj.appendRelativeTransform(timestamp, R, h)

def updateTrajectoryAbsolute(self, pose_vector, seqInd):
imgPathArr = self.imgPathArr

timestamp = radarImgPathToTimestamp(imgPathArr[seqInd])
self.estTraj.appendAbsoluteTransform(timestamp, pose_vector)

def plot(self,
prevImg,
currImg,
Expand Down
6 changes: 4 additions & 2 deletions Tracker.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,8 @@ def track(
return good_old, good_new, angleRotRad, corrStatus

def getTransform(self, srcCoord: np.ndarray,
targetCoord: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
targetCoord: np.ndarray,
pixel: bool) -> Tuple[np.ndarray, np.ndarray]:
'''
@brief Obtain transform from coordinate correspondnces
Expand All @@ -109,7 +110,8 @@ def getTransform(self, srcCoord: np.ndarray,
# Obtain transforms
# R, h = calculateTransformDxDth(srcCoord, targetCoord)
R, h = calculateTransformSVD(srcCoord, targetCoord)
h *= RANGE_RESOLUTION_CART_M
if not pixel:
h *= RANGE_RESOLUTION_CART_M

return R, h

Expand Down
95 changes: 66 additions & 29 deletions motionDistortion.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,41 +30,76 @@
Debug
'''

RADAR_SCAN_FREQUENCY = 4 # 4 hz data

class MotionDistortionSolver():
def __init__(self, T_wj0, p_w, p_jt, v_j0, T_wj, sigma_p, sigma_v):
def __init__(self, T_wj0, p_w, p_jt, T_wj, sigma_p, sigma_v,
frequency = RADAR_SCAN_FREQUENCY):
# e_p Parameters
self.T_wj0 = T_wj0 # Prior transform, T_w,j0
self.T_wj0_inv = np.linalg.inv(T_wj0)
self.p_w = homogenize(p_w) # Estimated point world positions, N x 3
self.p_jt = homogenize(p_jt) # Observed points at time t, N x 3

self.T_wj_initial = T_wj

# Radar Data Params
self.total_scan_time = 1 / frequency

# e_v Parameters
self.v_j_initial = v_j0 # Initial velocity guess (prior velocity/ velocity from SVD solution)
self.T_wj_initial = T_wj # Initial Transform guess (T from SVD solution)
self.v_j_initial = self.infer_velocity(T_wj)
# Initial velocity guess (prior velocity/ velocity from SVD solution)

# Optimization parameters
self.sigma_p = sigma_p # Info matrix, point error, lamdba_p
self.sigma_v = sigma_v # Info matrix, velocity, sigma_v
n_v = self.sigma_v.shape[0]
n_p = self.sigma_p.shape[0]
self.sigma_total = np.block([[sigma_v, np.zeros((n_v, n_p))],
[np.zeros((n_p, n_v)), sigma_p]])
self.info_sqrt = sp.linalg.sqrtm(np.linalg.inv(self.sigma_total)) # 5 x 5
# n_v = self.sigma_v.shape[0]
# n_p = self.sigma_p.shape[0]
# self.sigma_total = np.block([[sigma_v, np.zeros((n_v, n_p))],
# [np.zeros((n_p, n_v)), sigma_p]])
# self.info_sqrt = sp.linalg.sqrtm(np.linalg.inv(self.sigma_total))
sigma_p_vector = np.tile(np.diag(sigma_p), p_jt.shape[0])
sigma_v_vector = np.diag(sigma_v)
sigma_vector = np.concatenate((sigma_p_vector, sigma_v_vector))
self.info_vector = 1 / sigma_vector
self.dT = None
self.T_wj_best = T_wj
self.v_j_best = v_j0 # might not be good enough a guess, too far from optimal
self.dT = MotionDistortionSolver.compute_time_deltas(self.total_scan_time, p_jt)
pass

def __init__(self, sigma_p, sigma_v,
frequency = RADAR_SCAN_FREQUENCY):
# Radar Data Params
self.total_scan_time = 1 / 4 # assuming 4 Hz
self.total_scan_time = 1 / frequency

# Optimization parameters
self.sigma_p = np.diag(sigma_p) # Info matrix, point error, lamdba_p
self.sigma_v = np.diag(sigma_v) # Info matrix, velocity, sigma_v
pass

def update_problem(self, T_wj0, p_w, p_jt, T_wj):
# e_p Parameters
self.T_wj0 = T_wj0 # Prior transform, T_w,j0
self.T_wj0_inv = np.linalg.inv(T_wj0)
self.p_w = homogenize(p_w) # Estimated point world positions, N x 3
self.p_jt = homogenize(p_jt) # Observed points at time t, N x 3

# e_v Parameters
self.v_j_initial = self.infer_velocity(T_wj)
# Initial velocity guess (prior velocity/ velocity from SVD solution)
self.dT = MotionDistortionSolver.compute_time_deltas(self.total_scan_time, p_jt)

# Info matrix scales to number of points in the optimization problem
sigma_p_vector = np.tile(self.sigma_p, p_jt.shape[0])
sigma_v_vector = self.sigma_v
sigma_vector = np.concatenate((sigma_p_vector, sigma_v_vector))
self.info_vector = 1 / sigma_vector

def infer_velocity(self, transform):
dx = transform[0, 2]
dy = transform[1, 2]
dtheta = np.arctan2(transform[1, 0], transform[0, 0])
return np.array([dx, dy, dtheta]) / self.total_scan_time

def compute_time_deltas(self, points):
@staticmethod
def compute_time_deltas(period, points):
'''
Get the time deltas for each point. This depends solely on where the
points are in scan angle. The further away from center, the greater the
Expand All @@ -74,21 +109,26 @@ def compute_time_deltas(self, points):
idea to re-run this function once an undistorted frame is obtained for
better estimates.
'''
#points = self.undistort()#self.p_jt # provide in N x 3
x = points[:, 0]
y = points[:, 1]
angles = np.arctan2(y, -x) # scanline starts at positive x axis and moves clockwise, we take midpoint pi/2 as 0
dT = self.total_scan_time * angles / (2 * np.pi)
#dT -= self.total_scan_time / 2 # offset range, as defined in [-scan_time /2 , scan_time/2]
self.dT = dT

def undistort(self, v_j):
# scanline starts at positive x axis and moves clockwise
# We take midpoint pi/2 as 0
angles = np.arctan2(y, -x)
dT = period * angles / (2 * np.pi)
return dT

@staticmethod
def undistort(v_j, points, period = 1 / RADAR_SCAN_FREQUENCY, times = None):
'''
Computes a new set of undistorted observed points, based on the current
best estimate of v_T, T_wj, dT
'''
if times is None:
assert(period > 0)
times = MotionDistortionSolver.compute_time_deltas(period, points)

v_j_column = np.expand_dims(v_j, axis = 1)
displacement = v_j_column * self.dT # 3 x N
displacement = v_j_column * times # 3 x N

theta = displacement[2, :] # (N,)
dx = displacement[0, :] # (N,)
Expand All @@ -98,11 +138,10 @@ def undistort(self, v_j):
T_j_jt = np.array([[np.cos(theta), -np.sin(theta), dx],
[np.sin(theta), np.cos(theta), dy],
[np.zeros(shape), np.zeros(shape), np.ones(shape)]])
p_jt_col = np.expand_dims(self.p_jt, axis = 2) # N x 3 x 1
p_jt_col = np.expand_dims(points, axis = 2) # N x 3 x 1
undistorted = T_j_jt.transpose((2, 0, 1)) @ p_jt_col # N x 3 x 1
return undistorted

# TODO: Check if this really should be inverted
def expected_observed_pts(self, T_wj):
'''
Returns the estimated positions of points based on their world location
Expand Down Expand Up @@ -131,8 +170,7 @@ def error(self, v_j, T_wj):
estimated observed positions and the velocity error.
'''
# Compute point error
undistorted = self.undistort(v_j)
#self.compute_time_deltas(undistorted)
undistorted = MotionDistortionSolver.undistort(v_j, self.p_jt, times=self.dT)
expected = self.expected_observed_pts(T_wj)
naive_e_p = expected - np.squeeze(undistorted).T # 3 x N
# Actual loss is the Cauchy robust loss, defined here:
Expand Down Expand Up @@ -176,7 +214,7 @@ def jacobian(self, v_j, T_wj):
J_v - gradient of point error and velocity error wrt velocity terms
vx, vy, vtheta
'''
undistorted = self.undistort(v_j)
undistorted = MotionDistortionSolver.undistort(v_j, self.p_jt, times=self.dT)
expected = self.expected_observed_pts(T_wj)
input = expected - np.squeeze(undistorted).T # 3 x N
naive_e_p = input[:2]
Expand Down Expand Up @@ -248,7 +286,6 @@ def optimize_library(self):
'''
Optimize using the LM implementation in the scipy library.
'''
self.compute_time_deltas(self.p_jt)
# Initialize v, T
T0 = self.T_wj_initial
T_params = np.array([T0[0, 2], T0[1, 2], np.arctan2(T0[1, 0], T0[0, 0])])
Expand Down
11 changes: 11 additions & 0 deletions trajectoryPlotting.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,17 @@ def appendRelativeTransform(self, time, R, h):
# new_pose = [*xy[0], *xy[1], self.poses[-1,2] + dth]
self.poses = np.vstack((self.poses, new_pose))

def appendAbsoluteTransform(self, time, pose):
'''
@brief Append a relative transform to the trajectory
h should already be scaled by radar resolution
@param[in] time timestamp of the transform
@param[in] pose pose vector (3, )
'''
# Add to timestamps
self.timestamps = np.append(self.timestamps, time)
self.poses = np.vstack((self.poses, pose))

def getPoseAtTimes(self, times):
'''
@brief Given timestamps, return the pose at that time using cubic interpolation
Expand Down

0 comments on commit 1a87b8a

Please sign in to comment.