diff --git a/Mapping.py b/Mapping.py index 63066c2..18a15cc 100644 --- a/Mapping.py +++ b/Mapping.py @@ -62,7 +62,8 @@ def updateInfo(self, globalPose: np.ndarray, self.pointCloud = getPointCloudPolarInd(radarPolarImg) self.velocity = velocity - self.featurePointsLocalUndistorted = MotionDistortionSolver.undistort(velocity, featurePointsLocal) + print(featurePointsLocal.shape) + self.featurePointsLocalUndistorted = MotionDistortionSolver.undistort(velocity, featurePointsLocal)[:, :2] self.prunedUndistortedLocals = self.featurePointsLocalUndistorted def copyFromOtherKeyframe(self, keyframe) -> None: @@ -95,7 +96,23 @@ def convertFeaturesLocalToGlobal( return featurePointsGlobal def getPrunedFeaturesGlobalPosition(self): - return self.convertFeaturesLocalToGlobal(self.prunedUndistortedLocals) + x, y, th = self.pose + + # First translate local points to origin at center + featurePointsGlobal = self.prunedUndistortedLocals - RADAR_CART_CENTER + + # Then we need to convert to meters + #featurePointsGlobal *= RANGE_RESOLUTION_CART_M # px * (m/px) = m + + # Center origin at pose center + + # Rotate and translate to make into global coordinate system + R = getRotationMatrix(th) + t = np.array([x, y]).reshape(2, 1) + featurePointsGlobal = (R @ (featurePointsGlobal.T + t)).T + + return featurePointsGlobal + return self.convertFeaturesLocalToGlobal() def pruneFeaturePoints(self, corrStatus: np.ndarray) -> None: ''' @@ -103,8 +120,8 @@ def pruneFeaturePoints(self, corrStatus: np.ndarray) -> None: @param[in] corrStatus @note In place changing of `self.prunedFeaturePoints` function, which aims to track and prune away the feature points that are part of this keyframe ''' - self.prunedFeaturePoints = self.prunedFeaturePoints[corrStatus] - self.prunedUndistortedLocals = self.prunedUndistortedLocals[corrStatus] + self.prunedFeaturePoints = self.prunedFeaturePoints[corrStatus.flatten().astype(bool)] + self.prunedUndistortedLocals = self.prunedUndistortedLocals[corrStatus.flatten().astype(bool)] # def isVisible(self, keyframe): # ''' diff --git a/RawROAMSystem.py b/RawROAMSystem.py index 0e5a02d..d227550 100644 --- a/RawROAMSystem.py +++ b/RawROAMSystem.py @@ -142,13 +142,14 @@ def run(self, startSeqInd: int = 0, endSeqInd: int = -1) -> None: # Get initial features from Cartesian image blobCoord = np.empty((0, 2)) blobCoord, _ = appendNewFeatures(prevImgCart, blobCoord) + print(f"blobCoord shape:{blobCoord.shape}") # Initialize first keyframe zero_velocity = np.zeros((3,)) - old_kf = Keyframe(initPose, blobCoord, prevImgPolar, ) # pointer to previous kf + old_kf = Keyframe(initPose, blobCoord, prevImgPolar, zero_velocity) # pointer to previous kf self.map.addKeyframe(old_kf) - possible_kf = Keyframe(initPose, blobCoord, prevImgPolar) + possible_kf = Keyframe(initPose, blobCoord, prevImgPolar, zero_velocity) for seqInd in range(startSeqInd + 1, endSeqInd + 1): # Obtain polar and Cart image @@ -162,16 +163,18 @@ def run(self, startSeqInd: int = 0, endSeqInd: int = -1) -> None: blobCoord, seqInd) # Keyframe updating + print(f"Pruning index shape {corrStatus.shape}") old_kf.pruneFeaturePoints(corrStatus) print("Detected", np.rad2deg(rotAngleRad), "[deg] rotation") estR = getRotationMatrix(-rotAngleRad) - R, h = tracker.getTransform(good_old, good_new, pixel = False) + R, h = tracker.getTransform(good_old, good_new, pixel = True) # R = estR # Solve for Motion Compensated Transform - p_w = old_kf.getPrunedFeaturesGlobalPosition() + p_w = old_kf.getPrunedFeaturesGlobalPosition() + # Initial Transform guess T_wj = np.block([[R, h], [np.zeros((2,)), 1]]) @@ -179,13 +182,14 @@ def run(self, startSeqInd: int = 0, endSeqInd: int = -1) -> None: MDS.update_problem(prev_pose, p_w, good_new, T_wj) undistort_solution = MDS.optimize_library() pose_vector = undistort_solution[3:] + velocity = undistort_solution[:3] # Update trajectory #self.updateTrajectory(R, h, seqInd) self.updateTrajectoryAbsolute(pose_vector, seqInd) latestPose = pose_vector #self.estTraj.poses[-1] - possible_kf.updateInfo(latestPose, good_new, currImgPolar) + possible_kf.updateInfo(latestPose, good_new, currImgPolar, velocity) # Add a keyframe if it fulfills criteria # 1) large enough translation from previous keyframe @@ -195,7 +199,8 @@ def run(self, startSeqInd: int = 0, endSeqInd: int = -1) -> None: # NOTE: Feature check and appending will only be checked in the next iteration, # so we can prematuraly do it here and add a keyframe first nFeatures = good_new.shape[0] - if (nFeatures <= N_FEATURES_BEFORE_RETRACK) or \ + retrack = (nFeatures <= N_FEATURES_BEFORE_RETRACK) + if retrack or \ self.map.isGoodKeyframe(possible_kf): print("\nAdding keyframe...\n") @@ -208,16 +213,23 @@ def run(self, startSeqInd: int = 0, endSeqInd: int = -1) -> None: # Proposed fix: old_kf = possible_kf # switch ptr to new object # Initialize new poss_kf for new ptr old_kf = possible_kf - possible_kf = Keyframe(latestPose, good_new, currImgPolar) + # TODO: Never replenished blobCoord. Proposed fix: Done here. + if retrack: + print(f"Restocking features for new keyframe") + good_new, _ = appendNewFeatures(currImgCart, good_new) + old_kf.updateInfo(latestPose, good_new, currImgPolar, velocity) + print(f"{old_kf.featurePointsLocal.shape[0]} features now") + possible_kf = Keyframe(latestPose, blobCoord, currImgPolar, velocity) # TODO: do bundle adjustment here self.map.bundleAdjustment() # Plotting and prints and stuff - self.plot(prevImgCart, currImgCart, good_old, good_new, R, h, - seqInd) + # self.plot(prevImgCart, currImgCart, good_old, good_new, R, h, + # seqInd) # Update incremental variables blobCoord = good_new.copy() + print(f"Blob coord is now size: {blobCoord.shape}") prevImgCart = currImgCart prev_pose = convertPoseToTransform(latestPose) diff --git a/Tracker.py b/Tracker.py index 73450b0..79bf79e 100644 --- a/Tracker.py +++ b/Tracker.py @@ -82,12 +82,23 @@ def track( print( f"{seqInd} | Num good features: {nGoodFeatures} of {nFeatures} ({(nGoodFeatures / nFeatures) * 100:.2f}%) | Time: {toc(start):.2f}s" ) + + # plt.subplot(1, 2, 1) + # plt.scatter(good_old[:,0], good_old[:,1]) + # plt.subplot(1, 2, 2) + # plt.scatter(good_new[:,0], good_new[:,1]) + # plt.show() # Outlier rejection doOutlierRejection = self.paramFlags.get("rejectOutliers", True) if doOutlierRejection: good_old, good_new, pruning_mask = rejectOutliers(good_old, good_new) - + + # plt.subplot(1, 2, 1) + # plt.scatter(good_old[:,0], good_old[:,1]) + # plt.subplot(1, 2, 2) + # plt.scatter(good_new[:,0], good_new[:,1]) + # plt.show() # Ensure correct correspondence status rng = np.arange(nFeatures) corrStatus[rng[corrStatus.flatten().astype(bool)]] &= pruning_mask[:, np.newaxis] diff --git a/data/full_seq_1/INFO.md b/data/full_seq_1/INFO.md index f85f4b2..f73d0d9 100644 --- a/data/full_seq_1/INFO.md +++ b/data/full_seq_1/INFO.md @@ -11,7 +11,7 @@ Distance: 9.02 km Frames: 8867 FPS: 4.00 Hz Resolution: 4.32 cm -Range: 165 m +Range: 165 m ## Dataset Structure @@ -23,7 +23,6 @@ Range: 165 m |-- (bunch of radar images labelled as .png) |-- info.txt (this file) |-- radar.timestamps (in form: - |-- gt - |-- radar_odometry.csv (processed ground truth odometry data from GPS) - + |-- radar_odometry.csv (processed ground truth odometry data from GPS) + ``` diff --git a/data/full_seq_1/gt/trajectory.png b/data/full_seq_1/gt/trajectory.png deleted file mode 100644 index d1880a9..0000000 Binary files a/data/full_seq_1/gt/trajectory.png and /dev/null differ diff --git a/getFeatures.py b/getFeatures.py index 2564f16..9fe1e34 100644 --- a/getFeatures.py +++ b/getFeatures.py @@ -54,7 +54,7 @@ def getBlobsFromCart(cartImage: np.ndarray, # Thresholds for feature loss PERCENT_FEATURE_LOSS_THRESHOLD = 0.75 -N_FEATURES_BEFORE_RETRACK = -1 # TODO: Make it dynamic (find the overall loss) +N_FEATURES_BEFORE_RETRACK = 60 # TODO: Make it dynamic (find the overall loss) # TODO: Make dynamic? @@ -103,7 +103,6 @@ def appendNewFeatures(srcImg, oldFeaturesCoord): @param[in] srcImg Source image to obtain features on @param[in] oldFeaturesCoord (K x 2) array of [x, y] coordinate of features ''' - newFeatureCoord, newFeatureRadii = getFeatures(srcImg) print("Added", newFeatureCoord.shape[0], "new features!") diff --git a/getTransformKLT.py b/getTransformKLT.py index fb7babd..ce6b7e7 100644 --- a/getTransformKLT.py +++ b/getTransformKLT.py @@ -15,7 +15,7 @@ from utils import * PLOT_BAD_FEATURES = False -N_FEATURES_BEFORE_RETRACK = 80 +N_FEATURES_BEFORE_RETRACK = 60 def visualize_transform(prevImg: np.ndarray, currImg: np.ndarray, @@ -349,6 +349,7 @@ def getTrackedPointsKLT( if nFeatures < N_FEATURES_BEFORE_RETRACK: featurePtSrc, N_FEATURES_BEFORE_RETRACK = \ appendNewFeatures(srcImg, featurePtSrc) + print("WARNING: getTransformKLT added new features!") # Perform KLT to get corresponding points # Stupid conversions to appropriate types diff --git a/img/roam_mapping/tiny_traj/0001.jpg b/img/roam_mapping/tiny_traj/0001.jpg index 9f8ee7e..5ac3f75 100644 Binary files a/img/roam_mapping/tiny_traj/0001.jpg and b/img/roam_mapping/tiny_traj/0001.jpg differ diff --git a/motionDistortion.py b/motionDistortion.py index 0515c84..398b3cc 100644 --- a/motionDistortion.py +++ b/motionDistortion.py @@ -31,7 +31,7 @@ ''' RADAR_SCAN_FREQUENCY = 4 # 4 hz data - +VERBOSE = False class MotionDistortionSolver(): def __init__(self, T_wj0, p_w, p_jt, T_wj, sigma_p, sigma_v, frequency = RADAR_SCAN_FREQUENCY): @@ -80,6 +80,8 @@ def update_problem(self, T_wj0, p_w, p_jt, T_wj): self.T_wj0_inv = np.linalg.inv(T_wj0) self.p_w = homogenize(p_w) # Estimated point world positions, N x 3 self.p_jt = homogenize(p_jt) # Observed points at time t, N x 3 + assert(p_w.shape == p_jt.shape) + self.T_wj_initial = T_wj # e_v Parameters self.v_j_initial = self.infer_velocity(T_wj) @@ -123,6 +125,10 @@ def undistort(v_j, points, period = 1 / RADAR_SCAN_FREQUENCY, times = None): Computes a new set of undistorted observed points, based on the current best estimate of v_T, T_wj, dT ''' + # Turn points in homogeneous form if not already + points = homogenize(points) + + # Get the time deltas for motion distortion if times is None: assert(period > 0) times = MotionDistortionSolver.compute_time_deltas(period, points) @@ -140,7 +146,7 @@ def undistort(v_j, points, period = 1 / RADAR_SCAN_FREQUENCY, times = None): [np.zeros(shape), np.zeros(shape), np.ones(shape)]]) p_jt_col = np.expand_dims(points, axis = 2) # N x 3 x 1 undistorted = T_j_jt.transpose((2, 0, 1)) @ p_jt_col # N x 3 x 1 - return undistorted + return np.squeeze(undistorted) # N x 3 def expected_observed_pts(self, T_wj): ''' @@ -172,7 +178,7 @@ def error(self, v_j, T_wj): # Compute point error undistorted = MotionDistortionSolver.undistort(v_j, self.p_jt, times=self.dT) expected = self.expected_observed_pts(T_wj) - naive_e_p = expected - np.squeeze(undistorted).T # 3 x N + naive_e_p = expected - undistorted.T # 3 x N # Actual loss is the Cauchy robust loss, defined here: e_p_i = np.log(np.square(naive_e_p[:2, :]) / 2 + 1) # 2 x N e_p = e_p_i.flatten(order='F') @@ -185,13 +191,12 @@ def error(self, v_j, T_wj): dy = T_j_j1[1, 2] dtheta = np.arctan2(T_j_j1[1, 0], T_j_j1[0, 0]) v_j_prior = np.array([dx, dy, dtheta]) / self.total_scan_time - #print(f"Prior velocity: {v_j_prior}") v_diff = (v_j - v_j_prior) v_diff[2] = normalize_angles(v_diff[2]) e_v = v_diff * e_p_i.shape[1] # (3,) # ideally should warp2pi here on theta error e = np.hstack((e_p, e_v)) - #print(e) + return e def jacobian_vector(self, params): @@ -216,7 +221,7 @@ def jacobian(self, v_j, T_wj): ''' undistorted = MotionDistortionSolver.undistort(v_j, self.p_jt, times=self.dT) expected = self.expected_observed_pts(T_wj) - input = expected - np.squeeze(undistorted).T # 3 x N + input = expected - undistorted.T # 3 x N naive_e_p = input[:2] cauchy_derivative = naive_e_p / (np.square(naive_e_p) / 2 + 1) # 3 x N @@ -290,8 +295,9 @@ def optimize_library(self): T0 = self.T_wj_initial T_params = np.array([T0[0, 2], T0[1, 2], np.arctan2(T0[1, 0], T0[0, 0])]) initial_guess = np.hstack((self.v_j_initial, T_params)) - print(f"Initial v guess: {self.v_j_initial}") - print(f"Initial T guess: {T_params}") + if VERBOSE: + print(f"Initial v guess: {self.v_j_initial}") + print(f"Initial T guess: {T_params}") result = sp.optimize.least_squares(self.error_vector, initial_guess, jac = '2-point', method = 'lm') # result = sp.optimize.least_squares(self.error_vector, initial_guess, jac = self.jacobian_vector, method = 'lm') @@ -305,9 +311,10 @@ def optimize_library(self): 2 : "ftol termination condition is satisfied", 3 : "xtol termination condition is satisfied", 4 : "Both ftol and xtol termination conditions are satisfied"} - print(f"Final v: {best_params[:3]}") - print(f"Final t: {best_params[3:]}") - print(f"Used {num_evals} evaluations") - print(f"Residuals were {result.fun}") - print(status_dict[status]) + if VERBOSE: + print(f"Final v: {best_params[:3]}") + print(f"Final t: {best_params[3:]}") + print(f"Used {num_evals} evaluations") + print(f"Residuals were {result.fun}") + print(status_dict[status]) return best_params