Skip to content

Commit

Permalink
Implemented max acc relocalization filtering (working on iOS)
Browse files Browse the repository at this point in the history
  • Loading branch information
matlabbe committed Sep 15, 2024
1 parent 9af45c8 commit 0f1336d
Show file tree
Hide file tree
Showing 13 changed files with 294 additions and 255 deletions.
21 changes: 5 additions & 16 deletions app/android/jni/CameraARCore.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -385,12 +385,6 @@ SensorData CameraARCore::updateDataOnRender(Transform & pose)
/*near=*/0.1f, /*far=*/100.f,
glm::value_ptr(projectionMatrix_));

// adjust origin
if(!getOriginOffset().isNull())
{
viewMatrix_ = glm::inverse(rtabmap::glmFromTransform(rtabmap::opengl_world_T_rtabmap_world * getOriginOffset() *rtabmap::rtabmap_world_T_opengl_world)*glm::inverse(viewMatrix_));
}

ArTrackingState camera_tracking_state;
ArCamera_getTrackingState(arSession_, ar_camera, &camera_tracking_state);

Expand All @@ -402,11 +396,12 @@ SensorData CameraARCore::updateDataOnRender(Transform & pose)
ArCamera_getPose(arSession_, ar_camera, arPose_);
ArPose_getPoseRaw(arSession_, arPose_, pose_raw);
Transform poseArCore = Transform(pose_raw[4], pose_raw[5], pose_raw[6], pose_raw[0], pose_raw[1], pose_raw[2], pose_raw[3]);
poseArCore = rtabmap::rtabmap_world_T_opengl_world * poseArCore * rtabmap::opengl_world_T_rtabmap_world;
pose = rtabmap::rtabmap_world_T_opengl_world * poseArCore * rtabmap::opengl_world_T_rtabmap_world;

if(poseArCore.isNull())
if(pose.isNull())
{
LOGE("CameraARCore: Pose is null");
return data;
}

// Get calibration parameters
Expand Down Expand Up @@ -530,7 +525,7 @@ SensorData CameraARCore::updateDataOnRender(Transform & pose)
#endif
if(pointCloudData && points>0)
{
scan = scanFromPointCloudData(pointCloudData, points, poseArCore, model, rgb, &kpts, &kpts3);
scan = scanFromPointCloudData(pointCloudData, points, pose, model, rgb, &kpts, &kpts3);
}
}
else
Expand All @@ -541,15 +536,9 @@ SensorData CameraARCore::updateDataOnRender(Transform & pose)
data = SensorData(scan, rgb, depthFromMotion_?getOcclusionImage():cv::Mat(), model, 0, stamp);
data.setFeatures(kpts, kpts3, cv::Mat());

if(!poseArCore.isNull())
if(!pose.isNull())
{
pose = poseArCore;
this->poseReceived(pose, stamp);
// adjust origin
if(!getOriginOffset().isNull())
{
pose = getOriginOffset() * pose;
}
}
}
}
Expand Down
11 changes: 0 additions & 11 deletions app/android/jni/CameraAREngine.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -236,12 +236,6 @@ SensorData CameraAREngine::updateDataOnRender(Transform & pose)
/*near=*/0.1f, /*far=*/100.f,
glm::value_ptr(projectionMatrix_));

// adjust origin
if(!getOriginOffset().isNull())
{
viewMatrix_ = glm::inverse(rtabmap::glmFromTransform(rtabmap::opengl_world_T_rtabmap_world * getOriginOffset() *rtabmap::rtabmap_world_T_opengl_world)*glm::inverse(viewMatrix_));
}

HwArTrackingState camera_tracking_state;
HwArCamera_getTrackingState(arSession_, ar_camera, &camera_tracking_state);

Expand Down Expand Up @@ -334,11 +328,6 @@ SensorData CameraAREngine::updateDataOnRender(Transform & pose)
{
pose = rtabmap::rtabmap_world_T_opengl_world * pose * rtabmap::opengl_world_T_rtabmap_world;
this->poseReceived(pose, stamp);
// adjust origin
if(!getOriginOffset().isNull())
{
pose = getOriginOffset() * pose;
}
}
}
}
Expand Down
167 changes: 128 additions & 39 deletions app/android/jni/CameraMobile.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -53,15 +53,17 @@ const rtabmap::Transform CameraMobile::opticalRotationInv = Transform(
0.0f, 0.0f, -1.0f, 0.0f,
1.0f, 0.0f, 0.0f, 0.0f);

CameraMobile::CameraMobile(bool smoothing) :
CameraMobile::CameraMobile(bool smoothing, float upstreamRelocalizationAccThr) :
Camera(10),
deviceTColorCamera_(Transform::getIdentity()),
textureId_(0),
uvs_initialized_(false),
stampEpochOffset_(0.0),
smoothing_(smoothing),
colorCameraToDisplayRotation_(ROTATION_0),
originUpdate_(false),
originUpdate_(true),
upstreamRelocalizationAccThr_(upstreamRelocalizationAccThr),
previousAnchorStamp_(0.0),
dataGoodTracking_(true)
{
}
Expand All @@ -83,10 +85,13 @@ void CameraMobile::close()
lastKnownGPS_ = GPS();
lastEnvSensors_.clear();
originOffset_ = Transform();
originUpdate_ = false;
originUpdate_ = true;
dataPose_ = Transform();
data_ = SensorData();
dataGoodTracking_ = true;
previousAnchorPose_.setNull();
previousAnchorLinearVelocity_.clear();
previousAnchorStamp_ = 0.0;

if(textureId_ != 0)
{
Expand All @@ -99,16 +104,10 @@ void CameraMobile::close()

void CameraMobile::resetOrigin()
{
firstFrame_ = true;
lastKnownGPS_ = GPS();
lastEnvSensors_.clear();
dataPose_ = Transform();
data_ = SensorData();
dataGoodTracking_ = true;
originUpdate_ = true;
}

bool CameraMobile::getPose(double stamp, Transform & pose, cv::Mat & covariance, double maxWaitTime)
bool CameraMobile::getPose(double epochStamp, Transform & pose, cv::Mat & covariance, double maxWaitTime)
{
pose.setNull();

Expand All @@ -119,27 +118,27 @@ bool CameraMobile::getPose(double stamp, Transform & pose, cv::Mat & covariance,
{
poseMutex_.lock();
int waitTry = 0;
while(maxWaitTimeMs>0 && poseBuffer_.rbegin()->first < stamp && waitTry < maxWaitTimeMs)
while(maxWaitTimeMs>0 && poseBuffer_.rbegin()->first < epochStamp && waitTry < maxWaitTimeMs)
{
poseMutex_.unlock();
++waitTry;
uSleep(1);
poseMutex_.lock();
}
if(poseBuffer_.rbegin()->first < stamp)
if(poseBuffer_.rbegin()->first < epochStamp)
{
if(maxWaitTimeMs > 0)
{
UWARN("Could not find poses to interpolate at time %f after waiting %d ms (latest is %f)...", stamp, maxWaitTimeMs, poseBuffer_.rbegin()->first);
UWARN("Could not find poses to interpolate at time %f after waiting %d ms (latest is %f)...", epochStamp, maxWaitTimeMs, poseBuffer_.rbegin()->first);
}
else
{
UWARN("Could not find poses to interpolate at time %f (latest is %f)...", stamp, poseBuffer_.rbegin()->first);
UWARN("Could not find poses to interpolate at time %f (latest is %f)...", epochStamp, poseBuffer_.rbegin()->first);
}
}
else
{
std::map<double, Transform>::const_iterator iterB = poseBuffer_.lower_bound(stamp);
std::map<double, Transform>::const_iterator iterB = poseBuffer_.lower_bound(epochStamp);
std::map<double, Transform>::const_iterator iterA = iterB;
if(iterA != poseBuffer_.begin())
{
Expand All @@ -149,17 +148,17 @@ bool CameraMobile::getPose(double stamp, Transform & pose, cv::Mat & covariance,
{
iterB = --iterB;
}
if(iterA == iterB && stamp == iterA->first)
if(iterA == iterB && epochStamp == iterA->first)
{
pose = iterA->second;
}
else if(stamp >= iterA->first && stamp <= iterB->first)
else if(epochStamp >= iterA->first && epochStamp <= iterB->first)
{
pose = iterA->second.interpolate((stamp-iterA->first) / (iterB->first-iterA->first), iterB->second);
pose = iterA->second.interpolate((epochStamp-iterA->first) / (iterB->first-iterA->first), iterB->second);
}
else // stamp < iterA->first
{
UWARN("Could not find pose data to interpolate at time %f (earliest is %f). Are sensors synchronized?", stamp, iterA->first);
UWARN("Could not find pose data to interpolate at time %f (earliest is %f). Are sensors synchronized?", epochStamp, iterA->first);
}
}
poseMutex_.unlock();
Expand All @@ -169,11 +168,23 @@ bool CameraMobile::getPose(double stamp, Transform & pose, cv::Mat & covariance,

void CameraMobile::poseReceived(const Transform & pose, double deviceStamp)
{
// Pose reveived is the pose of the device in rtabmap coordinate
if(!pose.isNull())
{
UScopeMutex lock(dataMutex_);

Transform p = pose;
if(originUpdate_)
{
firstFrame_ = true;
lastKnownGPS_ = GPS();
lastEnvSensors_.clear();
dataPose_ = Transform();
data_ = SensorData();
dataGoodTracking_ = true;
previousAnchorPose_.setNull();
previousAnchorLinearVelocity_.clear();
previousAnchorStamp_ = 0.0;
originOffset_ = p.translation().inverse();
originUpdate_ = false;
}
Expand All @@ -187,7 +198,77 @@ void CameraMobile::poseReceived(const Transform & pose, double deviceStamp)

if(!originOffset_.isNull())
{
p = originOffset_*p;
// Filter re-localizations from poses received
rtabmap::Transform rawPose = originOffset_ * pose.translation(); // remove rotation to keep position in fixed frame
// Remove upstream localization corrections by integrating pose from previous frame anchor
bool showLog = false;
if(upstreamRelocalizationAccThr_>0.0f && !previousAnchorPose_.isNull())
{
float dt = epochStamp - previousAnchorStamp_;
std::vector<float> currentLinearVelocity(3);
float dx = rawPose.x()-previousAnchorPose_.x();
float dy = rawPose.y()-previousAnchorPose_.y();
float dz = rawPose.z()-previousAnchorPose_.z();
currentLinearVelocity[0] = dx / dt;
currentLinearVelocity[1] = dy / dt;
currentLinearVelocity[2] = dz / dt;
if(!previousAnchorLinearVelocity_.empty() && uNorm(dx, dy, dz)>0.02)
{
float ax = (currentLinearVelocity[0] - previousAnchorLinearVelocity_[0]) / dt;
float ay = (currentLinearVelocity[1] - previousAnchorLinearVelocity_[1]) / dt;
float az = (currentLinearVelocity[2] - previousAnchorLinearVelocity_[2]) / dt;
float acceleration = sqrt(ax*ax + ay*ay + az*az);
if(acceleration>=upstreamRelocalizationAccThr_)
{
// Only correct the translation to not lose rotation aligned
// with gravity.

// Use constant motion model to update current pose.
rtabmap::Transform offset(previousAnchorLinearVelocity_[0] * dt,
previousAnchorLinearVelocity_[1] * dt,
previousAnchorLinearVelocity_[2] * dt,
0, 0, 0, 1);
rtabmap::Transform newRawPose = offset * previousAnchorPose_;
currentLinearVelocity = previousAnchorLinearVelocity_;
originOffset_.x() += newRawPose.x() - rawPose.x();
originOffset_.y() += newRawPose.y() - rawPose.y();
originOffset_.z() += newRawPose.z() - rawPose.z();
UERROR("Upstream re-localization has been suppressed because of "
"high acceleration detected (%f m/s^2) causing a jump!",
acceleration);
dataGoodTracking_ = false;
post(new CameraInfoEvent(0, "UpstreamRelocationFiltered", uFormat("%.1f m/s^2", acceleration).c_str()));
showLog = true;
}
}
previousAnchorLinearVelocity_ = currentLinearVelocity;
}

p = originOffset_*pose;
previousAnchorPose_ = p;
previousAnchorStamp_ = epochStamp;

if(upstreamRelocalizationAccThr_>0.0f) {
relocalizationDebugBuffer_.insert(std::make_pair(epochStamp, std::make_pair(pose, p)));
if(relocalizationDebugBuffer_.size() > 60)
{
relocalizationDebugBuffer_.erase(relocalizationDebugBuffer_.begin());
}
if(showLog) {
std::stringstream stream;
for(auto iter=relocalizationDebugBuffer_.begin(); iter!=relocalizationDebugBuffer_.end(); ++iter)
{
stream << iter->first - relocalizationDebugBuffer_.begin()->first
<< " " << iter->second.first.x()
<< " " << iter->second.first.y()
<< " " << iter->second.first.z()
<< " " << iter->second.second.x()
<< " " << iter->second.second.y()
<< " " << iter->second.second.z() << std::endl;
}
UERROR("timestamp original_xyz corrected_xyz:\n%s", stream.str().c_str());
}
}
}

{
Expand Down Expand Up @@ -219,28 +300,22 @@ void CameraMobile::addEnvSensor(int type, float value)
lastEnvSensors_.insert(std::make_pair((EnvSensor::Type)type, EnvSensor((EnvSensor::Type)type, value)));
}

void CameraMobile::update(const SensorData & data, const Transform & pose, const glm::mat4 & viewMatrix, const glm::mat4 & projectionMatrix, const float * texCoord, bool trackingIsGood)
void CameraMobile::update(const SensorData & data, const Transform & pose, const glm::mat4 & viewMatrix, const glm::mat4 & projectionMatrix, const float * texCoord)
{
UScopeMutex lock(dataMutex_);

LOGD("CameraMobile::update pose=%s stamp=%f", pose.prettyPrint().c_str(), data.stamp());

bool notify = !data_.isValid();

LOGD("CameraMobile::update pose=%s stamp=%f", pose.prettyPrint().c_str(), data.stamp());
poseReceived(pose, data.stamp());

data_ = data;
dataPose_ = pose;
if(dataGoodTracking_)
dataGoodTracking_ = trackingIsGood;

viewMatrix_ = viewMatrix;
projectionMatrix_ = projectionMatrix;

// adjust origin
if(!originOffset_.isNull())
{
dataPose_ = originOffset_ * dataPose_;
viewMatrix_ = glm::inverse(rtabmap::glmFromTransform(rtabmap::opengl_world_T_rtabmap_world * originOffset_ *rtabmap::rtabmap_world_T_opengl_world)*glm::inverse(viewMatrix_));
}


if(textureId_ == 0)
{
glGenTextures(1, &textureId_);
Expand Down Expand Up @@ -320,6 +395,14 @@ void CameraMobile::postUpdate()
{
if(data_.isValid())
{
// adjust origin
if(!originOffset_.isNull())
{
dataPose_ = originOffset_ * dataPose_;
viewMatrix_ = glm::inverse(rtabmap::glmFromTransform(rtabmap::opengl_world_T_rtabmap_world * originOffset_ *rtabmap::rtabmap_world_T_opengl_world)*glm::inverse(viewMatrix_));
occlusionModel_.setLocalTransform(originOffset_ * occlusionModel_.localTransform());
}

if(lastKnownGPS_.stamp() > 0.0 && data_.stamp()-lastKnownGPS_.stamp()<1.0)
{
data_.setGPS(lastKnownGPS_);
Expand Down Expand Up @@ -435,11 +518,20 @@ void CameraMobile::postUpdate()
SensorData CameraMobile::captureImage(SensorCaptureInfo * info)
{
SensorData data;
bool firstFrame = true;
bool dataGoodTracking = true;
rtabmap::Transform dataPose;
if(dataReady_.acquire(1, 15000))
{
UScopeMutex lock(dataMutex_);
data = data_;
dataPose = dataPose_;
firstFrame = firstFrame_;
dataGoodTracking = dataGoodTracking_;
firstFrame_ = false;
dataGoodTracking_ = true;
data_ = SensorData();
dataPose_.setNull();
}
if(data.isValid())
{
Expand All @@ -449,14 +541,14 @@ SensorData CameraMobile::captureImage(SensorCaptureInfo * info)
if(info)
{
// linear cov = 0.0001
info->odomCovariance = cv::Mat::eye(6,6,CV_64FC1) * (firstFrame_?9999.0:0.00001);
if(!firstFrame_)
info->odomCovariance = cv::Mat::eye(6,6,CV_64FC1) * (firstFrame?9999.0:0.00001);
if(!firstFrame)
{
// angular cov = 0.000001
// roll/pitch should be fairly accurate with VIO input
info->odomCovariance.at<double>(3,3) *= 0.01; // roll
info->odomCovariance.at<double>(4,4) *= 0.01; // pitch
if(!dataGoodTracking_)
if(!dataGoodTracking)
{
UERROR("not good tracking!");
// add slightly more error on translation
Expand All @@ -467,11 +559,8 @@ SensorData CameraMobile::captureImage(SensorCaptureInfo * info)
info->odomCovariance.at<double>(5,5) *= 10; // yaw
}
}
info->odomPose = dataPose_;
info->odomPose = dataPose;
}

dataGoodTracking_ = true;
firstFrame_ = false;
}
else
{
Expand Down
Loading

0 comments on commit 0f1336d

Please sign in to comment.