Project

General

Profile

BitmapToolkit Scol plugin
sSlam_sd.cpp
Go to the documentation of this file.
1/*
2-----------------------------------------------------------------------------
3This source file is part of OpenSpace3D
4For the latest info, see http://www.openspace3d.com
5
6Copyright (c) 2012 I-maginer
7
8This program is free software; you can redistribute it and/or modify it under
9the terms of the GNU Lesser General Public License as published by the Free Software
10Foundation; either version 2 of the License, or (at your option) any later
11version.
12
13This program is distributed in the hope that it will be useful, but WITHOUT
14ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
15FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
16
17You should have received a copy of the GNU Lesser General Public License along with
18this program; if not, write to the Free Software Foundation, Inc., 59 Temple
19Place - Suite 330, Boston, MA 02111-1307, USA, or go to
20http://www.gnu.org/copyleft/lesser.txt
21
22-----------------------------------------------------------------------------
23*/
24
25//#include "sService.h"
26
27#include "ArManager.h"
28#include "ArCameraParam.h"
29#include "sSlam_sd.h"
30#include "SD_SLAM/extra/utils.h"
31#include <vector>
32#include <opencv2/core/eigen.hpp>
33
34const float eps = 1e-4;
35cv::Mat ExpSO3(const float &x, const float &y, const float &z)
36{
37 cv::Mat I = cv::Mat::eye(3, 3, CV_32F);
38 const float d2 = x * x + y * y + z * z;
39 const float d = sqrt(d2);
40 cv::Mat W;
41 W = (cv::Mat_<float>(3, 3) << 0, -z, y, z, 0, -x, -y, x, 0);
42
43 if (d < eps)
44 return (I + W + 0.5f*W*W);
45 else
46 return (I + W * sin(d) / d + W * W*(1.0f - cos(d)) / d2);
47}
48
49cv::Mat ExpSO3(const cv::Mat &v)
50{
51 return ExpSO3(v.at<float>(0), v.at<float>(1), v.at<float>(2));
52}
53
54cv::Point2f Camera2Pixel(cv::Mat poseCamera, cv::Mat mk)
55{
56 return cv::Point2f(
57 poseCamera.at<float>(0, 0) / poseCamera.at<float>(2, 0)*mk.at<float>(0, 0) + mk.at<float>(0, 2),
58 poseCamera.at<float>(1, 0) / poseCamera.at<float>(2, 0)*mk.at<float>(1, 1) + mk.at<float>(1, 2)
59 );
60}
61
62cv::Mat Plane::DetectPlane(const cv::Mat Tcw, const std::vector<SD_SLAM::MapPoint*> &vMPs, const int iterations)
63{
64 // Retrieve 3D points
65 std::vector<cv::Mat> vPoints;
66 vPoints.reserve(vMPs.size());
67 std::vector<SD_SLAM::MapPoint*> vPointMP;
68 vPointMP.reserve(vMPs.size());
69
70 for (size_t i = 0; i < vMPs.size(); i++)
71 {
72 SD_SLAM::MapPoint* pMP = vMPs[i];
73 if (pMP && !pMP->isBad())
74 {
75 if (pMP->Observations() > 5)
76 {
77 Eigen::Vector3d Pos = pMP->GetWorldPos();
78 cv::Mat pos(cv::Point3f(Pos(0), Pos(1), Pos(2)));
79 vPoints.push_back(pos);
80 vPointMP.push_back(pMP);
81 }
82 }
83 }
84
85 const int N = vPoints.size();
86
87 if (N < 50)
88 return mTpw;
89
90 // Indices for minimum set selection
91 std::vector<size_t> vAllIndices;
92 vAllIndices.reserve(N);
93 std::vector<size_t> vAvailableIndices;
94
95 for (int i = 0; i < N; i++)
96 {
97 vAllIndices.push_back(i);
98 }
99
100 float bestDist = 1e10;
101 std::vector<float> bestvDist;
102
103 //RANSAC
104 for (int n = 0; n < iterations; n++)
105 {
106 vAvailableIndices = vAllIndices;
107
108 cv::Mat A(3, 4, CV_32F);
109 A.col(3) = cv::Mat::ones(3, 1, CV_32F);
110
111 // Get min set of points
112 for (short i = 0; i < 3; ++i)
113 {
114 int randi = SD_SLAM::Random(0, vAvailableIndices.size() - 1);
115
116 int idx = vAvailableIndices[randi];
117
118 A.row(i).colRange(0, 3) = vPoints[idx].t();
119
120 vAvailableIndices[randi] = vAvailableIndices.back();
121 vAvailableIndices.pop_back();
122 }
123
124 cv::Mat u, w, vt;
125 cv::SVDecomp(A, w, u, vt, cv::SVD::MODIFY_A | cv::SVD::FULL_UV);
126
127 const float a = vt.at<float>(3, 0);
128 const float b = vt.at<float>(3, 1);
129 const float c = vt.at<float>(3, 2);
130 const float d = vt.at<float>(3, 3);
131
132 std::vector<float> vDistances(N, 0);
133
134 const float f = 1.0f / sqrt(a*a + b * b + c * c + d * d);
135
136 for (int i = 0; i < N; i++)
137 {
138 vDistances[i] = fabs(vPoints[i].at<float>(0)*a + vPoints[i].at<float>(1)*b + vPoints[i].at<float>(2)*c + d)*f;
139 }
140
141 std::vector<float> vSorted = vDistances;
142 std::sort(vSorted.begin(), vSorted.end());
143
144 int nth = std::max((int)(0.2*N), 20);
145 const float medianDist = vSorted[nth];
146
147 if (medianDist < bestDist)
148 {
149 bestDist = medianDist;
150 bestvDist = vDistances;
151 }
152 }
153
154 // Compute threshold inlier/outlier
155 const float th = 1.4 * bestDist;
156 std::vector<bool> vbInliers(N, false);
157 int nInliers = 0;
158 for (int i = 0; i < N; i++)
159 {
160 if (bestvDist[i] < th)
161 {
162 nInliers++;
163 vbInliers[i] = true;
164 }
165 }
166
167 std::vector<SD_SLAM::MapPoint*> vInlierMPs(nInliers, NULL);
168 int nin = 0;
169 for (int i = 0; i < N; i++)
170 {
171 if (vbInliers[i])
172 {
173 vInlierMPs[nin] = vPointMP[i];
174 nin++;
175 }
176 }
177
178 mMvMPs = vInlierMPs;
179 mMTcw = Tcw.clone();
180 mRang = -3.14f / 2 + ((float)rand() / RAND_MAX)*3.14f;
181 Recompute();
182 return mTpw;
183}
184
185void Plane::Recompute()
186{
187 const int N = mMvMPs.size();
188
189 // Recompute plane with all points
190 cv::Mat A = cv::Mat(N, 4, CV_32F);
191 A.col(3) = cv::Mat::ones(N, 1, CV_32F);
192
193 mOrigin = cv::Mat::zeros(3, 1, CV_32F);
194
195 int nPoints = 0;
196 for (int i = 0; i < N; i++)
197 {
198 SD_SLAM::MapPoint* pMP = mMvMPs[i];
199 if (!pMP->isBad())
200 {
201 Eigen::Vector3d Pos = pMP->GetWorldPos();
202 cv::Mat Xw(cv::Point3f(Pos(0), Pos(1), Pos(2)));
203 mOrigin += Xw;
204 A.row(nPoints).colRange(0, 3) = Xw.t();
205 nPoints++;
206 }
207 }
208 A.resize(nPoints);
209
210 cv::Mat u, w, vt;
211 cv::SVDecomp(A, w, u, vt, cv::SVD::MODIFY_A | cv::SVD::FULL_UV);
212
213 float a = vt.at<float>(3, 0);
214 float b = vt.at<float>(3, 1);
215 float c = vt.at<float>(3, 2);
216
217 mOrigin = mOrigin * (1.0f / nPoints);
218 const float f = 1.0f / sqrt(a * a + b * b + c * c);
219
220 // Compute XC just the first time
221 if (mXC.empty())
222 {
223 cv::Mat Oc = -mMTcw.colRange(0, 3).rowRange(0, 3).t() * mMTcw.rowRange(0, 3).col(3);
224 mXC = Oc - mOrigin;
225 }
226
227 if ((mXC.at<float>(0) * a + mXC.at<float>(1) * b + mXC.at<float>(2) * c) < 0)
228 {
229 a = -a;
230 b = -b;
231 c = -c;
232 }
233
234 const float nx = a * f;
235 const float ny = b * f;
236 const float nz = c * f;
237
238 mNormal = (cv::Mat_<float>(3, 1) << nx, ny, nz);
239
240 cv::Mat up;
241 up = (cv::Mat_<float>(3, 1) << 0.0f, 1.0f, 0.0f);
242
243 cv::Mat v = up.cross(mNormal);
244 const float sa = cv::norm(v);
245 const float ca = up.dot(mNormal);
246 const float ang = atan2(sa, ca);
247 mTpw = cv::Mat::eye(4, 4, CV_32F);
248
249 //mTpw.rowRange(0, 3).colRange(0, 3) = ExpSO3(v * ang / sa) * ExpSO3(up * mRang);
250 ExpSO3(v*ang / sa).copyTo(mTpw.rowRange(0, 3).colRange(0, 3));
251 mOrigin.copyTo(mTpw.col(3).rowRange(0, 3));
252}
253
255{
256 mIsDirty = false;
257 needUpdate = false;
258 mScale = 1.0f;
259 mSlam = 0;
260 mFound = false;
261 mInitialized = false;
262 mPlane2World = cv::Mat::eye(4, 4, CV_32F);
263
264 SD_SLAM::Config &params = SD_SLAM::Config::GetInstance();
265 params.SetCameraIntrinsics(640, 480, 640, 640, 320, 240);
266 params.SetUsePattern(false);
267 params.SetOrbParameters(600);
268
269 // Run thread
270 StartThreading(boost::bind(&Sslam::GoThread, this));
271}
272
274{
276 if (mSlam)
277 mSlam->Shutdown();
278
279 SAFE_DELETE(mSlam);
280}
281
282void Sslam::SetDirty()
283{
284 mIsDirty = true;
285}
286
287void Sslam::BuildCameraParam(const aruco::CameraParameters &camparam)
288{
289 mScale = 1.0f;
290 int maxsize = std::max(camparam.CamSize.width, camparam.CamSize.height);
291 if (maxsize > 640)
292 mScale = 640.0f / (float)maxsize;
293
294 //construct camParam
295 SD_SLAM::Config &params = SD_SLAM::Config::GetInstance();
296 double f = std::max((int)(static_cast<float>(camparam.CamSize.width) * mScale), (int)(static_cast<float>(camparam.CamSize.height) * mScale));
297 params.SetCameraIntrinsics((int)(static_cast<float>(camparam.CamSize.width) * mScale), (int)(static_cast<float>(camparam.CamSize.height) * mScale), f, f, (int)(static_cast<float>(camparam.CamSize.width) * mScale * 0.5f), (int)(static_cast<float>(camparam.CamSize.height) * mScale * 0.5f));
298
299 int wlvl = SD_SLAM::Config::Width();
300 int hlvl = SD_SLAM::Config::Height();
301 int pyrLevels = 1;
302 while (wlvl % 2 == 0 && hlvl % 2 == 0 && wlvl*hlvl > 5000 && pyrLevels < 12)
303 {
304 wlvl /= 2;
305 hlvl /= 2;
306 pyrLevels++;
307 }
308
309 params.SetPyrLevels(pyrLevels);
310}
311
312void Sslam::InitDetector()
313{
314 boost::unique_lock<boost::recursive_mutex> ld(mUpdateMutex);
315
316 if (mSlam)
317 mSlam->Shutdown();
318
319 SAFE_DELETE(mSlam);
320
321 try
322 {
323 // build a SLAM system
324 mSlam = new SD_SLAM::System(SD_SLAM::System::MONOCULAR, true);
325 mSlam->DeactivateLocalizationMode();
326 //mSlam->ActivateLocalizationMode();
327 }
328 catch (std::exception &)
329 {
330 SAFE_DELETE(mSlam);
331 }
332 mFound = false;
333 mInitialized = false;
334 ld.unlock();
335}
336
338{
339 return mLastCamPos;
340}
341
343{
344 return mLastCamQuat;
345}
346
347bool Sslam::IsFound()
348{
349 return mFound;
350}
351
352void Sslam::Reset()
353{
354 InitDetector();
355}
356
357void Sslam::DrawLandmarks(cv::Mat image)
358{
359 if (mSlam && !image.empty() && !mCameraWorld.empty())
360 {
361 //boost::unique_lock<boost::recursive_mutex> ld(mUpdateMutex);
362 const std::vector<SD_SLAM::MapPoint*> mapPoints = mSlam->GetMap()->GetAllMapPoints();
363 cv::Scalar color(0, 255, 0);
364
365 for (unsigned int i = 0; i < mapPoints.size(); i++)
366 {
367 SD_SLAM::MapPoint* pMP = mapPoints[i];
368 if (pMP && (pMP->Observations() >= 3))
369 {
370 Eigen::Vector3d Pos = pMP->GetWorldPos();
371 cv::Mat pm4 = mCameraWorld * cv::Mat(cv::Vec4f(Pos(0), Pos(1), Pos(2), 1.0));
372 cv::circle(image, Camera2Pixel(pm4, mLastData.camParam.CameraMatrix), 2, color);
373 }
374 }
375
376 if (!mCentroid.empty() && (mSlam->GetTrackingState() == 2))
377 {
378 std::vector<cv::Mat> axisPoints(4);
379 axisPoints[0] = (cv::Mat_ <float>(4, 1) << 0.0, 0, 0, 1);
380 axisPoints[1] = (cv::Mat_ <float>(4, 1) << 0.3, 0, 0, 1);
381 axisPoints[2] = (cv::Mat_ <float>(4, 1) << 0.0, 0.3, 0, 1);
382 axisPoints[3] = (cv::Mat_ <float>(4, 1) << 0, 0, 0.3, 1);
383 std::vector<cv::Point2f> drawPoints(4);
384 for (int i = 0; i < 4; i++)
385 {
386 axisPoints[i] = mPlane2Camera * axisPoints[i];
387 drawPoints[i] = Camera2Pixel(axisPoints[i], mLastData.camParam.CameraMatrix);
388 }
389
390 cv::line(image, drawPoints[0], drawPoints[1], cv::Scalar(250, 0, 0), 5);
391 cv::line(image, drawPoints[0], drawPoints[2], cv::Scalar(0, 250, 0), 5);
392 cv::line(image, drawPoints[0], drawPoints[3], cv::Scalar(0, 0, 250), 5);
393
394 /*
395 std::vector<cv::Point3f> drawRectPoints(8);
396 drawRectPoints[0] = cv::Point3f(-0.15, 0, -0.15);
397 drawRectPoints[1] = cv::Point3f(0.15, 0, -0.15);
398 drawRectPoints[2] = cv::Point3f(-0.15, 0, 0.15);
399 drawRectPoints[3] = cv::Point3f(-0.15, 0.3, -0.15);
400 drawRectPoints[4] = cv::Point3f(-0.15, 0.3, 0.15);
401 drawRectPoints[5] = cv::Point3f(0.15, 0.3, 0.15);
402 drawRectPoints[6] = cv::Point3f(0.15, 0, 0.15);
403 drawRectPoints[7] = cv::Point3f(0.15, 0.3, -0.15);
404
405 cv::Mat Rcp, Tcp;
406 std::vector<cv::Point2f> projectedPoints;
407 cv::Rodrigues(mPlane2Camera.rowRange(0, 3).colRange(0, 3), Rcp);
408 Tcp = mPlane2Camera.col(3).rowRange(0, 3);
409 cv::projectPoints(drawRectPoints, Rcp, Tcp, tracker->GetCalibrationMatrix(), tracker->GetDistortionMatrix(), projectedPoints);
410
411 cv::line(image, projectedPoints[0] / mScale, projectedPoints[1] / mScale, cv::Scalar(250, 0, 0), 5);
412 cv::line(image, projectedPoints[0] / mScale, projectedPoints[2] / mScale, cv::Scalar(0, 250, 0), 5);
413 cv::line(image, projectedPoints[0] / mScale, projectedPoints[3] / mScale, cv::Scalar(0, 0, 250), 5);
414 cv::line(image, projectedPoints[1] / mScale, projectedPoints[7] / mScale, cv::Scalar(10, 0, 50), 2);
415 cv::line(image, projectedPoints[3] / mScale, projectedPoints[7] / mScale, cv::Scalar(20, 0, 50), 2);
416 cv::line(image, projectedPoints[3] / mScale, projectedPoints[4] / mScale, cv::Scalar(30, 0, 50), 2);
417 cv::line(image, projectedPoints[2] / mScale, projectedPoints[4] / mScale, cv::Scalar(40, 0, 50), 2);
418 cv::line(image, projectedPoints[1] / mScale, projectedPoints[6] / mScale, cv::Scalar(50, 0, 50), 2);
419 cv::line(image, projectedPoints[2] / mScale, projectedPoints[6] / mScale, cv::Scalar(60, 0, 50), 2);
420 cv::line(image, projectedPoints[4] / mScale, projectedPoints[5] / mScale, cv::Scalar(70, 0, 50), 2);
421 cv::line(image, projectedPoints[5] / mScale, projectedPoints[6] / mScale, cv::Scalar(80, 0, 50), 2);
422 cv::line(image, projectedPoints[5] / mScale, projectedPoints[7] / mScale, cv::Scalar(90, 0, 50), 2);
423
424 cv::circle(image, projectedPoints[0] / mScale, 4, cv::Scalar(0, 0, 250), 1, 8);
425 */
426 }
427
428 switch (mSlam->GetTrackingState())
429 {
430 case -1: {cv::putText(image, "SYSTEM NOT READY", cv::Point(100, 50), cv::FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(0, 0, 255), 1); }break;
431 case 0: {cv::putText(image, "NO IMAGES YET", cv::Point(100, 50), cv::FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(0, 0, 255), 1); }break;
432 case 1: {cv::putText(image, "SLAM NOT INITIALIZED", cv::Point(100, 50), cv::FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(0, 0, 255), 1); }break;
433 case 2: {cv::putText(image, "SLAM ON", cv::Point(100, 50), cv::FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(0, 255, 0), 1); break; }
434 case 3: {cv::putText(image, "SLAM LOST", cv::Point(100, 50), cv::FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(0, 0, 255), 1); break; }
435 default:break;
436 }
437 //ld.unlock();
438 }
439}
440
441void Sslam::Detect()
442{
443 int prevState = mSlam->GetTrackingState();
444 cv::Mat pose;
445 {
446 boost::unique_lock<boost::recursive_mutex> ld(mUpdateMutex);
447
448 Eigen::Matrix4d epose = mSlam->TrackMonocular(mLastData.gray);
449 cv::Mat cpose;
450 eigen2cv(epose, cpose);
451 cpose.convertTo(pose, CV_32F);
452
453 if (mSlam->MapChanged() || (mSlam->GetTrackingState() < 2))
454 {
455 mInitialized = false;
456 mFound = false;
457 }
458
459 //mFound = (mSlam->GetTrackingState() == 2) ? true : false;
460 ld.unlock();
461 }
462 pose.copyTo(mCameraWorld);
463
464 if (pose.empty() || (mSlam->GetTrackingState() != 2))
465 {
466 mFound = false;
467
468 if ((prevState == 2) && (mSlam->GetTrackingState() == 3))
469 mTimestamp = std::chrono::steady_clock::now();
470
471 double passed = std::chrono::duration_cast<std::chrono::duration<double>>(std::chrono::steady_clock::now() - mTimestamp).count();
472 if ((mSlam->GetTrackingState() == 3) && (passed > 4))
473 Reset();
474 }
475 else
476 {
477 if (!mInitialized)
478 {
479 Plane mplane;
480 const std::vector<SD_SLAM::MapPoint*> vpMPs = mSlam->GetMap()->GetAllMapPoints();
481 //const std::vector<SD_SLAM::MapPoint*> vpTMPs = mSlam->GetTrackedMapPoints();
482 if (vpMPs.size() > 200)
483 {
484 mPlane2World = mplane.DetectPlane(pose, vpMPs, 50);
485
486 if (!mPlane2World.empty())
487 {
488 mCentroid = mplane.mOrigin;
489 mInitialized = true;
490 mPlane2Camera = pose * mPlane2World;
491 }
492 }
493 }
494 else
495 {
496 mPlane2Camera = pose * mPlane2World;
497 }
498
499 if (!mInitialized || pose.empty())
500 {
501 mFound = false;
502 }
503 else
504 {
505 mFound = true;
506 cv::Mat camtoplane = mPlane2Camera.inv();
507
508 double modelview_matrix[16];
509 modelview_matrix[0] = -camtoplane.at<float>(0, 0);
510 modelview_matrix[1] = -camtoplane.at<float>(1, 0);
511 modelview_matrix[2] = -camtoplane.at<float>(2, 0);
512 modelview_matrix[3] = 0.0;
513
514 modelview_matrix[4] = camtoplane.at<float>(0, 1);
515 modelview_matrix[5] = camtoplane.at<float>(1, 1);
516 modelview_matrix[6] = camtoplane.at<float>(2, 1);
517 modelview_matrix[7] = 0.0;
518
519 modelview_matrix[8] = camtoplane.at<float>(0, 2);
520 modelview_matrix[9] = camtoplane.at<float>(1, 2);
521 modelview_matrix[10] = camtoplane.at<float>(2, 2);
522 modelview_matrix[11] = 0.0;
523
524 modelview_matrix[12] = camtoplane.at<float>(0, 3);
525 modelview_matrix[13] = camtoplane.at<float>(1, 3);
526 modelview_matrix[14] = camtoplane.at<float>(2, 3);
527 modelview_matrix[15] = 1.0;
528
529 mLastCamQuat = BtQuaternion(0.0f, 0.0f, 1.0f, 0.0f) * BtQuaternion::FromRotationMatrix(modelview_matrix, mLastData.reversedBitmap);
530 mLastCamPos = Vector3(static_cast<float>(mLastData.reversedBitmap ? -modelview_matrix[12] : modelview_matrix[12]), static_cast<float>(modelview_matrix[13]), static_cast<float>(modelview_matrix[14]));
531 }
532 }
533}
534
535void Sslam::GoThread()
536{
537 while (IsRunning())
538 {
539 if (needUpdate)
540 {
542 manager->GetLastData(mLastData);
543 needUpdate = false;
544
545 //cv::blur(mLastData.gray, mLastData.gray, cv::Size(2, 2));
546
547 // Detect all markers seen on gray
548
549 // camera size changed
550 if (!mSlam || (((int)((float)mLastData.camParam.CamSize.width * mScale)) != SD_SLAM::Config::Width() || ((int)((float)mLastData.camParam.CamSize.height * mScale)) != SD_SLAM::Config::Height()))
551 mIsDirty = true;
552
553 // init detector
554 if (mIsDirty)
555 {
556 mIsDirty = false;
557 BuildCameraParam(mLastData.camParam);
558 InitDetector();
559 }
560
561 if (mScale != 1.0f)
562 cv::resize(mLastData.gray, mLastData.gray, cv::Size((int)((float)mLastData.gray.cols * mScale), (int)((float)mLastData.gray.rows * mScale)), 0, 0, cv::INTER_LINEAR);
563
564 //cv::equalizeHist(mLastData.gray, mLastData.gray);
565
566 // thread to detect do it after mutex lock
567 if (!mLastData.image.empty() && mSlam)
568 {
569 Detect();
570 }
571 }
572 else
573 {
574 //prevent cpu burn
575 boost::this_thread::sleep_for(boost::chrono::milliseconds(1)); //DO not burn too much CPU
576 }
577 }
578}
void GetLastData(LASTDATA &data)
static ArManager * GetInstance()
Definition ArManager.cpp:70
static BtQuaternion FromRotationMatrix(double rotMatrix[16], bool reverseX=false, bool reverseY=true)
cv::Mat image
cv::Mat gray
bool reversedBitmap
Definition sSlam.h:52
cv::Mat mMTcw
Definition sSlam.h:69
float mRang
Definition sSlam.h:63
cv::Mat mXC
Definition sSlam.h:70
cv::Mat mTpw
Definition sSlam.h:65
cv::Mat DetectPlane(cv::Mat campos, std::vector< slamPoint > &vMPs, const int iterations)
Definition sSlam.cpp:135
std::vector< slamPoint > mMvMPs
Definition sSlam.h:67
cv::Mat mOrigin
Definition sSlam.h:61
void Recompute()
Definition sSlam.cpp:255
cv::Mat mNormal
Definition sSlam.h:59
void SetDirty()
Definition sSlam.cpp:583
bool needUpdate
Definition sSlam.h:128
Vector3 GetCameraPosition()
Definition sSlam.cpp:687
~Sslam()
Definition sSlam.cpp:412
Sslam()
Definition sSlam.cpp:328
void GoThread()
Definition sSlam.cpp:820
bool IsFound()
Definition sSlam.cpp:712
BtQuaternion GetCameraOrientation()
Definition sSlam.cpp:700
void DrawLandmarks(cv::Mat image)
Definition sSlam.cpp:739
void Reset()
Definition sSlam.cpp:717
Create a thread type. .
cv::Mat ExpSO3(const float &x, const float &y, const float &z)
Definition sSlam.cpp:108
cv::Point2f Camera2Pixel(cv::Mat poseCamera, cv::Mat mk)
Definition sSlam.cpp:127
cv::Mat ExpSO3(const float &x, const float &y, const float &z)
Definition sSlam_sd.cpp:35
cv::Point2f Camera2Pixel(cv::Mat poseCamera, cv::Mat mk)
Definition sSlam_sd.cpp:54
const float eps
Definition sSlam_sd.cpp:34