forked from dorian3d/DBoW2
-
Notifications
You must be signed in to change notification settings - Fork 1
/
demo.cpp
249 lines (220 loc) · 9.6 KB
/
demo.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
/**************************************************************************
* Copyright (c) 2019-2020 Chimney Xu. All Rights Reserve.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**************************************************************************/
/* *************************************************************************
* File Name : demo.cpp
* Author : smallchimney
* Author Email : [email protected]
* Created Time : 2019-12-05 16:55:11
* Last Modified : smallchimney
* Modified Time : 2020-05-29 14:29:18
************************************************************************* */
// TDBoW and template typedef
#include <TDBoW/PCBridge.h>
// PCL
#include <pcl/io/pcd_io.h>
#pragma GCC diagnostic ignored "-Wpedantic"
#include <pcl/keypoints/harris_3d.h>
#ifdef FOUND_OPENMP
#include <pcl/features/fpfh_omp.h>
#else
#include <pcl/features/fpfh.h>
#endif
#include <chrono>
using std::cout; using std::endl;
typedef TDBoW::FPFH33Database Database;
typedef TDBoW::FPFH33Vocabulary Vocabulary;
typedef Vocabulary::Descriptor Descriptor;
typedef Vocabulary::ConstDataSet ConstDataSet;
typedef Vocabulary::DescriptorsSet DescriptorsSet;
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
std::vector<std::string> loadFeatures(DescriptorsSet&);
void testVocabCreation(const ConstDataSet&, const DescriptorsSet&, const std::vector<std::string>&);
void testDatabase(const DescriptorsSet&);
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
void wait() {
cout << endl << "Press enter to continue" << endl;
getchar();
}
// ----------------------------------------------------------------------------
int main() {
// Load files, calculate FPFH-33 descriptors and do transform
// DescriptorArray and Descriptor are recommended types when query
DescriptorsSet features;
auto files = loadFeatures(features);
// In this simple case, we had not prepare too many data, so we
// use the same data for both create and query.
// `make_shared`(inner method) will drop the original data, so make copy
auto copy = features;
// DataSet type is only used in vocabulary create.
auto dataset = Vocabulary::util::make_const(copy);
// Vocabulary testing
testVocabCreation(dataset, features, files);
wait();
// Vocabulary testing
testDatabase(features);
return 0;
}
// ----------------------------------------------------------------------------
std::vector<std::string> loadFeatures(DescriptorsSet& _Features) {
using namespace boost::filesystem;
const auto resourceDir = path(PKG_DIR)/"demo/pc/pointclouds";
// Automatically find all `.pcd` files
std::vector<path> files;
for(const auto& file : recursive_directory_iterator(resourceDir)) {
if(!is_regular_file(file))continue;
const auto& filePath = file.path();
auto extension = filePath.extension().native();
std::transform(extension.begin(), extension.end(), extension.begin(), ::tolower);
if(extension != ".pcd")continue;
files.emplace_back(filePath);
}
// Reserve the features space
_Features.clear();
_Features.shrink_to_fit();
_Features.resize(files.size());
// Iterator each file
typedef pcl::PointCloud<pcl::PointXYZ> PointCloudXYZ;
typedef pcl::PointCloud<pcl::PointXYZI> PointCloudXYZI;
typedef pcl::PointCloud<pcl::PointNormal> PointCloudNormal;
pcl::HarrisKeypoint3D<pcl::PointXYZ, pcl::PointXYZI, pcl::Normal> harris;
#ifdef FOUND_OPENMP
pcl::FPFHEstimationOMP<pcl::PointXYZ, pcl::PointNormal, pcl::FPFHSignature33> fpfh;
fpfh.setNumberOfThreads(static_cast<unsigned>(omp_get_num_threads()));
#else
pcl::FPFHEstimation<PointXYZ, pcl::PointNormal, pcl::FPFHSignature33> fpfh;
#endif
std::vector<std::string> names(0);
names.reserve(files.size());
for(size_t i = 0; i < files.size(); i++) {
// Load pointcloud
auto input = boost::make_shared<PointCloudXYZ>();
if(pcl::io::loadPCDFile(files[i].native(), *input) == -1)continue;
auto output = boost::make_shared<PointCloudXYZI>();
// Calculate keypoints using 3D harris
harris.setInputCloud(input);
harris.setNonMaxSupression(true);
harris.setRadius(0.5f);
harris.setThreshold(0.01f);
harris.compute(*output);
auto indices = harris.getKeypointsIndices();
// Compute normal for each points
auto normals = boost::make_shared<PointCloudNormal>();
auto kdTree = boost::make_shared<pcl::search::KdTree<pcl::PointXYZ>>();
pcl::NormalEstimation<pcl::PointXYZ, pcl::PointNormal> ne;
ne.setInputCloud(input);
ne.setSearchMethod(kdTree);
ne.setKSearch(10);
ne.compute(*normals);
// Compute descriptors for each points
auto descriptors = boost::make_shared<pcl::PointCloud<pcl::FPFHSignature33>>();
fpfh.setInputCloud(input);
fpfh.setInputNormals(normals);
fpfh.setSearchMethod(kdTree);
fpfh.setRadiusSearch(0.5f);
fpfh.compute(*descriptors);
// Collect the descriptors
typedef Vocabulary::Descriptor Descriptor;
auto& feature = _Features[i];
feature.resize(indices -> indices.size());
for(size_t j = 0; j < indices -> indices.size(); j++) {
const auto& index = static_cast<size_t>(indices -> indices[j]);
const auto& descriptor = descriptors -> at(index);
feature[j] = Descriptor::Map(
descriptor.histogram, 1, pcl::FPFHSignature33::descriptorSize());
}
names.emplace_back(files[i].filename().native());
}
return names;
}
// ----------------------------------------------------------------------------
void testVocabCreation(const ConstDataSet& _DataSet,
const DescriptorsSet& _Features, const std::vector<std::string>& _Names) {
// branching factor and depth levels
using namespace TDBoW;
Vocabulary voc;
assert(_Names.size() == _Features.size());
size_t PRINT_LEN = 0;
for(const auto& name : _Names) {
PRINT_LEN = std::max(PRINT_LEN, name.length());
}
PRINT_LEN *= 2;
size_t count = 0;
for(const auto& image : _DataSet) {
count += image.size();
}
cout << "Features size: " << count << endl;
cout << "Creating a small vocabulary..." << endl;
using namespace std::chrono;
auto start = system_clock::now();
voc.create(_DataSet);
auto end = system_clock::now();
std::cout << "Spent time: " << duration_cast<milliseconds>(end - start).count() << " ms." << endl;
cout << "... done!" << endl;
cout << "Vocabulary stop words by percent: 10% (" << voc.stopPercent(10) << " words)" << endl;
cout << "Vocabulary information: " << endl
<< voc << endl << endl;
// lets do something with this vocabulary
cout << "Matching images against themselves (0 low, 1 high): " << endl;
std::vector<BowVector> vec(_Features.size());
for(size_t i = 0; i < _Features.size(); i++) {
voc.transform(_Features[i], vec[i]);
// cout << "Bow vector " << i << ": " << vec[i] << endl;
}
cout << "======================================================" << endl;
for(size_t i = 0; i < _Features.size(); i++) {
for(size_t j = 0; j < _Features.size(); j++) {
cout << _Names[i] << " vs " << _Names[j];
size_t space = _Names[i].length() + _Names[j].length();
while(space++ <= PRINT_LEN) {
cout << ' ';
}
cout << ": " << voc.score(vec[i], vec[j]) << endl;
}
cout << "======================================================" << endl;
}
// save the vocabulary to disk
cout << endl << "Saving vocabulary..." << endl;
voc.save("small_voc.bin.lz4");
// voc.save("small_voc.yml", false); // save in YAML format
cout << "Done" << endl;
}
// ----------------------------------------------------------------------------
void testDatabase(const DescriptorsSet& _Features) {
cout << "Creating a small database..." << endl;
// load the vocabulary from disk
Database db("small_voc.bin.lz4", false); // false = do not use direct index
// (so ignore the last param)
// The direct index is useful if we want to retrieve the features that
// belong to some vocabulary node.
// add images to the database
for(const auto& feature : _Features) {
db.add(feature);
}
cout << "... done!" << endl;
cout << "Database information: " << endl << db << endl;
// and query the database
cout << "Querying the database: " << endl;
cout << "==============================" << endl;
for(size_t i = 0; i < _Features.size(); i++) {
// Ignore the bow vector result and don't limit result number
auto ret = db.query(_Features[i], nullptr, 0);
// ret[0] is always the same image in this case, because we added it to the
// database. ret[1] is the second best match.
cout << "Searching for Image " << i << ". " << ret << endl;
cout << "==============================" << endl;
}
}
// ----------------------------------------------------------------------------