这篇博文主要记录caffe开发环境的种种。
在直接使用caffe的时候,需要对数据做格式转换。然后配置一个网络格式的描述文件即可进行训练。但是在做预测和格式转化的时候,我们需要将Caffe当作一个sdk那样来使用。
这里我主要解决配置IDE。这里我选用的是nsight,因为装好cuda之后,这个编辑器就自带了。
代码我选用caffe/examples/mnist/convert_mnist_data.cpp/
// This script converts the MNIST dataset to a lmdb (default) or
// leveldb (--backend=leveldb) format used by caffe to load data.
// Usage:
// convert_mnist_data [FLAGS] input_image_file input_label_file
// output_db_file
// The MNIST dataset could be downloaded at
// http://yann.lecun.com/exdb/mnist/ #include <gflags/gflags.h>
#include <glog/logging.h>
#include <google/protobuf/text_format.h>
#include <leveldb/db.h>
#include <leveldb/write_batch.h>
#include <lmdb.h>
#include <stdint.h>
#include <sys/stat.h> #include <fstream> // NOLINT(readability/streams)
#include <string> #include <caffe/proto/caffe.pb.h> using namespace caffe; // NOLINT(build/namespaces)
using std::string; DEFINE_string(backend, "lmdb", "The backend for storing the result"); uint32_t swap_endian(uint32_t val) {
val = ((val << ) & 0xFF00FF00) | ((val >> ) & 0xFF00FF);
return (val << ) | (val >> );
} void convert_dataset(const char* image_filename, const char* label_filename,
const char* db_path, const string& db_backend) {
// Open files
std::ifstream image_file(image_filename, std::ios::in | std::ios::binary);
std::ifstream label_file(label_filename, std::ios::in | std::ios::binary);
CHECK(image_file) << "Unable to open file " << image_filename;
CHECK(label_file) << "Unable to open file " << label_filename;
// Read the magic and the meta data
uint32_t magic;
uint32_t num_items;
uint32_t num_labels;
uint32_t rows;
uint32_t cols; image_file.read(reinterpret_cast<char*>(&magic), );
magic = swap_endian(magic);
CHECK_EQ(magic, ) << "Incorrect image file magic.";
label_file.read(reinterpret_cast<char*>(&magic), );
magic = swap_endian(magic);
CHECK_EQ(magic, ) << "Incorrect label file magic.";
image_file.read(reinterpret_cast<char*>(&num_items), );
num_items = swap_endian(num_items);
label_file.read(reinterpret_cast<char*>(&num_labels), );
num_labels = swap_endian(num_labels);
CHECK_EQ(num_items, num_labels);
image_file.read(reinterpret_cast<char*>(&rows), );
rows = swap_endian(rows);
image_file.read(reinterpret_cast<char*>(&cols), );
cols = swap_endian(cols); // lmdb
MDB_env *mdb_env;
MDB_dbi mdb_dbi;
MDB_val mdb_key, mdb_data;
MDB_txn *mdb_txn;
// leveldb
leveldb::DB* db;
leveldb::Options options;
options.error_if_exists = true;
options.create_if_missing = true;
options.write_buffer_size = ;
leveldb::WriteBatch* batch = NULL; // Open db
if (db_backend == "leveldb") { // leveldb
LOG(INFO) << "Opening leveldb " << db_path;
leveldb::Status status = leveldb::DB::Open(
options, db_path, &db);
CHECK(status.ok()) << "Failed to open leveldb " << db_path
<< ". Is it already existing?";
batch = new leveldb::WriteBatch();
} else if (db_backend == "lmdb") { // lmdb
LOG(INFO) << "Opening lmdb " << db_path;
CHECK_EQ(mkdir(db_path, ), )
<< "mkdir " << db_path << "failed";
CHECK_EQ(mdb_env_create(&mdb_env), MDB_SUCCESS) << "mdb_env_create failed";
CHECK_EQ(mdb_env_set_mapsize(mdb_env, ), MDB_SUCCESS) // 1TB
<< "mdb_env_set_mapsize failed";
CHECK_EQ(mdb_env_open(mdb_env, db_path, , ), MDB_SUCCESS)
<< "mdb_env_open failed";
CHECK_EQ(mdb_txn_begin(mdb_env, NULL, , &mdb_txn), MDB_SUCCESS)
<< "mdb_txn_begin failed";
CHECK_EQ(mdb_open(mdb_txn, NULL, , &mdb_dbi), MDB_SUCCESS)
<< "mdb_open failed. Does the lmdb already exist? ";
} else {
LOG(FATAL) << "Unknown db backend " << db_backend;
} // Storing to db
char label;
char* pixels = new char[rows * cols];
int count = ;
const int kMaxKeyLength = ;
char key_cstr[kMaxKeyLength];
string value; Datum datum;
datum.set_channels();
datum.set_height(rows);
datum.set_width(cols);
LOG(INFO) << "A total of " << num_items << " items.";
LOG(INFO) << "Rows: " << rows << " Cols: " << cols;
for (int item_id = ; item_id < num_items; ++item_id) {
image_file.read(pixels, rows * cols);
label_file.read(&label, );
datum.set_data(pixels, rows*cols);
datum.set_label(label);
snprintf(key_cstr, kMaxKeyLength, "%08d", item_id);
datum.SerializeToString(&value);
string keystr(key_cstr); // Put in db
if (db_backend == "leveldb") { // leveldb
batch->Put(keystr, value);
} else if (db_backend == "lmdb") { // lmdb
mdb_data.mv_size = value.size();
mdb_data.mv_data = reinterpret_cast<void*>(&value[]);
mdb_key.mv_size = keystr.size();
mdb_key.mv_data = reinterpret_cast<void*>(&keystr[]);
CHECK_EQ(mdb_put(mdb_txn, mdb_dbi, &mdb_key, &mdb_data, ), MDB_SUCCESS)
<< "mdb_put failed";
} else {
LOG(FATAL) << "Unknown db backend " << db_backend;
} if (++count % == ) {
// Commit txn
if (db_backend == "leveldb") { // leveldb
db->Write(leveldb::WriteOptions(), batch);
delete batch;
batch = new leveldb::WriteBatch();
} else if (db_backend == "lmdb") { // lmdb
CHECK_EQ(mdb_txn_commit(mdb_txn), MDB_SUCCESS)
<< "mdb_txn_commit failed";
CHECK_EQ(mdb_txn_begin(mdb_env, NULL, , &mdb_txn), MDB_SUCCESS)
<< "mdb_txn_begin failed";
} else {
LOG(FATAL) << "Unknown db backend " << db_backend;
}
}
}
// write the last batch
if (count % != ) {
if (db_backend == "leveldb") { // leveldb
db->Write(leveldb::WriteOptions(), batch);
delete batch;
delete db;
} else if (db_backend == "lmdb") { // lmdb
CHECK_EQ(mdb_txn_commit(mdb_txn), MDB_SUCCESS) << "mdb_txn_commit failed";
mdb_close(mdb_env, mdb_dbi);
mdb_env_close(mdb_env);
} else {
LOG(FATAL) << "Unknown db backend " << db_backend;
}
LOG(ERROR) << "Processed " << count << " files.";
}
delete pixels;
} int main(int argc, char** argv) {
#ifndef GFLAGS_GFLAGS_H_
namespace gflags = google;
#endif gflags::SetUsageMessage("This script converts the MNIST dataset to\n"
"the lmdb/leveldb format used by Caffe to load data.\n"
"Usage:\n"
" convert_mnist_data [FLAGS] input_image_file input_label_file "
"output_db_file\n"
"The MNIST dataset could be downloaded at\n"
" http://yann.lecun.com/exdb/mnist/\n"
"You should gunzip them after downloading,"
"or directly use data/mnist/get_mnist.sh\n");
gflags::ParseCommandLineFlags(&argc, &argv, true); const string& db_backend = FLAGS_backend; if (argc != ) {
gflags::ShowUsageWithFlagsRestrict(argv[],
"examples/mnist/convert_mnist_data");
} else {
google::InitGoogleLogging(argv[]);
convert_dataset(argv[], argv[], argv[], db_backend);
}
return ;
}
在编译caffe时,使用一下make install,这样会生成一个install 目录,目录底下有include lib tools三个目录
现在我们来配置nsight
properties->settings->c++ includes->:
(caffe根目录换成你自己的)
/home/zhxfl/cuda-workspace/caffe/build/install/include
/home/zhxfl/cuda-workspace/caffe/build/src/
/usr/local/cuda/include
properties->build->settings->GCC C++ Compiler->Miscellaneous->选上-fPIC选项
properties->build->settings->GCC C++ Linker->Libraries-> 填上如下依赖的动态库。
caffe
proto
caffe_cu
leveldb
snappy
protobuf
gflags
glog
lmdb
properties->build->settings->GCC C++ Linker->Libraries search path(-L) -> 填上如下依赖的动态库目录。
/usr/local/lib
/home/zhxfl/cuda-workspace/caffe/build/install/lib