caffe使用稱爲blob的四維數組用於存儲和交換數據。Blob提供了統一的存儲器接口,持有一批圖像或其他數據、權值、權值更新值。Blob在內存中表示四維數組,維度從低到高爲[width_,height_,channels_,num_],其中,width_和height_表示圖像的寬和高,channes_表示顏色通道RGB,num_表示第幾幀,用於存儲數據或權值(data)和權值增量(diff),在進行網絡計算時,每層的輸入和輸出都需要通過Blob對象緩衝。Blob是caffe的基本存儲單元,下面對其進行簡單介紹,主要參考趙永科所著的《深度學習.21天實戰caffe》中對該Blob的詳細介紹。
數據結構描述
打開src/caffe/proto/caffe.proto,首先映入眼簾的便是與Blob相關的描述,可見該數據結構的重要性,是其他大部分數據結構的依賴項。
//該結構描述了Blob的形狀信息
message BlobShape {
repeated int64 dim = 1 [packed = true];
//只包括若干int64類型值,分別表示Blob每個維度大小,
//packed表示這些值在內存中緊密排列,沒有空洞
}
//該結構描述了Blob在磁盤中序列化後的狀態
message BlobProto {
//可選,包括一個BlobShape對象
optional BlobShape shape = 7;
//包括若干浮點元素,存儲數據或權值
//元素數目由shape或(num,channel,height,width)確定
//這些元素在內存中緊密排列
repeated float data = 5 [packed = true];
repeated float diff = 6 [packed = true];
repeated double double_data = 8 [packed = true];//與data並列,只是類型爲double
repeated double double_diff = 9 [packed = true];//與diff並列,只是類型爲double
// 4D dimensions -- deprecated. Use "shape" instead.
//以下爲可選的維度信息,新版本caffe推薦使用shape,而不再用後面的值
optional int32 num = 1 [default = 0];
optional int32 channels = 2 [default = 0];
optional int32 height = 3 [default = 0];
optional int32 width = 4 [default = 0];
}
Blob是怎樣練成的
Blob是一個模板類,聲明在include/caffe/blob.hpp中,封裝了SynceMemory。
#ifndef CAFFE_BLOB_HPP_
#define CAFFE_BLOB_HPP_
#include <algorithm>
#include <string>
#include <vector>
#include "caffe/common.hpp"
//由protoc生成的頭文件
#include "caffe/proto/caffe.pb.h"
//CPU/GPU共享內存類,用於數據同步
#include "caffe/syncedmem.hpp"
//Blob最大維數目
const int kMaxBlobAxes = 32;
namespace caffe {
template <typename Dtype>
class Blob {//類聲明
public:
//默認構造函數
Blob()
: data_(), diff_(), count_(0), capacity_(0) {}
explicit Blob(const int num, const int channels, const int height,
//顯式構造函數,避免隱式數據類型轉換
const int width);
explicit Blob(const vector<int>& shape);
void Reshape(const int num, const int channels, const int height,
const int width);
//變形函數,根據輸入參數重新設置當前blob形狀,必要時重新分配內存
void Reshape(const vector<int>& shape);
void Reshape(const BlobShape& shape);
void ReshapeLike(const Blob& other);
//得到Blob形狀字符串,用於打印log,見caffe運行log,
//類似於"Top shape: 100 1 28 28 (78400)"
inline string shape_string() const {
ostringstream stream;
for (int i = 0; i < shape_.size(); ++i) {
stream << shape_[i] << " ";
}
stream << "(" << count_ << ")";
return stream.str();
}
//返回Blob形狀
inline const vector<int>& shape() const { return shape_; }
//返回某一維度的尺寸
inline int shape(int index) const {
return shape_[CanonicalAxisIndex(index)];
}
//返回維度數目
inline int num_axes() const { return shape_.size(); }
//返回Blob中元素總數
inline int count() const { return count_; }
//返回Blob中某幾維子集的元素總數
inline int count(int start_axis, int end_axis) const {
CHECK_LE(start_axis, end_axis);//保證start_axis<=end_axis
CHECK_GE(start_axis, 0);//保證start_axis>= 0
CHECK_GE(end_axis, 0);//保證end_axis>= 0
CHECK_LE(start_axis, num_axes());//保證start_axis<=總的維度數目
CHECK_LE(end_axis, num_axes());//保證end_axis<=總的維度數目
int count = 1;
for (int i = start_axis; i < end_axis; ++i) {
count *= shape(i);
}
return count;
}
//計算從某一維度開始的元素總數
inline int count(int start_axis) const {
return count(start_axis, num_axes());
}
//轉換座標軸索引[-N,N]爲普通索引[0,N]
inline int CanonicalAxisIndex(int axis_index) const {
CHECK_GE(axis_index, -num_axes())//保證axis_index>=-num_axes()
<< "axis " << axis_index << " out of range for " << num_axes()
<< "-D Blob with shape " << shape_string();
CHECK_LT(axis_index, num_axes())//保證axis_index<num_axes()
<< "axis " << axis_index << " out of range for " << num_axes()
<< "-D Blob with shape " << shape_string();
if (axis_index < 0) {
return axis_index + num_axes();
}
return axis_index;
}
//獲取形狀某一維的尺寸
inline int num() const { return LegacyShape(0); }
inline int channels() const { return LegacyShape(1); }
inline int height() const { return LegacyShape(2);
inline int width() const { return LegacyShape(3); }
inline int LegacyShape(int index) const {
CHECK_LE(num_axes(), 4)
<< "Cannot use legacy accessors on Blobs with > 4 axes.";
CHECK_LT(index, 4);
CHECK_GE(index, -4);
if (index >= num_axes() || index < -num_axes()) {
return 1;
}
return shape(index);
}
//下面這幾個函數都是計算偏移量的
inline int offset(const int n, const int c = 0, const int h = 0,
const int w = 0) const {
CHECK_GE(n, 0);
CHECK_LE(n, num());
CHECK_GE(channels(), 0);
CHECK_LE(c, channels());
CHECK_GE(height(), 0);
CHECK_LE(h, height());
CHECK_GE(width(), 0);
CHECK_LE(w, width());
return ((n * channels() + c) * height() + h) * width() + w;
}
inline int offset(const vector<int>& indices) const {
CHECK_LE(indices.size(), num_axes());
int offset = 0;
for (int i = 0; i < num_axes(); ++i) {
offset *= shape(i);
if (indices.size() > i) {
CHECK_GE(indices[i], 0);
CHECK_LT(indices[i], shape(i));
offset += indices[i];
}
}
return offset;
}
//按值拷貝Blob到當前Blob
void CopyFrom(const Blob<Dtype>& source, bool copy_diff = false,
bool reshape = false);
inline Dtype data_at(const int n, const int c, const int h,
const int w) const {
return cpu_data()[offset(n, c, h, w)];
}
inline Dtype diff_at(const int n, const int c, const int h,
const int w) const {
return cpu_diff()[offset(n, c, h, w)];
}
inline Dtype data_at(const vector<int>& index) const {
return cpu_data()[offset(index)];
}
inline Dtype diff_at(const vector<int>& index) const {
return cpu_diff()[offset(index)];
}
inline const shared_ptr<SyncedMemory>& data() const {
CHECK(data_);
return data_;
}
inline const shared_ptr<SyncedMemory>& diff() const {
CHECK(diff_);
return diff_;
}
//只讀訪問cpu data
const Dtype* cpu_data() const;
//設置cpu data
void set_cpu_data(Dtype* data);
//只讀訪問gpu data
const int* gpu_shape() const;
const Dtype* gpu_data() const;
//設置gpu data
void set_gpu_data(Dtype* data);
//只讀訪問cpu diff
const Dtype* cpu_diff() const;
//只讀訪問gpu diff
const Dtype* gpu_diff() const;
//讀寫訪問cpu data、gpu data、cpu diff、gpu diff
Dtype* mutable_cpu_data();
Dtype* mutable_gpu_data();
Dtype* mutable_cpu_diff();
Dtype* mutable_gpu_diff();
void Update();//Blob更新運算,簡單理解爲data與diff的merge過程
//反序列化函數,從BlobProto中回覆一個Blob對象
void FromProto(const BlobProto& proto, bool reshape = true);
//序列化函數,將內存中的Blob對象保存到BlobProto中
void ToProto(BlobProto* proto, bool write_diff = false) const;
//計算data的L1範數(qiuhe )
Dtype asum_data() const;
//計算diff的L1範數
Dtype asum_diff() const;
//計算data的L2範數
Dtype sumsq_data() const;
//計算diff的L2範數
Dtype sumsq_diff() const;
void scale_data(Dtype scale_factor);
void scale_diff(Dtype scale_factor);
void ShareData(const Blob& other);
void ShareDiff(const Blob& other);
bool ShapeEquals(const BlobProto& other);
protected:
shared_ptr<SyncedMemory> data_;//存放指向data的指針
shared_ptr<SyncedMemory> diff_;//存放指向diff的指針
shared_ptr<SyncedMemory> shape_data_;
vector<int> shape_;//形狀信息
int count_;//存放有效元素數目信息
int capacity_;//存放Blob容器的容量信息
DISABLE_COPY_AND_ASSIGN(Blob);
}; // class Blob
} // namespace caffe
#endif // CAFFE_BLOB_HPP_