Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions include/caffe/loss_layers.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,14 @@ class AccuracyLayer : public Layer<Dtype> {
}
}


int label_axis_, outer_num_, inner_num_;
int top_k_;

/// Whether to ignore instances with a certain label.
bool has_ignore_label_;
/// The label indicating that an instance should be ignored.
int ignore_label_;
};

/**
Expand Down
71 changes: 47 additions & 24 deletions src/caffe/layers/accuracy_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -14,18 +14,28 @@ template <typename Dtype>
void AccuracyLayer<Dtype>::LayerSetUp(
const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
top_k_ = this->layer_param_.accuracy_param().top_k();

has_ignore_label_ =
this->layer_param_.accuracy_param().has_ignore_label();
if (has_ignore_label_) {
ignore_label_ = this->layer_param_.accuracy_param().ignore_label();
}
}

template <typename Dtype>
void AccuracyLayer<Dtype>::Reshape(
const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
CHECK_LE(top_k_, bottom[0]->count() / bottom[1]->count())
<< "top_k must be less than or equal to the number of classes.";
CHECK_GE(bottom[0]->num_axes(), bottom[1]->num_axes());
for (int i = 0; i < bottom[1]->num_axes(); ++i) {
CHECK_LE(bottom[0]->shape(i), bottom[1]->shape(i))
<< "Dimension mismatch between predictions and label.";
}
label_axis_ =
bottom[0]->CanonicalAxisIndex(this->layer_param_.accuracy_param().axis());
outer_num_ = bottom[0]->count(0, label_axis_);
inner_num_ = bottom[0]->count(label_axis_ + 1);
CHECK_EQ(outer_num_ * inner_num_, bottom[1]->count())
<< "Number of labels must match number of predictions; "
<< "e.g., if label axis == 1 and prediction shape is (N, C, H, W), "
<< "label count (number of labels) must be N*H*W, "
<< "with integer values in {0, 1, ..., C-1}.";
vector<int> top_shape(0); // Accuracy is a scalar; 0 axes.
top[0]->Reshape(top_shape);
}
Expand All @@ -35,32 +45,45 @@ void AccuracyLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top) {
Dtype accuracy = 0;
const Dtype* bottom_data = bottom[0]->cpu_data();
const Dtype* bottom_label = bottom[1]->cpu_data();
int num = bottom[0]->count(0, bottom[1]->num_axes());
int dim = bottom[0]->count() / num;
const Dtype* label = bottom[1]->cpu_data();
const int dim = bottom[0]->count() / outer_num_;
const int num_labels = bottom[0]->shape(label_axis_);
vector<Dtype> maxval(top_k_+1);
vector<int> max_id(top_k_+1);
for (int i = 0; i < num; ++i) {
// Top-k accuracy
std::vector<std::pair<Dtype, int> > bottom_data_vector;
for (int j = 0; j < dim; ++j) {
bottom_data_vector.push_back(
std::make_pair(bottom_data[i * dim + j], j));
}
std::partial_sort(
bottom_data_vector.begin(), bottom_data_vector.begin() + top_k_,
bottom_data_vector.end(), std::greater<std::pair<Dtype, int> >());
// check if true label is in top k predictions
for (int k = 0; k < top_k_; k++) {
if (bottom_data_vector[k].second == static_cast<int>(bottom_label[i])) {
++accuracy;
break;

unsigned int count = 0;
for (int i = 0; i < outer_num_; ++i) {
for (int j = 0; j < inner_num_; ++j) {
const int label_value = static_cast<int>(label[i * inner_num_ + j]);
if (has_ignore_label_ && label_value == ignore_label_) {
continue;
}
DCHECK_GE(label_value, 0);
DCHECK_LT(label_value, bottom[0]->channels());


// Top-k accuracy
std::vector<std::pair<Dtype, int> > bottom_data_vector;
for (int k = 0; k < num_labels; ++k) {
bottom_data_vector.push_back(std::make_pair(
bottom_data[i * dim + k * inner_num_ + j], k));
}
std::partial_sort(
bottom_data_vector.begin(), bottom_data_vector.begin() + top_k_,
bottom_data_vector.end(), std::greater<std::pair<Dtype, int> >());
// check if true label is in top k predictions
for (int k = 0; k < top_k_; k++) {
if (bottom_data_vector[k].second == label_value) {
++accuracy;
break;
}
}
count++;
}
}

// LOG(INFO) << "Accuracy: " << accuracy;
top[0]->mutable_cpu_data()[0] = accuracy / num;
top[0]->mutable_cpu_data()[0] = accuracy / count;
// Accuracy layer should not be used as a loss function.
}

Expand Down
13 changes: 13 additions & 0 deletions src/caffe/proto/caffe.proto
Original file line number Diff line number Diff line change
Expand Up @@ -333,6 +333,8 @@ message LayerParameter {
optional WindowDataParameter window_data_param = 129;
}



// Message that stores parameters used to apply transformation
// to the data layer's data
message TransformationParameter {
Expand Down Expand Up @@ -367,6 +369,17 @@ message AccuracyParameter {
// the top k scoring classes. By default, only compare to the top scoring
// class (i.e. argmax).
optional uint32 top_k = 1 [default = 1];

// The "label" axis of the prediction blob, whose argmax corresponds to the
// predicted label -- may be negative to index from the end (e.g., -1 for the
// last axis). For example, if axis == 1 and the predictions are
// (N x C x H x W), the label blob is expected to contain N*H*W ground truth
// labels with integer values in {0, 1, ..., C-1}.
optional int32 axis = 2 [default = 1];

// If specified, ignore instances with the given label.
optional int32 ignore_label = 3;

}

// Message that stores parameters used by ArgMaxLayer
Expand Down