forked from xialuxi/GHMLoss-caffe
-
Notifications
You must be signed in to change notification settings - Fork 0
/
ghmr_loss_layer.hpp
51 lines (41 loc) · 1.53 KB
/
ghmr_loss_layer.hpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
#ifndef CAFFE_GHMR_LOSS_LAYERS_HPP_
#define CAFFE_GHMR_LOSS_LAYERS_HPP_
#include <vector>
#include "caffe/blob.hpp"
#include "caffe/layer.hpp"
#include "caffe/proto/caffe.pb.h"
#include "caffe/layers/loss_layer.hpp"
namespace caffe {
template <typename Dtype>
class GhmrLossLayer : public LossLayer<Dtype> {
public:
explicit GhmrLossLayer(const LayerParameter& param)
: LossLayer<Dtype>(param){}
virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
virtual inline const char* type() const { return "GhmrLoss"; }
virtual inline bool AllowForceBackward(const int bottom_index) const {
return true;
}
protected:
virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
//virtual void Forward_gpu(const vector<Blob<Dtype>*>& bottom,
// const vector<Blob<Dtype>*>& top);
virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
//virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
// const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom);
int m_;
float * r_num;
Dtype alpha;
Dtype mu;
Blob<Dtype> diff_asl;
Blob<Dtype> beta; //beta = N / GD(g)
Blob<Dtype> distance;
Blob<Dtype> loss_value;
};
} // namespace caffe
#endif // CAFFE_GHMR_LOSS_LAYERS_HPP_