Skip to content

Commit

Permalink
update
Browse files Browse the repository at this point in the history
  • Loading branch information
luoyetx committed Dec 11, 2016
1 parent c6f8290 commit 30a7caa
Show file tree
Hide file tree
Showing 7 changed files with 5 additions and 103 deletions.
1 change: 0 additions & 1 deletion 3rdparty/include/.gitignore
Original file line number Diff line number Diff line change
@@ -1,2 +1 @@
glog/
google/
6 changes: 0 additions & 6 deletions copydeps.bat
Original file line number Diff line number Diff line change
@@ -1,9 +1,3 @@
mkdir 3rdparty\include\glog
copy 3rdparty\src\glog\src\windows\glog 3rdparty\include\glog
copy 3rdparty\src\glog\Debug\libglog.lib 3rdparty\lib\libglogd.lib
copy 3rdparty\src\glog\Release\libglog.lib 3rdparty\lib\libglog.lib
copy 3rdparty\src\glog\Release\libglog.dll 3rdparty\bin\libglog.dll

call 3rdparty\src\protobuf\cmake\build\extract_includes.bat
move include\google 3rdparty\include\
copy 3rdparty\src\protobuf\cmake\build\Debug\libprotobuf.lib 3rdparty\lib\libprotobufd.lib
Expand Down
5 changes: 0 additions & 5 deletions include/caffe/blob.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -218,14 +218,9 @@ class Blob {

const Dtype* cpu_data() const;
void set_cpu_data(Dtype* data);
const int* gpu_shape() const;
const Dtype* gpu_data() const;
const Dtype* cpu_diff() const;
const Dtype* gpu_diff() const;
Dtype* mutable_cpu_data();
Dtype* mutable_gpu_data();
Dtype* mutable_cpu_diff();
Dtype* mutable_gpu_diff();
void Update();
void FromProto(const BlobProto& proto, bool reshape = true);
void ToProto(BlobProto* proto, bool write_diff = false) const;
Expand Down
2 changes: 0 additions & 2 deletions include/caffe/common.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,6 @@

#include "caffe/util/logging.hpp"

#define NO_GPU LOG(FATAL) << "Cannot use GPU in CPU-only Caffe: check mode."

// Convert macro to string
#define STRINGIFY(m) #m
#define AS_STRING(m) STRINGIFY(m)
Expand Down
19 changes: 5 additions & 14 deletions include/caffe/syncedmem.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -32,35 +32,26 @@ inline void CaffeFreeHost(void* ptr, bool use_cuda) {
class SyncedMemory {
public:
SyncedMemory()
: cpu_ptr_(NULL), gpu_ptr_(NULL), size_(0), head_(UNINITIALIZED),
own_cpu_data_(false), cpu_malloc_use_cuda_(false), own_gpu_data_(false),
gpu_device_(-1) {}
: cpu_ptr_(NULL), size_(0), head_(UNINITIALIZED),
own_cpu_data_(false) {}
explicit SyncedMemory(size_t size)
: cpu_ptr_(NULL), gpu_ptr_(NULL), size_(size), head_(UNINITIALIZED),
own_cpu_data_(false), cpu_malloc_use_cuda_(false), own_gpu_data_(false),
gpu_device_(-1) {}
: cpu_ptr_(NULL), size_(size), head_(UNINITIALIZED),
own_cpu_data_(false) {}
~SyncedMemory();
const void* cpu_data();
void set_cpu_data(void* data);
const void* gpu_data();
void set_gpu_data(void* data);
void* mutable_cpu_data();
void* mutable_gpu_data();
enum SyncedHead { UNINITIALIZED, HEAD_AT_CPU, HEAD_AT_GPU, SYNCED };
enum SyncedHead { UNINITIALIZED, HEAD_AT_CPU };
SyncedHead head() { return head_; }
size_t size() { return size_; }

private:
void to_cpu();
void to_gpu();
void* cpu_ptr_;
void* gpu_ptr_;
size_t size_;
SyncedHead head_;
bool own_cpu_data_;
bool cpu_malloc_use_cuda_;
bool own_gpu_data_;
int gpu_device_;

DISABLE_COPY_AND_ASSIGN(SyncedMemory);
}; // class SyncedMemory
Expand Down
53 changes: 0 additions & 53 deletions src/caffe/blob.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -74,12 +74,6 @@ Blob<Dtype>::Blob(const vector<int>& shape)
Reshape(shape);
}

template <typename Dtype>
const int* Blob<Dtype>::gpu_shape() const {
CHECK(shape_data_);
return (const int*)shape_data_->gpu_data();
}

template <typename Dtype>
const Dtype* Blob<Dtype>::cpu_data() const {
CHECK(data_);
Expand All @@ -92,48 +86,24 @@ void Blob<Dtype>::set_cpu_data(Dtype* data) {
data_->set_cpu_data(data);
}

template <typename Dtype>
const Dtype* Blob<Dtype>::gpu_data() const {
CHECK(data_);
return (const Dtype*)data_->gpu_data();
}

template <typename Dtype>
const Dtype* Blob<Dtype>::cpu_diff() const {
CHECK(diff_);
return (const Dtype*)diff_->cpu_data();
}

template <typename Dtype>
const Dtype* Blob<Dtype>::gpu_diff() const {
CHECK(diff_);
return (const Dtype*)diff_->gpu_data();
}

template <typename Dtype>
Dtype* Blob<Dtype>::mutable_cpu_data() {
CHECK(data_);
return static_cast<Dtype*>(data_->mutable_cpu_data());
}

template <typename Dtype>
Dtype* Blob<Dtype>::mutable_gpu_data() {
CHECK(data_);
return static_cast<Dtype*>(data_->mutable_gpu_data());
}

template <typename Dtype>
Dtype* Blob<Dtype>::mutable_cpu_diff() {
CHECK(diff_);
return static_cast<Dtype*>(diff_->mutable_cpu_data());
}

template <typename Dtype>
Dtype* Blob<Dtype>::mutable_gpu_diff() {
CHECK(diff_);
return static_cast<Dtype*>(diff_->mutable_gpu_data());
}

template <typename Dtype>
void Blob<Dtype>::ShareData(const Blob& other) {
CHECK_EQ(count_, other.count());
Expand Down Expand Up @@ -162,10 +132,6 @@ void Blob<Dtype>::Update() {
static_cast<const Dtype*>(diff_->cpu_data()),
static_cast<Dtype*>(data_->mutable_cpu_data()));
break;
case SyncedMemory::HEAD_AT_GPU:
case SyncedMemory::SYNCED:
NO_GPU;
break;
default:
LOG(FATAL) << "Syncedmem not initialized.";
}
Expand All @@ -187,9 +153,6 @@ Dtype Blob<Dtype>::asum_data() const {
switch (data_->head()) {
case SyncedMemory::HEAD_AT_CPU:
return caffe_cpu_asum(count_, cpu_data());
case SyncedMemory::HEAD_AT_GPU:
case SyncedMemory::SYNCED:
NO_GPU;
case SyncedMemory::UNINITIALIZED:
return 0;
default:
Expand All @@ -214,9 +177,6 @@ Dtype Blob<Dtype>::asum_diff() const {
switch (diff_->head()) {
case SyncedMemory::HEAD_AT_CPU:
return caffe_cpu_asum(count_, cpu_diff());
case SyncedMemory::HEAD_AT_GPU:
case SyncedMemory::SYNCED:
NO_GPU;
case SyncedMemory::UNINITIALIZED:
return 0;
default:
Expand Down Expand Up @@ -245,10 +205,6 @@ Dtype Blob<Dtype>::sumsq_data() const {
data = cpu_data();
sumsq = caffe_cpu_dot(count_, data, data);
break;
case SyncedMemory::HEAD_AT_GPU:
case SyncedMemory::SYNCED:
NO_GPU;
break;
case SyncedMemory::UNINITIALIZED:
return 0;
default:
Expand Down Expand Up @@ -277,9 +233,6 @@ Dtype Blob<Dtype>::sumsq_diff() const {
diff = cpu_diff();
sumsq = caffe_cpu_dot(count_, diff, diff);
break;
case SyncedMemory::HEAD_AT_GPU:
case SyncedMemory::SYNCED:
NO_GPU;
case SyncedMemory::UNINITIALIZED:
return 0;
default:
Expand All @@ -305,9 +258,6 @@ void Blob<Dtype>::scale_data(Dtype scale_factor) {
data = mutable_cpu_data();
caffe_scal(count_, scale_factor, data);
return;
case SyncedMemory::HEAD_AT_GPU:
case SyncedMemory::SYNCED:
NO_GPU;
case SyncedMemory::UNINITIALIZED:
return;
default:
Expand All @@ -332,9 +282,6 @@ void Blob<Dtype>::scale_diff(Dtype scale_factor) {
diff = mutable_cpu_diff();
caffe_scal(count_, scale_factor, diff);
return;
case SyncedMemory::HEAD_AT_GPU:
case SyncedMemory::SYNCED:
NO_GPU;
case SyncedMemory::UNINITIALIZED:
return;
default:
Expand Down
22 changes: 0 additions & 22 deletions src/caffe/syncedmem.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -18,19 +18,11 @@ inline void SyncedMemory::to_cpu() {
head_ = HEAD_AT_CPU;
own_cpu_data_ = true;
break;
case HEAD_AT_GPU:
NO_GPU;
break;
case HEAD_AT_CPU:
case SYNCED:
break;
}
}

inline void SyncedMemory::to_gpu() {
NO_GPU;
}

const void* SyncedMemory::cpu_data() {
to_cpu();
return (const void*)cpu_ptr_;
Expand All @@ -46,25 +38,11 @@ void SyncedMemory::set_cpu_data(void* data) {
own_cpu_data_ = false;
}

const void* SyncedMemory::gpu_data() {
NO_GPU;
return NULL;
}

void SyncedMemory::set_gpu_data(void* data) {
NO_GPU;
}

void* SyncedMemory::mutable_cpu_data() {
to_cpu();
head_ = HEAD_AT_CPU;
return cpu_ptr_;
}

void* SyncedMemory::mutable_gpu_data() {
NO_GPU;
return NULL;
}

} // namespace caffe

0 comments on commit 30a7caa

Please sign in to comment.