tonghe90 / textspotter

Geek Repo:Geek Repo

Github PK Tool:Github PK Tool

Is CPU mode supported?

eugene123tw opened this issue · comments

I kept got errors during buiding the project with CPU_ONLY.
And I found that the foward and backward pass of at_layer.cpp are not implemented.
Have you considered adding CPU implementation?

@eugene123tw The core code supports CPU version. you can add our custom layer: att_lstm, reshape_layer, point_bilinear_layer, transpose_layer and other python layer to the newest version of caffe

I did not understand the above comment by @tonghe90 . @eugene123tw or @tonghe90 could you please elaborate on how do I run the project with just CPU support?

Hi @prajwalkr , you can change this line to caffe.set_mode_cpu() and uncomment the next line. I didn't test this, but it should work.

Hi @tonghe90 , thank you for your reply. I have tried that, it does not work. It gives an error saying "Not implemented".

@prajwalkr which layer

@tonghe90 reverse_axis_layer.cpp:37] Not Implemented Yet

@prajwalkr Hi If you want to use cpu version, you can change the reverse_axis_layer like this

@tonghe90 I tried changing the code as you suggested and ran make all after that, but I keep getting "undefined reference" errors. Below, I have pasted a copy of my modified reverse_axis_layer.cpp

#include "caffe/layers/reverse_axis_layer.hpp"


namespace caffe {

template <typename Dtype>
void reverse_cpu(const int count, const Dtype* from_data, Dtype* to_data, 
	const int* counts, const int axis_count, const int axis) {
	for(int index=0; index<count; index++) {
		int ind=(index/counts[axis])%axis_count;
		int to_index=counts[axis]*(axis_count-2*ind-1)+index;
		*(to_data+to_index)=*(from_data+index);
	}
}

template <typename Dtype>
void ReverseAxisLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
        const vector<Blob<Dtype>*>& top) {
	CHECK_NE(bottom[0], top[0])<<this->type()<<" does not support in-place computation.";
	reverse_param_=this->layer_param_.reverse_axis_param();
}

template <typename Dtype>
void ReverseAxisLayer<Dtype>::Reshape(const vector<Blob<Dtype>*>& bottom,
      const vector<Blob<Dtype>*>& top) {
	vector<int> shape=bottom[0]->shape();
	axis_=reverse_param_.axis();
	CHECK_GT(shape.size(), 0)<<this->type()<<" does not support 0 axes blob.";
	CHECK_GE(axis_, 0)<<"axis must be greater than or equal to 0.";
	CHECK_LT(axis_, shape.size())<<"axis must be less than bottom's dimension.";
	top[0]->ReshapeLike(*bottom[0]);
	const int dim=shape.size();
	shape.clear();
	shape.push_back(dim);
	bottom_counts_.Reshape(shape);
	int* p=bottom_counts_.mutable_cpu_data();
	for (int i=1; i<dim; i++) {
		*p=bottom[0]->count(i);
		p++;
	}
	*p=1;
}

template <typename Dtype>
void ReverseAxisLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom, 
		const vector<Blob<Dtype>*>& top) {
	reverse_cpu<Dtype>(bottom[0]->count(), bottom[0]->cpu_data(), 
		top[0]->mutable_cpu_data(), bottom_counts_.cpu_data(), 
		bottom[0]->shape(axis_), axis_);
}


template <typename Dtype>
void ReverseAxisLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top,
    	const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {
	if (!propagate_down[0]) {
		return;
	}
	reverse_cpu<Dtype>(bottom[0]->count(), top[0]->cpu_diff(), 
		bottom[0]->mutable_cpu_diff(), bottom_counts_.cpu_data(), 
		bottom[0]->shape(axis_), axis_);
}

INSTANTIATE_CLASS(ReverseAxisLayer);
REGISTER_LAYER_CLASS(ReverseAxis);

}  // namespace caffe

@prajwalkr I have add the cpu version of the reverse layer