namespace caffe { template <typename Dtype> classNet { protected: /// @brief The network name string name_; /// @brief The phase: TRAIN or TEST Phase phase_; /// @brief Individual layers in the net vector<shared_ptr<Layer<Dtype> > > layers_; vector<string> layer_names_; map<string, int> layer_names_index_; vector<bool> layer_need_backward_; /// @brief the blobs storing intermediate results between the layer. vector<shared_ptr<Blob<Dtype> > > blobs_; vector<string> blob_names_; map<string, int> blob_names_index_; vector<bool> blob_need_backward_; /// bottom_vecs stores the vectors containing the input for each layer. /// They don't actually host the blobs (blobs_ does), so we simply store /// pointers. vector<vector<Blob<Dtype>*> > bottom_vecs_; vector<vector<int> > bottom_id_vecs_; vector<vector<bool> > bottom_need_backward_; /// top_vecs stores the vectors containing the output for each layer vector<vector<Blob<Dtype>*> > top_vecs_; vector<vector<int> > top_id_vecs_; /// Vector of weight in the loss (or objective) function of each net blob, /// indexed by blob_id. vector<Dtype> blob_loss_weights_; vector<vector<int> > param_id_vecs_; vector<int> param_owners_; vector<string> param_display_names_; vector<pair<int, int> > param_layer_indices_; map<string, int> param_names_index_; /// blob indices for the input and the output of the net vector<int> net_input_blob_indices_; vector<int> net_output_blob_indices_; vector<Blob<Dtype>*> net_input_blobs_; vector<Blob<Dtype>*> net_output_blobs_; /// The parameters in the network. vector<shared_ptr<Blob<Dtype> > > params_; vector<Blob<Dtype>*> learnable_params_; /** * The mapping from params_ -> learnable_params_: we have * learnable_param_ids_.size() == params_.size(), * and learnable_params_[learnable_param_ids_[i]] == params_[i].get() * if and only if params_[i] is an "owner"; otherwise, params_[i] is a sharer * and learnable_params_[learnable_param_ids_[i]] gives its owner. */ vector<int> learnable_param_ids_; /// the learning rate multipliers for learnable_params_ vector<float> params_lr_; vector<bool> has_params_lr_; /// the weight decay multipliers for learnable_params_ vector<float> params_weight_decay_; vector<bool> has_params_decay_; /// The bytes of memory used by this net size_t memory_used_; /// Whether to compute and display debug info for the net. bool debug_info_; // Callbacks vector<Callback*> before_forward_; vector<Callback*> after_forward_; vector<Callback*> before_backward_; vector<Callback*> after_backward_;
Gitalk 加载中 ...