tesseract  5.0.0
plumbing.cpp
Go to the documentation of this file.
1 // File: plumbing.cpp
3 // Description: Base class for networks that organize other networks
4 // eg series or parallel.
5 // Author: Ray Smith
6 //
7 // (C) Copyright 2014, Google Inc.
8 // Licensed under the Apache License, Version 2.0 (the "License");
9 // you may not use this file except in compliance with the License.
10 // You may obtain a copy of the License at
11 // http://www.apache.org/licenses/LICENSE-2.0
12 // Unless required by applicable law or agreed to in writing, software
13 // distributed under the License is distributed on an "AS IS" BASIS,
14 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 // See the License for the specific language governing permissions and
16 // limitations under the License.
18 
19 #include "plumbing.h"
20 
21 namespace tesseract {
22 
23 // ni_ and no_ will be set by AddToStack.
24 Plumbing::Plumbing(const std::string &name) : Network(NT_PARALLEL, name, 0, 0) {}
25 
26 // Suspends/Enables training by setting the training_ flag. Serialize and
27 // DeSerialize only operate on the run-time data if state is false.
30  for (auto &i : stack_) {
31  i->SetEnableTraining(state);
32  }
33 }
34 
35 // Sets flags that control the action of the network. See NetworkFlags enum
36 // for bit values.
37 void Plumbing::SetNetworkFlags(uint32_t flags) {
39  for (auto &i : stack_) {
40  i->SetNetworkFlags(flags);
41  }
42 }
43 
44 // Sets up the network for training. Initializes weights using weights of
45 // scale `range` picked according to the random number generator `randomizer`.
46 // Note that randomizer is a borrowed pointer that should outlive the network
47 // and should not be deleted by any of the networks.
48 // Returns the number of weights initialized.
49 int Plumbing::InitWeights(float range, TRand *randomizer) {
50  num_weights_ = 0;
51  for (auto &i : stack_) {
52  num_weights_ += i->InitWeights(range, randomizer);
53  }
54  return num_weights_;
55 }
56 
57 // Recursively searches the network for softmaxes with old_no outputs,
58 // and remaps their outputs according to code_map. See network.h for details.
59 int Plumbing::RemapOutputs(int old_no, const std::vector<int> &code_map) {
60  num_weights_ = 0;
61  for (auto &i : stack_) {
62  num_weights_ += i->RemapOutputs(old_no, code_map);
63  }
64  return num_weights_;
65 }
66 
67 // Converts a float network to an int network.
69  for (auto &i : stack_) {
70  i->ConvertToInt();
71  }
72 }
73 
74 // Provides a pointer to a TRand for any networks that care to use it.
75 // Note that randomizer is a borrowed pointer that should outlive the network
76 // and should not be deleted by any of the networks.
77 void Plumbing::SetRandomizer(TRand *randomizer) {
78  for (auto &i : stack_) {
79  i->SetRandomizer(randomizer);
80  }
81 }
82 
83 // Adds the given network to the stack.
84 void Plumbing::AddToStack(Network *network) {
85  if (stack_.empty()) {
86  ni_ = network->NumInputs();
87  no_ = network->NumOutputs();
88  } else if (type_ == NT_SERIES) {
89  // ni is input of first, no output of last, others match output to input.
90  ASSERT_HOST(no_ == network->NumInputs());
91  no_ = network->NumOutputs();
92  } else {
93  // All parallel types. Output is sum of outputs, inputs all match.
94  ASSERT_HOST(ni_ == network->NumInputs());
95  no_ += network->NumOutputs();
96  }
97  stack_.push_back(network);
98 }
99 
100 // Sets needs_to_backprop_ to needs_backprop and calls on sub-network
101 // according to needs_backprop || any weights in this network.
102 bool Plumbing::SetupNeedsBackprop(bool needs_backprop) {
103  if (IsTraining()) {
104  needs_to_backprop_ = needs_backprop;
105  bool retval = needs_backprop;
106  for (auto &i : stack_) {
107  if (i->SetupNeedsBackprop(needs_backprop)) {
108  retval = true;
109  }
110  }
111  return retval;
112  }
113  // Frozen networks don't do backprop.
114  needs_to_backprop_ = false;
115  return false;
116 }
117 
118 // Returns an integer reduction factor that the network applies to the
119 // time sequence. Assumes that any 2-d is already eliminated. Used for
120 // scaling bounding boxes of truth data.
121 // WARNING: if GlobalMinimax is used to vary the scale, this will return
122 // the last used scale factor. Call it before any forward, and it will return
123 // the minimum scale factor of the paths through the GlobalMinimax.
125  return stack_[0]->XScaleFactor();
126 }
127 
128 // Provides the (minimum) x scale factor to the network (of interest only to
129 // input units) so they can determine how to scale bounding boxes.
130 void Plumbing::CacheXScaleFactor(int factor) {
131  for (auto &i : stack_) {
132  i->CacheXScaleFactor(factor);
133  }
134 }
135 
136 // Provides debug output on the weights.
138  for (auto &i : stack_) {
139  i->DebugWeights();
140  }
141 }
142 
143 // Returns a set of strings representing the layer-ids of all layers below.
144 void Plumbing::EnumerateLayers(const std::string *prefix, std::vector<std::string> &layers) const {
145  for (size_t i = 0; i < stack_.size(); ++i) {
146  std::string layer_name;
147  if (prefix) {
148  layer_name = *prefix;
149  }
150  layer_name += ":" + std::to_string(i);
151  if (stack_[i]->IsPlumbingType()) {
152  auto *plumbing = static_cast<Plumbing *>(stack_[i]);
153  plumbing->EnumerateLayers(&layer_name, layers);
154  } else {
155  layers.push_back(layer_name);
156  }
157  }
158 }
159 
160 // Returns a pointer to the network layer corresponding to the given id.
161 Network *Plumbing::GetLayer(const char *id) const {
162  char *next_id;
163  int index = strtol(id, &next_id, 10);
164  if (index < 0 || static_cast<unsigned>(index) >= stack_.size()) {
165  return nullptr;
166  }
167  if (stack_[index]->IsPlumbingType()) {
168  auto *plumbing = static_cast<Plumbing *>(stack_[index]);
169  ASSERT_HOST(*next_id == ':');
170  return plumbing->GetLayer(next_id + 1);
171  }
172  return stack_[index];
173 }
174 
175 // Returns a pointer to the learning rate for the given layer id.
176 float *Plumbing::LayerLearningRatePtr(const char *id) {
177  char *next_id;
178  int index = strtol(id, &next_id, 10);
179  if (index < 0 || static_cast<unsigned>(index) >= stack_.size()) {
180  return nullptr;
181  }
182  if (stack_[index]->IsPlumbingType()) {
183  auto *plumbing = static_cast<Plumbing *>(stack_[index]);
184  ASSERT_HOST(*next_id == ':');
185  return plumbing->LayerLearningRatePtr(next_id + 1);
186  }
187  if (static_cast<unsigned>(index) >= learning_rates_.size()) {
188  return nullptr;
189  }
190  return &learning_rates_[index];
191 }
192 
193 // Writes to the given file. Returns false in case of error.
194 bool Plumbing::Serialize(TFile *fp) const {
195  if (!Network::Serialize(fp)) {
196  return false;
197  }
198  uint32_t size = stack_.size();
199  // Can't use PointerVector::Serialize here as we need a special DeSerialize.
200  if (!fp->Serialize(&size)) {
201  return false;
202  }
203  for (uint32_t i = 0; i < size; ++i) {
204  if (!stack_[i]->Serialize(fp)) {
205  return false;
206  }
207  }
209  return false;
210  }
211  return true;
212 }
213 
214 // Reads from the given file. Returns false in case of error.
216  for (auto data : stack_) {
217  delete data;
218  }
219  stack_.clear();
220  no_ = 0; // We will be modifying this as we AddToStack.
221  uint32_t size;
222  if (!fp->DeSerialize(&size)) {
223  return false;
224  }
225  for (uint32_t i = 0; i < size; ++i) {
226  Network *network = CreateFromFile(fp);
227  if (network == nullptr) {
228  return false;
229  }
230  AddToStack(network);
231  }
233  return false;
234  }
235  return true;
236 }
237 
238 // Updates the weights using the given learning rate, momentum and adam_beta.
239 // num_samples is used in the adam computation iff use_adam_ is true.
240 void Plumbing::Update(float learning_rate, float momentum, float adam_beta, int num_samples) {
241  for (size_t i = 0; i < stack_.size(); ++i) {
243  if (i < learning_rates_.size()) {
244  learning_rate = learning_rates_[i];
245  } else {
246  learning_rates_.push_back(learning_rate);
247  }
248  }
249  if (stack_[i]->IsTraining()) {
250  stack_[i]->Update(learning_rate, momentum, adam_beta, num_samples);
251  }
252  }
253 }
254 
255 // Sums the products of weight updates in *this and other, splitting into
256 // positive (same direction) in *same and negative (different direction) in
257 // *changed.
258 void Plumbing::CountAlternators(const Network &other, TFloat *same, TFloat *changed) const {
259  ASSERT_HOST(other.type() == type_);
260  const auto *plumbing = static_cast<const Plumbing *>(&other);
261  ASSERT_HOST(plumbing->stack_.size() == stack_.size());
262  for (size_t i = 0; i < stack_.size(); ++i) {
263  stack_[i]->CountAlternators(*plumbing->stack_[i], same, changed);
264  }
265 }
266 
267 } // namespace tesseract.
#define ASSERT_HOST(x)
Definition: errcode.h:59
TrainingState
Definition: network.h:90
@ NT_PARALLEL
Definition: network.h:47
@ NT_SERIES
Definition: network.h:52
double TFloat
Definition: tesstypes.h:39
@ NF_LAYER_SPECIFIC_LR
Definition: network.h:85
bool DeSerialize(std::string &data)
Definition: serialis.cpp:94
bool Serialize(const std::string &data)
Definition: serialis.cpp:107
int32_t network_flags_
Definition: network.h:303
NetworkType type_
Definition: network.h:300
int NumOutputs() const
Definition: network.h:125
bool needs_to_backprop_
Definition: network.h:302
virtual void SetEnableTraining(TrainingState state)
Definition: network.cpp:113
static Network * CreateFromFile(TFile *fp)
Definition: network.cpp:217
bool IsTraining() const
Definition: network.h:113
virtual bool Serialize(TFile *fp) const
Definition: network.cpp:158
int NumInputs() const
Definition: network.h:122
int32_t num_weights_
Definition: network.h:306
virtual void SetNetworkFlags(uint32_t flags)
Definition: network.cpp:131
NetworkType type() const
Definition: network.h:110
void SetEnableTraining(TrainingState state) override
Definition: plumbing.cpp:28
bool DeSerialize(TFile *fp) override
Definition: plumbing.cpp:215
void CacheXScaleFactor(int factor) override
Definition: plumbing.cpp:130
int XScaleFactor() const override
Definition: plumbing.cpp:124
void ConvertToInt() override
Definition: plumbing.cpp:68
TESS_API void EnumerateLayers(const std::string *prefix, std::vector< std::string > &layers) const
Definition: plumbing.cpp:144
bool SetupNeedsBackprop(bool needs_backprop) override
Definition: plumbing.cpp:102
int InitWeights(float range, TRand *randomizer) override
Definition: plumbing.cpp:49
void SetRandomizer(TRand *randomizer) override
Definition: plumbing.cpp:77
virtual void AddToStack(Network *network)
Definition: plumbing.cpp:84
Plumbing(const std::string &name)
Definition: plumbing.cpp:24
int RemapOutputs(int old_no, const std::vector< int > &code_map) override
Definition: plumbing.cpp:59
TESS_API float * LayerLearningRatePtr(const char *id)
Definition: plumbing.cpp:176
void SetNetworkFlags(uint32_t flags) override
Definition: plumbing.cpp:37
void DebugWeights() override
Definition: plumbing.cpp:137
std::vector< Network * > stack_
Definition: plumbing.h:150
void CountAlternators(const Network &other, TFloat *same, TFloat *changed) const override
Definition: plumbing.cpp:258
TESS_API Network * GetLayer(const char *id) const
Definition: plumbing.cpp:161
bool Serialize(TFile *fp) const override
Definition: plumbing.cpp:194
std::vector< float > learning_rates_
Definition: plumbing.h:153
bool IsPlumbingType() const override
Definition: plumbing.h:48
void Update(float learning_rate, float momentum, float adam_beta, int num_samples) override
Definition: plumbing.cpp:240