Skip to content

Commit 418dc57

Browse files
zhoonitjijoongmoon
authored andcommitted
[Tensor] Enable default copy constructor
This patch enables default copy constructor to fix #281. - Added sharedConstTensor for safety **Self evaluation:** 1. Build test: [X]Passed [ ]Failed [ ]Skipped 2. Run test: [X]Passed [ ]Failed [ ]Skipped Signed-off-by: Jihoon Lee <[email protected]>
1 parent 0e78263 commit 418dc57

26 files changed

+247
-135
lines changed

Applications/ReinforcementLearning/DeepQ/jni/main.cpp

Lines changed: 10 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -355,7 +355,7 @@ int main(int argc, char **argv) {
355355
* @brief get action with input State with mainNet
356356
*/
357357
nntrainer::Tensor in_tensor;
358-
nntrainer::sharedTensor test;
358+
nntrainer::sharedConstTensor test;
359359
try {
360360
in_tensor = nntrainer::Tensor({input});
361361
} catch (...) {
@@ -372,7 +372,7 @@ int main(int argc, char **argv) {
372372
targetNet.finalize();
373373
return 0;
374374
}
375-
float *data = test->getData();
375+
const float *data = test->getData();
376376
unsigned int len = test->getDim().getDataLen();
377377
std::vector<float> temp(data, data + len);
378378
action.push_back(argmax(temp));
@@ -474,7 +474,7 @@ int main(int argc, char **argv) {
474474
/**
475475
* @brief run forward propagation with mainNet
476476
*/
477-
nntrainer::sharedTensor Q;
477+
nntrainer::sharedConstTensor Q;
478478
try {
479479
Q = mainNet.forwarding(MAKE_SHARED_TENSOR(q_in));
480480
} catch (...) {
@@ -487,7 +487,7 @@ int main(int argc, char **argv) {
487487
/**
488488
* @brief run forward propagation with targetNet
489489
*/
490-
nntrainer::sharedTensor NQ;
490+
nntrainer::sharedConstTensor NQ;
491491
try {
492492
NQ = targetNet.forwarding(MAKE_SHARED_TENSOR(nq_in));
493493
} catch (...) {
@@ -496,22 +496,23 @@ int main(int argc, char **argv) {
496496
targetNet.finalize();
497497
return -1;
498498
}
499-
float *nqa = NQ->getData();
499+
const float *nqa = NQ->getData();
500500

501501
/**
502502
* @brief Update Q values & udpate mainNetwork
503503
*/
504+
nntrainer::Tensor tempQ = *Q;
504505
for (unsigned int i = 0; i < in_Exp.size(); i++) {
505506
if (in_Exp[i].done) {
506-
Q->setValue(i, 0, 0, (int)in_Exp[i].action[0],
507-
(float)in_Exp[i].reward);
507+
tempQ.setValue(i, 0, 0, (int)in_Exp[i].action[0],
508+
(float)in_Exp[i].reward);
508509
} else {
509510
float next = (nqa[i * NQ->getWidth()] > nqa[i * NQ->getWidth() + 1])
510511
? nqa[i * NQ->getWidth()]
511512
: nqa[i * NQ->getWidth() + 1];
512513
try {
513-
Q->setValue(i, 0, 0, (int)in_Exp[i].action[0],
514-
(float)in_Exp[i].reward + DISCOUNT * next);
514+
tempQ.setValue(i, 0, 0, (int)in_Exp[i].action[0],
515+
(float)in_Exp[i].reward + DISCOUNT * next);
515516
} catch (...) {
516517
std::cerr << "Error during set value" << std::endl;
517518
mainNet.finalize();

nntrainer/include/activation_layer.h

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -64,14 +64,14 @@ class ActivationLayer : public Layer {
6464
void save(std::ofstream &file){/* noop */};
6565

6666
/**
67-
* @copydoc Layer::forwarding(sharedTensor in)
67+
* @copydoc Layer::forwarding(sharedConstTensor in)
6868
*/
69-
sharedTensor forwarding(sharedTensor in);
69+
sharedConstTensor forwarding(sharedConstTensor in);
7070

7171
/**
72-
* @copydoc Layer::backwarding(sharedTensor in, int iteration)
72+
* @copydoc Layer::backwarding(sharedConstTensor in, int iteration)
7373
*/
74-
sharedTensor backwarding(sharedTensor in, int iteration);
74+
sharedConstTensor backwarding(sharedConstTensor in, int iteration);
7575

7676
/**
7777
* @brief copy layer

nntrainer/include/bn_layer.h

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -62,14 +62,14 @@ class BatchNormalizationLayer : public Layer {
6262
BatchNormalizationLayer &operator=(BatchNormalizationLayer &&rhs) = default;
6363

6464
/**
65-
* @copydoc Layer::forwarding(sharedTensor in)
65+
* @copydoc Layer::forwarding(sharedConstTensor in)
6666
*/
67-
sharedTensor forwarding(sharedTensor in);
67+
sharedConstTensor forwarding(sharedConstTensor in);
6868

6969
/**
70-
* @copydoc Layer::backwarding(sharedTensor in, int iteration)
70+
* @copydoc Layer::backwarding(sharedConstTensor in, int iteration)
7171
*/
72-
sharedTensor backwarding(sharedTensor in, int iteration);
72+
sharedConstTensor backwarding(sharedConstTensor in, int iteration);
7373

7474
/**
7575
* @brief copy layer

nntrainer/include/conv2d_layer.h

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -84,14 +84,14 @@ class Conv2DLayer : public Layer {
8484
void save(std::ofstream &file);
8585

8686
/**
87-
* @copydoc Layer::forwarding(sharedTensor in)
87+
* @copydoc Layer::forwarding(sharedConstTensor in)
8888
*/
89-
sharedTensor forwarding(sharedTensor in);
89+
sharedConstTensor forwarding(sharedConstTensor in);
9090

9191
/**
92-
* @copydoc Layer::backwarding(sharedTensor in, int iteration)
92+
* @copydoc Layer::backwarding(sharedConstTensor in, int iteration)
9393
*/
94-
sharedTensor backwarding(sharedTensor in, int iteration);
94+
sharedConstTensor backwarding(sharedConstTensor in, int iteration);
9595

9696
/**
9797
* @brief copy layer
@@ -140,7 +140,7 @@ class Conv2DLayer : public Layer {
140140
* @retval #ML_ERROR_NONE Successful.
141141
* @retval #ML_ERROR_INVALID_PARAMETER invalid parameter.
142142
*/
143-
int conv2d(float *in, TensorDim indim, float *kernel, TensorDim kdim,
143+
int conv2d(float *in, TensorDim indim, const float *kernel, TensorDim kdim,
144144
float *out, unsigned int const *stride, float bias);
145145

146146
/* TO DO : support keras type of padding */

nntrainer/include/fc_layer.h

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -66,14 +66,14 @@ class FullyConnectedLayer : public Layer {
6666
void save(std::ofstream &file);
6767

6868
/**
69-
* @copydoc Layer::forwarding(sharedTensor in)
69+
* @copydoc Layer::forwarding(sharedConstTensor in)
7070
*/
71-
sharedTensor forwarding(sharedTensor in);
71+
sharedConstTensor forwarding(sharedConstTensor in);
7272

7373
/**
74-
* @copydoc Layer::backwarding(sharedTensor in, int iteration)
74+
* @copydoc Layer::backwarding(sharedConstTensor in, int iteration)
7575
*/
76-
sharedTensor backwarding(sharedTensor in, int iteration);
76+
sharedConstTensor backwarding(sharedConstTensor in, int iteration);
7777

7878
/**
7979
* @brief copy layer

nntrainer/include/flatten_layer.h

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -73,14 +73,14 @@ class FlattenLayer : public Layer {
7373
void save(std::ofstream &file){};
7474

7575
/**
76-
* @copydoc Layer::forwarding(sharedTensor in)
76+
* @copydoc Layer::forwarding(sharedConstTensor in)
7777
*/
78-
sharedTensor forwarding(sharedTensor in);
78+
sharedConstTensor forwarding(sharedConstTensor in);
7979

8080
/**
81-
* @copydoc Layer::backwarding(sharedTensor in, int iteration)
81+
* @copydoc Layer::backwarding(sharedConstTensor in, int iteration)
8282
*/
83-
sharedTensor backwarding(sharedTensor in, int iteration);
83+
sharedConstTensor backwarding(sharedConstTensor in, int iteration);
8484

8585
/**
8686
* @brief copy layer

nntrainer/include/input_layer.h

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -74,14 +74,14 @@ class InputLayer : public Layer {
7474
void save(std::ofstream &file){};
7575

7676
/**
77-
* @copydoc Layer::forwarding(sharedTensor in)
77+
* @copydoc Layer::forwarding(sharedConstTensor in)
7878
*/
79-
sharedTensor forwarding(sharedTensor in);
79+
sharedConstTensor forwarding(sharedConstTensor in);
8080

8181
/**
82-
* @copydoc Layer::backwarding(sharedTensor in, int iteration)
82+
* @copydoc Layer::backwarding(sharedConstTensor in, int iteration)
8383
*/
84-
sharedTensor backwarding(sharedTensor in, int iteration);
84+
sharedConstTensor backwarding(sharedConstTensor in, int iteration);
8585

8686
/**
8787
* @brief Initializer of Input Layer

nntrainer/include/layer.h

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -171,15 +171,16 @@ class Layer {
171171
* @param[in] in List of Input Tensors taken by this layer
172172
* @retval List of Output Tensors
173173
*/
174-
virtual sharedTensor forwarding(sharedTensor in) = 0;
174+
virtual sharedConstTensor forwarding(sharedConstTensor in) = 0;
175175

176176
/**
177177
* @brief Back Propagation of a layer
178178
* @param[in] in List of Derivative Tensor from the next layer
179179
* @param[in] iteration Iteration value for the Optimizer
180180
* @retval Derivative List of Tensor for the previous layer
181181
*/
182-
virtual sharedTensor backwarding(sharedTensor in, int iteration) = 0;
182+
virtual sharedConstTensor backwarding(sharedConstTensor in,
183+
int iteration) = 0;
183184

184185
/**
185186
* @brief Initialize the layer

nntrainer/include/lazy_tensor.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ class LazyTensor {
4040
* @brief Constructor of Lazy Tensor, Tensor is copied to gaurantee
4141
* immutability
4242
*/
43-
LazyTensor(const Tensor &from) { target = Tensor(from); };
43+
LazyTensor(const Tensor &from) { target.copy(from); };
4444

4545
/**
4646
* @brief Wrapper method of add_i. see tensor.h for more detail

nntrainer/include/loss_layer.h

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -42,22 +42,22 @@ class LossLayer : public Layer {
4242
~LossLayer(){};
4343

4444
/**
45-
* @copydoc Layer::forwarding(sharedTensor in)
45+
* @copydoc Layer::forwarding(sharedConstTensor in)
4646
*/
47-
sharedTensor forwarding(sharedTensor in);
47+
sharedConstTensor forwarding(sharedConstTensor in);
4848

4949
/**
5050
* @brief Forward Propagation of a layer
5151
* @param[in] in List of Input Tensors taken by this layer
5252
* @param[in] label List of Label Tensors for the model
5353
* @retval List of Input Tensors as it is.
5454
*/
55-
sharedTensor forwarding(sharedTensor in, sharedTensor label);
55+
sharedConstTensor forwarding(sharedConstTensor in, sharedConstTensor label);
5656

5757
/**
58-
* @copydoc Layer::backwarding(sharedTensor in, int iteration)
58+
* @copydoc Layer::backwarding(sharedConstTensor in, int iteration)
5959
*/
60-
sharedTensor backwarding(sharedTensor in, int iteration);
60+
sharedConstTensor backwarding(sharedConstTensor in, int iteration);
6161

6262
/**
6363
* @brief read layer Weight & Bias data from file

0 commit comments

Comments
 (0)