|
20 | 20 | #include "nnet/nnet-nnet.h" |
21 | 21 | #include "nnet/nnet-component.h" |
22 | 22 | #include "nnet/nnet-activation.h" |
| 23 | +#include "nnet/nnet-biasedlinearity.h" |
23 | 24 |
|
24 | 25 | int main(int argc, char *argv[]) { |
25 | 26 | try { |
@@ -58,13 +59,25 @@ int main(int argc, char *argv[]) { |
58 | 59 | { |
59 | 60 | Output ko(model_out_filename, binary_write); |
60 | 61 |
|
| 62 | + bool apply_scale = false; |
| 63 | + BaseFloat scale = 1.0; |
| 64 | + |
61 | 65 | for (int32 i=0; i<nnet.LayerCount(); ++i){ |
62 | 66 | Component *layer = nnet.Layer(i); |
63 | 67 | if(layer->GetType()==Component::kDropout){ |
64 | 68 | Dropout *dp=dynamic_cast<Dropout*>(layer); |
65 | | - Scale *sc=new Scale(dp->InputDim(), dp->OutputDim(), NULL); |
66 | | - sc->SetScale(1- dp->GetDropRatio()); |
67 | | - sc->Write(ko.Stream(), binary_write); |
| 69 | + scale = 1- dp->GetDropRatio(); |
| 70 | + apply_scale = true; |
| 71 | + } else if (apply_scale && layer->GetType()==Component::kBiasedLinearity){ |
| 72 | + BiasedLinearity *bl=dynamic_cast<BiasedLinearity*>(layer); |
| 73 | + CuMatrix<BaseFloat> weight(bl->GetLinearityWeight()); |
| 74 | + weight.Scale(scale); |
| 75 | + bl->SetLinearityWeight(weight, kNoTrans); |
| 76 | + bl->Write(ko.Stream(), binary_write); |
| 77 | + apply_scale = false; |
| 78 | + scale = 1.0; |
| 79 | + } else if (apply_scale) { |
| 80 | + KALDI_ERR << "Layer " << i << " following the dropout layer is not supported yet!"; |
68 | 81 | } else { |
69 | 82 | layer->Write(ko.Stream(), binary_write); |
70 | 83 | } |
|
0 commit comments