Wednesday, 5 December 2018

testDNN analysis 4

% pretrainDBN: pre-training the Deep Belief Nets (DBN) model by Contrastive Divergence Learning
%
% dbn = pretrainDBN(dbn, V, opts)
%
%
%Output parameters:
% dbn: the trained Deep Belief Nets (DBN) model
%
%
%Input parameters:
% dbn: the initial Deep Belief Nets (DBN) model
% V: visible (input) variables, where # of row is number of data and # of col is # of visible (input) nodes
% opts (optional): options
%
% options (defualt value):
%  opts.LayerNum: # of tarining RBMs counted from input layer (all layer)
%  opts.MaxIter: Maxium iteration number (100)
%  opts.InitialMomentum: Initial momentum until InitialMomentumIter (0.5)
%  opts.InitialMomentumIter: Iteration number for initial momentum (5)
%  opts.FinalMomentum: Final momentum after InitialMomentumIter (0.9)
%  opts.WeightCost: Weight cost (0.0002)
%  opts.DropOutRate: List of Dropout rates for each layer (0)
%  opts.StepRatio: Learning step size (0.01)
%  opts.BatchSize: # of mini-batch data (# of all data)
%  opts.Verbose: verbose or not (false)
%  opts.SparseQ: q parameter of sparse learning (0)
%  opts.SparseLambda: lambda parameter (weight) of sparse learning (0)
%
%
%Example:
% datanum = 1024;
% outputnum = 16;
% hiddennum = 8;
% inputnum = 4;
%
% inputdata = rand(datanum, inputnum);
% outputdata = rand(datanum, outputnum);
%
% dbn = randDBN([inputnum, hiddennum, outputnum]);
% dbn = pretrainDBN( dbn, inputdata );
% dbn = SetLinearMapping( dbn, inputdata, outputdata );
% dbn = trainDBN( dbn, inputdata, outputdata );
%
% estimate = v2h( dbn, inputdata );
%
%
%Reference:
%for details of the dropout
% Hinton et al, Improving neural networks by preventing co-adaptation of feature detectors, 2012.
%for details of the sparse learning
% Lee et al, Sparse deep belief net model for visual area V2, NIPS 2008.
%
%
%Version: 20130821


%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Deep Neural Network:                                     %
%                                                          %
% Copyright (C) 2013 Masayuki Tanaka. All rights reserved. %
%                    mtanaka@ctrl.titech.ac.jp             %
%                                                          %
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
function dbn = pretrainDBN(dbn, V, opts)

LayerNum = numel( dbn.rbm );  % 3
DropOutRate = zeros(LayerNum,1); % 3 x 1

X = V;  % 1000 x 32

if( exist('opts' ) )
 if( isfield(opts,'LayerNum') ) % false
  LayerNum = opts.LayerNum;
 end
 if( isfield(opts,'DropOutRate') ) % true
  DropOutRate = opts.DropOutRate; % 0.5
  if( numel( DropOutRate ) == 1 )
   DropOutRate = ones(LayerNum,1) * DropOutRate; % 3 x 1   [0.5 0.5 0.5]'
  end
 end

else
 opts = [];
end

for i=1:LayerNum % 1:3
opts.DropOutRate = DropOutRate(i);
    dbn.rbm{i} = pretrainRBM(dbn.rbm{i}, X, opts); % i=1 X=1000 x 32 
    X0 = X; % 1000 x 32, 1000 x 16, 1000 x 8
    X = v2h( dbn.rbm{i}, X0 ); %      V * dnn.W-> X0 * dbn.rbm{i} <----h bsxfun="" dnn.b="" dnn.w="" p="" plus="" sigmoid="" v=""> % i(1) X(1000 x 16) dbn.rbm{i}(32 x 16) X0(1000 x 32)
% i(2) X(1000 x 8) dbn.rbm{i}(16 x 8) X0(1000 x 16)
% i(3) X(1000 x 4) dbn.rbm{i}(8 x 4) X0(1000 x 8)
end