GPUMLib  0.2.2
GPU Machine Learning Library
MultipleBackPropagation.cu
1 /*
2  Noel Lopes is an Assistant Professor at the Polytechnic Institute of Guarda, Portugal
3  Copyright (C) 2009, 2010, 2011, 2012 Noel de Jesus Mendonša Lopes
4 
5  This file is part of GPUMLib.
6 
7  GPUMLib is free software: you can redistribute it and/or modify
8  it under the terms of the GNU General Public License as published by
9  the Free Software Foundation, either version 3 of the License, or
10  (at your option) any later version.
11 
12  This program is distributed in the hope that it will be useful,
13  but WITHOUT ANY WARRANTY; without even the implied warranty of
14  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15  GNU General Public License for more details.
16 
17  You should have received a copy of the GNU General Public License
18  along with this program. If not, see <http://www.gnu.org/licenses/>.
19 */
20 
21 #include "MultipleBackPropagation.h"
22 
23 namespace GPUMLib {
24 
25 MultipleBackPropagation::MultipleBackPropagation(HostArray<int> & sizeLayers, HostArray<bool> & selectiveNeurons, HostArray<int> & sizeAdditionalSpaceLayers, HostMatrix<cudafloat> & trainInputPatterns, HostMatrix<cudafloat> & trainDesiredOutputPatterns, cudafloat initialLearningRate) {
26  int processingLayers = sizeLayers.Length() - 1;
27 
28  assert(selectiveNeurons.Length() == processingLayers);
29 
30  int outputsSpaceLayer = 0;
31  for(int l = 0; l < processingLayers; l++) {
32  if (selectiveNeurons[l]) outputsSpaceLayer += sizeLayers[l + 1];
33  }
34 
35  assert(outputsSpaceLayer > 0);
36 
37  int additionalLayers = sizeAdditionalSpaceLayers.Length();
38  HostArray<int> sizeSpaceLayers(additionalLayers + 1);
39 
40  for(int l = 0; l < additionalLayers; l++) {
41  assert(sizeAdditionalSpaceLayers[l] > 0);
42  sizeSpaceLayers[l] = sizeAdditionalSpaceLayers[l];
43  }
44  sizeSpaceLayers[additionalLayers] = outputsSpaceLayer;
45 
46  CreateNetwork(sizeLayers, &sizeSpaceLayers, &selectiveNeurons, trainInputPatterns, trainDesiredOutputPatterns, initialLearningRate);
47 
48  layerHasSelectiveNeurons = selectiveNeurons;
49 }
50 
52  assert(layer >= 0 && layer < layerHasSelectiveNeurons.Length());
53  return layerHasSelectiveNeurons[layer];
54 }
55 
57  return spaceLayers.Length();
58 }
59 
61  assert(layer >= 0 && layer < spaceLayers.Length());
62  return spaceLayers[layer].neurons;
63 }
64 
66  assert(layer >= 0 && layer < spaceLayers.Length());
67  return HostArray<cudafloat>(spaceLayers[layer].d_weights);
68 }
69 
71  assert(layer >= 0 && layer < spaceLayers.Length());
72  spaceLayers[layer].d_weights = weights;
73 }
74 
76  return HostArray<cudafloat>(selectiveInputLayerSpaceNetwork->d_weights);
77 }
78 
80  selectiveInputLayerSpaceNetwork->d_weights = weights;
81 }
82 
84  return HostArray<cudafloat>(selectiveInputLayerSpaceNetwork->d_bias);
85 }
86 
88  selectiveInputLayerSpaceNetwork->d_bias = bias;
89 }
90 
91 }
void SetLayerWeightsSpaceNetwork(int layer, HostArray< cudafloat > &weights)
Create an array of any type, on the host, that automatically manages the memory used to hold its elem...
Definition: HostArray.h:40
int GetNumberNeuronsSpaceNetwork(int layer) const
MultipleBackPropagation(HostArray< int > &sizeLayers, HostArray< bool > &selectiveNeurons, HostArray< int > &sizeAdditionalSpaceLayers, HostMatrix< cudafloat > &trainInputPatterns, HostMatrix< cudafloat > &trainDesiredOutputPatterns, cudafloat initialLearningRate=INITIAL_LEARNING_RATE)
void SetSelectiveInputBiasSpaceNetwork(HostArray< cudafloat > &bias)
int Length() const
Definition: BaseArray.h:63
HostArray< cudafloat > GetSelectiveInputWeightsSpaceNetwork()
HostArray< cudafloat > GetLayerWeightsSpaceNetwork(int layer)
void SetSelectiveInputWeightsSpaceNetwork(HostArray< cudafloat > &weights)
float cudafloat
HostArray< cudafloat > GetSelectiveInputBiasSpaceNetwork()