Skip to content

Commit

Permalink
some fixes and optimizations
Browse files Browse the repository at this point in the history
  • Loading branch information
ivan-vasilev committed Apr 17, 2014
1 parent a731467 commit 1614d86
Show file tree
Hide file tree
Showing 17 changed files with 169 additions and 122 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import java.util.Arrays;
import java.util.List;

import com.github.neuralnetworks.util.Environment;
import com.github.neuralnetworks.util.Matrix;
import com.github.neuralnetworks.util.Tensor;
import com.github.neuralnetworks.util.TensorFactory;
Expand All @@ -21,11 +22,7 @@ public class ConnectionFactory implements Serializable {

public ConnectionFactory() {
super();
}

public ConnectionFactory(boolean useSharedMemory) {
super();
if (useSharedMemory) {
if (Environment.getInstance().getUseWeightsSharedMemory()) {
this.connections = new ArrayList<>();
this.sharedWeights = new float[0];
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ public class NNFactory {
* @param useSharedMemory - whether all network weights should be in a single array
* @return neural network
*/
public static NeuralNetworkImpl convNN(int[][] layers, boolean addBias, boolean useSharedMemory) {
public static NeuralNetworkImpl convNN(int[][] layers, boolean addBias) {
if (layers.length <= 1) {
throw new IllegalArgumentException("more than one layer is required");
}
Expand All @@ -56,7 +56,7 @@ public static NeuralNetworkImpl convNN(int[][] layers, boolean addBias, boolean
}

NeuralNetworkImpl result = new NeuralNetworkImpl();
ConnectionFactory cf = new ConnectionFactory(useSharedMemory);
ConnectionFactory cf = new ConnectionFactory();

Layer prev = null;
int prevUnitCount = layers[0][0] * layers[0][1] * layers[0][2];
Expand Down Expand Up @@ -131,9 +131,9 @@ public static NeuralNetworkImpl convNN(int[][] layers, boolean addBias, boolean
* @param useSharedMemory - whether all network weights will be part of single array
* @return
*/
public static NeuralNetworkImpl mlp(int[] layers, boolean addBias, boolean useSharedMemory) {
public static NeuralNetworkImpl mlp(int[] layers, boolean addBias) {
NeuralNetworkImpl result = new NeuralNetworkImpl();
mlp(result, new ConnectionFactory(useSharedMemory), layers, addBias);
mlp(result, new ConnectionFactory(), layers, addBias);
return result;
}

Expand Down Expand Up @@ -301,63 +301,63 @@ public static void lcStochasticPooling(NeuralNetworkImpl nn) {
}
}

public static NeuralNetworkImpl mlpSigmoid(int[] layers, boolean addBias, boolean useSharedMemory) {
NeuralNetworkImpl result = mlp(layers, addBias, useSharedMemory);
public static NeuralNetworkImpl mlpSigmoid(int[] layers, boolean addBias) {
NeuralNetworkImpl result = mlp(layers, addBias);
result.setLayerCalculator(lcSigmoid(result, null));
return result;
}

public static NeuralNetworkImpl mlpSoftRelu(int[] layers, boolean addBias, boolean useSharedMemory, ConnectionCalculator outputCC) {
NeuralNetworkImpl result = mlp(layers, addBias, useSharedMemory);
public static NeuralNetworkImpl mlpSoftRelu(int[] layers, boolean addBias, ConnectionCalculator outputCC) {
NeuralNetworkImpl result = mlp(layers, addBias);
result.setLayerCalculator(lcSoftRelu(result, outputCC));
return result;
}

public static NeuralNetworkImpl mlpRelu(int[] layers, boolean addBias, boolean useSharedMemory, ConnectionCalculator outputCC) {
NeuralNetworkImpl result = mlp(layers, addBias, useSharedMemory);
public static NeuralNetworkImpl mlpRelu(int[] layers, boolean addBias, ConnectionCalculator outputCC) {
NeuralNetworkImpl result = mlp(layers, addBias);
result.setLayerCalculator(lcRelu(result, outputCC));
return result;
}

public static NeuralNetworkImpl mlpTanh(int[] layers, boolean addBias, boolean useSharedMemory, ConnectionCalculator outputCC) {
NeuralNetworkImpl result = mlp(layers, addBias, useSharedMemory);
public static NeuralNetworkImpl mlpTanh(int[] layers, boolean addBias, ConnectionCalculator outputCC) {
NeuralNetworkImpl result = mlp(layers, addBias);
result.setLayerCalculator(lcTanh(result, outputCC));
return result;
}

public static Autoencoder autoencoder(int visibleCount, int hiddenCount, boolean addBias, boolean useSharedMemory) {
public static Autoencoder autoencoder(int visibleCount, int hiddenCount, boolean addBias) {
Autoencoder result = new Autoencoder();
mlp(result, new ConnectionFactory(useSharedMemory), new int[] {visibleCount, hiddenCount, visibleCount}, addBias);
mlp(result, new ConnectionFactory(), new int[] {visibleCount, hiddenCount, visibleCount}, addBias);
return result;
}

public static Autoencoder autoencoderSigmoid(int visibleCount, int hiddenCount, boolean addBias, boolean useSharedMemory) {
Autoencoder ae = autoencoder(visibleCount, hiddenCount, addBias, useSharedMemory);
public static Autoencoder autoencoderSigmoid(int visibleCount, int hiddenCount, boolean addBias) {
Autoencoder ae = autoencoder(visibleCount, hiddenCount, addBias);
ae.setLayerCalculator(lcSigmoid(ae, null));
return ae;
}

public static Autoencoder autoencoderSoftReLU(int visibleCount, int hiddenCount, boolean addBias, boolean useSharedMemory, ConnectionCalculator outputCC) {
Autoencoder ae = autoencoder(visibleCount, hiddenCount, addBias, useSharedMemory);
public static Autoencoder autoencoderSoftReLU(int visibleCount, int hiddenCount, boolean addBias, ConnectionCalculator outputCC) {
Autoencoder ae = autoencoder(visibleCount, hiddenCount, addBias);
ae.setLayerCalculator(lcSoftRelu(ae, outputCC));
return ae;
}

public static Autoencoder autoencoderReLU(int visibleCount, int hiddenCount, boolean addBias, boolean useSharedMemory, ConnectionCalculator outputCC) {
Autoencoder ae = autoencoder(visibleCount, hiddenCount, addBias, useSharedMemory);
public static Autoencoder autoencoderReLU(int visibleCount, int hiddenCount, boolean addBias, ConnectionCalculator outputCC) {
Autoencoder ae = autoencoder(visibleCount, hiddenCount, addBias);
ae.setLayerCalculator(lcRelu(ae, outputCC));
return ae;
}

public static Autoencoder autoencoderTanh(int visibleCount, int hiddenCount, boolean addBias, boolean useSharedMemory, ConnectionCalculator outputCC) {
Autoencoder ae = autoencoder(visibleCount, hiddenCount, addBias, useSharedMemory);
public static Autoencoder autoencoderTanh(int visibleCount, int hiddenCount, boolean addBias, ConnectionCalculator outputCC) {
Autoencoder ae = autoencoder(visibleCount, hiddenCount, addBias);
ae.setLayerCalculator(lcTanh(ae, outputCC));
return ae;
}

public static RBM rbm(int visibleCount, int hiddenCount, boolean addBias, boolean useSharedMemory) {
public static RBM rbm(int visibleCount, int hiddenCount, boolean addBias) {
RBM result = new RBM();
ConnectionFactory cf = new ConnectionFactory(useSharedMemory);
ConnectionFactory cf = new ConnectionFactory();
result.addConnections(cf.fullyConnected(new Layer(), new Layer(), visibleCount, hiddenCount));

if (addBias) {
Expand Down Expand Up @@ -406,13 +406,13 @@ public static RBMLayerCalculator rbmTanhTanh(RBM rbm, int batchSize) {
return new RBMLayerCalculator(rbm, batchSize, new AparapiTanh(), new AparapiTanh(), new AparapiTanh());
}

public static DBN dbn(int[] layers, boolean addBias, boolean useSharedMemory) {
public static DBN dbn(int[] layers, boolean addBias) {
if (layers.length <= 1) {
throw new IllegalArgumentException("more than one layer is required");
}

DBN result = new DBN();
ConnectionFactory cf = new ConnectionFactory(useSharedMemory);
ConnectionFactory cf = new ConnectionFactory();
result.addLayer(new Layer());
for (int i = 1; i < layers.length; i++) {
RBM rbm = new RBM();
Expand All @@ -429,36 +429,36 @@ public static DBN dbn(int[] layers, boolean addBias, boolean useSharedMemory) {
return result;
}

public static DBN dbnSigmoid(int[] layers, boolean addBias, boolean useSharedMemory) {
DBN result = dbn(layers, addBias, useSharedMemory);
public static DBN dbnSigmoid(int[] layers, boolean addBias) {
DBN result = dbn(layers, addBias);
result.setLayerCalculator(lcSigmoid(result, null));
return result;
}

public static DBN dbnSoftReLU(int[] layers, boolean addBias, boolean useSharedMemory) {
DBN result = dbn(layers, addBias, useSharedMemory);
public static DBN dbnSoftReLU(int[] layers, boolean addBias) {
DBN result = dbn(layers, addBias);
result.setLayerCalculator(lcSoftRelu(result, null));
return result;
}

public static DBN dbnReLU(int[] layers, boolean addBias, boolean useSharedMemory) {
DBN result = dbn(layers, addBias, useSharedMemory);
public static DBN dbnReLU(int[] layers, boolean addBias) {
DBN result = dbn(layers, addBias);
result.setLayerCalculator(lcRelu(result, null));
return result;
}

public static DBN dbnTanh(int[] layers, boolean addBias, boolean useSharedMemory) {
DBN result = dbn(layers, addBias, useSharedMemory);
public static DBN dbnTanh(int[] layers, boolean addBias) {
DBN result = dbn(layers, addBias);
result.setLayerCalculator(lcTanh(result, null));
return result;
}

public static StackedAutoencoder sae(int[] layers, boolean addBias, boolean useSharedMemory) {
public static StackedAutoencoder sae(int[] layers, boolean addBias) {
if (layers == null || layers.length <= 1) {
throw new IllegalArgumentException("more than one layer is required");
}

ConnectionFactory cf = new ConnectionFactory(useSharedMemory);
ConnectionFactory cf = new ConnectionFactory();
StackedAutoencoder result = new StackedAutoencoder(new Layer());
for (int i = 1; i < layers.length; i++) {
Autoencoder ae = new Autoencoder();
Expand All @@ -472,26 +472,26 @@ public static StackedAutoencoder sae(int[] layers, boolean addBias, boolean useS
return result;
}

public static StackedAutoencoder saeSigmoid(int[] layers, boolean addBias, boolean useSharedMemory) {
StackedAutoencoder sae = sae(layers, addBias, useSharedMemory);
public static StackedAutoencoder saeSigmoid(int[] layers, boolean addBias) {
StackedAutoencoder sae = sae(layers, addBias);
sae.setLayerCalculator(lcSigmoid(sae, null));
return sae;
}

public static StackedAutoencoder saeSoftReLU(int[] layers, int hiddenCount, boolean addBias, boolean useSharedMemory) {
StackedAutoencoder sae = sae(layers, addBias, useSharedMemory);
public static StackedAutoencoder saeSoftReLU(int[] layers, int hiddenCount, boolean addBias) {
StackedAutoencoder sae = sae(layers, addBias);
sae.setLayerCalculator(lcSoftRelu(sae, null));
return sae;
}

public static StackedAutoencoder saeReLU(int[] layers, int hiddenCount, boolean addBias, boolean useSharedMemory) {
StackedAutoencoder sae = sae(layers, addBias, useSharedMemory);
public static StackedAutoencoder saeReLU(int[] layers, int hiddenCount, boolean addBias) {
StackedAutoencoder sae = sae(layers, addBias);
sae.setLayerCalculator(lcRelu(sae, null));
return sae;
}

public static StackedAutoencoder saeTanh(int[] layers, int hiddenCount, boolean addBias, boolean useSharedMemory) {
StackedAutoencoder sae = sae(layers, addBias, useSharedMemory);
public static StackedAutoencoder saeTanh(int[] layers, int hiddenCount, boolean addBias) {
StackedAutoencoder sae = sae(layers, addBias);
sae.setLayerCalculator(lcTanh(sae, null));
return sae;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ public RBMLayerCalculator(RBM rbm, int miniBatchSize, ConnectionCalculator posPh
this.posPhaseCC = posPhaseCC;
this.negPhaseVisibleToHiddenCC = negPhaseVisibleToHiddenCC;
this.negPhaseHiddenToVisibleCC = negPhaseHiddenToVisibleCC;
this.posPhaseVP = TensorFactory.tensorProvider(rbm, miniBatchSize, Environment.getInstance().getUseSharedMemory());
this.posPhaseVP = TensorFactory.tensorProvider(rbm, miniBatchSize, Environment.getInstance().getUseDataSharedMemory());
this.negPhaseVP = TensorFactory.tensorProvider(posPhaseVP, rbm);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ public DeepTrainerTrainingInputProvider(TrainingInputProvider inputProvider, DNN
this.dnn = dnn;
this.currentNN = currentNN;
this.calculatedLayers = new HashSet<>();
this.layerResults = TensorFactory.tensorProvider(batchSize, Environment.getInstance().getUseSharedMemory(), dnn, currentNN);
this.layerResults = TensorFactory.tensorProvider(batchSize, Environment.getInstance().getUseDataSharedMemory(), dnn, currentNN);
this.inputDataBase = new TrainingInputDataImpl(layerResults.get(dnn.getInputLayer()));
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ public void test() {
triggerEvent(new TestingStartedEvent(this));

Set<Layer> calculatedLayers = new UniqueList<>();
ValuesProvider results = TensorFactory.tensorProvider(n, getTestBatchSize(), Environment.getInstance().getUseSharedMemory());
ValuesProvider results = TensorFactory.tensorProvider(n, getTestBatchSize(), Environment.getInstance().getUseDataSharedMemory());

OutputError oe = getOutputError();
if (oe != null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,9 @@ public class BackPropagationTrainer<N extends NeuralNetwork> extends OneStepTrai

public BackPropagationTrainer(Properties properties) {
super(properties);
activations = TensorFactory.tensorProvider(getNeuralNetwork(), getTrainingBatchSize(), Environment.getInstance().getUseSharedMemory());
activations = TensorFactory.tensorProvider(getNeuralNetwork(), getTrainingBatchSize(), Environment.getInstance().getUseDataSharedMemory());
activations.add(getProperties().getParameter(Constants.OUTPUT_ERROR_DERIVATIVE), activations.get(getNeuralNetwork().getOutputLayer()).getDimensions());
backpropagation = TensorFactory.tensorProvider(getNeuralNetwork(), getTrainingBatchSize(), Environment.getInstance().getUseSharedMemory());
backpropagation = TensorFactory.tensorProvider(getNeuralNetwork(), getTrainingBatchSize(), Environment.getInstance().getUseDataSharedMemory());
}

/* (non-Javadoc)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ public void handleEvent(TrainingEvent event) {

ValuesProvider vp = mbe.getResults();
if (vp == null) {
vp = TensorFactory.tensorProvider(n, 1, Environment.getInstance().getUseSharedMemory());
vp = TensorFactory.tensorProvider(n, 1, Environment.getInstance().getUseDataSharedMemory());
}
if (vp.get(outputError) == null) {
vp.add(outputError, vp.get(n.getInputLayer()).getDimensions());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,19 +20,19 @@ public class Environment {
private KernelExecutionStrategy executionStrategy;

/**
* is debug
* Shared memory for calculations
*/
private boolean debug;
private boolean useDataSharedMemory;

/**
* Shared memory
* Shared memory for neural network connection weights
*/
private boolean useSharedMemory;
private boolean useWeightsSharedMemory;

private Environment() {
executionStrategy = new DefaultKernelExecution();
debug = true;
useSharedMemory = true;
useDataSharedMemory = true;
useWeightsSharedMemory = false;
}

public KernelExecutionStrategy getExecutionStrategy() {
Expand Down Expand Up @@ -62,19 +62,19 @@ public static Environment getInstance() {
return singleton;
}

public boolean isDebug() {
return debug;
public boolean getUseDataSharedMemory() {
return useDataSharedMemory;
}

public void setDebug(boolean debug) {
this.debug = debug;
public void setUseDataSharedMemory(boolean useDataSharedMemory) {
this.useDataSharedMemory = useDataSharedMemory;
}

public boolean getUseSharedMemory() {
return useSharedMemory;
public boolean getUseWeightsSharedMemory() {
return useWeightsSharedMemory;
}

public void setUseSharedMemory(boolean useSharedMemory) {
this.useSharedMemory = useSharedMemory;
public void setUseWeightsSharedMemory(boolean useWeightsSharedMemory) {
this.useWeightsSharedMemory = useWeightsSharedMemory;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,8 @@ public void testAEBackpropagation() {
Environment.getInstance().setExecutionMode(EXECUTION_MODE.CPU);

// autoencoder with 6 input/output and 2 hidden units
Autoencoder ae = NNFactory.autoencoderSigmoid(6, 2, true, true);
Environment.getInstance().setUseWeightsSharedMemory(true);
Autoencoder ae = NNFactory.autoencoderSigmoid(6, 2, true);

// We'll use a simple dataset of symptoms of a flu illness. There are 6
// input features and the first three are symptoms of the illness - for
Expand Down
Loading

0 comments on commit 1614d86

Please sign in to comment.