Skip to content

Commit

Permalink
vol3:java:more refactoring, now ready to address: #31
Browse files Browse the repository at this point in the history
  • Loading branch information
jeffheaton committed Jan 18, 2016
1 parent 2af1712 commit 0e80e8d
Show file tree
Hide file tree
Showing 9 changed files with 34 additions and 273 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,14 @@ public class BasicLayer extends WeightedLayer {
*/
private boolean hasBias;

/**
* The output from each of the layers.
*/
private FlatVolume layerOutput;

/**
* The sums from each of the layers, esseitnally the output prior to activation function.
*/
private FlatVolume layerSums;

/**
Expand Down Expand Up @@ -88,9 +95,6 @@ public BasicLayer(final ActivationFunction theActivation, boolean theHasBias, in
this(theActivation,theHasBias,new int[] {theCount,1,1});
}




/**
* @return the count
*/
Expand Down Expand Up @@ -145,43 +149,14 @@ public int[] getDimensionCounts() {
* {@inheritDoc}
*/
@Override
public int getWeightDepthUnit() {
Layer previousLayer = getOwner().getPreviousLayer(this);
int prevCount;
if( previousLayer instanceof Conv2DLayer ) {
prevCount = (((Conv2DLayer)previousLayer).getFilterColumns() *
((Conv2DLayer)previousLayer).getFilterRows());
} else {
if( previousLayer.getDimensionCounts().length==1) {
prevCount = previousLayer.getCount();
} else {
prevCount = previousLayer.getDimensionCounts()[0] * previousLayer.getDimensionCounts()[1];
}
}
if(previousLayer.hasBias()) {
prevCount++;
}


return prevCount * getNeuronDepthUnit();
public FlatVolume getLayerOutput() {
return layerOutput;
}

/**
* {@inheritDoc}
*/
@Override
public int getNeuronDepthUnit() {
if( this.count.length==3) {
return this.count[0] * this.count[1];
} else {
return this.count[0];
}
}

public FlatVolume getLayerOutput() {
return layerOutput;
}

public FlatVolume getLayerSums() {
return layerSums;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,9 @@ public class BasicNetwork implements RegressionAlgorithm, ClassificationAlgorith
*/
private boolean networkTraining;

/**
* The outputs/activations from each layer.
*/
private FlatData layerOutput = new FlatData();

/**
Expand Down Expand Up @@ -100,25 +103,6 @@ public void compute(final double[] input, final double[] output) {
output, 0, this.outputCount);
}

/**
* @return The total number of neurons in the neural network.
*/
public int getNeuronCount() {
int result = 0;
for(Layer layer: this.layers) {
result+=layer.getTotalCount();
}
return result;
}

/**
* @return The length of the array the network would encode to.
*/
public int getEncodeLength() {
return this.weights.getLength();
}


/**
* @return The number of input neurons.
*/
Expand Down Expand Up @@ -149,16 +133,6 @@ public void setInputCount(final int inputCount) {
this.inputCount = inputCount;
}



/**
* Set the output count.
* @param outputCount The output count.
*/
public void setOutputCount(final int outputCount) {
this.outputCount = outputCount;
}

/**
* Get the weight between the two layers.
* @param fromLayer The from layer.
Expand Down Expand Up @@ -226,11 +200,9 @@ public void finalizeStructure() {
this.inputCount = this.layers.get(0).getCount();
this.outputCount = this.layers.get(layerCount - 1).getCount();

TempStructureCounts counts = new TempStructureCounts();

for (int i = this.layers.size() - 1; i >= 0; i--) {
final Layer layer = this.layers.get(i);
layer.finalizeStructure(this, i, counts);
layer.finalizeStructure(this, i);
this.layerOutput.addFlatObject(layer.getLayerSums());
this.layerOutput.addFlatObject(layer.getLayerOutput());
if( layer.getWeightMatrix()!=null ) {
Expand Down Expand Up @@ -279,14 +251,7 @@ public void setWeight(final int fromLayer, final int fromNeuron,
+ fromLayer);
}

final int weightBaseIndex
= this.layers.get(fromLayer+1).getWeightIndex();
final int count = this.layers.get(fromLayer).getTotalCount();

final int weightIndex = weightBaseIndex + fromNeuron
+ (toNeuron * count);

getWeights()[weightIndex] = value;
this.layers.get(fromLayer+1).getWeightMatrix().set(toNeuron,fromNeuron, value);
}

/**
Expand Down Expand Up @@ -388,24 +353,24 @@ public void setNetworkTraining(boolean networkTraining) {
this.networkTraining = networkTraining;
}

/**
* @return The input layer.
*/
public Layer getInputLayer() {
return this.layers.get(0);
}

/**
* @return The output layer.
*/
public Layer getOutputLayer() {
return this.layers.get(this.layers.size()-1);
}

/**
* @return The output from each of the layers.
*/
public FlatData getLayerOutput() {
return layerOutput;
}

public void dumpOutputs() {
int i = 0;
for(Layer layer: this.layers) {
i++;
System.out.println("Layer #" + i + ":Sums=" + layer.getLayerSums()
+ ",Output=" + layer.getLayerOutput() );
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -109,8 +109,8 @@ public Conv2DLayer(final ActivationFunction theActivation, int theNumFilters, in
* {@inheritDoc}
*/
@Override
public void finalizeStructure(BasicNetwork theOwner, int theLayerIndex, TempStructureCounts counts) {
super.finalizeStructure(theOwner,theLayerIndex,counts);
public void finalizeStructure(BasicNetwork theOwner, int theLayerIndex) {
super.finalizeStructure(theOwner,theLayerIndex);

Layer prevLayer = (getLayerIndex()>0) ? getOwner().getLayers().get(getLayerIndex()-1) : null;
Layer nextLayer = (getLayerIndex()<getOwner().getLayers().size()-1) ? getOwner().getLayers().get(getLayerIndex()+1) : null;
Expand All @@ -127,23 +127,6 @@ public void finalizeStructure(BasicNetwork theOwner, int theLayerIndex, TempStru
this.outRows = Math.floor((inRows + this.padding * 2 - this.filterColumns) / this.stride + 1);
}

/**
* {@inheritDoc}
*/
@Override
public int getWeightDepthUnit() {
Layer previousLayer = getOwner().getPreviousLayer(this);
return previousLayer.getNeuronDepthUnit() * getNeuronDepthUnit();
}

/**
* {@inheritDoc}
*/
@Override
public int getNeuronDepthUnit() {
return this.filterColumns * this.filterRows;
}

@Override
public FlatVolume getLayerOutput() {
return this.layerOutput;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,10 +58,8 @@ public interface Layer {
* Finalize the structure of this layer.
* @param theOwner The neural network that owns this layer.
* @param theLayerIndex The zero-based index of this layer.
* @param counts The counts structure to track the weight and neuron counts.
*/
void finalizeStructure(BasicNetwork theOwner, int theLayerIndex,
TempStructureCounts counts);
void finalizeStructure(BasicNetwork theOwner, int theLayerIndex);

/**
* Compute this layer.
Expand All @@ -74,16 +72,6 @@ void finalizeStructure(BasicNetwork theOwner, int theLayerIndex,
*/
void computeGradient(GradientCalc calc);

/**
* @return The start of this layer's weights in the weight vector.
*/
int getWeightIndex();

/**
* @return The start of this layer's neurons in the neuron vector.
*/
int getNeuronIndex();

/**
* @return This layer's index in the layer stack.
*/
Expand Down Expand Up @@ -119,24 +107,18 @@ void finalizeStructure(BasicNetwork theOwner, int theLayerIndex,
int[] getDimensionCounts();

/**
* Get the number of weights in a single unit. For non-convolution layer, this is the total number of weights.
* For a convolution network, this is the number of weights per filter.
* @return The weights per depth unit.
* @return The output from each of the layers.
*/
int getWeightDepthUnit();
FlatVolume getLayerOutput();

/**
* Get the number of neurons in a single unit. For non-convolution layers, this is the total number of neurons in
* this layer. For convolution networks this is the number of neurons per filter.
* @return The neurons per depth unit.
* @return The sums from each of the layers.
*/
int getNeuronDepthUnit();

FlatVolume getLayerOutput();

FlatVolume getLayerSums();

/**
* @return The weight matrix.
*/
FlatMatrix getWeightMatrix();


}
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ public ActivationFunction getActivation() {
* {@inheritDoc}
*/
@Override
public void finalizeStructure(BasicNetwork theOwner, int theLayerIndex, TempStructureCounts counts) {
public void finalizeStructure(BasicNetwork theOwner, int theLayerIndex) {

}

Expand All @@ -90,16 +90,6 @@ public void computeGradient(GradientCalc calc) {

}

@Override
public int getWeightIndex() {
return 0;
}

@Override
public int getNeuronIndex() {
return 0;
}


@Override
public int getLayerIndex() { return 0; }
Expand Down Expand Up @@ -129,16 +119,6 @@ public int[] getDimensionCounts() {
return new int[0];
}

@Override
public int getWeightDepthUnit() {
return 0;
}

@Override
public int getNeuronDepthUnit() {
return 0;
}

@Override
public FlatVolume getLayerOutput() {
return null;
Expand Down
Loading

0 comments on commit 0e80e8d

Please sign in to comment.