Skip to content

Commit 09ed8fe

Browse files
committed
1. Corrected typo in Backpropagation class name 2. Added composition operations for Scaler objects 3. Added Autoencoder class
1 parent 1712258 commit 09ed8fe

File tree

7 files changed

+83
-18
lines changed

7 files changed

+83
-18
lines changed

README.md

Lines changed: 17 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -88,10 +88,25 @@ Steps
8888
* Clone this repository
8989
* Run the following.
9090
```shell
91-
sbt console
91+
sbt
9292
```
9393

94-
You should get the following prompt.
94+
The sbt shell will open
95+
96+
```shell
97+
[info] Loading project definition from ~/DynaML/project
98+
[info] Set current project to DynaML (in build file:~/Development/DynaML/)
99+
>
100+
```
101+
102+
Now enter the following commands
103+
104+
```shell
105+
>stage
106+
>console
107+
```
108+
109+
After the project builds, you should get the following prompt.
95110

96111
```
97112
___ ___ ___ ___ ___ ___

build.sbt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ packageDescription := "DynaML is a scala library/repl for implementing and worki
1212
"which can be extended easily to implement advanced models for small and large scale applications.\n\n"+
1313
"But the library can also be used as an educational/research tool for data analysis."
1414

15-
val mainVersion = "v1.4-beta.14"
15+
val mainVersion = "v1.4-beta.15"
1616

1717
val dataDirectory = settingKey[File]("The directory holding the data files for running example scripts")
1818

dynaml-core/src/main/scala-2.11/io/github/mandar2812/dynaml/models/neuralnets/AutoEncoder.scala

Lines changed: 37 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -19,13 +19,45 @@ under the License.
1919
package io.github.mandar2812.dynaml.models.neuralnets
2020

2121
import breeze.linalg.DenseVector
22-
import io.github.mandar2812.dynaml.models.ModelPipe
22+
import io.github.mandar2812.dynaml.graph.FFNeuralGraph
23+
import io.github.mandar2812.dynaml.graph.utils.Neuron
24+
import io.github.mandar2812.dynaml.optimization.BackPropagation
25+
import io.github.mandar2812.dynaml.pipes.{ReversibleScaler, Scaler}
2326

2427
/**
25-
* Created by mandar on 23/2/16.
28+
* @author mandar2812 22/6/16.
29+
*
30+
* Base implementation of a Sparse Autoencoder
31+
*
32+
* It is represented as a [[ReversibleScaler]] transforming
33+
* a breeze [[DenseVector]] into another breeze Dense Vector.
2634
*/
27-
trait AutoEncoder[Source, Data] extends
28-
ModelPipe[Source, Data, DenseVector[Double],
29-
DenseVector[Double], FeedForwardNetwork[Data]]{
35+
class AutoEncoder(inDim: Int, outDim: Int) extends ReversibleScaler[DenseVector[Double]]{
36+
37+
def initialize() =
38+
FFNeuralGraph(
39+
inDim, inDim, 1,
40+
List("logsig", "logsig"),
41+
List(outDim))
42+
43+
var graph = initialize()
44+
45+
val optimizer = new BackPropagation
46+
47+
val i = Scaler((xhat: DenseVector[Double]) => {
48+
graph.getLayer(1)
49+
.filter(_.getNeuronType() == "perceptron")
50+
.foreach(n => n.setValue(xhat(n.getNID())))
51+
DenseVector(graph.getLayer(2).map(n => Neuron.getLocalField(n)._1).toArray)
52+
})
53+
54+
def learn(data: Stream[(DenseVector[Double], DenseVector[Double])]) = {
55+
graph = optimizer.optimize(data.length.toLong, data, initialize())
56+
}
57+
58+
override def run(x: DenseVector[Double]) = {
59+
graph.forwardPass(x)
60+
DenseVector(graph.getLayer(1).filter(_.getNeuronType() == "perceptron").map(_.getValue()).toArray)
61+
}
3062

3163
}

dynaml-core/src/main/scala-2.11/io/github/mandar2812/dynaml/models/neuralnets/CommitteeNetwork.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ package io.github.mandar2812.dynaml.models.neuralnets
2121
import breeze.linalg.DenseVector
2222
import io.github.mandar2812.dynaml.graph.FFNeuralGraph
2323
import io.github.mandar2812.dynaml.models.LinearModel
24-
import io.github.mandar2812.dynaml.optimization.{BackPropogation, CommitteeModelSolver, RegularizedOptimizer}
24+
import io.github.mandar2812.dynaml.optimization.{BackPropagation, CommitteeModelSolver, RegularizedOptimizer}
2525
import io.github.mandar2812.dynaml.pipes.DataPipe
2626

2727
/**
@@ -40,7 +40,7 @@ LinearModel[D, DenseVector[Double], DenseVector[Double],
4040

4141
val baseNetworks: List[FFNeuralGraph] = networks.toList
4242

43-
val baseOptimizer = new BackPropogation()
43+
val baseOptimizer = new BackPropagation()
4444
.setMomentum(0.01).setRegParam(0.001)
4545
.setMiniBatchFraction(1.0/baseNetworks.length)
4646
.setNumIterations(20)

dynaml-core/src/main/scala-2.11/io/github/mandar2812/dynaml/models/neuralnets/FeedForwardNetwork.scala

Lines changed: 4 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ import breeze.linalg.DenseVector
2222
import com.tinkerpop.blueprints.Graph
2323
import com.tinkerpop.frames.FramedGraph
2424
import io.github.mandar2812.dynaml.graph.FFNeuralGraph
25-
import io.github.mandar2812.dynaml.optimization.BackPropogation
25+
import io.github.mandar2812.dynaml.optimization.BackPropagation
2626
import io.github.mandar2812.dynaml.pipes.DataPipe
2727

2828

@@ -32,12 +32,9 @@ import io.github.mandar2812.dynaml.pipes.DataPipe
3232
* backed by an underlying graph.
3333
*
3434
* @tparam D The type of the underlying training data structure.
35-
*
3635
* @param data The training data
37-
*
3836
* @param netgraph The [[FFNeuralGraph]] object which represents the
3937
* network.
40-
*
4138
* @param transform A [[DataPipe]] which takes input of type [[D]] and
4239
* returns a [[Stream]] of input, output tuples.
4340
*
@@ -74,14 +71,14 @@ class FeedForwardNetwork[D](
7471

7572
/**
7673
* Model optimizer set to
77-
* [[BackPropogation]] which
74+
* [[BackPropagation]] which
7875
* is an implementation of
7976
* gradient based Back-propogation
8077
* with a momentum term.
8178
*
8279
* */
8380
override protected val optimizer =
84-
new BackPropogation()
81+
new BackPropagation()
8582
.setNumIterations(100)
8683
.setStepSize(0.01)
8784

@@ -90,7 +87,7 @@ class FeedForwardNetwork[D](
9087
this
9188
}
9289

93-
override def dataAsStream(d: D) = transform.run(d)
90+
override def dataAsStream(d: D) = transform(d)
9491

9592
/**
9693
* Learn the parameters
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ import scala.util.Random
2929
* Implementation of the standard back pro-pogation with momentum
3030
* using the "generalized delta rule".
3131
*/
32-
class BackPropogation extends RegularizedOptimizer[FFNeuralGraph,
32+
class BackPropagation extends RegularizedOptimizer[FFNeuralGraph,
3333
DenseVector[Double], DenseVector[Double],
3434
Stream[(DenseVector[Double], DenseVector[Double])]] {
3535

dynaml-pipes/src/main/scala-2.11/io/github/mandar2812/dynaml/pipes/Scaler.scala

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,15 @@ trait Scaler[S] extends DataPipe[S, S]{
1717
}
1818
}
1919

20+
def >(otherScaler: Scaler[S]) = {
21+
22+
val firstRun = this.run _
23+
24+
new Scaler[S] {
25+
def run(data: S) = otherScaler.run(firstRun(data))
26+
}
27+
}
28+
2029
}
2130

2231
object Scaler {
@@ -52,4 +61,16 @@ trait ReversibleScaler[S] extends Scaler[S] {
5261
override def run(data: (S, T)): (S, T) = (firstRun(data._1), that(data._2))
5362
}
5463
}
64+
65+
def >(otherRevScaler: ReversibleScaler[S]): ReversibleScaler[S] = {
66+
67+
val firstInv = this.i
68+
69+
val firstRun = this.run _
70+
71+
new ReversibleScaler[S] {
72+
val i: Scaler[S] = otherRevScaler.i > firstInv
73+
def run(data: S) = otherRevScaler.run(firstRun(data))
74+
}
75+
}
5576
}

0 commit comments

Comments
 (0)