I was hoping this can happen to F# as well. Many successful business, in my opinion, are based on some simple ideas. Hopefully these algorithm can be one of these simple ideas and F# will become the default way in a successful business.
namespace FSharp.NN
open System
open System.Collections.Generic
// NN factor which serve as the linkage between neurons
type NeuralFactor(weight:float) =
member val HVector = 0. with get, set
member val Weight = weight with get, set
member this.SetWeightChange rate =
this.Weight <- this.Weight + this.HVector * rate
member this.Reset() =
this.HVector <- 0.
override this.ToString() =
sprintf "(HVector=%A, Weight=%A)" this.HVector this.Weight
type Map = Dictionary<Neuron, NeuralFactor>
// the neuron class
and Neuron(bias) =
let sigmoid v = 1. / (1. + exp(-v))
member val Bias = NeuralFactor(bias) with get, set
member val Error = 0. with get, set
member val Input = Map() with get, set
member val LastError = 0. with get, set
member val Output = 0. with get, set
member this.Pulse() =
this.Output <- 0.
for item in this.Input do
this.Output <- this.Output + item.Key.Output * item.Value.Weight
this.Output <- this.Output + this.Bias.Weight
this.Output <- sigmoid this.Output
member this.ApplyLearning rate =
for value in this.Input.Values do
value.SetWeightChange rate
this.Bias.SetWeightChange rate
member this.Initialize() =
this.Input.Values
|> Seq.iter (fun value -> value.Reset())
this.Bias.Reset()
override this.ToString() =
sprintf "(Bias=%A, Error=%A, Output=%A)" this.Bias this.Error this.Output
// the neural layer which hosts one or more neurons
type NeuralLayer() =
inherit List<Neuron>()
member this.Pulse() =
this
|> Seq.iter (fun n->n.Pulse())
member this.Apply rate =
this
|> Seq.iter (fun n->n.ApplyLearning rate)
member this.Initialize() =
this
|> Seq.iter (fun n->n.Initialize())
// the neural network class
type NeuralNet()=
let sigmoidDerivative v = v * ( 1. - v)
let rand = new Random()
member val LearningRate = 3.0 with get, set
member val InputLayer = NeuralLayer() with get, set
member val HiddenLayer = NeuralLayer() with get, set
member val OutputLayer = NeuralLayer() with get, set
member this.Initialize(inputNeuronCount, hiddenNeuronCount, outputNeuronCount) =
[1..inputNeuronCount] |> Seq.iter (fun _ -> this.InputLayer.Add(Neuron(0.)))
[1..outputNeuronCount] |> Seq.iter (fun _ -> this.OutputLayer.Add(Neuron(0.)))
[1..hiddenNeuronCount] |> Seq.iter (fun _ -> this.HiddenLayer.Add(Neuron(0.)))
for hiddenNode in this.HiddenLayer do
for inputNode in this.InputLayer do
hiddenNode.Input.Add(inputNode, new NeuralFactor(rand.NextDouble()))
for outputNode in this.OutputLayer do
for hiddenNode in this.HiddenLayer do
outputNode.Input.Add(hiddenNode, new NeuralFactor(rand.NextDouble()));
member this.Pulse() =
[ this.HiddenLayer; this.OutputLayer]
|> Seq.iter (fun n->n.Pulse())
member this.Apply() =
[ this.HiddenLayer; this.OutputLayer]
|> Seq.iter (fun n->n.Apply(this.LearningRate))
member this.InitializeLearning() =
[ this.HiddenLayer; this.OutputLayer]
|> Seq.iter (fun n->n.Initialize())
member this.Train(input: float list list, expected: float list list, iteration) =
[1..iteration]
|> Seq.iter (fun n ->
this.InitializeLearning()
for i=0 to input.Length-1 do
this.BackPropogation(input.[i], expected.[i])
this.Apply())
member this.Prepare(input) =
Seq.zip this.InputLayer input
|> Seq.iter (fun (a,b) -> a.Output <- b)
member this.Calculate() =
for outputNode in this.OutputLayer do
for hiddenNode in this.HiddenLayer do
outputNode.Input.[hiddenNode].HVector <- outputNode.Input.[hiddenNode].HVector + outputNode.Error * hiddenNode.Output;
outputNode.Bias.HVector <- outputNode.Bias.HVector + outputNode.Error * outputNode.Bias.Weight;
for hiddenNode in this.HiddenLayer do
for inputNode in this.InputLayer do
hiddenNode.Input.[inputNode].HVector <- hiddenNode.Input.[inputNode].HVector + hiddenNode.Error * inputNode.Output;
hiddenNode.Bias.HVector <- hiddenNode.Bias.HVector + hiddenNode.Error * hiddenNode.Bias.Weight;
member this.CalculateErrors desiredResults =
Seq.zip this.OutputLayer desiredResults
|> Seq.iter (fun (outputNode,v) ->
outputNode.Error <- (v - outputNode.Output) * sigmoidDerivative(outputNode.Output))
for hiddenNode in this.HiddenLayer do
hiddenNode.Error <-
this.OutputLayer
|> Seq.sumBy (fun outputNode -> (outputNode.Error * outputNode.Input.[hiddenNode].Weight) * sigmoidDerivative(hiddenNode.Output))
member this.BackPropogation(input, expected) =
this.Prepare(input)
this.Pulse()
this.CalculateErrors(expected)
this.Calculate()
member this.Inputs with get(i) = this.InputLayer.[i]
member this.Output with get(i) = this.OutputLayer.[i]
member this.GetOutputs() =
[ for output in this.OutputLayer do yield output.Output ]
member this.PrepareInput(input:float list) =
Seq.zip this.InputLayer input
|> Seq.iter (fun (a,b) -> a.Output <- b)
module Test =
let high = 0.99
let low = 0.01
let mid = 0.5
let rate = 3.4
let input = [ [high;high]; [low;high]; [high;low]; [low;low] ]
let output = [ [low]; [high]; [high]; [low] ]
let mutable cont = true
let net = NeuralNet()
net.Initialize(2,2,1)
let mutable count = 0
while cont do
count <- count + 1
net.Train(input, output, 5)
net.PrepareInput([low;low])
net.Pulse()
let [ll] = net.GetOutputs()
net.PrepareInput([high;low])
net.Pulse()
let [hl] = net.GetOutputs()
net.PrepareInput([low;high])
net.Pulse()
let [lh] = net.GetOutputs()
net.PrepareInput([high;high])
net.Pulse()
let [hh] = net.GetOutputs()
cont <- hh > (mid + low)/2.
|| lh < (mid + high)/2.
|| hl < (mid + low) /2.
|| ll > (mid + high)/2.
net.PrepareInput([high;low])
let [v] = net.GetOutputs()
let result = v<0.5
No comments:
Post a Comment