0 people like it.

Basic Feed-Forward Neural network (without training code)

Basic Feed-Forward Neural network build, compute result & error functions. Tested with crude random search training to generalize XOR. (https://gist.github.com/ideaflare/c0b2cb2d96e76b72ca7937cc188f579b)

  1: 
  2: 
  3: 
  4: 
  5: 
  6: 
  7: 
  8: 
  9: 
 10: 
 11: 
 12: 
 13: 
 14: 
 15: 
 16: 
 17: 
 18: 
 19: 
 20: 
 21: 
 22: 
 23: 
 24: 
 25: 
 26: 
 27: 
 28: 
 29: 
 30: 
 31: 
 32: 
 33: 
 34: 
 35: 
 36: 
 37: 
 38: 
 39: 
 40: 
 41: 
 42: 
 43: 
 44: 
 45: 
 46: 
 47: 
 48: 
 49: 
 50: 
 51: 
 52: 
 53: 
 54: 
 55: 
 56: 
 57: 
 58: 
 59: 
 60: 
 61: 
 62: 
 63: 
 64: 
 65: 
 66: 
 67: 
 68: 
 69: 
 70: 
 71: 
 72: 
 73: 
 74: 
 75: 
 76: 
 77: 
 78: 
 79: 
 80: 
 81: 
 82: 
 83: 
 84: 
 85: 
 86: 
 87: 
 88: 
 89: 
 90: 
 91: 
 92: 
 93: 
 94: 
 95: 
 96: 
 97: 
 98: 
 99: 
100: 
101: 
102: 
103: 
104: 
105: 
//------------------------------- Create Network ------------------------------- 
type Neuron  = { Weights : float  list ; Bias : float }
type Layer   = { Neurons : Neuron list }
type Network = { Layers  : Layer  list }

let rnd = System.Random()

let randomWeight _ = (2.0 * rnd.NextDouble()) - 1.0

let buildNeuron inputs =
    { Neuron.Weights = List.init inputs randomWeight ; Bias = randomWeight ()}

let buildLayer inputs neurons =
    { Layer.Neurons = List.init neurons (fun i -> buildNeuron inputs) }

let buildNetwork inputs layerSizes =
    let rec createLayers acc inputs layers =
        match layers with
        | [] -> acc
        | neurons :: nextSizes ->
            let newLayer = buildLayer inputs neurons
            createLayers (newLayer :: acc) neurons nextSizes
    { Network.Layers = List.rev (createLayers [] inputs layerSizes) }

//------------------------------- Compute Result ------------------------------- 
let activation = tanh

let neuronOutput neuron inputs =
    let weightedSum = 
        List.zip neuron.Weights inputs 
        |> List.sumBy (fun (w, i) -> w * i)
    activation (weightedSum + neuron.Bias)

let layerOutput layer inputs =
    layer.Neurons 
    |> List.map (fun neuron -> neuronOutput neuron inputs)

let feedForwardResult network inputs =
    let rec ff activationResult layers =
        match layers with
        | [] -> activationResult
        | layer :: remainingLayers -> 
            let nextLayerInput = layerOutput layer activationResult
            ff nextLayerInput remainingLayers 
    ff inputs network.Layers

//------------------------------- Compute Errors ------------------------------- 
type TrainingCase = { Inputs : float list; Outputs : float list }

let neuronError (expected, computed) =
    let error = expected - computed
    error * error

let layerError (expectedLastLayer, actualLastLayer) =
    List.zip actualLastLayer expectedLastLayer
    |> List.sumBy neuronError

let networkError network trainingCases =
    let expectedResults = trainingCases |> List.map (fun case -> case.Outputs)
    let networkResults  = trainingCases |> List.map (fun case -> feedForwardResult network case.Inputs)
    List.zip expectedResults networkResults
    |> List.sumBy layerError

//------------------------------- Modify Network ------------------------------- 
// TODO: Implement gradient/feature based algorithm for complex problems
//------------------------------- Train Network ------------------------------- 
let randomSearch (trainingSet : TrainingCase list) maxSeconds  =
    let inputs = trainingSet.Head.Inputs.Length
    let outputs = trainingSet.Head.Outputs.Length

    let createNetwork () = buildNetwork inputs [inputs;inputs;outputs]
    let stopwatch = System.Diagnostics.Stopwatch.StartNew()

    let rec findNext bestNetwork oldError =
        match stopwatch.Elapsed.TotalSeconds with
        | timeSpent when timeSpent > maxSeconds -> bestNetwork
        | _ ->
            let nn = createNetwork ()
            let newError = networkError nn trainingSet
            match newError < oldError with
            | true ->
                printfn "Better network found! %f" newError
                findNext nn newError
            |false ->
                findNext bestNetwork oldError
    findNext (createNetwork ()) System.Double.MaxValue

//----------------------------------- Test  ----------------------------------- 
let case1 = {Inputs = [1.0;1.0] ; Outputs = [0.0]}
let case2 = {Inputs = [0.0;1.0] ; Outputs = [1.0]}
let case3 = {Inputs = [1.0;0.0] ; Outputs = [1.0]}
let case4 = {Inputs = [0.0;0.0] ; Outputs = [0.0]}

let xorGate = [case1;case2;case3;case4]

let trainedNetwork = randomSearch xorGate 10.0

let computedValues = 
    xorGate 
    |> List.map (fun case -> feedForwardResult trainedNetwork case.Inputs)
    |> List.sortDescending

let xorOutput case = (feedForwardResult trainedNetwork case.Inputs).[0] > (computedValues).[2].[0]

xorGate |> List.iter (fun case -> printfn "XOR result of %A is %A" case.Inputs (xorOutput case))
Neuron.Weights: float list
Multiple items
val float : value:'T -> float (requires member op_Explicit)

Full name: Microsoft.FSharp.Core.Operators.float

--------------------
type float = System.Double

Full name: Microsoft.FSharp.Core.float

--------------------
type float<'Measure> = float

Full name: Microsoft.FSharp.Core.float<_>
type 'T list = List<'T>

Full name: Microsoft.FSharp.Collections.list<_>
Neuron.Bias: float
type Layer =
  {Neurons: Neuron list;}

Full name: Script.Layer
Layer.Neurons: Neuron list
type Neuron =
  {Weights: float list;
   Bias: float;}

Full name: Script.Neuron
type Network =
  {Layers: Layer list;}

Full name: Script.Network
Network.Layers: Layer list
val rnd : System.Random

Full name: Script.rnd
namespace System
Multiple items
type Random =
  new : unit -> Random + 1 overload
  member Next : unit -> int + 2 overloads
  member NextBytes : buffer:byte[] -> unit
  member NextDouble : unit -> float

Full name: System.Random

--------------------
System.Random() : unit
System.Random(Seed: int) : unit
val randomWeight : 'a -> float

Full name: Script.randomWeight
System.Random.NextDouble() : float
val buildNeuron : inputs:int -> Neuron

Full name: Script.buildNeuron
val inputs : int
Multiple items
module List

from Microsoft.FSharp.Collections

--------------------
type List<'T> =
  | ( [] )
  | ( :: ) of Head: 'T * Tail: 'T list
  interface IEnumerable
  interface IEnumerable<'T>
  member GetSlice : startIndex:int option * endIndex:int option -> 'T list
  member Head : 'T
  member IsEmpty : bool
  member Item : index:int -> 'T with get
  member Length : int
  member Tail : 'T list
  static member Cons : head:'T * tail:'T list -> 'T list
  static member Empty : 'T list

Full name: Microsoft.FSharp.Collections.List<_>
val init : length:int -> initializer:(int -> 'T) -> 'T list

Full name: Microsoft.FSharp.Collections.List.init
val buildLayer : inputs:int -> neurons:int -> Layer

Full name: Script.buildLayer
val neurons : int
val i : int
val buildNetwork : inputs:int -> layerSizes:int list -> Network

Full name: Script.buildNetwork
val layerSizes : int list
val createLayers : (Layer list -> int -> int list -> Layer list)
val acc : Layer list
val layers : int list
val nextSizes : int list
val newLayer : Layer
val rev : list:'T list -> 'T list

Full name: Microsoft.FSharp.Collections.List.rev
val activation : (float -> float)

Full name: Script.activation
val tanh : value:'T -> 'T (requires member Tanh)

Full name: Microsoft.FSharp.Core.Operators.tanh
val neuronOutput : neuron:Neuron -> inputs:float list -> float

Full name: Script.neuronOutput
val neuron : Neuron
val inputs : float list
val weightedSum : float
val zip : list1:'T1 list -> list2:'T2 list -> ('T1 * 'T2) list

Full name: Microsoft.FSharp.Collections.List.zip
val sumBy : projection:('T -> 'U) -> list:'T list -> 'U (requires member ( + ) and member get_Zero)

Full name: Microsoft.FSharp.Collections.List.sumBy
val w : float
val i : float
val layerOutput : layer:Layer -> inputs:float list -> float list

Full name: Script.layerOutput
val layer : Layer
val map : mapping:('T -> 'U) -> list:'T list -> 'U list

Full name: Microsoft.FSharp.Collections.List.map
val feedForwardResult : network:Network -> inputs:float list -> float list

Full name: Script.feedForwardResult
val network : Network
val ff : (float list -> Layer list -> float list)
val activationResult : float list
val layers : Layer list
val remainingLayers : Layer list
val nextLayerInput : float list
type TrainingCase =
  {Inputs: float list;
   Outputs: float list;}

Full name: Script.TrainingCase
TrainingCase.Inputs: float list
TrainingCase.Outputs: float list
val neuronError : expected:float * computed:float -> float

Full name: Script.neuronError
val expected : float
val computed : float
val error : float
val layerError : expectedLastLayer:float list * actualLastLayer:float list -> float

Full name: Script.layerError
val expectedLastLayer : float list
val actualLastLayer : float list
val networkError : network:Network -> trainingCases:TrainingCase list -> float

Full name: Script.networkError
val trainingCases : TrainingCase list
val expectedResults : float list list
val case : TrainingCase
val networkResults : float list list
val randomSearch : trainingSet:TrainingCase list -> maxSeconds:float -> Network

Full name: Script.randomSearch
val trainingSet : TrainingCase list
val maxSeconds : float
property List.Head: TrainingCase
property List.Length: int
val outputs : int
val createNetwork : (unit -> Network)
val stopwatch : System.Diagnostics.Stopwatch
namespace System.Diagnostics
Multiple items
type Stopwatch =
  new : unit -> Stopwatch
  member Elapsed : TimeSpan
  member ElapsedMilliseconds : int64
  member ElapsedTicks : int64
  member IsRunning : bool
  member Reset : unit -> unit
  member Restart : unit -> unit
  member Start : unit -> unit
  member Stop : unit -> unit
  static val Frequency : int64
  ...

Full name: System.Diagnostics.Stopwatch

--------------------
System.Diagnostics.Stopwatch() : unit
System.Diagnostics.Stopwatch.StartNew() : System.Diagnostics.Stopwatch
val findNext : (Network -> float -> Network)
val bestNetwork : Network
val oldError : float
property System.Diagnostics.Stopwatch.Elapsed: System.TimeSpan
property System.TimeSpan.TotalSeconds: float
val timeSpent : float
val nn : Network
val newError : float
val printfn : format:Printf.TextWriterFormat<'T> -> 'T

Full name: Microsoft.FSharp.Core.ExtraTopLevelOperators.printfn
type Double =
  struct
    member CompareTo : value:obj -> int + 1 overload
    member Equals : obj:obj -> bool + 1 overload
    member GetHashCode : unit -> int
    member GetTypeCode : unit -> TypeCode
    member ToString : unit -> string + 3 overloads
    static val MinValue : float
    static val MaxValue : float
    static val Epsilon : float
    static val NegativeInfinity : float
    static val PositiveInfinity : float
    ...
  end

Full name: System.Double
field float.MaxValue = 1.79769313486e+308
val case1 : TrainingCase

Full name: Script.case1
val case2 : TrainingCase

Full name: Script.case2
val case3 : TrainingCase

Full name: Script.case3
val case4 : TrainingCase

Full name: Script.case4
val xorGate : TrainingCase list

Full name: Script.xorGate
val trainedNetwork : Network

Full name: Script.trainedNetwork
val computedValues : float list list

Full name: Script.computedValues
val sortDescending : list:'T list -> 'T list (requires comparison)

Full name: Microsoft.FSharp.Collections.List.sortDescending
val xorOutput : case:TrainingCase -> bool

Full name: Script.xorOutput
val iter : action:('T -> unit) -> list:'T list -> unit

Full name: Microsoft.FSharp.Collections.List.iter

More information

Link:http://fssnip.net/7PF
Posted:7 years ago
Author:Christoph Alrich
Tags: machine learning , neural network