diff --git a/src/Ironpen.jl b/src/Ironpen.jl index dcd88cb..e750502 100644 --- a/src/Ironpen.jl +++ b/src/Ironpen.jl @@ -34,6 +34,7 @@ using .interface """ Todo: + [*1] wRec should not normalized whole. it should be local 5 conn normalized. [3] verify that model can complete learning cycle with no error [2] neuroplasticity() i.e. change connection [] using RL to control learning signal diff --git a/src/forward.jl b/src/forward.jl index f0b544e..c10c389 100644 --- a/src/forward.jl +++ b/src/forward.jl @@ -28,7 +28,7 @@ end function (kfn::kfn_1)(m::model, input_data::AbstractVector) kfn.timeStep = m.timeStep - kfn.softreset = m.softreset + kfn.learningStage = m.learningStage # generate noise diff --git a/src/learn.jl b/src/learn.jl index 2f0d1bf..e7fe6fb 100644 --- a/src/learn.jl +++ b/src/learn.jl @@ -126,7 +126,7 @@ end function learn!(n::lif_neuron, error::Number) n.eRec = n.phi * n.epsilonRec - ΔwRecChange = n.eta * error + ΔwRecChange = n.eta * error * n.eRec n.wRecChange = (n.subExInType * n.wRecChange) + ΔwRecChange reset_epsilonRec!(n) end @@ -138,7 +138,7 @@ function learn!(n::alif_neuron, error::Number) n.eRec_a = -n.phi * n.beta * n.epsilonRecA n.eRec = n.eRec_v + n.eRec_a - ΔwRecChange = n.eta * error + ΔwRecChange = n.eta * error * n.eRec n.wRecChange = (n.subExInType * n.wRecChange) + ΔwRecChange reset_epsilonRec!(n) reset_epsilonRecA!(n) @@ -149,7 +149,7 @@ end function learn!(n::linear_neuron, error::Number) n.eRec = n.phi * n.epsilonRec - ΔwRecChange = n.eta * error + ΔwRecChange = n.eta * error * n.eRec n.wRecChange = (n.subExInType * n.wRecChange) + ΔwRecChange reset_epsilonRec!(n) end diff --git a/src/types.jl b/src/types.jl index ef35679..473e7a9 100644 --- a/src/types.jl +++ b/src/types.jl @@ -36,8 +36,6 @@ Base.@kwdef mutable struct model <: Ironpen reset epsilon_j. "reflect" = neuron will merge wRecChange into wRec then reset wRecChange. """ learningStage::String = "inference" - - softreset::Bool = false timeStep::Number = 0.0 end """ Model outer constructor @@ -106,7 +104,6 @@ Base.@kwdef mutable struct kfn_1 <: knowledgeFn learningStage::String = "inference" error::Union{Float64,Nothing} = nothing - softreset::Bool = false firedNeurons::Array{Int64} = Vector{Int64}() # store unique id of firing neurons to be used when random neuron connection firedNeurons_t0::Union{Vector{Bool},Nothing} = nothing # store firing state of all neurons at t0 @@ -314,7 +311,7 @@ Base.@kwdef mutable struct lif_neuron <: compute_neuron timeStep::Number = 0.0 # current time wRec::Union{Array{Float64},Nothing} = nothing # synaptic weight (for receiving signal from other neuron) v_t::Float64 = rand() # vᵗ, postsynaptic neuron membrane potential of previous timestep - v_t1::Float64 = 0.0 # vᵗ⁺¹, postsynaptic neuron membrane potential at current timestep + v_t1::Float64 = rand() # vᵗ⁺¹, postsynaptic neuron membrane potential at current timestep v_t_default::Union{Float64,Nothing} = 0.0 # default membrane potential voltage v_th::Float64 = 1.0 # vᵗʰ, neuron firing threshold vRest::Float64 = 0.0 # resting potential after neuron fired @@ -407,7 +404,7 @@ Base.@kwdef mutable struct alif_neuron <: compute_neuron timeStep::Union{Number,Nothing} = nothing # current time wRec::Union{Array{Float64},Nothing} = nothing # synaptic weight (for receiving signal from other neuron) v_t::Float64 = rand() # vᵗ, postsynaptic neuron membrane potential of previous timestep - v_t1::Float64 = 0.0 # vᵗ⁺¹, postsynaptic neuron membrane potential at current timestep + v_t1::Float64 = rand() # vᵗ⁺¹, postsynaptic neuron membrane potential at current timestep v_t_default::Union{Float64,Nothing} = 0.0 v_th::Float64 = 1.0 # vᵗʰ, neuron firing threshold vRest::Float64 = 0.0 # resting potential after neuron fired @@ -515,7 +512,7 @@ Base.@kwdef mutable struct linear_neuron <: output_neuron subExInType::Array{Int64} = Vector{Int64}() # store ExIn type of subscribed neurons wRec::Union{Array{Float64},Nothing} = nothing # synaptic weight (for receiving signal from other neuron) v_t::Float64 = 0.0 # vᵗ, postsynaptic neuron membrane potential of previous timestep - v_t1::Float64 = 0.0 # vᵗ⁺¹, postsynaptic neuron membrane potential at current timestep + v_t1::Float64 = rand() # vᵗ⁺¹, postsynaptic neuron membrane potential at current timestep v_t_default::Union{Float64,Nothing} = 0.0 # default membrane potential voltage v_th::Float64 = 1.0 # vᵗʰ, neuron firing threshold vRest::Float64 = 0.0 # resting potential after neuron fired