reset_epsilonRec after ΔwRecChange is calculated

This commit is contained in:
2023-05-16 22:04:47 +07:00
parent 0ac5a703ea
commit 7d943bce6a
5 changed files with 75 additions and 120 deletions

View File

@@ -34,8 +34,6 @@ using .interface
""" """
Todo: Todo:
[9] verify that model can complete learning cycle with no error [9] verify that model can complete learning cycle with no error
[*5] synaptic connection strength concept. use sigmoid, turn connection offline [*5] synaptic connection strength concept. use sigmoid, turn connection offline
[8] neuroplasticity() i.e. change connection [8] neuroplasticity() i.e. change connection
@@ -58,6 +56,7 @@ using .interface
if output neuron activates when it should NOT, use output neuron's if output neuron activates when it should NOT, use output neuron's
(vt*100)/vth as error (vt*100)/vth as error
[DONE] use LinearAlgebra.normalize!(vector, 1) to adjust weight after weight merge [DONE] use LinearAlgebra.normalize!(vector, 1) to adjust weight after weight merge
[DONE] reset_epsilonRec after ΔwRecChange is calculated
Change from version: v06_36a Change from version: v06_36a
- -

View File

@@ -102,7 +102,7 @@ function (n::lif_neuron)(kfn::knowledgeFn)
# decay of v_t1 # decay of v_t1
n.v_t1 = n.alpha * n.v_t n.v_t1 = n.alpha * n.v_t
else else
n.recSignal = sum(n.w_rec .* n.z_i_t) # signal from other neuron that this neuron subscribed n.recSignal = sum(n.wRec .* n.z_i_t) # signal from other neuron that this neuron subscribed
n.alpha_v_t = n.alpha * n.v_t n.alpha_v_t = n.alpha * n.v_t
n.v_t1 = n.alpha_v_t + n.recSignal n.v_t1 = n.alpha_v_t + n.recSignal
@@ -119,6 +119,8 @@ function (n::lif_neuron)(kfn::knowledgeFn)
# there is a difference from alif formula # there is a difference from alif formula
n.phi = (n.gammaPd / n.v_th) * max(0, 1 - (n.v_t1 - n.v_th) / n.v_th) n.phi = (n.gammaPd / n.v_th) * max(0, 1 - (n.v_t1 - n.v_th) / n.v_th)
n.decayedEpsilonRec = n.alpha * n.epsilonRec
n.epsilonRec = n.decayedEpsilonRec + n.z_i_t
end end
end end
@@ -147,7 +149,7 @@ function (n::alif_neuron)(kfn::knowledgeFn)
n.z_t = isnothing(n.z_t) ? false : n.z_t n.z_t = isnothing(n.z_t) ? false : n.z_t
n.a = (n.rho * n.a) + ((1 - n.rho) * n.z_t) n.a = (n.rho * n.a) + ((1 - n.rho) * n.z_t)
n.av_th = n.v_th + (n.beta * n.a) n.av_th = n.v_th + (n.beta * n.a)
n.recSignal = sum(n.w_rec .* n.z_i_t) # signal from other neuron that this neuron subscribed n.recSignal = sum(n.wRec .* n.z_i_t) # signal from other neuron that this neuron subscribed
n.alpha_v_t = n.alpha * n.v_t n.alpha_v_t = n.alpha * n.v_t
n.v_t1 = n.alpha_v_t + n.recSignal n.v_t1 = n.alpha_v_t + n.recSignal
n.v_t1 = no_negative!.(n.v_t1) n.v_t1 = no_negative!.(n.v_t1)
@@ -162,6 +164,8 @@ function (n::alif_neuron)(kfn::knowledgeFn)
# there is a difference from lif formula # there is a difference from lif formula
n.phi = (n.gammaPd / n.v_th) * max(0, 1 - (n.v_t1 - n.av_th) / n.v_th) n.phi = (n.gammaPd / n.v_th) * max(0, 1 - (n.v_t1 - n.av_th) / n.v_th)
n.decayedEpsilonRec = n.alpha * n.epsilonRec
n.epsilonRec = n.decayedEpsilonRec + n.z_i_t
end end
end end
@@ -188,7 +192,7 @@ function (n::linear_neuron)(kfn::T) where T<:knowledgeFn
# decay of v_t1 # decay of v_t1
n.v_t1 = n.alpha * n.v_t n.v_t1 = n.alpha * n.v_t
else else
n.recSignal = sum(n.w_rec .* n.z_i_t) # signal from other neuron that this neuron subscribed n.recSignal = sum(n.wRec .* n.z_i_t) # signal from other neuron that this neuron subscribed
n.alpha_v_t = n.alpha * n.v_t n.alpha_v_t = n.alpha * n.v_t
n.v_t1 = n.alpha_v_t + n.recSignal n.v_t1 = n.alpha_v_t + n.recSignal
@@ -205,6 +209,8 @@ function (n::linear_neuron)(kfn::T) where T<:knowledgeFn
# there is a difference from alif formula # there is a difference from alif formula
n.phi = (n.gammaPd / n.v_th) * max(0, 1 - (n.v_t1 - n.v_th) / n.v_th) n.phi = (n.gammaPd / n.v_th) * max(0, 1 - (n.v_t1 - n.v_th) / n.v_th)
n.decayedEpsilonRec = n.alpha * n.epsilonRec
n.epsilonRec = n.decayedEpsilonRec + n.z_i_t
end end
end end

View File

@@ -59,23 +59,12 @@ function learn!(kfn::kfn_1, correctAnswer::AbstractVector)
kfn.firedNeurons_t1 = Vector{Bool}() kfn.firedNeurons_t1 = Vector{Bool}()
kfn.learningStage = "learning" kfn.learningStage = "learning"
#TODO prepare for end learning
elseif kfn.learningStage == "end_learning"
resetLearningParams!(n)
# clear variables
kfn.firedNeurons = Vector{Int64}()
kfn.firedNeurons_t0 = Vector{Bool}()
kfn.firedNeurons_t1 = Vector{Bool}()
kfn.learningStage = "inference"
end end
# compute kfn error # compute kfn error
out = [n.z_t1 for n in kfn.outputNeuronsArray] outs = [n.z_t1 for n in kfn.outputNeuronsArray]
for (i, v) in enumerate(out) for (i, out) in enumerate(outs)
if v != correctAnswer[i] # need to adjust weight if out != correctAnswer[i] # need to adjust weight
kfnError = (kfn.outputNeuronsArray[i].v_th - kfn.outputNeuronsArray[i].v_t) * kfnError = (kfn.outputNeuronsArray[i].v_th - kfn.outputNeuronsArray[i].v_t) *
100 / kfn.outputNeuronsArray[i].v_th 100 / kfn.outputNeuronsArray[i].v_th
@@ -87,55 +76,36 @@ function learn!(kfn::kfn_1, correctAnswer::AbstractVector)
learn!(kfn.outputNeuronsArray[i], kfn) learn!(kfn.outputNeuronsArray[i], kfn)
end end
end end
#WORKING
# Threads.@threads for n in kfn.neuronsArray
for n in kfn.neuronsArray
learn!(n, kfn) # Neurons are always learning, besides error from model output
end
if kfn.outputError !== nothing
# Threads.@threads for n in kfn.outputNeuronsArray
for n in kfn.outputNeuronsArray # not use multithreading because 1st output neuron
# will set learning rate that will be used by
# other output neurons
learn!(n, kfn)
end
# for main loop user's display and training's exit condition
avgNeuronsFiringRate = 0.0
for n in kfn.neuronsArray
if typeof(n) <: compute_neuron
avgNeuronsFiringRate += n.firingRate
end
end
kfn.avgNeuronsFiringRate = avgNeuronsFiringRate /
kfn.kfnParams[:compute_neuron_number]
avgNeurons_v_t1 = 0.0
for n in kfn.neuronsArray
if typeof(n) <: compute_neuron
avgNeurons_v_t1 += n.v_t1
end
end
kfn.avgNeurons_v_t1 = avgNeurons_v_t1 / kfn.kfnParams[:compute_neuron_number]
end
# wrap up learning session # wrap up learning session
if kfn.learningStage == "end_learning" if kfn.learningStage == "end_learning"
# Threads.@threads for n in kfn.neuronsArray
for n in kfn.neuronsArray
n.wRec += n.wRecChange # merge wRecChange into wRec
wSign = sign.(n.wRec) # check for fliped sign, 1 indicates non-fliped sign
nonFlipedSign = isequal.(n.subExInType, wSign) # 1 not fliped, 0 fliped
LinearAlgebra.normalize!(n.wRec, 1)
n.wRec .*= nonFlipedSign # set weight that fliped sign to 0 for random new connection
#WORKING synapticConnStrength
#TODO neuroplasticity #TODO neuroplasticity
end
for n in kfn.outputNeuronsArray # merge wRecChange into wRec
n.wRec += n.wRecChange
wSign = sign.(n.wRec) # check for fliped sign, 1 indicates non-fliped sign
nonFlipedSign = isequal.(n.subExInType, wSign) # 1 not fliped, 0 fliped
LinearAlgebra.normalize!(n.wRec, 1)
n.wRec .*= nonFlipedSign # set weight that fliped sign to 0 for random new connection
#TODO synapticConnStrength
#TODO neuroplasticity
end
resetLearningParams!(n) resetLearningParams!(n)
# clear variables # clear variables
@@ -156,25 +126,16 @@ end
""" lif learn() """ lif learn()
""" """
function learn!(n::lif_neuron, error::Number) function learn!(n::lif_neuron, error::Number)
n.decayedEpsilonRec = n.alpha * n.epsilonRec
n.epsilonRec = n.decayedEpsilonRec + n.z_i_t
n.eRec = n.phi * n.epsilonRec n.eRec = n.phi * n.epsilonRec
ΔwRecChange = n.eta * error ΔwRecChange = n.eta * error
n.wRecChange = (n.subExInType * n.wRecChange) + ΔwRecChange n.wRecChange = (n.subExInType * n.wRecChange) + ΔwRecChange
LinearAlgebra.normalize!(n.wRecChange, 1) reset_epsilonRec!(n)
# check for fliped sign, 1 indicates non-fliped sign
wSign = sign.(n.wRecChange)
nonFlipedSign = isequal.(n.subExInType, wSign) # 1 not fliped, 0 fliped
n.wRecChange .*= nonFlipedSign # set weight that fliped sign to 0 for random new connection
end end
""" alif_neuron learn() """ alif_neuron learn()
""" """
function learn!(n::alif_neuron, error::Number) function learn!(n::alif_neuron, error::Number)
n.decayedEpsilonRec = n.alpha * n.epsilonRec
n.epsilonRec = n.decayedEpsilonRec + n.z_i_t
n.epsilonRecA = (n.phi * n.epsilonRec) + n.epsilonRecA = (n.phi * n.epsilonRec) +
((n.rho - (n.phi * n.beta)) * n.epsilonRecA) ((n.rho - (n.phi * n.beta)) * n.epsilonRecA)
n.eRec_v = n.phi * n.epsilonRec n.eRec_v = n.phi * n.epsilonRec
@@ -183,29 +144,17 @@ function learn!(n::alif_neuron, error::Number)
ΔwRecChange = n.eta * error ΔwRecChange = n.eta * error
n.wRecChange = (n.subExInType * n.wRecChange) + ΔwRecChange n.wRecChange = (n.subExInType * n.wRecChange) + ΔwRecChange
LinearAlgebra.normalize!(n.wRecChange, 1) reset_epsilonRec!(n)
# check for fliped sign, 1 indicates non-fliped sign
wSign = sign.(n.wRecChange)
nonFlipedSign = isequal.(n.subExInType, wSign) # 1 not fliped, 0 fliped
n.wRecChange .*= nonFlipedSign # set weight that fliped sign to 0 for random new connection
end end
""" linear_neuron learn() """ linear_neuron learn()
""" """
function learn!(n::linear_neuron, error::Number) function learn!(n::linear_neuron, error::Number)
n.decayedEpsilonRec = n.alpha * n.epsilonRec
n.epsilonRec = n.decayedEpsilonRec + n.z_i_t
n.eRec = n.phi * n.epsilonRec n.eRec = n.phi * n.epsilonRec
ΔwRecChange = n.eta * error ΔwRecChange = n.eta * error
n.wRecChange = (n.subExInType * n.wRecChange) + ΔwRecChange n.wRecChange = (n.subExInType * n.wRecChange) + ΔwRecChange
LinearAlgebra.normalize!(n.wRecChange, 1) reset_epsilonRec!(n)
# check for fliped sign, 1 indicates non-fliped sign
wSign = sign.(n.wRecChange)
nonFlipedSign = isequal.(n.subExInType, wSign) # 1 not fliped, 0 fliped
n.wRecChange .*= nonFlipedSign # set weight that fliped sign to 0 for random new connection
end end

View File

@@ -3,7 +3,7 @@ module snn_utils
using Flux.Optimise: apply! using Flux.Optimise: apply!
export calculate_α, calculate_ρ, calculate_k, timestep_forward!, init_neuron, no_negative!, export calculate_α, calculate_ρ, calculate_k, timestep_forward!, init_neuron, no_negative!,
precision, calculate_w_change!, store_knowledgefn_error!, interneurons_adjustment!, precision, calculate_w_change!, store_knowledgefn_error!, interneurons_adjustment!,
reset_z_t!, resetLearningParams!, reset_learning_history_params!, reset_z_t!, resetLearningParams!, reset_learning_history_params!, reset_epsilonRec!,
cal_v_reg!, calculate_w_change_end!, cal_v_reg!, calculate_w_change_end!,
firing_rate_error!, firing_rate_regulator!, update_Bn!, cal_firing_reg!, firing_rate_error!, firing_rate_regulator!, update_Bn!, cal_firing_reg!,
neuroplasticity!, shakeup!, reset_learning_no_wchange!, adjust_internal_learning_rate!, neuroplasticity!, shakeup!, reset_learning_no_wchange!, adjust_internal_learning_rate!,
@@ -32,12 +32,13 @@ reset_last_firing_time!(n::compute_neuron) = n.lastFiringTime = 0.0
reset_refractory_state_active!(n::compute_neuron) = n.refractory_state_active = false reset_refractory_state_active!(n::compute_neuron) = n.refractory_state_active = false
reset_v_t!(n::neuron) = n.v_t = n.vRest reset_v_t!(n::neuron) = n.v_t = n.vRest
reset_z_t!(n::compute_neuron) = n.z_t = false reset_z_t!(n::compute_neuron) = n.z_t = false
reset_epsilon_rec!(n::compute_neuron) = n.epsilonRec = n.epsilonRec * 0.0 reset_epsilonRec!(n::compute_neuron) = n.epsilonRec = n.epsilonRec * 0.0
reset_epsilonRec!(n::output_neuron) = n.epsilonRec = n.epsilonRec * 0.0
reset_epsilon_rec_a!(n::alif_neuron) = n.epsilonRecA = n.epsilonRecA * 0.0 reset_epsilon_rec_a!(n::alif_neuron) = n.epsilonRecA = n.epsilonRecA * 0.0
reset_epsilon_in!(n::compute_neuron) = n.epsilon_in = isnothing(n.epsilon_in) ? nothing : n.epsilon_in * 0.0 reset_epsilon_in!(n::compute_neuron) = n.epsilon_in = isnothing(n.epsilon_in) ? nothing : n.epsilon_in * 0.0
reset_error!(n::Union{compute_neuron, linear_neuron}) = n.error = nothing reset_error!(n::Union{compute_neuron, linear_neuron}) = n.error = nothing
reset_w_in_change!(n::compute_neuron) = n.w_in_change = isnothing(n.w_in_change) ? nothing : n.w_in_change * 0.0 reset_w_in_change!(n::compute_neuron) = n.w_in_change = isnothing(n.w_in_change) ? nothing : n.w_in_change * 0.0
reset_w_rec_change!(n::compute_neuron) = n.wRecChange = n.wRecChange * 0.0 reset_wRecChange!(n::compute_neuron) = n.wRecChange = n.wRecChange * 0.0
reset_a!(n::alif_neuron) = n.a = n.a * 0.0 reset_a!(n::alif_neuron) = n.a = n.a * 0.0
reset_reg_voltage_a!(n::compute_neuron) = n.reg_voltage_a = n.reg_voltage_a * 0.0 reset_reg_voltage_a!(n::compute_neuron) = n.reg_voltage_a = n.reg_voltage_a * 0.0
reset_reg_voltage_b!(n::compute_neuron) = n.reg_voltage_b = n.reg_voltage_b * 0.0 reset_reg_voltage_b!(n::compute_neuron) = n.reg_voltage_b = n.reg_voltage_b * 0.0
@@ -57,7 +58,7 @@ reset_b_change!(n::linear_neuron) = n.b_change = n.b_change * 0.0
session session
""" """
# function reset_learning_no_wchange!(n::lif_neuron) # function reset_learning_no_wchange!(n::lif_neuron)
# reset_epsilon_rec!(n) # reset_epsilonRec!(n)
# # reset_v_t!(n) # # reset_v_t!(n)
# # reset_z_t!(n) # # reset_z_t!(n)
# # reset_reg_voltage_a!(n) # # reset_reg_voltage_a!(n)
@@ -73,7 +74,7 @@ reset_b_change!(n::linear_neuron) = n.b_change = n.b_change * 0.0
# # reset_refractory_state_active!(n) # # reset_refractory_state_active!(n)
# end # end
# function reset_learning_no_wchange!(n::Union{alif_neuron, elif_neuron}) # function reset_learning_no_wchange!(n::Union{alif_neuron, elif_neuron})
# reset_epsilon_rec!(n) # reset_epsilonRec!(n)
# reset_epsilon_rec_a!(n) # reset_epsilon_rec_a!(n)
# reset_v_t!(n) # reset_v_t!(n)
# reset_z_t!(n) # reset_z_t!(n)
@@ -99,8 +100,8 @@ reset_b_change!(n::linear_neuron) = n.b_change = n.b_change * 0.0
""" Reset all learning-related params at the END of learning session """ Reset all learning-related params at the END of learning session
""" """
function resetLearningParams!(n::lif_neuron) function resetLearningParams!(n::lif_neuron)
reset_epsilon_rec!(n) reset_epsilonRec!(n)
reset_w_rec_change!(n) reset_wRecChange!(n)
# reset_v_t!(n) # reset_v_t!(n)
# reset_z_t!(n) # reset_z_t!(n)
reset_firing_counter!(n) reset_firing_counter!(n)
@@ -111,9 +112,9 @@ function resetLearningParams!(n::lif_neuron)
reset_refractoryCounter!(n) reset_refractoryCounter!(n)
end end
function resetLearningParams!(n::alif_neuron) function resetLearningParams!(n::alif_neuron)
reset_epsilon_rec!(n) reset_epsilonRec!(n)
reset_epsilon_rec_a!(n) reset_epsilon_rec_a!(n)
reset_w_rec_change!(n) reset_wRecChange!(n)
# reset_v_t!(n) # reset_v_t!(n)
# reset_z_t!(n) # reset_z_t!(n)
# reset_a!(n) # reset_a!(n)
@@ -133,8 +134,8 @@ function resetLearningParams!(n::passthrough_neuron)
end end
function resetLearningParams!(n::linear_neuron) function resetLearningParams!(n::linear_neuron)
reset_epsilon_rec!(n) reset_epsilonRec!(n)
reset_w_rec_change!(n) reset_wRecChange!(n)
reset_v_t!(n) reset_v_t!(n)
reset_firing_counter!(n) reset_firing_counter!(n)
@@ -245,7 +246,7 @@ firing_diff!(n::compute_neuron) = n.firingDiff = n.firingRate - n.firingRateTarg
function neuroplasticity!(n::compute_neuron, firedNeurons::Vector) function neuroplasticity!(n::compute_neuron, firedNeurons::Vector)
# if there is 0-weight then replace it with new connection # if there is 0-weight then replace it with new connection
zero_weight_index = findall(iszero.(n.w_rec)) zero_weight_index = findall(iszero.(n.wRec))
if length(zero_weight_index) != 0 if length(zero_weight_index) != 0
""" sampling new connection from list of neurons that fires instead of ramdom choose from """ sampling new connection from list of neurons that fires instead of ramdom choose from
all compute neuron because there is no point to connect to neuron that not fires i.e. all compute neuron because there is no point to connect to neuron that not fires i.e.
@@ -262,7 +263,7 @@ function neuroplasticity!(n::compute_neuron, firedNeurons::Vector)
for i in zero_weight_index for i in zero_weight_index
if Utils.random_choices([true, false], percentage) if Utils.random_choices([true, false], percentage)
n.subscriptionList[i] = pop!(subscribe_options) n.subscriptionList[i] = pop!(subscribe_options)
n.w_rec[i] = 0.01 # new connection should not send large signal otherwise it would throw n.wRec[i] = 0.01 # new connection should not send large signal otherwise it would throw
# RSNN off path. Let weight grow by an optimiser # RSNN off path. Let weight grow by an optimiser
end end
end end

View File

@@ -34,7 +34,7 @@ Base.@kwdef mutable struct model <: Ironpen
"learning" = neuron will accumulate epsilon_j, compute Δw_rec_change each time "learning" = neuron will accumulate epsilon_j, compute Δw_rec_change each time
correct answer is available then merge Δw_rec_change into wRecChange then correct answer is available then merge Δw_rec_change into wRecChange then
reset epsilon_j. reset epsilon_j.
"reflect" = neuron will merge wRecChange into w_rec then reset wRecChange. """ "reflect" = neuron will merge wRecChange into wRec then reset wRecChange. """
learningStage::String = "inference" learningStage::String = "inference"
softreset::Bool = false softreset::Bool = false
@@ -102,7 +102,7 @@ Base.@kwdef mutable struct kfn_1 <: knowledgeFn
"learning" = neuron will accumulate epsilon_j, compute Δw_rec_change each time "learning" = neuron will accumulate epsilon_j, compute Δw_rec_change each time
correct answer is available then merge Δw_rec_change into wRecChange then correct answer is available then merge Δw_rec_change into wRecChange then
reset epsilon_j. reset epsilon_j.
"reflect" = neuron will merge wRecChange into w_rec then reset wRecChange. """ "reflect" = neuron will merge wRecChange into wRec then reset wRecChange. """
learningStage::String = "inference" learningStage::String = "inference"
error::Union{Float64,Nothing} = nothing error::Union{Float64,Nothing} = nothing
@@ -312,7 +312,7 @@ Base.@kwdef mutable struct lif_neuron <: compute_neuron
subscriptionList::Union{Array{Int64},Nothing} = nothing # list of other neuron that this neuron synapse subscribed to subscriptionList::Union{Array{Int64},Nothing} = nothing # list of other neuron that this neuron synapse subscribed to
subExInType::Array{Int64} = Vector{Int64}() # store ExIn type of subscribed neurons subExInType::Array{Int64} = Vector{Int64}() # store ExIn type of subscribed neurons
timeStep::Number = 0.0 # current time timeStep::Number = 0.0 # current time
w_rec::Union{Array{Float64},Nothing} = nothing # synaptic weight (for receiving signal from other neuron) wRec::Union{Array{Float64},Nothing} = nothing # synaptic weight (for receiving signal from other neuron)
v_t::Float64 = rand() # vᵗ, postsynaptic neuron membrane potential of previous timestep v_t::Float64 = rand() # vᵗ, postsynaptic neuron membrane potential of previous timestep
v_t1::Float64 = 0.0 # vᵗ⁺¹, postsynaptic neuron membrane potential at current timestep v_t1::Float64 = 0.0 # vᵗ⁺¹, postsynaptic neuron membrane potential at current timestep
v_t_default::Union{Float64,Nothing} = 0.0 # default membrane potential voltage v_t_default::Union{Float64,Nothing} = 0.0 # default membrane potential voltage
@@ -340,7 +340,7 @@ Base.@kwdef mutable struct lif_neuron <: compute_neuron
refractoryCounter::Integer = 0 refractoryCounter::Integer = 0
tau_m::Union{Float64,Nothing} = nothing # τ_m, membrane time constant in millisecond tau_m::Union{Float64,Nothing} = nothing # τ_m, membrane time constant in millisecond
eta::Union{Float64,Nothing} = 0.01 # η, learning rate eta::Union{Float64,Nothing} = 0.01 # η, learning rate
wRecChange::Union{Array{Float64},Nothing} = nothing # Δw_rec, cumulated w_rec change wRecChange::Union{Array{Float64},Nothing} = nothing # Δw_rec, cumulated wRec change
recSignal::Union{Float64,Nothing} = nothing # incoming recurrent signal recSignal::Union{Float64,Nothing} = nothing # incoming recurrent signal
alpha_v_t::Union{Float64,Nothing} = nothing # alpha * v_t alpha_v_t::Union{Float64,Nothing} = nothing # alpha * v_t
error::Union{Float64,Nothing} = nothing # local neuron error error::Union{Float64,Nothing} = nothing # local neuron error
@@ -356,7 +356,7 @@ Base.@kwdef mutable struct lif_neuron <: compute_neuron
"learning" = neuron will accumulate epsilon_j, compute Δw_rec_change each time "learning" = neuron will accumulate epsilon_j, compute Δw_rec_change each time
correct answer is available then merge Δw_rec_change into wRecChange then correct answer is available then merge Δw_rec_change into wRecChange then
reset epsilon_j. reset epsilon_j.
"reflect" = neuron will merge wRecChange into w_rec then reset wRecChange. """ "reflect" = neuron will merge wRecChange into wRec then reset wRecChange. """
learningStage::String = "inference" learningStage::String = "inference"
end end
@@ -405,7 +405,7 @@ Base.@kwdef mutable struct alif_neuron <: compute_neuron
subscriptionList::Union{Array{Int64},Nothing} = nothing # list of other neuron that this neuron synapse subscribed to subscriptionList::Union{Array{Int64},Nothing} = nothing # list of other neuron that this neuron synapse subscribed to
subExInType::Array{Int64} = Vector{Int64}() # store ExIn type of subscribed neurons subExInType::Array{Int64} = Vector{Int64}() # store ExIn type of subscribed neurons
timeStep::Union{Number,Nothing} = nothing # current time timeStep::Union{Number,Nothing} = nothing # current time
w_rec::Union{Array{Float64},Nothing} = nothing # synaptic weight (for receiving signal from other neuron) wRec::Union{Array{Float64},Nothing} = nothing # synaptic weight (for receiving signal from other neuron)
v_t::Float64 = rand() # vᵗ, postsynaptic neuron membrane potential of previous timestep v_t::Float64 = rand() # vᵗ, postsynaptic neuron membrane potential of previous timestep
v_t1::Float64 = 0.0 # vᵗ⁺¹, postsynaptic neuron membrane potential at current timestep v_t1::Float64 = 0.0 # vᵗ⁺¹, postsynaptic neuron membrane potential at current timestep
v_t_default::Union{Float64,Nothing} = 0.0 v_t_default::Union{Float64,Nothing} = 0.0
@@ -436,7 +436,7 @@ Base.@kwdef mutable struct alif_neuron <: compute_neuron
# refractory_state_active::Union{Bool,Nothing} = false # if true, neuron is in refractory state and cannot process new information # refractory_state_active::Union{Bool,Nothing} = false # if true, neuron is in refractory state and cannot process new information
refractoryCounter::Integer = 0 refractoryCounter::Integer = 0
tau_m::Union{Float64,Nothing} = nothing # τ_m, membrane time constant in millisecond tau_m::Union{Float64,Nothing} = nothing # τ_m, membrane time constant in millisecond
wRecChange::Union{Array{Float64},Nothing} = nothing # Δw_rec, cumulated w_rec change wRecChange::Union{Array{Float64},Nothing} = nothing # Δw_rec, cumulated wRec change
recSignal::Union{Float64,Nothing} = nothing # incoming recurrent signal recSignal::Union{Float64,Nothing} = nothing # incoming recurrent signal
alpha_v_t::Union{Float64,Nothing} = nothing # alpha * v_t alpha_v_t::Union{Float64,Nothing} = nothing # alpha * v_t
error::Union{Float64,Nothing} = nothing # local neuron error error::Union{Float64,Nothing} = nothing # local neuron error
@@ -458,7 +458,7 @@ Base.@kwdef mutable struct alif_neuron <: compute_neuron
"learning" = neuron will accumulate epsilon_j, compute Δw_rec_change each time "learning" = neuron will accumulate epsilon_j, compute Δw_rec_change each time
correct answer is available then merge Δw_rec_change into wRecChange then correct answer is available then merge Δw_rec_change into wRecChange then
reset epsilon_j. reset epsilon_j.
"reflect" = neuron will merge wRecChange into w_rec then reset wRecChange. """ "reflect" = neuron will merge wRecChange into wRec then reset wRecChange. """
learningStage::String = "inference" learningStage::String = "inference"
end end
@@ -513,7 +513,7 @@ Base.@kwdef mutable struct linear_neuron <: output_neuron
timeStep::Union{Number,Nothing} = nothing # current time timeStep::Union{Number,Nothing} = nothing # current time
subExInType::Array{Int64} = Vector{Int64}() # store ExIn type of subscribed neurons subExInType::Array{Int64} = Vector{Int64}() # store ExIn type of subscribed neurons
w_rec::Union{Array{Float64},Nothing} = nothing # synaptic weight (for receiving signal from other neuron) wRec::Union{Array{Float64},Nothing} = nothing # synaptic weight (for receiving signal from other neuron)
v_t::Float64 = 0.0 # vᵗ, postsynaptic neuron membrane potential of previous timestep v_t::Float64 = 0.0 # vᵗ, postsynaptic neuron membrane potential of previous timestep
v_t1::Float64 = 0.0 # vᵗ⁺¹, postsynaptic neuron membrane potential at current timestep v_t1::Float64 = 0.0 # vᵗ⁺¹, postsynaptic neuron membrane potential at current timestep
v_t_default::Union{Float64,Nothing} = 0.0 # default membrane potential voltage v_t_default::Union{Float64,Nothing} = 0.0 # default membrane potential voltage
@@ -542,7 +542,7 @@ Base.@kwdef mutable struct linear_neuron <: output_neuron
refractoryCounter::Integer = 0 refractoryCounter::Integer = 0
tau_out::Union{Float64,Nothing} = nothing # τ_out, membrane time constant in millisecond tau_out::Union{Float64,Nothing} = nothing # τ_out, membrane time constant in millisecond
eta::Union{Float64,Nothing} = 0.01 # η, learning rate eta::Union{Float64,Nothing} = 0.01 # η, learning rate
wRecChange::Union{Array{Float64},Nothing} = nothing # Δw_rec, cumulated w_rec change wRecChange::Union{Array{Float64},Nothing} = nothing # Δw_rec, cumulated wRec change
recSignal::Union{Float64,Nothing} = nothing # incoming recurrent signal recSignal::Union{Float64,Nothing} = nothing # incoming recurrent signal
alpha_v_t::Union{Float64,Nothing} = nothing # alpha * v_t alpha_v_t::Union{Float64,Nothing} = nothing # alpha * v_t
error::Union{Float64,Nothing} = nothing # local neuron error error::Union{Float64,Nothing} = nothing # local neuron error
@@ -614,7 +614,7 @@ end
# end # end
# filter!(x -> x != n.id, n.subscriptionList) # filter!(x -> x != n.id, n.subscriptionList)
# n.epsilonRec = zeros(length(n.subscriptionList)) # n.epsilonRec = zeros(length(n.subscriptionList))
# n.w_rec = Random.rand(length(n.subscriptionList)) # n.wRec = Random.rand(length(n.subscriptionList))
# n.wRecChange = zeros(length(n.subscriptionList)) # n.wRecChange = zeros(length(n.subscriptionList))
# n.reg_voltage_b = zeros(length(n.subscriptionList)) # n.reg_voltage_b = zeros(length(n.subscriptionList))
# n.alpha = calculate_α(n) # n.alpha = calculate_α(n)
@@ -633,7 +633,7 @@ function init_neuron!(id::Int64, n::lif_neuron, n_params::Dict, kfnParams::Dict)
n.synapticStrength = normalize!(rand(length(n.subscriptionList)), 1) n.synapticStrength = normalize!(rand(length(n.subscriptionList)), 1)
n.epsilonRec = zeros(length(n.subscriptionList)) n.epsilonRec = zeros(length(n.subscriptionList))
n.w_rec = Random.rand(length(n.subscriptionList)) n.wRec = Random.rand(length(n.subscriptionList))
n.wRecChange = zeros(length(n.subscriptionList)) n.wRecChange = zeros(length(n.subscriptionList))
n.alpha = calculate_α(n) n.alpha = calculate_α(n)
end end
@@ -652,7 +652,7 @@ function init_neuron!(id::Int64, n::alif_neuron, n_params::Dict,
n.synapticStrength = normalize!(rand(length(n.subscriptionList)), 1) n.synapticStrength = normalize!(rand(length(n.subscriptionList)), 1)
n.epsilonRec = zeros(length(n.subscriptionList)) n.epsilonRec = zeros(length(n.subscriptionList))
n.w_rec = Random.rand(length(n.subscriptionList)) n.wRec = Random.rand(length(n.subscriptionList))
n.wRecChange = zeros(length(n.subscriptionList)) n.wRecChange = zeros(length(n.subscriptionList))
# the more time has passed from the last time neuron was activated, the more # the more time has passed from the last time neuron was activated, the more
@@ -673,7 +673,7 @@ function init_neuron!(id::Int64, n::linear_neuron, n_params::Dict, kfnParams::Di
n.subscriptionList = [pop!(subscription_options) for i = 1:subscription_numbers] n.subscriptionList = [pop!(subscription_options) for i = 1:subscription_numbers]
n.synapticStrength = normalize!(rand(length(n.subscriptionList)), 1) n.synapticStrength = normalize!(rand(length(n.subscriptionList)), 1)
n.epsilonRec = zeros(length(n.subscriptionList)) n.epsilonRec = zeros(length(n.subscriptionList))
n.w_rec = Random.rand(length(n.subscriptionList)) n.wRec = Random.rand(length(n.subscriptionList))
n.wRecChange = zeros(length(n.subscriptionList)) n.wRecChange = zeros(length(n.subscriptionList))
n.alpha = calculate_k(n) n.alpha = calculate_k(n)
end end