bug fix synapticConnStrength!()

This commit is contained in:
2023-05-18 11:14:50 +07:00
parent d1fe518739
commit 7db310d465
4 changed files with 33 additions and 12 deletions

View File

@@ -116,6 +116,7 @@ function (n::lifNeuron)(kfn::knowledgeFn)
# pulling other neuron's firing status at time t
n.z_i_t = getindex(kfn.firedNeurons_t0, n.subscriptionList)
n.z_i_t_commulative += n.z_i_t
if n.refractoryCounter != 0
n.refractoryCounter -= 1
@@ -157,6 +158,7 @@ function (n::alifNeuron)(kfn::knowledgeFn)
n.timeStep = kfn.timeStep
n.z_i_t = getindex(kfn.firedNeurons_t0, n.subscriptionList)
n.z_i_t_commulative += n.z_i_t
if n.refractoryCounter != 0
n.refractoryCounter -= 1
@@ -204,6 +206,7 @@ function (n::linearNeuron)(kfn::T) where T<:knowledgeFn
# pulling other neuron's firing status at time t
n.z_i_t = getindex(kfn.firedNeurons_t1, n.subscriptionList)
n.z_i_t_commulative += n.z_i_t
if n.refractoryCounter != 0
n.refractoryCounter -= 1

View File

@@ -49,9 +49,9 @@ function learn!(kfn::kfn_1, correctAnswer::AbstractVector)
wSign_1 = sign.(n.wRec) # check for fliped sign, 1 indicates non-fliped sign
nonFlipedSign = isequal.(wSign_0, wSign_1) # 1 not fliped, 0 fliped
# normalize wRec peak to prevent input signal overwhelming neuron
normalizePeak!(n.wRec, 2)
n.wRec .*= nonFlipedSign # set weight that fliped sign to 0 for random new connection
normalizePeak!(n.wRec, n.wRecChange, 2)
# set weight that fliped sign to 0 for random new connection
n.wRec .*= nonFlipedSign
synapticConnStrength!(n)
#TODO neuroplasticity

View File

@@ -46,6 +46,7 @@ reset_reg_voltage_error!(n::computeNeuron) = n.reg_voltage_error = n.reg_voltage
reset_firing_counter!(n::Union{computeNeuron, outputNeuron}) = n.firingCounter = n.firingCounter * 0.0
reset_firing_diff!(n::Union{computeNeuron, linearNeuron}) = n.firingDiff = n.firingDiff * 0.0
reset_refractoryCounter!(n::Union{computeNeuron, outputNeuron}) = n.refractoryCounter = n.refractoryCounter * 0.0
reset_z_i_t_commulative!(n::Union{computeNeuron, outputNeuron}) = n.z_i_t_commulative = n.z_i_t_commulative * 0.0
# reset function for output neuron
reset_epsilon_j!(n::linearNeuron) = n.epsilon_j = n.epsilon_j * 0.0
@@ -109,7 +110,8 @@ function resetLearningParams!(n::lifNeuron)
# reset refractory state at the start/end of episode. Otherwise once neuron goes into
# refractory state, it will stay in refractory state forever
reset_refractoryCounter!(n)
# reset_refractoryCounter!(n)
reset_z_i_t_commulative!(n)
end
function resetLearningParams!(n::alifNeuron)
reset_epsilonRec!(n)
@@ -124,6 +126,7 @@ function resetLearningParams!(n::alifNeuron)
# reset refractory state at the start/end of episode. Otherwise once neuron goes into
# refractory state, it will stay in refractory state forever
# reset_refractoryCounter!(n)
reset_z_i_t_commulative!(n)
end
# function reset_learning_no_wchange!(n::passthroughNeuron)
@@ -142,6 +145,7 @@ function resetLearningParams!(n::linearNeuron)
# reset refractory state at the start/end of episode. Otherwise once neuron goes into
# refractory state, it will stay in refractory state forever
# reset_refractoryCounter!(n)
reset_z_i_t_commulative!(n)
end
#------------------------------------------------------------------------------------------------100
@@ -279,8 +283,14 @@ end
function synapticConnStrength!(n::Union{computeNeuron, outputNeuron})
for (i, connStrength) in enumerate(n.synapticStrength)
# check whether connStrength increase or decrease based on usage from n.epsilonRec
#WORKING n.epsilonRec is all 0.0 why? may b it was reset? ANS: model fire at this timestep and gets reset epsilonRec during ΔwRecChange compute
updown = n.epsilonRec[i] == 0.0 ? "down" : "up"
""" use n.wRecChange instead of the best choise, epsilonRec, here because ΔwRecChange
calculation in learn!() will reset epsilonRec to zeroes vector in case where
output neuron fires and trigger learn!() just before this synapticConnStrength
calculation.
Since n.wRecChange indicates whether a synaptic connection were used or not, it is
ok to use. n.wRecChange also span across a training sample without resetting.
"""
updown = n.z_i_t_commulative[i] == 0 ? "down" : "up" #
updatedConnStrength = synapticConnStrength(connStrength, updown)
updatedConnStrength = GeneralUtils.limitvalue(updatedConnStrength,
n.synapticStrengthLimit.lowerlimit, n.synapticStrengthLimit.upperlimit)
@@ -288,6 +298,7 @@ function synapticConnStrength!(n::Union{computeNeuron, outputNeuron})
if updatedConnStrength == n.synapticStrengthLimit.lowerlimit[1]
n.wRec[i] = 0.0
end
n.synapticStrength[i] = updatedConnStrength
end
end
@@ -319,15 +330,16 @@ function neuroplasticity!(n::computeNeuron, firedNeurons::Vector)
end
""" normalize a part of a vector centering at a vector's maximum value along with nearby value
within its radius. radius must be odd number
within its radius. radius must be odd number.
v1 will be normalized based on v2's peak
"""
function normalizePeak!(v::Vector, radius::Integer=2)
peak = findall(isequal.(abs.(v), maximum(abs.(v))))[1]
function normalizePeak!(v1::Vector, v2::Vector, radius::Integer=2)
peak = findall(isequal.(abs.(v2), maximum(abs.(v2))))[1]
upindex = peak - radius
upindex = upindex < 1 ? 1 : upindex
downindex = peak + radius
downindex = downindex > length(v) ? length(v) : downindex
subvector = view(v, upindex:downindex)
downindex = downindex > length(v1) ? length(v1) : downindex
subvector = view(v1, upindex:downindex)
normalize!(subvector, 1)
end

View File

@@ -322,6 +322,7 @@ Base.@kwdef mutable struct lifNeuron <: computeNeuron
# during v_t1 calculation hence I need a variable to hold z_t1 so that I'm not replacing z_t
z_t1::Bool = false # neuron postsynaptic firing at current timestep (after neuron's calculation)
z_i_t::Union{Array{Bool},Nothing} = nothing # neuron presynaptic firing at current timestep (which is other neuron postsynaptic firing of previous timestep)
z_i_t_commulative::Union{Array{Integer},Nothing} = nothing # used to compute connection strength
synapticStrength::Union{Array{Float64},Nothing} = nothing
synapticStrengthLimit::Union{NamedTuple,Nothing} = (lowerlimit=(0=>0), upperlimit=(10=>10))
@@ -413,6 +414,7 @@ Base.@kwdef mutable struct alifNeuron <: computeNeuron
# during v_t1 calculation hence I need a variable to hold z_t1 so that I'm not replacing z_t
z_t1::Bool = false # neuron postsynaptic firing at current timestep (after neuron's calculation)
z_i_t::Union{Array{Bool},Nothing} = nothing # neuron presynaptic firing at current timestep (which is other neuron postsynaptic firing of previous timestep)
z_i_t_commulative::Union{Array{Integer},Nothing} = nothing # used to compute connection strength
synapticStrength::Union{Array{Float64},Nothing} = nothing
synapticStrengthLimit::Union{NamedTuple,Nothing} = (lowerlimit=(-5=>0), upperlimit=(5=>5))
@@ -523,6 +525,7 @@ Base.@kwdef mutable struct linearNeuron <: outputNeuron
# neuron presynaptic firing at current timestep (which is other neuron postsynaptic firing of
# previous timestep)
z_i_t::Union{Array{Bool},Nothing} = nothing
z_i_t_commulative::Union{Array{Integer},Nothing} = nothing # used to compute connection strength
synapticStrength::Union{Array{Float64},Nothing} = nothing
synapticStrengthLimit::Union{NamedTuple,Nothing} = (lowerlimit=(-5=>-5), upperlimit=(5=>5))
@@ -630,6 +633,7 @@ function init_neuron!(id::Int64, n::lifNeuron, n_params::Dict, kfnParams::Dict)
n.wRec = rand(-0.2:0.01:0.2, length(n.subscriptionList))
n.wRecChange = zeros(length(n.subscriptionList))
n.alpha = calculate_α(n)
n.z_i_t_commulative = zeros(length(n.subscriptionList))
end
function init_neuron!(id::Int64, n::alifNeuron, n_params::Dict,
@@ -654,6 +658,7 @@ function init_neuron!(id::Int64, n::alifNeuron, n_params::Dict,
n.alpha = calculate_α(n)
n.rho = calculate_ρ(n)
n.epsilonRecA = zeros(length(n.subscriptionList))
n.z_i_t_commulative = zeros(length(n.subscriptionList))
end
@@ -671,6 +676,7 @@ function init_neuron!(id::Int64, n::linearNeuron, n_params::Dict, kfnParams::Dic
n.wRec = rand(-0.2:0.01:0.2, length(n.subscriptionList))
n.wRecChange = zeros(length(n.subscriptionList))
n.alpha = calculate_k(n)
n.z_i_t_commulative = zeros(length(n.subscriptionList))
end
""" Make a neuron intended for use with knowledgeFn