bug fix synapticConnStrength!()

This commit is contained in:
2023-05-18 11:14:50 +07:00
parent d1fe518739
commit 7db310d465
4 changed files with 33 additions and 12 deletions

View File

@@ -46,6 +46,7 @@ reset_reg_voltage_error!(n::computeNeuron) = n.reg_voltage_error = n.reg_voltage
reset_firing_counter!(n::Union{computeNeuron, outputNeuron}) = n.firingCounter = n.firingCounter * 0.0
reset_firing_diff!(n::Union{computeNeuron, linearNeuron}) = n.firingDiff = n.firingDiff * 0.0
reset_refractoryCounter!(n::Union{computeNeuron, outputNeuron}) = n.refractoryCounter = n.refractoryCounter * 0.0
reset_z_i_t_commulative!(n::Union{computeNeuron, outputNeuron}) = n.z_i_t_commulative = n.z_i_t_commulative * 0.0
# reset function for output neuron
reset_epsilon_j!(n::linearNeuron) = n.epsilon_j = n.epsilon_j * 0.0
@@ -109,7 +110,8 @@ function resetLearningParams!(n::lifNeuron)
# reset refractory state at the start/end of episode. Otherwise once neuron goes into
# refractory state, it will stay in refractory state forever
reset_refractoryCounter!(n)
# reset_refractoryCounter!(n)
reset_z_i_t_commulative!(n)
end
function resetLearningParams!(n::alifNeuron)
reset_epsilonRec!(n)
@@ -124,6 +126,7 @@ function resetLearningParams!(n::alifNeuron)
# reset refractory state at the start/end of episode. Otherwise once neuron goes into
# refractory state, it will stay in refractory state forever
# reset_refractoryCounter!(n)
reset_z_i_t_commulative!(n)
end
# function reset_learning_no_wchange!(n::passthroughNeuron)
@@ -142,6 +145,7 @@ function resetLearningParams!(n::linearNeuron)
# reset refractory state at the start/end of episode. Otherwise once neuron goes into
# refractory state, it will stay in refractory state forever
# reset_refractoryCounter!(n)
reset_z_i_t_commulative!(n)
end
#------------------------------------------------------------------------------------------------100
@@ -279,15 +283,22 @@ end
function synapticConnStrength!(n::Union{computeNeuron, outputNeuron})
for (i, connStrength) in enumerate(n.synapticStrength)
# check whether connStrength increase or decrease based on usage from n.epsilonRec
#WORKING n.epsilonRec is all 0.0 why? may b it was reset? ANS: model fire at this timestep and gets reset epsilonRec during ΔwRecChange compute
updown = n.epsilonRec[i] == 0.0 ? "down" : "up"
""" use n.wRecChange instead of the best choise, epsilonRec, here because ΔwRecChange
calculation in learn!() will reset epsilonRec to zeroes vector in case where
output neuron fires and trigger learn!() just before this synapticConnStrength
calculation.
Since n.wRecChange indicates whether a synaptic connection were used or not, it is
ok to use. n.wRecChange also span across a training sample without resetting.
"""
updown = n.z_i_t_commulative[i] == 0 ? "down" : "up" #
updatedConnStrength = synapticConnStrength(connStrength, updown)
updatedConnStrength = GeneralUtils.limitvalue(updatedConnStrength,
n.synapticStrengthLimit.lowerlimit, n.synapticStrengthLimit.upperlimit)
n.synapticStrengthLimit.lowerlimit, n.synapticStrengthLimit.upperlimit)
# at lowerlimit, mark wRec at this position to 0. for new random synaptic conn
if updatedConnStrength == n.synapticStrengthLimit.lowerlimit[1]
n.wRec[i] = 0.0
end
n.synapticStrength[i] = updatedConnStrength
end
end
@@ -319,15 +330,16 @@ function neuroplasticity!(n::computeNeuron, firedNeurons::Vector)
end
""" normalize a part of a vector centering at a vector's maximum value along with nearby value
within its radius. radius must be odd number
within its radius. radius must be odd number.
v1 will be normalized based on v2's peak
"""
function normalizePeak!(v::Vector, radius::Integer=2)
peak = findall(isequal.(abs.(v), maximum(abs.(v))))[1]
function normalizePeak!(v1::Vector, v2::Vector, radius::Integer=2)
peak = findall(isequal.(abs.(v2), maximum(abs.(v2))))[1]
upindex = peak - radius
upindex = upindex < 1 ? 1 : upindex
downindex = peak + radius
downindex = downindex > length(v) ? length(v) : downindex
subvector = view(v, upindex:downindex)
downindex = downindex > length(v1) ? length(v1) : downindex
subvector = view(v1, upindex:downindex)
normalize!(subvector, 1)
end