reset_epsilonRec after ΔwRecChange is calculated

This commit is contained in:
2023-05-16 22:04:47 +07:00
parent 0ac5a703ea
commit 7d943bce6a
5 changed files with 75 additions and 120 deletions

View File

@@ -3,11 +3,11 @@ module snn_utils
using Flux.Optimise: apply!
export calculate_α, calculate_ρ, calculate_k, timestep_forward!, init_neuron, no_negative!,
precision, calculate_w_change!, store_knowledgefn_error!, interneurons_adjustment!,
reset_z_t!, resetLearningParams!, reset_learning_history_params!,
reset_z_t!, resetLearningParams!, reset_learning_history_params!, reset_epsilonRec!,
cal_v_reg!, calculate_w_change_end!,
firing_rate_error!, firing_rate_regulator!, update_Bn!, cal_firing_reg!,
neuroplasticity!, shakeup!, reset_learning_no_wchange!, adjust_internal_learning_rate!,
gradient_withloss
gradient_withloss
using Statistics, Random, LinearAlgebra, Distributions, Zygote, Flux
@@ -32,12 +32,13 @@ reset_last_firing_time!(n::compute_neuron) = n.lastFiringTime = 0.0
reset_refractory_state_active!(n::compute_neuron) = n.refractory_state_active = false
reset_v_t!(n::neuron) = n.v_t = n.vRest
reset_z_t!(n::compute_neuron) = n.z_t = false
reset_epsilon_rec!(n::compute_neuron) = n.epsilonRec = n.epsilonRec * 0.0
reset_epsilonRec!(n::compute_neuron) = n.epsilonRec = n.epsilonRec * 0.0
reset_epsilonRec!(n::output_neuron) = n.epsilonRec = n.epsilonRec * 0.0
reset_epsilon_rec_a!(n::alif_neuron) = n.epsilonRecA = n.epsilonRecA * 0.0
reset_epsilon_in!(n::compute_neuron) = n.epsilon_in = isnothing(n.epsilon_in) ? nothing : n.epsilon_in * 0.0
reset_error!(n::Union{compute_neuron, linear_neuron}) = n.error = nothing
reset_w_in_change!(n::compute_neuron) = n.w_in_change = isnothing(n.w_in_change) ? nothing : n.w_in_change * 0.0
reset_w_rec_change!(n::compute_neuron) = n.wRecChange = n.wRecChange * 0.0
reset_wRecChange!(n::compute_neuron) = n.wRecChange = n.wRecChange * 0.0
reset_a!(n::alif_neuron) = n.a = n.a * 0.0
reset_reg_voltage_a!(n::compute_neuron) = n.reg_voltage_a = n.reg_voltage_a * 0.0
reset_reg_voltage_b!(n::compute_neuron) = n.reg_voltage_b = n.reg_voltage_b * 0.0
@@ -57,7 +58,7 @@ reset_b_change!(n::linear_neuron) = n.b_change = n.b_change * 0.0
session
"""
# function reset_learning_no_wchange!(n::lif_neuron)
# reset_epsilon_rec!(n)
# reset_epsilonRec!(n)
# # reset_v_t!(n)
# # reset_z_t!(n)
# # reset_reg_voltage_a!(n)
@@ -73,7 +74,7 @@ reset_b_change!(n::linear_neuron) = n.b_change = n.b_change * 0.0
# # reset_refractory_state_active!(n)
# end
# function reset_learning_no_wchange!(n::Union{alif_neuron, elif_neuron})
# reset_epsilon_rec!(n)
# reset_epsilonRec!(n)
# reset_epsilon_rec_a!(n)
# reset_v_t!(n)
# reset_z_t!(n)
@@ -99,8 +100,8 @@ reset_b_change!(n::linear_neuron) = n.b_change = n.b_change * 0.0
""" Reset all learning-related params at the END of learning session
"""
function resetLearningParams!(n::lif_neuron)
reset_epsilon_rec!(n)
reset_w_rec_change!(n)
reset_epsilonRec!(n)
reset_wRecChange!(n)
# reset_v_t!(n)
# reset_z_t!(n)
reset_firing_counter!(n)
@@ -111,9 +112,9 @@ function resetLearningParams!(n::lif_neuron)
reset_refractoryCounter!(n)
end
function resetLearningParams!(n::alif_neuron)
reset_epsilon_rec!(n)
reset_epsilonRec!(n)
reset_epsilon_rec_a!(n)
reset_w_rec_change!(n)
reset_wRecChange!(n)
# reset_v_t!(n)
# reset_z_t!(n)
# reset_a!(n)
@@ -133,8 +134,8 @@ function resetLearningParams!(n::passthrough_neuron)
end
function resetLearningParams!(n::linear_neuron)
reset_epsilon_rec!(n)
reset_w_rec_change!(n)
reset_epsilonRec!(n)
reset_wRecChange!(n)
reset_v_t!(n)
reset_firing_counter!(n)
@@ -245,7 +246,7 @@ firing_diff!(n::compute_neuron) = n.firingDiff = n.firingRate - n.firingRateTarg
function neuroplasticity!(n::compute_neuron, firedNeurons::Vector)
# if there is 0-weight then replace it with new connection
zero_weight_index = findall(iszero.(n.w_rec))
zero_weight_index = findall(iszero.(n.wRec))
if length(zero_weight_index) != 0
""" sampling new connection from list of neurons that fires instead of ramdom choose from
all compute neuron because there is no point to connect to neuron that not fires i.e.
@@ -262,7 +263,7 @@ function neuroplasticity!(n::compute_neuron, firedNeurons::Vector)
for i in zero_weight_index
if Utils.random_choices([true, false], percentage)
n.subscriptionList[i] = pop!(subscribe_options)
n.w_rec[i] = 0.01 # new connection should not send large signal otherwise it would throw
n.wRec[i] = 0.01 # new connection should not send large signal otherwise it would throw
# RSNN off path. Let weight grow by an optimiser
end
end