output neuron connect to random multiple compute neurons

This commit is contained in:
2023-05-15 14:13:45 +07:00
parent 68c8a3597d
commit 114161ba69
5 changed files with 94 additions and 75 deletions

View File

@@ -39,8 +39,6 @@ using .interface
(vth - vt)*100/vth as error
if output neuron activates when it should NOT, use output neuron's
(vt*100)/vth as error
[*4] output neuron connect to random multiple compute neurons and have the same structure
as lif
[8] verify that model can complete learning cycle with no error
[5] synaptic connection strength concept. use sigmoid
[6] neuroplasticity() i.e. change connection
@@ -54,6 +52,8 @@ using .interface
[DONE] add excitatory, inhabitory to neuron
[DONE] implement "start learning", reset learning and "learning", "end_learning and
"inference"
[DONE] output neuron connect to random multiple compute neurons and overall have
the same structure as lif
Change from version: v06_36a
-

View File

@@ -43,9 +43,6 @@ function (kfn::kfn_1)(m::model, input_data::AbstractVector)
for n in kfn.neuronsArray
timestep_forward!(n)
end
for n in kfn.outputNeuronsArray
timestep_forward!(n)
end
# pass input_data into input neuron.
# number of data point equals to number of input neuron starting from id 1
@@ -71,7 +68,7 @@ function (kfn::kfn_1)(m::model, input_data::AbstractVector)
n(kfn)
end
out = [n.out_t1 for n in kfn.outputNeuronsArray]
out = [n.z_t1 for n in kfn.outputNeuronsArray]
return out
end
@@ -177,7 +174,40 @@ end
"""
function (n::linear_neuron)(kfn::T) where T<:knowledgeFn
n.timeStep = kfn.timeStep
n.out_t1 = getindex(kfn.firedNeurons_t1, n.subscriptionList)[1]
# pulling other neuron's firing status at time t
n.z_i_t = getindex(kfn.firedNeurons_t0, n.subscriptionList)
n.z_i_t .*= n.subExInType
if n.refractoryCounter != 0
n.refractoryCounter -= 1
# neuron is in refractory state, skip all calculation
n.z_t1 = false # used by timestep_forward() in kfn. Set to zero because neuron spike
# last only 1 timestep follow by a period of refractory.
n.recSignal = n.recSignal * 0.0
# Exponantial decay of v_t1
n.v_t1 = n.v_t * n.alpha^(n.timeStep - n.lastFiringTime) # or n.v_t1 = n.alpha * n.v_t
else
n.recSignal = sum(n.w_rec .* n.z_i_t) # signal from other neuron that this neuron subscribed
n.alpha_v_t = n.alpha * n.v_t
n.v_t1 = n.alpha_v_t + n.recSignal
n.v_t1 = no_negative!.(n.v_t1)
if n.v_t1 > n.v_th
n.z_t1 = true
n.refractoryCounter = n.refractoryDuration
n.firingCounter += 1
n.v_t1 = n.vRest
else
n.z_t1 = false
end
# there is a difference from alif formula
n.phi = (n.gammaPd / n.v_th) * max(0, 1 - (n.v_t1 - n.v_th) / n.v_th)
end
end

View File

@@ -54,7 +54,8 @@ function learn!(kfn::kfn_1, correctAnswer=nothing)
kfn.outputs = nothing
kfn.learningStage = "learning"
elseif kfn.learningStage = "end_learning"
elseif kfn.learningStage == "end_learning"
reset_learning_params!(n)
kfn.learningStage = "inference"
end

View File

@@ -24,17 +24,13 @@ function timestep_forward!(x::compute_neuron)
x.v_t = x.v_t1
end
function timestep_forward!(x::linear_neuron)
x.out_t = x.out_t1
end
no_negative!(x) = x < 0.0 ? 0.0 : x
precision(x::Array{<:Array}) = ( std(mean.(x)) / mean(mean.(x)) ) * 100
# reset functions for LIF/ALIF neuron
reset_last_firing_time!(n::compute_neuron) = n.lastFiringTime = 0.0
reset_refractory_state_active!(n::compute_neuron) = n.refractory_state_active = false
reset_v_t!(n::compute_neuron) = n.v_t = n.v_t_default
reset_v_t!(n::neuron) = n.v_t = n.vRest
reset_z_t!(n::compute_neuron) = n.z_t = false
reset_epsilon_rec!(n::compute_neuron) = n.epsilonRec = n.epsilonRec * 0.0
reset_epsilon_rec_a!(n::alif_neuron) = n.epsilonRecA = n.epsilonRecA * 0.0
@@ -48,6 +44,7 @@ reset_reg_voltage_b!(n::compute_neuron) = n.reg_voltage_b = n.reg_voltage_b * 0.
reset_reg_voltage_error!(n::compute_neuron) = n.reg_voltage_error = n.reg_voltage_error * 0.0
reset_firing_counter!(n::compute_neuron) = n.firingCounter = n.firingCounter * 0.0
reset_firing_diff!(n::Union{compute_neuron, linear_neuron}) = n.firingDiff = n.firingDiff * 0.0
reset_refractoryCounter!(n::Union{compute_neuron, linear_neuron}) = n.refractoryCounter = n.refractoryCounter * 0.0
# reset function for output neuron
reset_epsilon_j!(n::linear_neuron) = n.epsilon_j = n.epsilon_j * 0.0
@@ -106,17 +103,14 @@ function reset_learning_params!(n::lif_neuron)
reset_w_rec_change!(n)
# reset_v_t!(n)
# reset_z_t!(n)
# reset_reg_voltage_a!(n)
# reset_reg_voltage_b!(n)
# reset_reg_voltage_error!(n)
reset_firing_counter!(n)
reset_firing_diff!(n)
reset_previous_error!(n)
reset_error!(n)
# # reset refractory state at the end of episode. Otherwise once neuron goes into refractory state,
# # it will stay in refractory state forever
# reset_refractory_state_active!(n)
# reset refractory state at the start/end of episode. Otherwise once neuron goes into
# refractory state, it will stay in refractory state forever
reset_refractoryCounter!(n)
end
function reset_learning_params!(n::alif_neuron)
reset_epsilon_rec!(n)
@@ -125,17 +119,14 @@ function reset_learning_params!(n::alif_neuron)
# reset_v_t!(n)
# reset_z_t!(n)
# reset_a!(n)
# reset_reg_voltage_a!(n)
# reset_reg_voltage_b!(n)
# reset_reg_voltage_error!(n)
reset_firing_counter!(n)
reset_firing_diff!(n)
reset_previous_error!(n)
reset_error!(n)
# # reset refractory state at the end of episode. Otherwise once neuron goes into refractory state,
# # it will stay in refractory state forever
# reset_refractory_state_active!(n)
# reset refractory state at the start/end of episode. Otherwise once neuron goes into
# refractory state, it will stay in refractory state forever
reset_refractoryCounter!(n)
end
# function reset_learning_no_wchange!(n::passthrough_neuron)
@@ -144,7 +135,20 @@ end
function reset_learning_params!(n::passthrough_neuron)
# skip
end
#WORKING
function reset_learning_params!(n::linear_neuron)
reset_epsilon_rec!(n)
reset_w_rec_change!(n)
reset_v_t!(n)
reset_firing_counter!(n)
reset_firing_diff!(n)
reset_previous_error!(n)
reset_error!(n)
# reset refractory state at the start/end of episode. Otherwise once neuron goes into
# refractory state, it will stay in refractory state forever
reset_refractoryCounter!(n)
end
#------------------------------------------------------------------------------------------------100
function store_knowledgefn_error!(kfn::knowledgeFn)

View File

@@ -225,13 +225,6 @@ function kfn_1(kfnParams::Dict)
push!(kfn.outputNeuronsArray, neuron)
end
# random which neuron output port subscribed to, 1-compute_neuron for each output port
sub_list = shuffle!([kfn.kfnParams[:total_input_port]+1:length(kfn.neuronsArray)...])
sub_output_neuron = [pop!(sub_list) for i in 1:kfn.kfnParams[:output_port][:numbers]]
for i in kfn.outputNeuronsArray
i.subscriptionList = [pop!(sub_output_neuron)]
end
for n in kfn.neuronsArray
if typeof(n) <: compute_neuron
n.firingRateTarget = kfn.kfnParams[:neuron_firing_rate_target]
@@ -262,6 +255,17 @@ function kfn_1(kfnParams::Dict)
end
end
# add ExInType into each output neuron subExInType
for n in kfn.outputNeuronsArray
try # input neuron doest have n.subscriptionList
for sub_id in n.subscriptionList
n_ExInType = kfn.neuronsArray[sub_id].ExInType
push!(n.subExInType, n_ExInType)
end
catch
end
end
return kfn
end
@@ -309,7 +313,7 @@ Base.@kwdef mutable struct lif_neuron <: compute_neuron
subExInType::Array{Int64} = Vector{Int64}() # store ExIn type of subscribed neurons
timeStep::Number = 0.0 # current time
w_rec::Union{Array{Float64},Nothing} = nothing # synaptic weight (for receiving signal from other neuron)
v_t::Float64 = 0.0 # vᵗ, postsynaptic neuron membrane potential of previous timestep
v_t::Float64 = rand() # vᵗ, postsynaptic neuron membrane potential of previous timestep
v_t1::Float64 = 0.0 # vᵗ⁺¹, postsynaptic neuron membrane potential at current timestep
v_t_default::Union{Float64,Nothing} = 0.0 # default membrane potential voltage
v_th::Float64 = 1.0 # vᵗʰ, neuron firing threshold
@@ -342,7 +346,7 @@ Base.@kwdef mutable struct lif_neuron <: compute_neuron
error::Union{Float64,Nothing} = nothing # local neuron error
optimiser::Union{Any,Nothing} = load_optimiser("AdaBelief") # Flux optimizer
firingCounter::Float64 = 0.0 # store how many times neuron fires
firingCounter::Integer = 0 # store how many times neuron fires
firingRateTarget::Float64 = 20.0 # neuron's target firing rate in Hz
firingDiff::Float64 = 0.0 # e-prop supplement paper equation 5
firingRateError::Float64 = 0.0 # local neuron error w.r.t. firing regularization
@@ -402,7 +406,7 @@ Base.@kwdef mutable struct alif_neuron <: compute_neuron
subExInType::Array{Int64} = Vector{Int64}() # store ExIn type of subscribed neurons
timeStep::Union{Number,Nothing} = nothing # current time
w_rec::Union{Array{Float64},Nothing} = nothing # synaptic weight (for receiving signal from other neuron)
v_t::Float64 = 0.0 # vᵗ, postsynaptic neuron membrane potential of previous timestep
v_t::Float64 = rand() # vᵗ, postsynaptic neuron membrane potential of previous timestep
v_t1::Float64 = 0.0 # vᵗ⁺¹, postsynaptic neuron membrane potential at current timestep
v_t_default::Union{Float64,Nothing} = 0.0
v_th::Float64 = 1.0 # vᵗʰ, neuron firing threshold
@@ -438,7 +442,7 @@ Base.@kwdef mutable struct alif_neuron <: compute_neuron
error::Union{Float64,Nothing} = nothing # local neuron error
optimiser::Union{Any,Nothing} = load_optimiser("AdaBelief") # Flux optimizer
firingCounter::Float64 = 0.0 # store how many times neuron fires
firingCounter::Integer = 0 # store how many times neuron fires
firingRateTarget::Float64 = 20.0 # neuron's target firing rate in Hz
firingDiff::Float64 = 0.0 # e-prop supplement paper equation 5
firingRateError::Float64 = 0.0 # local neuron error w.r.t. firing regularization
@@ -507,16 +511,14 @@ Base.@kwdef mutable struct linear_neuron <: output_neuron
knowledgeFnName::Union{String,Nothing} = nothing # knowledgeFn that this neuron belongs to
subscriptionList::Union{Array{Int64},Nothing} = nothing # list of other neuron that this neuron synapse subscribed to
timeStep::Union{Number,Nothing} = nothing # current time
out_t::Bool = false # output of linear neuron BEFORE forward()
out_t1::Bool = false # output of linear neuron AFTER forward()
#WORKING
subExInType::Array{Int64} = Vector{Int64}() # store ExIn type of subscribed neurons
w_rec::Union{Array{Float64},Nothing} = nothing # synaptic weight (for receiving signal from other neuron)
v_t::Float64 = 0.0 # vᵗ, postsynaptic neuron membrane potential of previous timestep
v_t1::Float64 = 0.0 # vᵗ⁺¹, postsynaptic neuron membrane potential at current timestep
v_t_default::Union{Float64,Nothing} = 0.0 # default membrane potential voltage
v_th::Float64 = 1.0 # vᵗʰ, neuron firing threshold
vRest::Float64 = 0.0 # resting potential after neuron fired
vRest::Float64 = 0.0 # resting potential after neuron fired
# zᵗ⁺¹, neuron firing status at time = t+1. I need this because the way I calculate all
# neurons forward function at each timestep-by-timestep is to do every neuron
# forward calculation. Each neuron requires access to other neuron's firing status
@@ -537,12 +539,14 @@ Base.@kwdef mutable struct linear_neuron <: output_neuron
lastFiringTime::Union{Float64,Nothing} = 0.0 # the last time neuron fires, use to calculate exponantial decay of v_t1
refractoryDuration::Union{Float64,Nothing} = 3 # neuron's refratory period in millisecond
refractoryCounter::Integer = 0
tau_m::Union{Float64,Nothing} = nothing # τ_m, membrane time constant in millisecond
tau_out::Union{Float64,Nothing} = nothing # τ_out, membrane time constant in millisecond
eta::Union{Float64,Nothing} = 0.01 # η, learning rate
wRecChange::Union{Array{Float64},Nothing} = nothing # Δw_rec, cumulated w_rec change
recSignal::Union{Float64,Nothing} = nothing # incoming recurrent signal
alpha_v_t::Union{Float64,Nothing} = nothing # alpha * v_t
error::Union{Float64,Nothing} = nothing # local neuron error
firingCounter::Integer = 0 # store how many times neuron fires
end
""" linear neuron outer constructor
@@ -648,39 +652,28 @@ function init_neuron!(id::Int64, n::alif_neuron, n_params::Dict,
n.epsilonRec = zeros(length(n.subscriptionList))
n.w_rec = Random.rand(length(n.subscriptionList))
n.wRecChange = zeros(length(n.subscriptionList))
# n.reg_voltage_b = zeros(length(n.subscriptionList))
n.alpha = calculate_α(n) # the more time has passed from the last time neuron was
# activated, the more neuron membrane potential is reduced
# the more time has passed from the last time neuron was activated, the more
# neuron membrane potential is reduced
n.alpha = calculate_α(n)
n.rho = calculate_ρ(n)
n.epsilonRecA = zeros(length(n.subscriptionList))
end
# function init_neuron!(id::Int64, n::linear_neuron, kfnParams::Dict)
# n.id = id
# n.knowledgeFnName = kfnParams[:knowledgeFnName]
# start_id = kfnParams[:input_neuron_number] + 1 # don't readout from input neurons
# n.subscriptionList = [start_id:(start_id+kfnParams[:compute_neuron_number]-1)...]
# n.epsilon_j = zeros(length(n.subscriptionList))
# n.w_out = Random.randn(length(n.subscriptionList))
# n.w_out_change = zeros(length(n.subscriptionList))
# n.b = Random.randn()
# n.b_change = 0.0
# n.k = calculate_k(n)
# end
#WORKING
function init_neuron!(id::Int64, n::linear_neuron, n_params::Dict, kfnParams::Dict)
n.id = id
n.knowledgeFnName = kfnParams[:knowledgeFnName]
# start_id = kfnParams[:total_input_port] + 1 # don't readout from input neurons
# subscription_options = [start_id:(start_id+kfnParams[:total_compute_neuron]-1)...]
# n.subscriptionList = [rand(subscription_options)]
subscription_options = shuffle!([kfnParams[:total_input_port]+1 : kfnParams[:total_neurons]...])
subscription_numbers = Int(floor(n_params[:synaptic_connection_number] *
kfnParams[:total_compute_neuron] / 100.0))
n.subscriptionList = [pop!(subscription_options) for i = 1:subscription_numbers]
# n.epsilon_j = zeros(length(n.subscriptionList))
# n.w_out = Random.randn(length(n.subscriptionList))
# n.w_out_change = zeros(length(n.subscriptionList))
# n.b = Random.randn()
# n.b_change = 0.0
# n.k = calculate_k(n)
n.epsilonRec = zeros(length(n.subscriptionList))
n.w_rec = Random.rand(length(n.subscriptionList))
n.wRecChange = zeros(length(n.subscriptionList))
n.alpha = calculate_k(n)
end
""" Make a neuron intended for use with knowledgeFn
@@ -733,15 +726,6 @@ end
# add_n_output_n!(Random.rand(kfn.outputNeuronsArray), id)
# end
""" Add a new neuron to output neuron's subscriptionList
"""
function add_n_output_n!(o_n::linear_neuron, id::Int64)
push!(o_n.subscriptionList, id)
push!(o_n.epsilon_j, 0.0)
push!(o_n.w_out, Random.randn(1)[1])
push!(o_n.w_out_change, 0.0)
end
calculate_α(neuron::lif_neuron) = exp(-neuron.delta / neuron.tau_m)
calculate_α(neuron::alif_neuron) = exp(-neuron.delta / neuron.tau_m)
calculate_ρ(neuron::alif_neuron) = exp(-neuron.delta / neuron.tau_a)