output neuron connect to random multiple compute neurons

This commit is contained in:
2023-05-15 14:13:45 +07:00
parent 68c8a3597d
commit 114161ba69
5 changed files with 94 additions and 75 deletions

View File

@@ -225,13 +225,6 @@ function kfn_1(kfnParams::Dict)
push!(kfn.outputNeuronsArray, neuron)
end
# random which neuron output port subscribed to, 1-compute_neuron for each output port
sub_list = shuffle!([kfn.kfnParams[:total_input_port]+1:length(kfn.neuronsArray)...])
sub_output_neuron = [pop!(sub_list) for i in 1:kfn.kfnParams[:output_port][:numbers]]
for i in kfn.outputNeuronsArray
i.subscriptionList = [pop!(sub_output_neuron)]
end
for n in kfn.neuronsArray
if typeof(n) <: compute_neuron
n.firingRateTarget = kfn.kfnParams[:neuron_firing_rate_target]
@@ -262,6 +255,17 @@ function kfn_1(kfnParams::Dict)
end
end
# add ExInType into each output neuron subExInType
for n in kfn.outputNeuronsArray
try # input neuron doest have n.subscriptionList
for sub_id in n.subscriptionList
n_ExInType = kfn.neuronsArray[sub_id].ExInType
push!(n.subExInType, n_ExInType)
end
catch
end
end
return kfn
end
@@ -309,7 +313,7 @@ Base.@kwdef mutable struct lif_neuron <: compute_neuron
subExInType::Array{Int64} = Vector{Int64}() # store ExIn type of subscribed neurons
timeStep::Number = 0.0 # current time
w_rec::Union{Array{Float64},Nothing} = nothing # synaptic weight (for receiving signal from other neuron)
v_t::Float64 = 0.0 # vᵗ, postsynaptic neuron membrane potential of previous timestep
v_t::Float64 = rand() # vᵗ, postsynaptic neuron membrane potential of previous timestep
v_t1::Float64 = 0.0 # vᵗ⁺¹, postsynaptic neuron membrane potential at current timestep
v_t_default::Union{Float64,Nothing} = 0.0 # default membrane potential voltage
v_th::Float64 = 1.0 # vᵗʰ, neuron firing threshold
@@ -342,7 +346,7 @@ Base.@kwdef mutable struct lif_neuron <: compute_neuron
error::Union{Float64,Nothing} = nothing # local neuron error
optimiser::Union{Any,Nothing} = load_optimiser("AdaBelief") # Flux optimizer
firingCounter::Float64 = 0.0 # store how many times neuron fires
firingCounter::Integer = 0 # store how many times neuron fires
firingRateTarget::Float64 = 20.0 # neuron's target firing rate in Hz
firingDiff::Float64 = 0.0 # e-prop supplement paper equation 5
firingRateError::Float64 = 0.0 # local neuron error w.r.t. firing regularization
@@ -402,7 +406,7 @@ Base.@kwdef mutable struct alif_neuron <: compute_neuron
subExInType::Array{Int64} = Vector{Int64}() # store ExIn type of subscribed neurons
timeStep::Union{Number,Nothing} = nothing # current time
w_rec::Union{Array{Float64},Nothing} = nothing # synaptic weight (for receiving signal from other neuron)
v_t::Float64 = 0.0 # vᵗ, postsynaptic neuron membrane potential of previous timestep
v_t::Float64 = rand() # vᵗ, postsynaptic neuron membrane potential of previous timestep
v_t1::Float64 = 0.0 # vᵗ⁺¹, postsynaptic neuron membrane potential at current timestep
v_t_default::Union{Float64,Nothing} = 0.0
v_th::Float64 = 1.0 # vᵗʰ, neuron firing threshold
@@ -438,7 +442,7 @@ Base.@kwdef mutable struct alif_neuron <: compute_neuron
error::Union{Float64,Nothing} = nothing # local neuron error
optimiser::Union{Any,Nothing} = load_optimiser("AdaBelief") # Flux optimizer
firingCounter::Float64 = 0.0 # store how many times neuron fires
firingCounter::Integer = 0 # store how many times neuron fires
firingRateTarget::Float64 = 20.0 # neuron's target firing rate in Hz
firingDiff::Float64 = 0.0 # e-prop supplement paper equation 5
firingRateError::Float64 = 0.0 # local neuron error w.r.t. firing regularization
@@ -507,16 +511,14 @@ Base.@kwdef mutable struct linear_neuron <: output_neuron
knowledgeFnName::Union{String,Nothing} = nothing # knowledgeFn that this neuron belongs to
subscriptionList::Union{Array{Int64},Nothing} = nothing # list of other neuron that this neuron synapse subscribed to
timeStep::Union{Number,Nothing} = nothing # current time
out_t::Bool = false # output of linear neuron BEFORE forward()
out_t1::Bool = false # output of linear neuron AFTER forward()
#WORKING
subExInType::Array{Int64} = Vector{Int64}() # store ExIn type of subscribed neurons
w_rec::Union{Array{Float64},Nothing} = nothing # synaptic weight (for receiving signal from other neuron)
v_t::Float64 = 0.0 # vᵗ, postsynaptic neuron membrane potential of previous timestep
v_t1::Float64 = 0.0 # vᵗ⁺¹, postsynaptic neuron membrane potential at current timestep
v_t_default::Union{Float64,Nothing} = 0.0 # default membrane potential voltage
v_th::Float64 = 1.0 # vᵗʰ, neuron firing threshold
vRest::Float64 = 0.0 # resting potential after neuron fired
vRest::Float64 = 0.0 # resting potential after neuron fired
# zᵗ⁺¹, neuron firing status at time = t+1. I need this because the way I calculate all
# neurons forward function at each timestep-by-timestep is to do every neuron
# forward calculation. Each neuron requires access to other neuron's firing status
@@ -537,12 +539,14 @@ Base.@kwdef mutable struct linear_neuron <: output_neuron
lastFiringTime::Union{Float64,Nothing} = 0.0 # the last time neuron fires, use to calculate exponantial decay of v_t1
refractoryDuration::Union{Float64,Nothing} = 3 # neuron's refratory period in millisecond
refractoryCounter::Integer = 0
tau_m::Union{Float64,Nothing} = nothing # τ_m, membrane time constant in millisecond
tau_out::Union{Float64,Nothing} = nothing # τ_out, membrane time constant in millisecond
eta::Union{Float64,Nothing} = 0.01 # η, learning rate
wRecChange::Union{Array{Float64},Nothing} = nothing # Δw_rec, cumulated w_rec change
recSignal::Union{Float64,Nothing} = nothing # incoming recurrent signal
alpha_v_t::Union{Float64,Nothing} = nothing # alpha * v_t
error::Union{Float64,Nothing} = nothing # local neuron error
firingCounter::Integer = 0 # store how many times neuron fires
end
""" linear neuron outer constructor
@@ -648,39 +652,28 @@ function init_neuron!(id::Int64, n::alif_neuron, n_params::Dict,
n.epsilonRec = zeros(length(n.subscriptionList))
n.w_rec = Random.rand(length(n.subscriptionList))
n.wRecChange = zeros(length(n.subscriptionList))
# n.reg_voltage_b = zeros(length(n.subscriptionList))
n.alpha = calculate_α(n) # the more time has passed from the last time neuron was
# activated, the more neuron membrane potential is reduced
# the more time has passed from the last time neuron was activated, the more
# neuron membrane potential is reduced
n.alpha = calculate_α(n)
n.rho = calculate_ρ(n)
n.epsilonRecA = zeros(length(n.subscriptionList))
end
# function init_neuron!(id::Int64, n::linear_neuron, kfnParams::Dict)
# n.id = id
# n.knowledgeFnName = kfnParams[:knowledgeFnName]
# start_id = kfnParams[:input_neuron_number] + 1 # don't readout from input neurons
# n.subscriptionList = [start_id:(start_id+kfnParams[:compute_neuron_number]-1)...]
# n.epsilon_j = zeros(length(n.subscriptionList))
# n.w_out = Random.randn(length(n.subscriptionList))
# n.w_out_change = zeros(length(n.subscriptionList))
# n.b = Random.randn()
# n.b_change = 0.0
# n.k = calculate_k(n)
# end
#WORKING
function init_neuron!(id::Int64, n::linear_neuron, n_params::Dict, kfnParams::Dict)
n.id = id
n.knowledgeFnName = kfnParams[:knowledgeFnName]
# start_id = kfnParams[:total_input_port] + 1 # don't readout from input neurons
# subscription_options = [start_id:(start_id+kfnParams[:total_compute_neuron]-1)...]
# n.subscriptionList = [rand(subscription_options)]
subscription_options = shuffle!([kfnParams[:total_input_port]+1 : kfnParams[:total_neurons]...])
subscription_numbers = Int(floor(n_params[:synaptic_connection_number] *
kfnParams[:total_compute_neuron] / 100.0))
n.subscriptionList = [pop!(subscription_options) for i = 1:subscription_numbers]
# n.epsilon_j = zeros(length(n.subscriptionList))
# n.w_out = Random.randn(length(n.subscriptionList))
# n.w_out_change = zeros(length(n.subscriptionList))
# n.b = Random.randn()
# n.b_change = 0.0
# n.k = calculate_k(n)
n.epsilonRec = zeros(length(n.subscriptionList))
n.w_rec = Random.rand(length(n.subscriptionList))
n.wRecChange = zeros(length(n.subscriptionList))
n.alpha = calculate_k(n)
end
""" Make a neuron intended for use with knowledgeFn
@@ -733,15 +726,6 @@ end
# add_n_output_n!(Random.rand(kfn.outputNeuronsArray), id)
# end
""" Add a new neuron to output neuron's subscriptionList
"""
function add_n_output_n!(o_n::linear_neuron, id::Int64)
push!(o_n.subscriptionList, id)
push!(o_n.epsilon_j, 0.0)
push!(o_n.w_out, Random.randn(1)[1])
push!(o_n.w_out_change, 0.0)
end
calculate_α(neuron::lif_neuron) = exp(-neuron.delta / neuron.tau_m)
calculate_α(neuron::alif_neuron) = exp(-neuron.delta / neuron.tau_m)
calculate_ρ(neuron::alif_neuron) = exp(-neuron.delta / neuron.tau_a)