minor fix

This commit is contained in:
ton
2023-07-05 07:36:05 +07:00
parent ecf0325e7d
commit 4e23fc4d69
21 changed files with 4767 additions and 201 deletions

View File

@@ -10,7 +10,7 @@ export
instantiate_custom_types, init_neuron, populate_neuron,
add_neuron!
using Random, LinearAlgebra
using Random, LinearAlgebra, Flux
#------------------------------------------------------------------------------------------------100
@@ -117,7 +117,7 @@ Base.@kwdef mutable struct kfn_1 <: knowledgeFn
nExcitatory::Array{Int64} =Int64[] # list of excitatory neuron id
nInhabitory::Array{Int64} = Int64[] # list of inhabitory neuron id
nExInType::Array{Int64} = Int64[] # list all neuron EX or IN
excitatoryPercent::Int64 = 60 # percentage of excitatory neuron, inhabitory percent will be 100-ExcitatoryPercent
excitatoryPercent::Int64 = 70 # percentage of excitatory neuron, inhabitory percent will be 100-ExcitatoryPercent
end
#------------------------------------------------------------------------------------------------100
@@ -193,13 +193,6 @@ function kfn_1(kfnParams::Dict)
throw(error("number of compute neuron must be greater than input neuron"))
end
# # Bn
# if kfn.kfnParams[:Bn] == "random"
# kfn.Bn = [Random.rand(0:0.001:1) for i in 1:kfn.kfnParams[:computeNeuronNumber]]
# else # in case I want to specify manually
# kfn.Bn = [kfn.kfnParams[:Bn] for i in 1:kfn.kfnParams[:computeNeuronNumber]]
# end
# assign neurons ID by their position in kfn.neurons array because I think it is
# straight forward way
@@ -229,12 +222,6 @@ function kfn_1(kfnParams::Dict)
push!(kfn.outputNeuronsArray, neuron)
end
for n in kfn.neuronsArray
if typeof(n) <: computeNeuron
n.firingRateTarget = kfn.kfnParams[:neuronFiringRateTarget]
end
end
# excitatory neuron to inhabitory neuron = 60:40 % of computeNeuron
ex_number = Int(floor((kfn.excitatoryPercent/100.0) * kfn.kfnParams[:computeNeuronNumber]))
ex_n = [1 for i in 1:ex_number]
@@ -265,21 +252,23 @@ function kfn_1(kfnParams::Dict)
end
end
# # add ExInType into each output neuron subExInType
# for n in kfn.outputNeuronsArray
# try # input neuron doest have n.subscriptionList
# for (i, sub_id) in enumerate(n.subscriptionList)
# n_ExInType = kfn.neuronsArray[sub_id].ExInType
# n.wRec[i] *= n_ExInType
# end
# catch
# end
# end
# add ExInType into each output neuron subExInType
for n in kfn.outputNeuronsArray
try # input neuron doest have n.subscriptionList
for (i, sub_id) in enumerate(n.subscriptionList)
n_ExInType = kfn.neuronsArray[sub_id].ExInType
n.wRec[i] *= n_ExInType
end
catch
end
end
for n in kfn.neuronsArray
push!(kfn.nExInType, n.ExInType)
end
return kfn
end
@@ -341,8 +330,15 @@ Base.@kwdef mutable struct lifNeuron <: computeNeuron
synapticStrengthLimit::NamedTuple = (lowerlimit=(-5=>-5), upperlimit=(5=>5))
gammaPd::Float64 = 0.3 # γ_pd, discount factor, value from paper
alpha::Float64 = 0.0 # α, neuron membrane potential decay factor
alphaChange::Float64 = 0.0
alpha::Float64 = 0.99
alpha_wSignal::Float64 = 2.0
alpha_wPotential::Float64 = 2.0
alpha_b::Float64 = 2.0
alpha_wSignalChange::Float64 = 0.0
alpha_wPotentialChange::Float64 = 0.0
alpha_bChange::Float64 = 0.0
phi::Float64 = 0.0 # ϕ, psuedo derivative
epsilonRec::Array{Float64} = Float64[] # ϵ_rec, eligibility vector for neuron spike
decayedEpsilonRec::Array{Float64} = Float64[] # α * epsilonRec
@@ -364,7 +360,7 @@ Base.@kwdef mutable struct lifNeuron <: computeNeuron
firingRateError::Float64 = 0.0 # local neuron error w.r.t. firing regularization
firingRate::Float64 = 0.0 # running average of firing rate in Hz
notFireTimeOut::Int64 = 100 # consecutive count of not firing. Should be the same as batch size
notFireTimeOut::Int64 = 10 # consecutive count of not firing. Should be the same as batch size
notFireCounter::Int64 = 0
""" "inference" = no learning params will be collected.
@@ -434,8 +430,22 @@ Base.@kwdef mutable struct alifNeuron <: computeNeuron
synapticStrength::Array{Float64} = Float64[]
synapticStrengthLimit::NamedTuple = (lowerlimit=(-5=>0), upperlimit=(5=>5))
alpha::Float64 = 0.0 # α, neuron membrane potential decay factor
alphaChange::Float64 = 0.0
alpha::Float64 = 0.99
alpha_wSignal::Float64 = 2.0
alpha_wPotential::Float64 = 2.0
alpha_b::Float64 = 2.0
alpha_wSignalChange::Float64 = 0.0
alpha_wPotentialChange::Float64 = 0.0
alpha_bChange::Float64 = 0.0
# alpha::Vector{Float64} = Float64[]
# alpha_wSignal::Vector{Float64} = Float64[]
# alpha_wPotential::Float64 = randn() / 100
# alpha_b::Vector{Float64} = Float64[]
# alpha_wSignalChange::Vector{Float64} = Float64[]
# alpha_wPotentialChange::Float64 = 0.0
# alpha_bChange::Vector{Float64} = Float64[]
delta::Float64 = 1.0 # δ, discreate timestep size in millisecond
epsilonRec::Array{Float64} = Float64[] # ϵ_rec(v), eligibility vector for neuron i spike
epsilonRecA::Array{Float64} = Float64[] # ϵ_rec(a)
@@ -461,7 +471,7 @@ Base.@kwdef mutable struct alifNeuron <: computeNeuron
firingRateError::Float64 = 0.0 # local neuron error w.r.t. firing regularization
firingRate::Float64 = 0.0 # running average of firing rate, Hz
notFireTimeOut::Int64 = 100 # consecutive count of not firing. Should be the same as batch size
notFireTimeOut::Int64 = 10 # consecutive count of not firing. Should be the same as batch size
notFireCounter::Int64 = 0
tau_a::Float64 = 100.0 # τ_a, adaption time constant in millisecond
@@ -546,8 +556,16 @@ Base.@kwdef mutable struct linearNeuron <: outputNeuron
synapticStrength::Array{Float64} = Float64[]
synapticStrengthLimit::NamedTuple = (lowerlimit=(-5=>-5), upperlimit=(5=>5))
gammaPd::Float64 = 0.3 # γ_pd, discount factor, value from paper
alpha::Float64 = 0.0 # α, neuron membrane potential decay factor
gammaPd::Float64 = 0.3 # γ_pd, discount factor, value from
alpha::Float64 = 0.99
alpha_wSignal::Float64 = 2.0
alpha_wPotential::Float64 = 2.0
alpha_b::Float64 = 2.0
alpha_wSignalChange::Float64 = 0.0
alpha_wPotentialChange::Float64 = 0.0
alpha_bChange::Float64 = 0.0
phi::Float64 = 0.0 # ϕ, psuedo derivative
epsilonRec::Array{Float64} = Float64[] # ϵ_rec, eligibility vector for neuron spike
decayedEpsilonRec::Array{Float64} = Float64[] # α * epsilonRec
@@ -562,6 +580,14 @@ Base.@kwdef mutable struct linearNeuron <: outputNeuron
alpha_v_t::Float64 = 0.0 # alpha * v_t
firingCounter::Int64 = 0 # store how many times neuron fires
firingRateTarget::Float64 = 20.0 # neuron's target firing rate in Hz
firingDiff::Float64 = 0.0 # e-prop supplement paper equation 5
firingRateError::Float64 = 0.0 # local neuron error w.r.t. firing regularization
firingRate::Float64 = 0.0 # running average of firing rate in Hz
notFireTimeOut::Int64 = 10 # consecutive count of not firing. Should be the same as batch size
notFireCounter::Int64 = 0
ExInSignalSum::Float64 = 0.0
end
@@ -627,8 +653,15 @@ Base.@kwdef mutable struct integrateNeuron <: outputNeuron
synapticStrengthLimit::NamedTuple = (lowerlimit=(-5=>-5), upperlimit=(5=>5))
gammaPd::Float64 = 0.3 # γ_pd, discount factor, value from paper
alpha::Float64 = 0.0 # α, neuron membrane potential decay factor
alphaChange::Float64 = 0.0
alpha::Float64 = 0.99
alpha_wSignal::Float64 = 2.0
alpha_wPotential::Float64 = 2.0
alpha_b::Float64 = 2.0
alpha_wSignalChange::Float64 = 0.0
alpha_wPotentialChange::Float64 = 0.0
alpha_bChange::Float64 = 0.0
phi::Float64 = 0.0 # ϕ, psuedo derivative
epsilonRec::Array{Float64} = Float64[] # ϵ_rec, eligibility vector for neuron spike
decayedEpsilonRec::Array{Float64} = Float64[] # α * epsilonRec
@@ -643,6 +676,14 @@ Base.@kwdef mutable struct integrateNeuron <: outputNeuron
alpha_v_t::Float64 = 0.0 # alpha * v_t
firingCounter::Int64 = 0 # store how many times neuron fires
firingRateTarget::Float64 = 20.0 # neuron's target firing rate in Hz
firingDiff::Float64 = 0.0 # e-prop supplement paper equation 5
firingRateError::Float64 = 0.0 # local neuron error w.r.t. firing regularization
firingRate::Float64 = 0.0 # running average of firing rate in Hz
notFireTimeOut::Int64 = 10 # consecutive count of not firing. Should be the same as batch size
notFireCounter::Int64 = 0
ExInSignalSum::Float64 = 0.0
end
@@ -699,23 +740,6 @@ function init_neuron!(id::Int64, n::passthroughNeuron, n_params::Dict, kfnParams
n.knowledgeFnName = kfnParams[:knowledgeFnName]
end
# function init_neuron!(id::Int64, n::lifNeuron, kfnParams::Dict)
# n.id = id
# n.knowledgeFnName = kfnParams[:knowledgeFnName]
# subscription_options = shuffle!([1:(kfnParams[:input_neuron_number]+kfnParams[:computeNeuronNumber])...])
# if typeof(kfnParams[:synapticConnectionPercent]) == String
# percent = parse(Int, kfnParams[:synapticConnectionPercent][1:end-1]) / 100
# synapticConnectionPercent = floor(length(subscription_options) * percent)
# n.subscriptionList = [pop!(subscription_options) for i = 1:synapticConnectionPercent]
# end
# filter!(x -> x != n.id, n.subscriptionList)
# n.epsilonRec = zeros(length(n.subscriptionList))
# n.wRec = Random.rand(length(n.subscriptionList))
# n.wRecChange = zeros(length(n.subscriptionList))
# n.reg_voltage_b = zeros(length(n.subscriptionList))
# n.alpha = calculate_α(n)
# end
function init_neuron!(id::Int64, n::lifNeuron, n_params::Dict, kfnParams::Dict)
n.id = id
n.knowledgeFnName = kfnParams[:knowledgeFnName]
@@ -728,11 +752,13 @@ function init_neuron!(id::Int64, n::lifNeuron, n_params::Dict, kfnParams::Dict)
filter!(x -> x != n.id, n.subscriptionList)
n.synapticStrength = rand(-4.5:0.01:-4, length(n.subscriptionList))
n.epsilonRec = zeros(length(n.subscriptionList))
# n.wRec = randn(length(n.subscriptionList))
n.wRec = randn(rng, length(n.subscriptionList)) / 100
n.wRecChange = zeros(length(n.subscriptionList))
n.alpha = calculate_α(n)
n.epsilonRec = zeros(length(n.subscriptionList))
# start w/ small weight Otherwise neuron's weight will be explode in the long run
n.wRec = randn(rng, length(n.subscriptionList)) / 10
n.wRecChange = zeros(length(n.subscriptionList))
n.z_i_t_commulative = zeros(length(n.subscriptionList))
end
@@ -750,7 +776,9 @@ function init_neuron!(id::Int64, n::alifNeuron, n_params::Dict,
n.synapticStrength = rand(-4.5:0.01:-4, length(n.subscriptionList))
n.epsilonRec = zeros(length(n.subscriptionList))
n.wRec = randn(rng, length(n.subscriptionList)) / 100
# start w/ small weight Otherwise neuron's weight will be explode in the long run
n.wRec = randn(rng, length(n.subscriptionList)) / 10
n.wRecChange = zeros(length(n.subscriptionList))
# the more time has passed from the last time neuron was activated, the more
@@ -761,6 +789,23 @@ function init_neuron!(id::Int64, n::alifNeuron, n_params::Dict,
n.z_i_t_commulative = zeros(length(n.subscriptionList))
end
function init_neuron!(id::Int64, n::linearNeuron, n_params::Dict, kfnParams::Dict)
n.id = id
n.knowledgeFnName = kfnParams[:knowledgeFnName]
subscription_options = shuffle!([kfnParams[:totalInputPort]+1 : kfnParams[:totalNeurons]...])
subscription_numbers = Int(floor((n_params[:synapticConnectionPercent] / 100.0) *
kfnParams[:totalNeurons] - kfnParams[:totalInputPort]))
n.subscriptionList = [pop!(subscription_options) for i = 1:subscription_numbers]
n.synapticStrength = rand(-4.5:0.01:-4, length(n.subscriptionList))
n.epsilonRec = zeros(length(n.subscriptionList))
n.wRec = randn(rng, length(n.subscriptionList)) / 10
n.wRecChange = zeros(length(n.subscriptionList))
n.alpha = calculate_k(n)
n.z_i_t_commulative = zeros(length(n.subscriptionList))
end
function init_neuron!(id::Int64, n::integrateNeuron, n_params::Dict, kfnParams::Dict)
n.id = id
n.knowledgeFnName = kfnParams[:knowledgeFnName]
@@ -771,31 +816,17 @@ function init_neuron!(id::Int64, n::integrateNeuron, n_params::Dict, kfnParams::
n.subscriptionList = [pop!(subscription_options) for i = 1:subscription_numbers]
n.synapticStrength = rand(-4.5:0.01:-4, length(n.subscriptionList))
n.epsilonRec = zeros(length(n.subscriptionList))
n.wRec = randn(rng, length(n.subscriptionList)) / 100
n.wRecChange = zeros(length(n.subscriptionList))
n.alpha = calculate_k(n)
n.epsilonRec = zeros(length(n.subscriptionList))
# start w/ small weight Otherwise neuron's weight will be explode in the long run
n.wRec = randn(rng, length(n.subscriptionList)) / 10
n.wRecChange = zeros(length(n.subscriptionList))
n.z_i_t_commulative = zeros(length(n.subscriptionList))
n.b = randn(rng) / 100
# start w/ small weight Otherwise neuron's weight will be explode in the long run
n.b = randn(rng) / 10
end
# function init_neuron!(id::Int64, n::linearNeuron, n_params::Dict, kfnParams::Dict)
# n.id = id
# n.knowledgeFnName = kfnParams[:knowledgeFnName]
# subscription_options = shuffle!([kfnParams[:totalInputPort]+1 : kfnParams[:totalNeurons]...])
# subscription_numbers = Int(floor((n_params[:synapticConnectionPercent] / 100.0) *
# kfnParams[:totalNeurons] - kfnParams[:totalInputPort]))
# n.subscriptionList = [pop!(subscription_options) for i = 1:subscription_numbers]
# n.synapticStrength = rand(-4.5:0.01:-4, length(n.subscriptionList))
# n.epsilonRec = zeros(length(n.subscriptionList))
# n.wRec = randn(rng, length(n.subscriptionList)) / 100
# n.wRecChange = zeros(length(n.subscriptionList))
# n.alpha = calculate_k(n)
# n.z_i_t_commulative = zeros(length(n.subscriptionList))
# end
""" Make a neuron intended for use with knowledgeFn
"""
function init_neuron(id::Int64, n_params::Dict, kfnParams::Dict)
@@ -854,6 +885,10 @@ calculate_ρ(neuron::alifNeuron) = exp(-neuron.delta / neuron.tau_a)
calculate_k(neuron::linearNeuron) = exp(-neuron.delta / neuron.tau_out)
calculate_k(neuron::integrateNeuron) = exp(-neuron.delta / neuron.tau_out)
#------------------------------------------------------------------------------------------------100