building v0.0.6

This commit is contained in:
2023-06-23 12:08:13 +07:00
parent c8cc3d2500
commit ecf0325e7d
5 changed files with 39 additions and 19 deletions

View File

@@ -253,7 +253,7 @@ function kfn_1(kfnParams::Dict)
try # input neuron doest have n.subscriptionList
for (i, sub_id) in enumerate(n.subscriptionList)
n_ExInType = kfn.neuronsArray[sub_id].ExInType
n.wRec[i] *= n_ExInType
n.wRec[i] = abs(n.wRec[i]) * n_ExInType
# add id exin type to kfn
if n_ExInType < 0
push!(kfn.nInhabitory, sub_id)
@@ -364,6 +364,9 @@ Base.@kwdef mutable struct lifNeuron <: computeNeuron
firingRateError::Float64 = 0.0 # local neuron error w.r.t. firing regularization
firingRate::Float64 = 0.0 # running average of firing rate in Hz
notFireTimeOut::Int64 = 100 # consecutive count of not firing. Should be the same as batch size
notFireCounter::Int64 = 0
""" "inference" = no learning params will be collected.
"learning" = neuron will accumulate epsilon_j, compute Δw_rec_change each time
correct answer is available then merge Δw_rec_change into wRecChange then
@@ -458,6 +461,9 @@ Base.@kwdef mutable struct alifNeuron <: computeNeuron
firingRateError::Float64 = 0.0 # local neuron error w.r.t. firing regularization
firingRate::Float64 = 0.0 # running average of firing rate, Hz
notFireTimeOut::Int64 = 100 # consecutive count of not firing. Should be the same as batch size
notFireCounter::Int64 = 0
tau_a::Float64 = 100.0 # τ_a, adaption time constant in millisecond
beta::Float64 = 0.15 # β, constant, value from paper
rho::Float64 = 0.0 # ρ, threshold adaptation decay factor
@@ -744,7 +750,7 @@ function init_neuron!(id::Int64, n::alifNeuron, n_params::Dict,
n.synapticStrength = rand(-4.5:0.01:-4, length(n.subscriptionList))
n.epsilonRec = zeros(length(n.subscriptionList))
n.wRec = randn(rng, length(n.subscriptionList)) / 100 # TODO use abs()
n.wRec = randn(rng, length(n.subscriptionList)) / 100
n.wRecChange = zeros(length(n.subscriptionList))
# the more time has passed from the last time neuron was activated, the more