change from end of sample learning to online learning

This commit is contained in:
2023-05-26 21:03:03 +07:00
parent 3556167591
commit b0cede75c1
6 changed files with 146 additions and 152 deletions

View File

@@ -9,7 +9,7 @@ export
instantiate_custom_types, init_neuron, populate_neuron,
add_neuron!
using Random, Flux, LinearAlgebra
using Random, LinearAlgebra
#------------------------------------------------------------------------------------------------100
@@ -350,7 +350,7 @@ Base.@kwdef mutable struct lifNeuron <: computeNeuron
refractoryDuration::Int64 = 3 # neuron's refratory period in millisecond
refractoryCounter::Int64 = 0
tau_m::Float64 = 0.0 # τ_m, membrane time constant in millisecond
eta::Float64 = 0.0001 # η, learning rate
eta::Float64 = 0.01 # η, learning rate
wRecChange::Array{Float64} = Float64[] # Δw_rec, cumulated wRec change
recSignal::Float64 = 0.0 # incoming recurrent signal
alpha_v_t::Float64 = 0.0 # alpha * v_t
@@ -438,7 +438,7 @@ Base.@kwdef mutable struct alifNeuron <: computeNeuron
eRec_v::Array{Float64} = Float64[] # a component of neuron's eligibility trace resulted from v_t
eRec_a::Array{Float64} = Float64[] # a component of neuron's eligibility trace resulted from av_th
eRec::Array{Float64} = Float64[] # neuron's eligibility trace
eta::Float64 = 0.0001 # eta, learning rate
eta::Float64 = 0.01 # eta, learning rate
gammaPd::Float64 = 0.3 # γ_pd, discount factor, value from paper
phi::Float64 = 0.0 # ϕ, psuedo derivative
refractoryDuration::Int64 = 3 # neuron's refractory period in millisecond
@@ -448,7 +448,7 @@ Base.@kwdef mutable struct alifNeuron <: computeNeuron
recSignal::Float64 = 0.0 # incoming recurrent signal
alpha_v_t::Float64 = 0.0 # alpha * v_t
error::Float64 = 0.0 # local neuron error
optimiser::Union{Any,Nothing} = load_optimiser("AdaBelief") # Flux optimizer
# optimiser::Union{Any,Nothing} = load_optimiser("AdaBelief") # Flux optimizer
firingCounter::Int64 = 0 # store how many times neuron fires
firingRateTarget::Float64 = 20.0 # neuron's target firing rate in Hz
@@ -548,7 +548,7 @@ Base.@kwdef mutable struct linearNeuron <: outputNeuron
refractoryDuration::Int64 = 3 # neuron's refratory period in millisecond
refractoryCounter::Int64 = 0
tau_out::Float64 = 0.0 # τ_out, membrane time constant in millisecond
eta::Float64 = 0.0001 # η, learning rate
eta::Float64 = 0.01 # η, learning rate
wRecChange::Array{Float64} = Float64[] # Δw_rec, cumulated wRec change
recSignal::Float64 = 0.0 # incoming recurrent signal
alpha_v_t::Float64 = 0.0 # alpha * v_t
@@ -588,21 +588,21 @@ end
#------------------------------------------------------------------------------------------------100
function load_optimiser(optimiser_name::String; params::Union{Dict,Nothing} = nothing)
if optimiser_name == "AdaBelief"
params = (0.01, (0.9, 0.8))
return Flux.Optimise.AdaBelief(params...)
elseif optimiser_name == "AdaBelief2"
# output neuron requires slower change pace so η is lower than compute neuron at 0.007
# because if w_out change too fast, compute neuron will not able to
# grapse output neuron moving direction i.e. both compute neuron's direction and
# output neuron direction are out of sync.
params = (0.007, (0.9, 0.8))
return Flux.Optimise.AdaBelief(params...)
else
error("optimiser is not defined yet in load_optimiser()")
end
end
# function load_optimiser(optimiser_name::String; params::Union{Dict,Nothing} = nothing)
# if optimiser_name == "AdaBelief"
# params = (0.01, (0.9, 0.8))
# return Flux.Optimise.AdaBelief(params...)
# elseif optimiser_name == "AdaBelief2"
# # output neuron requires slower change pace so η is lower than compute neuron at 0.007
# # because if w_out change too fast, compute neuron will not able to
# # grapse output neuron moving direction i.e. both compute neuron's direction and
# # output neuron direction are out of sync.
# params = (0.007, (0.9, 0.8))
# return Flux.Optimise.AdaBelief(params...)
# else
# error("optimiser is not defined yet in load_optimiser()")
# end
# end
function init_neuron!(id::Int64, n::passthroughNeuron, n_params::Dict, kfnParams::Dict)
n.id = id