working milestone with 25% accuracy

This commit is contained in:
2023-06-20 09:16:34 +07:00
parent 08297ccd00
commit 57efefc8e3
6 changed files with 1280 additions and 262 deletions

View File

@@ -4,6 +4,7 @@ export
# struct
IronpenStruct, model, knowledgeFn, lifNeuron, alifNeuron, linearNeuron,
kfn_1, inputNeuron, computeNeuron, neuron, outputNeuron, passthroughNeuron,
integrateNeuron,
# function
instantiate_custom_types, init_neuron, populate_neuron,
@@ -22,6 +23,8 @@ abstract type computeNeuron <: neuron end
#------------------------------------------------------------------------------------------------100
rng = MersenneTwister(1234)
""" Model struct
"""
Base.@kwdef mutable struct model <: Ironpen
@@ -262,16 +265,16 @@ function kfn_1(kfnParams::Dict)
end
end
# add ExInType into each output neuron subExInType
for n in kfn.outputNeuronsArray
try # input neuron doest have n.subscriptionList
for (i, sub_id) in enumerate(n.subscriptionList)
n_ExInType = kfn.neuronsArray[sub_id].ExInType
n.wRec[i] *= n_ExInType
end
catch
end
end
# # add ExInType into each output neuron subExInType
# for n in kfn.outputNeuronsArray
# try # input neuron doest have n.subscriptionList
# for (i, sub_id) in enumerate(n.subscriptionList)
# n_ExInType = kfn.neuronsArray[sub_id].ExInType
# n.wRec[i] *= n_ExInType
# end
# catch
# end
# end
for n in kfn.neuronsArray
push!(kfn.nExInType, n.ExInType)
@@ -339,6 +342,7 @@ Base.@kwdef mutable struct lifNeuron <: computeNeuron
gammaPd::Float64 = 0.3 # γ_pd, discount factor, value from paper
alpha::Float64 = 0.0 # α, neuron membrane potential decay factor
alphaChange::Float64 = 0.0
phi::Float64 = 0.0 # ϕ, psuedo derivative
epsilonRec::Array{Float64} = Float64[] # ϵ_rec, eligibility vector for neuron spike
decayedEpsilonRec::Array{Float64} = Float64[] # α * epsilonRec
@@ -347,7 +351,7 @@ Base.@kwdef mutable struct lifNeuron <: computeNeuron
refractoryDuration::Int64 = 3 # neuron's refratory period in millisecond
refractoryCounter::Int64 = 0
tau_m::Float64 = 100.0 # τ_m, membrane time constant in millisecond
eta::Float64 = 0.01 # η, learning rate
eta::Float64 = 1e-3 # η, learning rate
wRecChange::Array{Float64} = Float64[] # Δw_rec, cumulated wRec change
recSignal::Float64 = 0.0 # incoming recurrent signal
alpha_v_t::Float64 = 0.0 # alpha * v_t
@@ -428,6 +432,7 @@ Base.@kwdef mutable struct alifNeuron <: computeNeuron
synapticStrengthLimit::NamedTuple = (lowerlimit=(-5=>0), upperlimit=(5=>5))
alpha::Float64 = 0.0 # α, neuron membrane potential decay factor
alphaChange::Float64 = 0.0
delta::Float64 = 1.0 # δ, discreate timestep size in millisecond
epsilonRec::Array{Float64} = Float64[] # ϵ_rec(v), eligibility vector for neuron i spike
epsilonRecA::Array{Float64} = Float64[] # ϵ_rec(a)
@@ -435,7 +440,7 @@ Base.@kwdef mutable struct alifNeuron <: computeNeuron
eRec_v::Array{Float64} = Float64[] # a component of neuron's eligibility trace resulted from v_t
eRec_a::Array{Float64} = Float64[] # a component of neuron's eligibility trace resulted from av_th
eRec::Array{Float64} = Float64[] # neuron's eligibility trace
eta::Float64 = 0.01 # eta, learning rate
eta::Float64 = 1e-3 # eta, learning rate
gammaPd::Float64 = 0.3 # γ_pd, discount factor, value from paper
phi::Float64 = 0.0 # ϕ, psuedo derivative
refractoryDuration::Int64 = 3 # neuron's refractory period in millisecond
@@ -510,7 +515,7 @@ end
""" linearNeuron struct
"""
Base.@kwdef mutable struct linearNeuron <: outputNeuron
id::Float64 = 0.0 # ID of this neuron which is it position in knowledgeFn array
id::Int64 = 0 # ID of this neuron which is it position in knowledgeFn array
type::String = "linearNeuron"
knowledgeFnName::String = "not defined" # knowledgeFn that this neuron belongs to
subscriptionList::Array{Int64} = Int64[] # list of other neuron that this neuron synapse subscribed to
@@ -545,7 +550,7 @@ Base.@kwdef mutable struct linearNeuron <: outputNeuron
refractoryDuration::Int64 = 3 # neuron's refratory period in millisecond
refractoryCounter::Int64 = 0
tau_out::Float64 = 50.0 # τ_out, membrane time constant in millisecond
eta::Float64 = 0.01 # η, learning rate
eta::Float64 = 1e-3 # η, learning rate
wRecChange::Array{Float64} = Float64[] # Δw_rec, cumulated wRec change
recSignal::Float64 = 0.0 # incoming recurrent signal
alpha_v_t::Float64 = 0.0 # alpha * v_t
@@ -584,6 +589,87 @@ function linearNeuron(params::Dict)
return n
end
#------------------------------------------------------------------------------------------------100
""" integrateNeuron struct
"""
Base.@kwdef mutable struct integrateNeuron <: outputNeuron
id::Int64 = 0 # ID of this neuron which is it position in knowledgeFn array
type::String = "integrateNeuron"
knowledgeFnName::String = "not defined" # knowledgeFn that this neuron belongs to
subscriptionList::Array{Int64} = Int64[] # list of other neuron that this neuron synapse subscribed to
timeStep::Int64 = 0 # current time
wRec::Array{Float64} = Float64[] # synaptic weight (for receiving signal from other neuron)
v_t::Float64 = randn() # vᵗ, postsynaptic neuron membrane potential of previous timestep
v_t1::Float64 = 0.0 # vᵗ⁺¹, postsynaptic neuron membrane potential at current timestep
v_th::Float64 = 1.0 # vᵗʰ, neuron firing threshold
vRest::Float64 = 0.0 # resting potential after neuron fired
vError::Float64 = 0.0 # used to compute model error
z_t::Bool = false # zᵗ, neuron postsynaptic firing of previous timestep
# zᵗ⁺¹, neuron firing status at time = t+1. I need this because the way I calculate all
# neurons forward function at each timestep-by-timestep is to do every neuron
# forward calculation. Each neuron requires access to other neuron's firing status
# during v_t1 calculation hence I need a variable to hold z_t1 so that I'm not replacing z_t
z_t1::Bool = false # neuron postsynaptic firing at current timestep (after neuron's calculation)
b::Float64 = 0.0
bChange::Float64 = 0.0
# neuron presynaptic firing at current timestep (which is other neuron postsynaptic firing of
# previous timestep)
z_i_t::Array{Bool} = Bool[]
z_i_t_commulative::Array{Int64} = Int64[] # used to compute connection strength
synapticStrength::Array{Float64} = Float64[]
synapticStrengthLimit::NamedTuple = (lowerlimit=(-5=>-5), upperlimit=(5=>5))
gammaPd::Float64 = 0.3 # γ_pd, discount factor, value from paper
alpha::Float64 = 0.0 # α, neuron membrane potential decay factor
alphaChange::Float64 = 0.0
phi::Float64 = 0.0 # ϕ, psuedo derivative
epsilonRec::Array{Float64} = Float64[] # ϵ_rec, eligibility vector for neuron spike
decayedEpsilonRec::Array{Float64} = Float64[] # α * epsilonRec
eRec::Array{Float64} = Float64[] # eligibility trace for neuron spike
delta::Float64 = 1.0 # δ, discreate timestep size in millisecond
refractoryDuration::Int64 = 3 # neuron's refratory period in millisecond
refractoryCounter::Int64 = 0
tau_out::Float64 = 50.0 # τ_out, membrane time constant in millisecond
eta::Float64 = 1e-3 # η, learning rate
wRecChange::Array{Float64} = Float64[] # Δw_rec, cumulated wRec change
recSignal::Float64 = 0.0 # incoming recurrent signal
alpha_v_t::Float64 = 0.0 # alpha * v_t
firingCounter::Int64 = 0 # store how many times neuron fires
ExInSignalSum::Float64 = 0.0
end
""" linear neuron outer constructor
# Example
linear_neuron_params = Dict(
:type => "linearNeuron",
:k => 0.9, # output leakink coefficient
:tau_out => 5.0, # output time constant in millisecond. It should equals to time use for 1 sequence
:out => 0.0, # neuron's output value store here
)
neuron1 = linearNeuron(linear_neuron_params)
"""
function integrateNeuron(params::Dict)
n = integrateNeuron()
field_names = fieldnames(typeof(n))
for i in field_names
if i in keys(params)
if i == :optimiser
opt_type = string(split(params[i], ".")[end])
n.:($i) = load_optimiser(opt_type)
else
n.:($i) = params[i] # assign params to n struct fields
end
end
end
return n
end
#------------------------------------------------------------------------------------------------100
# function load_optimiser(optimiser_name::String; params::Union{Dict,Nothing} = nothing)
@@ -634,10 +720,11 @@ function init_neuron!(id::Int64, n::lifNeuron, n_params::Dict, kfnParams::Dict)
# prevent subscription to itself by removing this neuron id
filter!(x -> x != n.id, n.subscriptionList)
n.synapticStrength = rand(-5:0.01:-4, length(n.subscriptionList))
n.synapticStrength = rand(-4.5:0.01:-4, length(n.subscriptionList))
n.epsilonRec = zeros(length(n.subscriptionList))
n.wRec = randn(length(n.subscriptionList))
# n.wRec = randn(length(n.subscriptionList))
n.wRec = randn(rng, length(n.subscriptionList)) / 100
n.wRecChange = zeros(length(n.subscriptionList))
n.alpha = calculate_α(n)
n.z_i_t_commulative = zeros(length(n.subscriptionList))
@@ -654,10 +741,10 @@ function init_neuron!(id::Int64, n::alifNeuron, n_params::Dict,
# prevent subscription to itself by removing this neuron id
filter!(x -> x != n.id, n.subscriptionList)
n.synapticStrength = rand(-5:0.01:-4, length(n.subscriptionList))
n.synapticStrength = rand(-4.5:0.01:-4, length(n.subscriptionList))
n.epsilonRec = zeros(length(n.subscriptionList))
n.wRec = randn(length(n.subscriptionList))
n.wRec = randn(rng, length(n.subscriptionList)) / 100 # TODO use abs()
n.wRecChange = zeros(length(n.subscriptionList))
# the more time has passed from the last time neuron was activated, the more
@@ -668,8 +755,7 @@ function init_neuron!(id::Int64, n::alifNeuron, n_params::Dict,
n.z_i_t_commulative = zeros(length(n.subscriptionList))
end
function init_neuron!(id::Int64, n::linearNeuron, n_params::Dict, kfnParams::Dict)
function init_neuron!(id::Int64, n::integrateNeuron, n_params::Dict, kfnParams::Dict)
n.id = id
n.knowledgeFnName = kfnParams[:knowledgeFnName]
@@ -677,15 +763,33 @@ function init_neuron!(id::Int64, n::linearNeuron, n_params::Dict, kfnParams::Dic
subscription_numbers = Int(floor((n_params[:synapticConnectionPercent] / 100.0) *
kfnParams[:totalNeurons] - kfnParams[:totalInputPort]))
n.subscriptionList = [pop!(subscription_options) for i = 1:subscription_numbers]
n.synapticStrength = rand(-5:0.01:-4, length(n.subscriptionList))
n.synapticStrength = rand(-4.5:0.01:-4, length(n.subscriptionList))
n.epsilonRec = zeros(length(n.subscriptionList))
n.wRec = randn(length(n.subscriptionList))
n.wRec = randn(rng, length(n.subscriptionList)) / 100
n.wRecChange = zeros(length(n.subscriptionList))
n.alpha = calculate_k(n)
n.z_i_t_commulative = zeros(length(n.subscriptionList))
n.b = randn(rng) / 100
end
# function init_neuron!(id::Int64, n::linearNeuron, n_params::Dict, kfnParams::Dict)
# n.id = id
# n.knowledgeFnName = kfnParams[:knowledgeFnName]
# subscription_options = shuffle!([kfnParams[:totalInputPort]+1 : kfnParams[:totalNeurons]...])
# subscription_numbers = Int(floor((n_params[:synapticConnectionPercent] / 100.0) *
# kfnParams[:totalNeurons] - kfnParams[:totalInputPort]))
# n.subscriptionList = [pop!(subscription_options) for i = 1:subscription_numbers]
# n.synapticStrength = rand(-4.5:0.01:-4, length(n.subscriptionList))
# n.epsilonRec = zeros(length(n.subscriptionList))
# n.wRec = randn(rng, length(n.subscriptionList)) / 100
# n.wRecChange = zeros(length(n.subscriptionList))
# n.alpha = calculate_k(n)
# n.z_i_t_commulative = zeros(length(n.subscriptionList))
# end
""" Make a neuron intended for use with knowledgeFn
"""
function init_neuron(id::Int64, n_params::Dict, kfnParams::Dict)
@@ -715,7 +819,9 @@ function instantiate_custom_types(params::Union{Dict,Nothing} = nothing)
elseif type == "alifNeuron"
return alifNeuron(params)
elseif type == "linearNeuron"
return linearNeuron(params)
return linearNeuron(params)
elseif type == "integrateNeuron"
return integrateNeuron(params)
else
return nothing
end
@@ -740,6 +846,7 @@ calculate_α(neuron::lifNeuron) = exp(-neuron.delta / neuron.tau_m)
calculate_α(neuron::alifNeuron) = exp(-neuron.delta / neuron.tau_m)
calculate_ρ(neuron::alifNeuron) = exp(-neuron.delta / neuron.tau_a)
calculate_k(neuron::linearNeuron) = exp(-neuron.delta / neuron.tau_out)
calculate_k(neuron::integrateNeuron) = exp(-neuron.delta / neuron.tau_out)
#------------------------------------------------------------------------------------------------100