771 lines
34 KiB
Julia
771 lines
34 KiB
Julia
module types
|
||
|
||
export
|
||
# struct
|
||
IronpenStruct, model, knowledgeFn, lif_neuron, alif_neuron, linear_neuron,
|
||
kfn_1, compute_neuron, neuron, output_neuron, passthrough_neuron,
|
||
|
||
# function
|
||
instantiate_custom_types, init_neuron, populate_neuron,
|
||
add_neuron!
|
||
|
||
using Random, Flux, LinearAlgebra
|
||
|
||
#------------------------------------------------------------------------------------------------100
|
||
|
||
abstract type Ironpen end
|
||
abstract type knowledgeFn <: Ironpen end
|
||
abstract type neuron <: Ironpen end
|
||
abstract type input_neuron <: neuron end
|
||
abstract type output_neuron <: neuron end
|
||
abstract type compute_neuron <: neuron end
|
||
|
||
#------------------------------------------------------------------------------------------------100
|
||
|
||
""" Model struct
|
||
"""
|
||
Base.@kwdef mutable struct model <: Ironpen
|
||
knowledgeFn::Union{Dict,Nothing} = nothing
|
||
modelParams::Union{Dict,Nothing} = nothing
|
||
error::Union{Float64,Nothing} = 0.0
|
||
outputError::Union{Array,Nothing} = Vector{AbstractFloat}()
|
||
|
||
""" "inference" = no learning params will be collected.
|
||
"learning" = neuron will accumulate epsilon_j, compute Δw_rec_change each time
|
||
correct answer is available then merge Δw_rec_change into wRecChange then
|
||
reset epsilon_j.
|
||
"reflect" = neuron will merge wRecChange into w_rec then reset wRecChange. """
|
||
learningStage::String = "inference"
|
||
|
||
softreset::Bool = false
|
||
timeStep::Number = 0.0
|
||
end
|
||
""" Model outer constructor
|
||
|
||
# Example
|
||
I_kfnparams = Dict(
|
||
:type => "lif_neuron",
|
||
:v_t1 => 0.0, # neuron membrane potential at time = t+1
|
||
:v_th => 2.0, # neuron firing threshold (this value is treated as maximum bound if I use auto generate)
|
||
:z_t => false, # neuron firing status at time = t
|
||
:z_t1 => false, # neuron firing status at time = t+1
|
||
:gammaPd => 0.3, # discount factor. The value is from the paper
|
||
:phi => 0.0, # psuedo derivative
|
||
:refractoryDuration => 2.0, # neuron refractory period in tick
|
||
:delta => 1.0,
|
||
:tau_m => 20.0, # membrane time constant in millisecond. The value is from the paper
|
||
:eta => 0.01, # learning rate
|
||
|
||
I_kfn = Ironpen_ai_gpu.knowledgeFn(I_kfnparams, lif_neuron_params, alif_neuron_params,
|
||
linear_neuron_params)
|
||
|
||
modelParams_1 = Dict(:knowledgeFn => Dict(:I => I_kfn,
|
||
:run => run_kfn),
|
||
:learningStage => "doing_inference",)
|
||
|
||
model_1 = Ironpen_ai_gpu.model(modelParams_1)
|
||
"""
|
||
function model(params::Dict)
|
||
m = model()
|
||
m.modelParams = params
|
||
|
||
fields = fieldnames(typeof(m))
|
||
for i in fields
|
||
if i in keys(params)
|
||
m.:($i) = params[i] # assign params to n struct fields
|
||
end
|
||
end
|
||
|
||
return m
|
||
end
|
||
|
||
#------------------------------------------------------------------------------------------------100
|
||
|
||
""" knowledgeFn struct
|
||
"""
|
||
Base.@kwdef mutable struct kfn_1 <: knowledgeFn
|
||
knowledgeFnName::Union{String,Nothing} = nothing
|
||
kfnParams::Union{Dict,Nothing} = nothing # store params of knowledgeFn itself for later use
|
||
timeStep::Number = 0.0
|
||
|
||
# Bn contain error coefficient for both neurons and output neurons in one place
|
||
Bn::Vector{Float64} = Vector{Float64}() # error projection coefficient from kfn output's error to each neurons's error
|
||
neuronsArray::Union{Array,Nothing} = [] # put neurons here
|
||
|
||
""" put output neuron here. I seperate output neuron because
|
||
1. its calculation is difference than other neuron types
|
||
2. other neuron type will not induced to connnect to output neuron
|
||
3. output neuron does not induced to connect to its own type """
|
||
outputNeuronsArray::Union{Array,Nothing} = []
|
||
|
||
""" "inference" = no learning params will be collected.
|
||
"learning" = neuron will accumulate epsilon_j, compute Δw_rec_change each time
|
||
correct answer is available then merge Δw_rec_change into wRecChange then
|
||
reset epsilon_j.
|
||
"reflect" = neuron will merge wRecChange into w_rec then reset wRecChange. """
|
||
learningStage::String = "inference"
|
||
|
||
error::Union{Float64,Nothing} = nothing
|
||
softreset::Bool = false
|
||
|
||
firedNeurons::Array{Int64} = Vector{Int64}() # store unique id of firing neurons to be used when random neuron connection
|
||
firedNeurons_t0::Union{Vector{Bool},Nothing} = nothing # store firing state of all neurons at t0
|
||
firedNeurons_t1::Union{Vector{Bool},Nothing} = nothing # store firing state of all neurons at t1
|
||
|
||
avgNeuronsFiringRate::Union{Float64,Nothing} = 0.0 # for displaying average firing rate over all neurons
|
||
avgNeurons_v_t1::Union{Float64,Nothing} = 0.0 # for displaying average v_t1 over all neurons
|
||
end
|
||
|
||
#------------------------------------------------------------------------------------------------100
|
||
|
||
""" Knowledge function outer constructor >>> auto generate <<<
|
||
|
||
# Example
|
||
|
||
lif_neuron_params = Dict(
|
||
:type => "lif_neuron",
|
||
:v_th => 1.2, # neuron firing threshold (this value is treated as maximum bound if I use auto generate)
|
||
:z_t => false, # neuron firing status at time = t
|
||
:gammaPd => 0.3, # discount factor. The value is from the paper
|
||
:refractoryDuration => 2.0, # neuron refractory period in tick
|
||
:delta => 1.0,
|
||
:tau_m => 5.0, # membrane time constant in millisecond. It should equals to time use for 1 sequence
|
||
)
|
||
|
||
alif_neuron_params = Dict(
|
||
:type => "alif_neuron",
|
||
:v_th => 1.2, # neuron firing threshold (this value is treated as maximum bound if I use auto generate)
|
||
:z_t => false, # neuron firing status at time = t
|
||
:gammaPd => 0.3, # discount factor. The value is from the paper
|
||
:refractoryDuration => 2.0, # neuron refractory period in millisecond
|
||
:delta => 1.0,
|
||
:tau_m => 5.0, # membrane time constant in millisecond. It should equals to time use for 1 sequence
|
||
|
||
# adaptation time constant in millisecond. It should equals to total time SNN takes to
|
||
# perform a task i.e. equals to episode length
|
||
:tau_a => 10.0,
|
||
:beta => 0.15, # constant.
|
||
:a => 0.0,
|
||
)
|
||
|
||
linear_neuron_params = Dict(
|
||
:type => "linear_neuron",
|
||
:k => 0.9, # output leakink coefficient
|
||
:tau_out => 5.0, # output time constant in millisecond. It should equals to time use for 1 sequence
|
||
:out => 0.0, # neuron's output value store here
|
||
)
|
||
|
||
I_kfnparams = Dict(
|
||
:knowledgeFnName => "I",
|
||
:lif_neuron_number => 200,
|
||
:alif_neuron_number => 100, # from Allen Institute, ALIF is 40% of LIF
|
||
:linear_neuron_number => 5, # output neuron, this is also the output length
|
||
:Bn => "random", # error projection coefficient from kfn output's error to each neurons's error
|
||
:learning_rate => 0.01,
|
||
:neuron_connection_pattern => "100%", # number of each neuron subscribe to other neuron in knowledgeFn.neuronsArray
|
||
:output_neuron_connection_pattern => "100%", # "60%" of kfn.neuronsArray or number
|
||
:maximum_input_data_length => 5, # in case of GloVe word encoding, it is 300
|
||
:neuron_w_in_generation_pattern => "random", # number or "random"
|
||
:neuron_w_rec_generation_pattern => "random",
|
||
:neuron_v_t_default => 0.5,
|
||
:neuron_voltage_drop_percentage => "100%",
|
||
:neuron_firing_rate_target => 50.0,
|
||
:neuron_learning_rate => 0.01,
|
||
:neuron_c_reg => 0.0001,
|
||
:neuron_c_reg_v => 0.0001,
|
||
:neuron_optimiser => "ADAM",
|
||
:meta_params => Dict(:is_first_cycle => true,
|
||
:launch_time => 0.0,))
|
||
|
||
kfn1 = knowledgeFn(kfnParams, lif_neuron_params, alif_neuron_params, linear_neuron_params)
|
||
"""
|
||
function kfn_1(kfnParams::Dict)
|
||
|
||
kfn = kfn_1()
|
||
kfn.kfnParams = kfnParams
|
||
kfn.knowledgeFnName = kfn.kfnParams[:knowledgeFnName]
|
||
|
||
if kfn.kfnParams[:compute_neuron_number] < kfn.kfnParams[:total_input_port]
|
||
throw(error("number of compute neuron must be greater than input neuron"))
|
||
end
|
||
|
||
# Bn
|
||
if kfn.kfnParams[:Bn] == "random"
|
||
kfn.Bn = [Random.rand(0:0.001:1) for i in 1:kfn.kfnParams[:compute_neuron_number]]
|
||
else # in case I want to specify manually
|
||
kfn.Bn = [kfn.kfnParams[:Bn] for i in 1:kfn.kfnParams[:compute_neuron_number]]
|
||
end
|
||
|
||
# assign neurons ID by their position in kfn.neurons array because I think it is
|
||
# straight forward way
|
||
|
||
# add input port
|
||
for (k, v) in kfn.kfnParams[:input_port]
|
||
current_type = kfn.kfnParams[:input_port][k]
|
||
for i = 1:current_type[:numbers]
|
||
n_id = length(kfn.neuronsArray) + 1
|
||
neuron = init_neuron(n_id, current_type[:params], kfn.kfnParams)
|
||
push!(kfn.neuronsArray, neuron)
|
||
end
|
||
end
|
||
|
||
# add compute neurons
|
||
for (k, v) in kfn.kfnParams[:compute_neuron]
|
||
current_type = kfn.kfnParams[:compute_neuron][k]
|
||
for i = 1:current_type[:numbers]
|
||
n_id = length(kfn.neuronsArray) + 1
|
||
neuron = init_neuron(n_id, current_type[:params], kfn.kfnParams)
|
||
push!(kfn.neuronsArray, neuron)
|
||
end
|
||
end
|
||
|
||
for i = 1:kfn.kfnParams[:output_port][:numbers]
|
||
neuron = init_neuron(i, kfn.kfnParams[:output_port][:params],
|
||
kfn.kfnParams)
|
||
push!(kfn.outputNeuronsArray, neuron)
|
||
end
|
||
|
||
for n in kfn.neuronsArray
|
||
if typeof(n) <: compute_neuron
|
||
n.firingRateTarget = kfn.kfnParams[:neuron_firing_rate_target]
|
||
end
|
||
end
|
||
|
||
# excitatory neuron to inhabitory neuron = 60:40 % of compute_neuron
|
||
ex_number = Int(floor(0.6 * kfn.kfnParams[:compute_neuron_number]))
|
||
ex_n = [1 for i in 1:ex_number]
|
||
in_number = kfn.kfnParams[:compute_neuron_number] - ex_number
|
||
in_n = [-1 for i in 1:in_number]
|
||
ex_in = shuffle!([ex_n; in_n])
|
||
|
||
# input neurons are always excitatory, compute_neurons are random between excitatory
|
||
# and inhabitory
|
||
for n in reverse(kfn.neuronsArray)
|
||
try n.ExInType = pop!(ex_in) catch end
|
||
end
|
||
|
||
# add ExInType into each compute_neuron subExInType
|
||
for n in reverse(kfn.neuronsArray)
|
||
try # input neuron doest have n.subscriptionList
|
||
for sub_id in n.subscriptionList
|
||
n_ExInType = kfn.neuronsArray[sub_id].ExInType
|
||
push!(n.subExInType, n_ExInType)
|
||
end
|
||
catch
|
||
end
|
||
end
|
||
|
||
# add ExInType into each output neuron subExInType
|
||
for n in kfn.outputNeuronsArray
|
||
try # input neuron doest have n.subscriptionList
|
||
for sub_id in n.subscriptionList
|
||
n_ExInType = kfn.neuronsArray[sub_id].ExInType
|
||
push!(n.subExInType, n_ExInType)
|
||
end
|
||
catch
|
||
end
|
||
end
|
||
|
||
return kfn
|
||
end
|
||
|
||
#------------------------------------------------------------------------------------------------100
|
||
|
||
""" passthrough_neuron struct
|
||
"""
|
||
Base.@kwdef mutable struct passthrough_neuron <: input_neuron
|
||
id::Union{Int64,Nothing} = nothing # ID of this neuron which is it position in knowledgeFn array
|
||
type::String = "passthrough_neuron"
|
||
knowledgeFnName::Union{String,Nothing} = nothing # knowledgeFn that this neuron belongs to
|
||
z_t::Bool = false
|
||
z_t1::Bool = false
|
||
timeStep::Number = 0.0 # current time
|
||
ExInType::Integer = 1 # 1 excitatory, -1 inhabitory. input neuron is always excitatory
|
||
end
|
||
|
||
function passthrough_neuron(params::Dict)
|
||
n = passthrough_neuron()
|
||
field_names = fieldnames(typeof(n))
|
||
for i in field_names
|
||
if i in keys(params)
|
||
if i == :optimiser
|
||
opt_type = string(split(params[i], ".")[end])
|
||
n.:($i) = load_optimiser(opt_type)
|
||
else
|
||
n.:($i) = params[i] # assign params to n struct fields
|
||
end
|
||
end
|
||
end
|
||
return n
|
||
end
|
||
|
||
#------------------------------------------------------------------------------------------------100
|
||
|
||
""" lif_neuron struct
|
||
"""
|
||
Base.@kwdef mutable struct lif_neuron <: compute_neuron
|
||
id::Union{Int64,Nothing} = nothing # this neuron ID i.e. position of this neuron in knowledgeFn
|
||
type::String = "lif_neuron"
|
||
ExInType::Integer = 1 # 1 excitatory, -1 inhabitory
|
||
# Bn::Union{Float64,Nothing} = Random.rand() # Bias for neuron error
|
||
knowledgeFnName::Union{String,Nothing} = nothing # knowledgeFn that this neuron belongs to
|
||
subscriptionList::Union{Array{Int64},Nothing} = nothing # list of other neuron that this neuron synapse subscribed to
|
||
subExInType::Array{Int64} = Vector{Int64}() # store ExIn type of subscribed neurons
|
||
timeStep::Number = 0.0 # current time
|
||
w_rec::Union{Array{Float64},Nothing} = nothing # synaptic weight (for receiving signal from other neuron)
|
||
v_t::Float64 = rand() # vᵗ, postsynaptic neuron membrane potential of previous timestep
|
||
v_t1::Float64 = 0.0 # vᵗ⁺¹, postsynaptic neuron membrane potential at current timestep
|
||
v_t_default::Union{Float64,Nothing} = 0.0 # default membrane potential voltage
|
||
v_th::Float64 = 1.0 # vᵗʰ, neuron firing threshold
|
||
vRest::Float64 = 0.0 # resting potential after neuron fired
|
||
z_t::Bool = false # zᵗ, neuron postsynaptic firing of previous timestep
|
||
# zᵗ⁺¹, neuron firing status at time = t+1. I need this because the way I calculate all
|
||
# neurons forward function at each timestep-by-timestep is to do every neuron
|
||
# forward calculation. Each neuron requires access to other neuron's firing status
|
||
# during v_t1 calculation hence I need a variable to hold z_t1 so that I'm not replacing z_t
|
||
z_t1::Bool = false # neuron postsynaptic firing at current timestep (after neuron's calculation)
|
||
z_i_t::Union{Array{Bool},Nothing} = nothing # neuron presynaptic firing at current timestep (which is other neuron postsynaptic firing of previous timestep)
|
||
synapticStrength::Union{Array{Float64},Nothing} = nothing
|
||
synapticStrengthLimit::Union{NamedTuple,Nothing} = (lowerlimit=(0=>0), upperlimit=(10=>10))
|
||
|
||
gammaPd::Union{Float64,Nothing} = 0.3 # γ_pd, discount factor, value from paper
|
||
alpha::Union{Float64,Nothing} = nothing # α, neuron membrane potential decay factor
|
||
phi::Union{Float64,Nothing} = nothing # ϕ, psuedo derivative
|
||
epsilonRec::Union{Array{Float64},Nothing} = nothing # ϵ_rec, eligibility vector for neuron spike
|
||
decayedEpsilonRec::Union{Array{Float64},Nothing} = nothing # α * epsilonRec
|
||
eRec::Union{Array{Float64},Nothing} = nothing # eligibility trace for neuron spike
|
||
delta::Union{Float64,Nothing} = 1.0 # δ, discreate timestep size in millisecond
|
||
refractoryDuration::Union{Float64,Nothing} = 3 # neuron's refratory period in millisecond
|
||
# refractory_state_active::Union{Bool,Nothing} = false # if true, neuron is in refractory state and cannot process new information
|
||
refractoryCounter::Integer = 0
|
||
tau_m::Union{Float64,Nothing} = nothing # τ_m, membrane time constant in millisecond
|
||
eta::Union{Float64,Nothing} = 0.01 # η, learning rate
|
||
wRecChange::Union{Array{Float64},Nothing} = nothing # Δw_rec, cumulated w_rec change
|
||
recSignal::Union{Float64,Nothing} = nothing # incoming recurrent signal
|
||
alpha_v_t::Union{Float64,Nothing} = nothing # alpha * v_t
|
||
error::Union{Float64,Nothing} = nothing # local neuron error
|
||
optimiser::Union{Any,Nothing} = load_optimiser("AdaBelief") # Flux optimizer
|
||
|
||
firingCounter::Integer = 0 # store how many times neuron fires
|
||
firingRateTarget::Float64 = 20.0 # neuron's target firing rate in Hz
|
||
firingDiff::Float64 = 0.0 # e-prop supplement paper equation 5
|
||
firingRateError::Float64 = 0.0 # local neuron error w.r.t. firing regularization
|
||
firingRate::Float64 = 0.0 # running average of firing rate in Hz
|
||
|
||
""" "inference" = no learning params will be collected.
|
||
"learning" = neuron will accumulate epsilon_j, compute Δw_rec_change each time
|
||
correct answer is available then merge Δw_rec_change into wRecChange then
|
||
reset epsilon_j.
|
||
"reflect" = neuron will merge wRecChange into w_rec then reset wRecChange. """
|
||
learningStage::String = "inference"
|
||
end
|
||
|
||
""" lif neuron outer constructor
|
||
|
||
# Example
|
||
|
||
lif_neuron_params = Dict(
|
||
:type => "lif_neuron",
|
||
:v_th => 1.2, # neuron firing threshold (this value is treated as maximum bound if I use auto generate)
|
||
:z_t => false, # neuron firing status at time = t
|
||
:gammaPd => 0.3, # discount factor. The value is from the paper
|
||
:refractoryDuration => 2.0, # neuron refractory period in tick
|
||
:delta => 1.0,
|
||
:tau_m => 5.0, # membrane time constant in millisecond. It should equals to time use for 1 sequence
|
||
)
|
||
|
||
neuron1 = lif_neuron(lif_neuron_params)
|
||
"""
|
||
function lif_neuron(params::Dict)
|
||
n = lif_neuron()
|
||
field_names = fieldnames(typeof(n))
|
||
for i in field_names
|
||
if i in keys(params)
|
||
if i == :optimiser
|
||
opt_type = string(split(params[i], ".")[end])
|
||
n.:($i) = load_optimiser(opt_type)
|
||
else
|
||
n.:($i) = params[i] # assign params to n struct fields
|
||
end
|
||
end
|
||
end
|
||
return n
|
||
end
|
||
|
||
#------------------------------------------------------------------------------------------------100
|
||
|
||
""" alif_neuron struct
|
||
"""
|
||
Base.@kwdef mutable struct alif_neuron <: compute_neuron
|
||
id::Union{Int64,Nothing} = nothing # this neuron ID i.e. position of this neuron in knowledgeFn
|
||
type::String = "alif_neuron"
|
||
ExInType::Integer = -1 # 1 excitatory, -1 inhabitory
|
||
# Bn::Union{Float64,Nothing} = Random.rand() # Bias for neuron error
|
||
knowledgeFnName::Union{String,Nothing} = nothing # knowledgeFn that this neuron belongs to
|
||
subscriptionList::Union{Array{Int64},Nothing} = nothing # list of other neuron that this neuron synapse subscribed to
|
||
subExInType::Array{Int64} = Vector{Int64}() # store ExIn type of subscribed neurons
|
||
timeStep::Union{Number,Nothing} = nothing # current time
|
||
w_rec::Union{Array{Float64},Nothing} = nothing # synaptic weight (for receiving signal from other neuron)
|
||
v_t::Float64 = rand() # vᵗ, postsynaptic neuron membrane potential of previous timestep
|
||
v_t1::Float64 = 0.0 # vᵗ⁺¹, postsynaptic neuron membrane potential at current timestep
|
||
v_t_default::Union{Float64,Nothing} = 0.0
|
||
v_th::Float64 = 1.0 # vᵗʰ, neuron firing threshold
|
||
vRest::Float64 = 0.0 # resting potential after neuron fired
|
||
z_t::Bool = false # zᵗ, neuron postsynaptic firing of previous timestep
|
||
# zᵗ⁺¹, neuron firing status at time = t+1. I need this because the way I calculate all
|
||
# neurons forward function at each timestep-by-timestep is to do every neuron
|
||
# forward calculation. Each neuron requires access to other neuron's firing status
|
||
# during v_t1 calculation hence I need a variable to hold z_t1 so that I'm not replacing z_t
|
||
z_t1::Bool = false # neuron postsynaptic firing at current timestep (after neuron's calculation)
|
||
z_i_t::Union{Array{Bool},Nothing} = nothing # neuron presynaptic firing at current timestep (which is other neuron postsynaptic firing of previous timestep)
|
||
synapticStrength::Union{Array{Float64},Nothing} = nothing
|
||
synapticStrengthLimit::Union{NamedTuple,Nothing} = (lowerlimit=(-5=>0), upperlimit=(5=>5))
|
||
|
||
alpha::Union{Float64,Nothing} = nothing # α, neuron membrane potential decay factor
|
||
delta::Union{Float64,Nothing} = 1.0 # δ, discreate timestep size in millisecond
|
||
epsilonRec::Union{Array{Float64},Nothing} = nothing # ϵ_rec(v), eligibility vector for neuron i spike
|
||
epsilonRecA::Union{Array{Float64},Nothing} = nothing # ϵ_rec(a)
|
||
decayedEpsilonRec::Union{Array{Float64},Nothing} = nothing # α * epsilonRec
|
||
eRec_v::Union{Array{Float64},Nothing} = nothing # a component of neuron's eligibility trace resulted from v_t
|
||
eRec_a::Union{Array{Float64},Nothing} = nothing # a component of neuron's eligibility trace resulted from av_th
|
||
eRec::Union{Array{Float64},Nothing} = nothing # neuron's eligibility trace
|
||
eta::Union{Float64,Nothing} = 0.01 # eta, learning rate
|
||
gammaPd::Union{Float64,Nothing} = 0.3 # γ_pd, discount factor, value from paper
|
||
phi::Union{Float64,Nothing} = nothing # ϕ, psuedo derivative
|
||
refractoryDuration::Union{Float64,Nothing} = 3 # neuron's refractory period in millisecond
|
||
# refractory_state_active::Union{Bool,Nothing} = false # if true, neuron is in refractory state and cannot process new information
|
||
refractoryCounter::Integer = 0
|
||
tau_m::Union{Float64,Nothing} = nothing # τ_m, membrane time constant in millisecond
|
||
wRecChange::Union{Array{Float64},Nothing} = nothing # Δw_rec, cumulated w_rec change
|
||
recSignal::Union{Float64,Nothing} = nothing # incoming recurrent signal
|
||
alpha_v_t::Union{Float64,Nothing} = nothing # alpha * v_t
|
||
error::Union{Float64,Nothing} = nothing # local neuron error
|
||
optimiser::Union{Any,Nothing} = load_optimiser("AdaBelief") # Flux optimizer
|
||
|
||
firingCounter::Integer = 0 # store how many times neuron fires
|
||
firingRateTarget::Float64 = 20.0 # neuron's target firing rate in Hz
|
||
firingDiff::Float64 = 0.0 # e-prop supplement paper equation 5
|
||
firingRateError::Float64 = 0.0 # local neuron error w.r.t. firing regularization
|
||
firingRate::Float64 = 0.0 # running average of firing rate, Hz
|
||
|
||
tau_a::Union{Float64,Nothing} = nothing # τ_a, adaption time constant in millisecond
|
||
beta::Union{Float64,Nothing} = 0.15 # β, constant, value from paper
|
||
rho::Union{Float64,Nothing} = nothing # ρ, threshold adaptation decay factor
|
||
a::Union{Float64,Nothing} = 0.0 # threshold adaptation
|
||
av_th::Union{Float64,Nothing} = nothing # adjusted neuron firing threshold
|
||
|
||
""" "inference" = no learning params will be collected.
|
||
"learning" = neuron will accumulate epsilon_j, compute Δw_rec_change each time
|
||
correct answer is available then merge Δw_rec_change into wRecChange then
|
||
reset epsilon_j.
|
||
"reflect" = neuron will merge wRecChange into w_rec then reset wRecChange. """
|
||
learningStage::String = "inference"
|
||
|
||
end
|
||
""" alif neuron outer constructor
|
||
|
||
# Example
|
||
|
||
alif_neuron_params = Dict(
|
||
:type => "alif_neuron",
|
||
:v_th => 1.2, # neuron firing threshold (this value is treated as maximum bound if I
|
||
use auto generate)
|
||
:z_t => false, # neuron firing status at time = t
|
||
:gammaPd => 0.3, # discount factor. The value is from the paper
|
||
:refractoryDuration => 2.0, # neuron refractory period in millisecond
|
||
:delta => 1.0,
|
||
:tau_m => 5.0, # membrane time constant in millisecond. It should equals to time use
|
||
for 1 sequence
|
||
|
||
# adaptation time constant in millisecond. It should equals to total time SNN takes to
|
||
# perform a task i.e. equals to episode length
|
||
:tau_a => 10.0,
|
||
:beta => 0.15, # constant.
|
||
:a => 0.0,
|
||
)
|
||
|
||
neuron1 = alif_neuron(alif_neuron_params)
|
||
"""
|
||
function alif_neuron(params::Dict)
|
||
n = alif_neuron()
|
||
field_names = fieldnames(typeof(n))
|
||
for i in field_names
|
||
if i in keys(params)
|
||
if i == :optimiser
|
||
opt_type = string(split(params[i], ".")[end])
|
||
n.:($i) = load_optimiser(opt_type)
|
||
else
|
||
n.:($i) = params[i] # assign params to n struct fields
|
||
end
|
||
end
|
||
end
|
||
return n
|
||
end
|
||
|
||
#------------------------------------------------------------------------------------------------100
|
||
""" linear_neuron struct
|
||
"""
|
||
Base.@kwdef mutable struct linear_neuron <: output_neuron
|
||
id::Union{Int64,Nothing} = nothing # ID of this neuron which is it position in knowledgeFn array
|
||
type::String = "linear_neuron"
|
||
knowledgeFnName::Union{String,Nothing} = nothing # knowledgeFn that this neuron belongs to
|
||
subscriptionList::Union{Array{Int64},Nothing} = nothing # list of other neuron that this neuron synapse subscribed to
|
||
timeStep::Union{Number,Nothing} = nothing # current time
|
||
|
||
subExInType::Array{Int64} = Vector{Int64}() # store ExIn type of subscribed neurons
|
||
w_rec::Union{Array{Float64},Nothing} = nothing # synaptic weight (for receiving signal from other neuron)
|
||
v_t::Float64 = 0.0 # vᵗ, postsynaptic neuron membrane potential of previous timestep
|
||
v_t1::Float64 = 0.0 # vᵗ⁺¹, postsynaptic neuron membrane potential at current timestep
|
||
v_t_default::Union{Float64,Nothing} = 0.0 # default membrane potential voltage
|
||
v_th::Float64 = 1.0 # vᵗʰ, neuron firing threshold
|
||
vRest::Float64 = 0.0 # resting potential after neuron fired
|
||
# zᵗ⁺¹, neuron firing status at time = t+1. I need this because the way I calculate all
|
||
# neurons forward function at each timestep-by-timestep is to do every neuron
|
||
# forward calculation. Each neuron requires access to other neuron's firing status
|
||
# during v_t1 calculation hence I need a variable to hold z_t1 so that I'm not replacing z_t
|
||
z_t1::Bool = false # neuron postsynaptic firing at current timestep (after neuron's calculation)
|
||
|
||
# neuron presynaptic firing at current timestep (which is other neuron postsynaptic firing of
|
||
# previous timestep)
|
||
z_i_t::Union{Array{Bool},Nothing} = nothing
|
||
synapticStrength::Union{Array{Float64},Nothing} = nothing
|
||
synapticStrengthLimit::Union{NamedTuple,Nothing} = (lowerlimit=(-5=>0), upperlimit=(5=>5))
|
||
|
||
gammaPd::Union{Float64,Nothing} = 0.3 # γ_pd, discount factor, value from paper
|
||
alpha::Union{Float64,Nothing} = nothing # α, neuron membrane potential decay factor
|
||
phi::Union{Float64,Nothing} = nothing # ϕ, psuedo derivative
|
||
epsilonRec::Union{Array{Float64},Nothing} = nothing # ϵ_rec, eligibility vector for neuron spike
|
||
decayedEpsilonRec::Union{Array{Float64},Nothing} = nothing # α * epsilonRec
|
||
eRec::Union{Array{Float64},Nothing} = nothing # eligibility trace for neuron spike
|
||
delta::Union{Float64,Nothing} = 1.0 # δ, discreate timestep size in millisecond
|
||
refractoryDuration::Union{Float64,Nothing} = 3 # neuron's refratory period in millisecond
|
||
refractoryCounter::Integer = 0
|
||
tau_out::Union{Float64,Nothing} = nothing # τ_out, membrane time constant in millisecond
|
||
eta::Union{Float64,Nothing} = 0.01 # η, learning rate
|
||
wRecChange::Union{Array{Float64},Nothing} = nothing # Δw_rec, cumulated w_rec change
|
||
recSignal::Union{Float64,Nothing} = nothing # incoming recurrent signal
|
||
alpha_v_t::Union{Float64,Nothing} = nothing # alpha * v_t
|
||
error::Union{Float64,Nothing} = nothing # local neuron error
|
||
|
||
firingCounter::Integer = 0 # store how many times neuron fires
|
||
end
|
||
|
||
""" linear neuron outer constructor
|
||
|
||
# Example
|
||
|
||
linear_neuron_params = Dict(
|
||
:type => "linear_neuron",
|
||
:k => 0.9, # output leakink coefficient
|
||
:tau_out => 5.0, # output time constant in millisecond. It should equals to time use for 1 sequence
|
||
:out => 0.0, # neuron's output value store here
|
||
)
|
||
|
||
neuron1 = linear_neuron(linear_neuron_params)
|
||
"""
|
||
function linear_neuron(params::Dict)
|
||
n = linear_neuron()
|
||
field_names = fieldnames(typeof(n))
|
||
for i in field_names
|
||
if i in keys(params)
|
||
if i == :optimiser
|
||
opt_type = string(split(params[i], ".")[end])
|
||
n.:($i) = load_optimiser(opt_type)
|
||
else
|
||
n.:($i) = params[i] # assign params to n struct fields
|
||
end
|
||
end
|
||
end
|
||
|
||
return n
|
||
end
|
||
|
||
#------------------------------------------------------------------------------------------------100
|
||
|
||
function load_optimiser(optimiser_name::String; params::Union{Dict,Nothing} = nothing)
|
||
if optimiser_name == "AdaBelief"
|
||
params = (0.01, (0.9, 0.8))
|
||
return Flux.Optimise.AdaBelief(params...)
|
||
elseif optimiser_name == "AdaBelief2"
|
||
# output neuron requires slower change pace so η is lower than compute neuron at 0.007
|
||
# because if w_out change too fast, compute neuron will not able to
|
||
# grapse output neuron moving direction i.e. both compute neuron's direction and
|
||
# output neuron direction are out of sync.
|
||
params = (0.007, (0.9, 0.8))
|
||
return Flux.Optimise.AdaBelief(params...)
|
||
else
|
||
error("optimiser is not defined yet in load_optimiser()")
|
||
end
|
||
end
|
||
|
||
function init_neuron!(id::Int64, n::passthrough_neuron, n_params::Dict, kfnParams::Dict)
|
||
n.id = id
|
||
n.knowledgeFnName = kfnParams[:knowledgeFnName]
|
||
end
|
||
|
||
# function init_neuron!(id::Int64, n::lif_neuron, kfnParams::Dict)
|
||
# n.id = id
|
||
# n.knowledgeFnName = kfnParams[:knowledgeFnName]
|
||
# subscription_options = shuffle!([1:(kfnParams[:input_neuron_number]+kfnParams[:compute_neuron_number])...])
|
||
# if typeof(kfnParams[:synaptic_connection_number]) == String
|
||
# percent = parse(Int, kfnParams[:synaptic_connection_number][1:end-1]) / 100
|
||
# synaptic_connection_number = floor(length(subscription_options) * percent)
|
||
# n.subscriptionList = [pop!(subscription_options) for i = 1:synaptic_connection_number]
|
||
# end
|
||
# filter!(x -> x != n.id, n.subscriptionList)
|
||
# n.epsilonRec = zeros(length(n.subscriptionList))
|
||
# n.w_rec = Random.rand(length(n.subscriptionList))
|
||
# n.wRecChange = zeros(length(n.subscriptionList))
|
||
# n.reg_voltage_b = zeros(length(n.subscriptionList))
|
||
# n.alpha = calculate_α(n)
|
||
# end
|
||
|
||
function init_neuron!(id::Int64, n::lif_neuron, n_params::Dict, kfnParams::Dict)
|
||
n.id = id
|
||
n.knowledgeFnName = kfnParams[:knowledgeFnName]
|
||
subscription_options = shuffle!([1:kfnParams[:total_neurons]...])
|
||
subscription_numbers = Int(floor(n_params[:synaptic_connection_number] *
|
||
kfnParams[:total_neurons] / 100.0))
|
||
n.subscriptionList = [pop!(subscription_options) for i = 1:subscription_numbers]
|
||
|
||
# prevent subscription to itself by removing this neuron id
|
||
filter!(x -> x != n.id, n.subscriptionList)
|
||
n.synapticStrength = normalize!(rand(length(n.subscriptionList)), 1)
|
||
|
||
n.epsilonRec = zeros(length(n.subscriptionList))
|
||
n.w_rec = Random.rand(length(n.subscriptionList))
|
||
n.wRecChange = zeros(length(n.subscriptionList))
|
||
n.alpha = calculate_α(n)
|
||
end
|
||
|
||
function init_neuron!(id::Int64, n::alif_neuron, n_params::Dict,
|
||
kfnParams::Dict)
|
||
n.id = id
|
||
n.knowledgeFnName = kfnParams[:knowledgeFnName]
|
||
subscription_options = shuffle!([1:kfnParams[:total_neurons]...])
|
||
subscription_numbers = Int(floor(n_params[:synaptic_connection_number] *
|
||
kfnParams[:total_neurons] / 100.0))
|
||
n.subscriptionList = [pop!(subscription_options) for i = 1:subscription_numbers]
|
||
|
||
# prevent subscription to itself by removing this neuron id
|
||
filter!(x -> x != n.id, n.subscriptionList)
|
||
n.synapticStrength = normalize!(rand(length(n.subscriptionList)), 1)
|
||
|
||
n.epsilonRec = zeros(length(n.subscriptionList))
|
||
n.w_rec = Random.rand(length(n.subscriptionList))
|
||
n.wRecChange = zeros(length(n.subscriptionList))
|
||
|
||
# the more time has passed from the last time neuron was activated, the more
|
||
# neuron membrane potential is reduced
|
||
n.alpha = calculate_α(n)
|
||
n.rho = calculate_ρ(n)
|
||
n.epsilonRecA = zeros(length(n.subscriptionList))
|
||
end
|
||
|
||
|
||
function init_neuron!(id::Int64, n::linear_neuron, n_params::Dict, kfnParams::Dict)
|
||
n.id = id
|
||
n.knowledgeFnName = kfnParams[:knowledgeFnName]
|
||
|
||
subscription_options = shuffle!([kfnParams[:total_input_port]+1 : kfnParams[:total_neurons]...])
|
||
subscription_numbers = Int(floor(n_params[:synaptic_connection_number] *
|
||
kfnParams[:total_compute_neuron] / 100.0))
|
||
n.subscriptionList = [pop!(subscription_options) for i = 1:subscription_numbers]
|
||
n.synapticStrength = normalize!(rand(length(n.subscriptionList)), 1)
|
||
n.epsilonRec = zeros(length(n.subscriptionList))
|
||
n.w_rec = Random.rand(length(n.subscriptionList))
|
||
n.wRecChange = zeros(length(n.subscriptionList))
|
||
n.alpha = calculate_k(n)
|
||
end
|
||
|
||
""" Make a neuron intended for use with knowledgeFn
|
||
"""
|
||
function init_neuron(id::Int64, n_params::Dict, kfnParams::Dict)
|
||
n = instantiate_custom_types(n_params)
|
||
init_neuron!(id, n, n_params, kfnParams)
|
||
|
||
return n
|
||
end
|
||
|
||
""" This function instantiate Ironpen type.
|
||
|
||
# Example
|
||
|
||
new_model = instantiate_custom_types("model")
|
||
"""
|
||
function instantiate_custom_types(params::Union{Dict,Nothing} = nothing)
|
||
type = string(split(params[:type], ".")[end])
|
||
|
||
if type == "model"
|
||
return model()
|
||
elseif type == "knowledgeFn"
|
||
return knowledgeFn()
|
||
elseif type == "passthrough_neuron"
|
||
return passthrough_neuron(params)
|
||
elseif type == "lif_neuron"
|
||
return lif_neuron(params)
|
||
elseif type == "alif_neuron"
|
||
return alif_neuron(params)
|
||
elseif type == "linear_neuron"
|
||
return linear_neuron(params)
|
||
else
|
||
return nothing
|
||
end
|
||
end
|
||
|
||
""" Add a new neuron into a knowledgeFn
|
||
|
||
# Example
|
||
add_neuron!(kfn.kfnParams[:lif_neuron_params], kfn)
|
||
"""
|
||
# function add_neuron!(neuron_Dict::Dict, kfn::knowledgeFn)
|
||
# id = length(kfn.neuronsArray) + 1
|
||
# neuron = init_neuron(id, neuron_Dict, kfn.kfnParams,
|
||
# total_neurons = (length(kfn.neuronsArray) + 1))
|
||
# push!(kfn.neuronsArray, neuron)
|
||
|
||
# # Randomly select an output neuron to add a new neuron to
|
||
# add_n_output_n!(Random.rand(kfn.outputNeuronsArray), id)
|
||
# end
|
||
|
||
calculate_α(neuron::lif_neuron) = exp(-neuron.delta / neuron.tau_m)
|
||
calculate_α(neuron::alif_neuron) = exp(-neuron.delta / neuron.tau_m)
|
||
calculate_ρ(neuron::alif_neuron) = exp(-neuron.delta / neuron.tau_a)
|
||
calculate_k(neuron::linear_neuron) = exp(-neuron.delta / neuron.tau_out)
|
||
|
||
#------------------------------------------------------------------------------------------------100
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
end # module end
|
||
|
||
|
||
|
||
|
||
|