add neuroplasticity

This commit is contained in:
2023-05-19 14:54:12 +07:00
parent fb458a49db
commit e6a3ad5202
4 changed files with 110 additions and 59 deletions

View File

@@ -34,8 +34,6 @@ using .interface
""" """
Todo: Todo:
[3] verify that model can complete learning cycle with no error
[*2] neuroplasticity() i.e. change connection
[] using RL to control learning signal [] using RL to control learning signal
[] consider using Dates.now() instead of timestamp because time_stamp may overflow [] consider using Dates.now() instead of timestamp because time_stamp may overflow
[] training should include adjusting α, neuron membrane potential decay factor [] training should include adjusting α, neuron membrane potential decay factor
@@ -58,6 +56,7 @@ using .interface
[DONE] reset_epsilonRec after ΔwRecChange is calculated [DONE] reset_epsilonRec after ΔwRecChange is calculated
[DONE] synaptic connection strength concept. use sigmoid, turn connection offline [DONE] synaptic connection strength concept. use sigmoid, turn connection offline
[DONE] wRec should not normalized whole. it should be local 5 conn normalized. [DONE] wRec should not normalized whole. it should be local 5 conn normalized.
[DONE] neuroplasticity() i.e. change connection
Change from version: v06_36a Change from version: v06_36a
- -

View File

@@ -54,8 +54,7 @@ function learn!(kfn::kfn_1, correctAnswer::AbstractVector)
n.wRec .*= nonFlipedSign n.wRec .*= nonFlipedSign
synapticConnStrength!(n) synapticConnStrength!(n)
#TODO neuroplasticity neuroplasticity!(n, kfn.firedNeurons, kfn.nExInType)
println("")
end end
end end
@@ -68,17 +67,9 @@ function learn!(kfn::kfn_1, correctAnswer::AbstractVector)
n.wRec .*= nonFlipedSign # set weight that fliped sign to 0 for random new connection n.wRec .*= nonFlipedSign # set weight that fliped sign to 0 for random new connection
synapticConnStrength!(n) synapticConnStrength!(n)
#TODO neuroplasticity neuroplasticity!(n, kfn.firedNeurons, kfn.nExInType)
end end
resetLearningParams!(n)
# clear variables
kfn.firedNeurons = Vector{Int64}()
kfn.firedNeurons_t0 = Vector{Bool}()
kfn.firedNeurons_t1 = Vector{Bool}()
kfn.learningStage = "inference" kfn.learningStage = "inference"
end end
end end

View File

@@ -36,7 +36,7 @@ reset_epsilonRec!(n::computeNeuron) = n.epsilonRec = n.epsilonRec * 0.0
reset_epsilonRec!(n::outputNeuron) = n.epsilonRec = n.epsilonRec * 0.0 reset_epsilonRec!(n::outputNeuron) = n.epsilonRec = n.epsilonRec * 0.0
reset_epsilonRecA!(n::alifNeuron) = n.epsilonRecA = n.epsilonRecA * 0.0 reset_epsilonRecA!(n::alifNeuron) = n.epsilonRecA = n.epsilonRecA * 0.0
reset_epsilon_in!(n::computeNeuron) = n.epsilon_in = isnothing(n.epsilon_in) ? nothing : n.epsilon_in * 0.0 reset_epsilon_in!(n::computeNeuron) = n.epsilon_in = isnothing(n.epsilon_in) ? nothing : n.epsilon_in * 0.0
reset_error!(n::Union{computeNeuron, linearNeuron}) = n.error = nothing reset_error!(n::Union{computeNeuron, outputNeuron}) = n.error = nothing
reset_w_in_change!(n::computeNeuron) = n.w_in_change = isnothing(n.w_in_change) ? nothing : n.w_in_change * 0.0 reset_w_in_change!(n::computeNeuron) = n.w_in_change = isnothing(n.w_in_change) ? nothing : n.w_in_change * 0.0
reset_wRecChange!(n::Union{computeNeuron, outputNeuron}) = n.wRecChange = n.wRecChange * 0.0 reset_wRecChange!(n::Union{computeNeuron, outputNeuron}) = n.wRecChange = n.wRecChange * 0.0
reset_a!(n::alifNeuron) = n.a = n.a * 0.0 reset_a!(n::alifNeuron) = n.a = n.a * 0.0
@@ -44,7 +44,7 @@ reset_reg_voltage_a!(n::computeNeuron) = n.reg_voltage_a = n.reg_voltage_a * 0.0
reset_reg_voltage_b!(n::computeNeuron) = n.reg_voltage_b = n.reg_voltage_b * 0.0 reset_reg_voltage_b!(n::computeNeuron) = n.reg_voltage_b = n.reg_voltage_b * 0.0
reset_reg_voltage_error!(n::computeNeuron) = n.reg_voltage_error = n.reg_voltage_error * 0.0 reset_reg_voltage_error!(n::computeNeuron) = n.reg_voltage_error = n.reg_voltage_error * 0.0
reset_firing_counter!(n::Union{computeNeuron, outputNeuron}) = n.firingCounter = n.firingCounter * 0.0 reset_firing_counter!(n::Union{computeNeuron, outputNeuron}) = n.firingCounter = n.firingCounter * 0.0
reset_firing_diff!(n::Union{computeNeuron, linearNeuron}) = n.firingDiff = n.firingDiff * 0.0 reset_firing_diff!(n::Union{computeNeuron, outputNeuron}) = n.firingDiff = n.firingDiff * 0.0
reset_refractoryCounter!(n::Union{computeNeuron, outputNeuron}) = n.refractoryCounter = n.refractoryCounter * 0.0 reset_refractoryCounter!(n::Union{computeNeuron, outputNeuron}) = n.refractoryCounter = n.refractoryCounter * 0.0
reset_z_i_t_commulative!(n::Union{computeNeuron, outputNeuron}) = n.z_i_t_commulative = n.z_i_t_commulative * 0.0 reset_z_i_t_commulative!(n::Union{computeNeuron, outputNeuron}) = n.z_i_t_commulative = n.z_i_t_commulative * 0.0
@@ -304,31 +304,6 @@ end
function synapticConnStrength!(n::inputNeuron) end function synapticConnStrength!(n::inputNeuron) end
function neuroplasticity!(n::computeNeuron, firedNeurons::Vector)
# if there is 0-weight then replace it with new connection
zero_weight_index = findall(iszero.(n.wRec))
if length(zero_weight_index) != 0
""" sampling new connection from list of neurons that fires instead of ramdom choose from
all compute neuron because there is no point to connect to neuron that not fires i.e.
not fire = no information
"""
subscribe_options = filter(x -> x [n.id], firedNeurons) # exclude this neuron id from the list
filter!(x -> x n.subscriptionList, subscribe_options) # exclude this neuron's subscriptionList from the list
shuffle!(subscribe_options)
end
new_connection_percent = 10 - ((n.optimiser.eta / 0.0001) / 10) # percent is in range 0.1 to 10
percentage = [new_connection_percent, 100.0 - new_connection_percent] / 100.0
for i in zero_weight_index
if Utils.random_choices([true, false], percentage)
n.subscriptionList[i] = pop!(subscribe_options)
n.wRec[i] = 0.01 # new connection should not send large signal otherwise it would throw
# RSNN off path. Let weight grow by an optimiser
end
end
end
""" normalize a part of a vector centering at a vector's maximum value along with nearby value """ normalize a part of a vector centering at a vector's maximum value along with nearby value
within its radius. radius must be odd number. within its radius. radius must be odd number.
v1 will be normalized based on v2's peak v1 will be normalized based on v2's peak
@@ -343,10 +318,85 @@ function normalizePeak!(v1::Vector, v2::Vector, radius::Integer=2)
normalize!(subvector, 1) normalize!(subvector, 1)
end end
""" rewire of neuron synaptic connection that has 0 weight. With connection's excitatory and
inhabitory ratio constraint.
"""
# function neuroplasticity!(n::Union{computeNeuron, outputNeuron}, firedNeurons::Vector,
# nExcitatory::Vector, nInhabitory::Vector, excitatoryPercent::Integer)
# # if there is 0-weight then replace it with new connection
# zeroWeightConnIndex = findall(iszero.(n.wRec)) # connection that has 0 weight
# desiredEx = Int(floor((excitatoryPercent / 100) * length(n.subscriptionList)))
# desiredIn = length(n.subscriptionList) - desiredEx
# wRecSign = sign.(n.wRec)
# inConn = sum(isequal.(wRecSign, -1))
# # random new synaptic connection
# inConnToAdd = desiredIn - inConn
# if inConnToAdd <= 0
# # skip all new Conn will be excitatory type
# else
# newConnVecSign = ones(length(zeroWeightConnIndex))
# newConnVecSign = view(newConnVecSign, 1:inConnToAdd) * -1
# end
# # new synaptic connection must sample fron neuron that fires
# inPool = nInhabitory ∩ firedNeurons
# filter!(x -> x ∉ [n.id], inPool) # exclude this neuron id from the list
# filter!(x -> x ∉ n.subscriptionList, inPool) # exclude this neuron's subscriptionList from the list
# exPool = nExcitatory ∩ firedNeurons
# filter!(x -> x ∉ [n.id], exPool) # exclude this neuron id from the list
# filter!(x -> x ∉ n.subscriptionList, exPool) # exclude this neuron's subscriptionList from the list
# w = [rand(0.01:0.01:0.2, length(zeroWeightConnIndex))] .* newConnVecSign
# synapticStrength = [rand(-5:0.01:-4, length(zeroWeightConnIndex))]
# # add new synaptic connection to neuron
# for (i, connIndex) in enumerate(zeroWeightConnIndex)
# n.subscriptionList[connIndex] = newConnVecSign[i] < 0 ? pop!(inPool) : pop!(exPool)
# n.wRec[connIndex] = w[i]
# n.synapticStrength[connIndex] = synapticStrength[i]
# end
# end
""" rewire of neuron synaptic connection that has 0 weight. Without connection's excitatory and
inhabitory ratio constraint.
"""
function neuroplasticity!(n::Union{computeNeuron, outputNeuron}, firedNeurons::Vector,
nExInTypeList::Vector)
# if there is 0-weight then replace it with new connection
zeroWeightConnIndex = findall(iszero.(n.wRec)) # connection that has 0 weight
# new synaptic connection must sample fron neuron that fires
nFiredPool = filter(x -> x [n.id], firedNeurons) # exclude this neuron id from the id list
filter!(x -> x n.subscriptionList, nFiredPool) # exclude this neuron's subscriptionList from the list
nNonFiredPool = setdiff!([1:length(nExInTypeList)...], nFiredPool)
filter!(x -> x [n.id], nNonFiredPool) # exclude this neuron id from the id list
filter!(x -> x n.subscriptionList, nNonFiredPool) # exclude this neuron's subscriptionList from the list
w = rand(0.01:0.01:0.2, length(zeroWeightConnIndex))
synapticStrength = rand(-5:0.01:-4, length(zeroWeightConnIndex))
shuffle!(nFiredPool)
shuffle!(nNonFiredPool)
# add new synaptic connection to neuron
for (i, connIndex) in enumerate(zeroWeightConnIndex)
if length(nFiredPool) != 0
newConn = popfirst!(nFiredPool)
else
newConn = popfirst!(nNonFiredPool)
end
""" conn that is being replaced has to go into nNonFiredPool so nNonFiredPool isn't empty
"""
push!(nNonFiredPool, n.subscriptionList[connIndex])
n.subscriptionList[connIndex] = newConn
n.wRec[connIndex] = w[i] * nExInTypeList[newConn]
n.synapticStrength[connIndex] = synapticStrength[i]
end
end

View File

@@ -111,6 +111,10 @@ Base.@kwdef mutable struct kfn_1 <: knowledgeFn
avgNeuronsFiringRate::Union{Float64,Nothing} = 0.0 # for displaying average firing rate over all neurons avgNeuronsFiringRate::Union{Float64,Nothing} = 0.0 # for displaying average firing rate over all neurons
avgNeurons_v_t1::Union{Float64,Nothing} = 0.0 # for displaying average v_t1 over all neurons avgNeurons_v_t1::Union{Float64,Nothing} = 0.0 # for displaying average v_t1 over all neurons
nExcitatory::Union{Array,Nothing} = Vector{Integer}() # list of excitatory neuron id
nInhabitory::Union{Array,Nothing} = Vector{Integer}() # list of inhabitory neuron id
nExInType::Union{Array,Nothing} = Vector{Integer}() # list all neuron EX or IN
excitatoryPercent::Integer = 60 # percentage of excitatory neuron, inhabitory percent will be 100-ExcitatoryPercent
end end
#------------------------------------------------------------------------------------------------100 #------------------------------------------------------------------------------------------------100
@@ -229,7 +233,7 @@ function kfn_1(kfnParams::Dict)
end end
# excitatory neuron to inhabitory neuron = 60:40 % of computeNeuron # excitatory neuron to inhabitory neuron = 60:40 % of computeNeuron
ex_number = Int(floor(0.6 * kfn.kfnParams[:computeNeuronNumber])) ex_number = Int(floor((kfn.excitatoryPercent/100.0) * kfn.kfnParams[:computeNeuronNumber]))
ex_n = [1 for i in 1:ex_number] ex_n = [1 for i in 1:ex_number]
in_number = kfn.kfnParams[:computeNeuronNumber] - ex_number in_number = kfn.kfnParams[:computeNeuronNumber] - ex_number
in_n = [-1 for i in 1:in_number] in_n = [-1 for i in 1:in_number]
@@ -237,17 +241,22 @@ function kfn_1(kfnParams::Dict)
# input neurons are always excitatory, compute_neurons are random between excitatory # input neurons are always excitatory, compute_neurons are random between excitatory
# and inhabitory # and inhabitory
for n in reverse(kfn.neuronsArray) for n in kfn.neuronsArray
try n.ExInType = pop!(ex_in) catch end try n.ExInType = pop!(ex_in) catch end
end end
# add ExInType into each computeNeuron subExInType # add ExInType into each computeNeuron subExInType
for n in reverse(kfn.neuronsArray) for n in kfn.neuronsArray
try # input neuron doest have n.subscriptionList try # input neuron doest have n.subscriptionList
for (i, sub_id) in enumerate(n.subscriptionList) for (i, sub_id) in enumerate(n.subscriptionList)
n_ExInType = kfn.neuronsArray[sub_id].ExInType n_ExInType = kfn.neuronsArray[sub_id].ExInType
# push!(n.subExInType, n_ExInType)
n.wRec[i] *= n_ExInType n.wRec[i] *= n_ExInType
# add id exin type to kfn
if n_ExInType < 0
push!(kfn.nInhabitory, sub_id)
else
push!(kfn.nExcitatory, sub_id)
end
end end
catch catch
end end
@@ -258,13 +267,16 @@ function kfn_1(kfnParams::Dict)
try # input neuron doest have n.subscriptionList try # input neuron doest have n.subscriptionList
for (i, sub_id) in enumerate(n.subscriptionList) for (i, sub_id) in enumerate(n.subscriptionList)
n_ExInType = kfn.neuronsArray[sub_id].ExInType n_ExInType = kfn.neuronsArray[sub_id].ExInType
# push!(n.subExInType, n_ExInType)
n.wRec[i] *= n_ExInType n.wRec[i] *= n_ExInType
end end
catch catch
end end
end end
for n in kfn.neuronsArray
push!(kfn.nExInType, n.ExInType)
end
return kfn return kfn
end end
@@ -457,7 +469,6 @@ Base.@kwdef mutable struct alifNeuron <: computeNeuron
reset epsilon_j. reset epsilon_j.
"reflect" = neuron will merge wRecChange into wRec then reset wRecChange. """ "reflect" = neuron will merge wRecChange into wRec then reset wRecChange. """
learningStage::String = "inference" learningStage::String = "inference"
end end
""" alif neuron outer constructor """ alif neuron outer constructor
@@ -604,10 +615,10 @@ end
# n.id = id # n.id = id
# n.knowledgeFnName = kfnParams[:knowledgeFnName] # n.knowledgeFnName = kfnParams[:knowledgeFnName]
# subscription_options = shuffle!([1:(kfnParams[:input_neuron_number]+kfnParams[:computeNeuronNumber])...]) # subscription_options = shuffle!([1:(kfnParams[:input_neuron_number]+kfnParams[:computeNeuronNumber])...])
# if typeof(kfnParams[:synaptic_connection_number]) == String # if typeof(kfnParams[:synapticConnectionPercent]) == String
# percent = parse(Int, kfnParams[:synaptic_connection_number][1:end-1]) / 100 # percent = parse(Int, kfnParams[:synapticConnectionPercent][1:end-1]) / 100
# synaptic_connection_number = floor(length(subscription_options) * percent) # synapticConnectionPercent = floor(length(subscription_options) * percent)
# n.subscriptionList = [pop!(subscription_options) for i = 1:synaptic_connection_number] # n.subscriptionList = [pop!(subscription_options) for i = 1:synapticConnectionPercent]
# end # end
# filter!(x -> x != n.id, n.subscriptionList) # filter!(x -> x != n.id, n.subscriptionList)
# n.epsilonRec = zeros(length(n.subscriptionList)) # n.epsilonRec = zeros(length(n.subscriptionList))
@@ -621,13 +632,13 @@ function init_neuron!(id::Int64, n::lifNeuron, n_params::Dict, kfnParams::Dict)
n.id = id n.id = id
n.knowledgeFnName = kfnParams[:knowledgeFnName] n.knowledgeFnName = kfnParams[:knowledgeFnName]
subscription_options = shuffle!([1:kfnParams[:totalNeurons]...]) subscription_options = shuffle!([1:kfnParams[:totalNeurons]...])
subscription_numbers = Int(floor(n_params[:synaptic_connection_number] * subscription_numbers = Int(floor((n_params[:synapticConnectionPercent] / 100.0) *
kfnParams[:totalNeurons] / 100.0)) kfnParams[:totalNeurons]))
n.subscriptionList = [pop!(subscription_options) for i = 1:subscription_numbers] n.subscriptionList = [pop!(subscription_options) for i = 1:subscription_numbers]
# prevent subscription to itself by removing this neuron id # prevent subscription to itself by removing this neuron id
filter!(x -> x != n.id, n.subscriptionList) filter!(x -> x != n.id, n.subscriptionList)
n.synapticStrength = rand(-5:0.1:-3, length(n.subscriptionList)) n.synapticStrength = rand(-5:0.01:-4, length(n.subscriptionList))
n.epsilonRec = zeros(length(n.subscriptionList)) n.epsilonRec = zeros(length(n.subscriptionList))
n.wRec = rand(-0.2:0.01:0.2, length(n.subscriptionList)) n.wRec = rand(-0.2:0.01:0.2, length(n.subscriptionList))
@@ -641,13 +652,13 @@ function init_neuron!(id::Int64, n::alifNeuron, n_params::Dict,
n.id = id n.id = id
n.knowledgeFnName = kfnParams[:knowledgeFnName] n.knowledgeFnName = kfnParams[:knowledgeFnName]
subscription_options = shuffle!([1:kfnParams[:totalNeurons]...]) subscription_options = shuffle!([1:kfnParams[:totalNeurons]...])
subscription_numbers = Int(floor(n_params[:synaptic_connection_number] * subscription_numbers = Int(floor((n_params[:synapticConnectionPercent] / 100.0) *
kfnParams[:totalNeurons] / 100.0)) kfnParams[:totalNeurons]))
n.subscriptionList = [pop!(subscription_options) for i = 1:subscription_numbers] n.subscriptionList = [pop!(subscription_options) for i = 1:subscription_numbers]
# prevent subscription to itself by removing this neuron id # prevent subscription to itself by removing this neuron id
filter!(x -> x != n.id, n.subscriptionList) filter!(x -> x != n.id, n.subscriptionList)
n.synapticStrength = rand(-5:0.1:-3, length(n.subscriptionList)) n.synapticStrength = rand(-5:0.01:-4, length(n.subscriptionList))
n.epsilonRec = zeros(length(n.subscriptionList)) n.epsilonRec = zeros(length(n.subscriptionList))
n.wRec = rand(-0.2:0.01:0.2, length(n.subscriptionList)) n.wRec = rand(-0.2:0.01:0.2, length(n.subscriptionList))
@@ -667,10 +678,10 @@ function init_neuron!(id::Int64, n::linearNeuron, n_params::Dict, kfnParams::Dic
n.knowledgeFnName = kfnParams[:knowledgeFnName] n.knowledgeFnName = kfnParams[:knowledgeFnName]
subscription_options = shuffle!([kfnParams[:totalInputPort]+1 : kfnParams[:totalNeurons]...]) subscription_options = shuffle!([kfnParams[:totalInputPort]+1 : kfnParams[:totalNeurons]...])
subscription_numbers = Int(floor(n_params[:synaptic_connection_number] * subscription_numbers = Int(floor((n_params[:synapticConnectionPercent] / 100.0) *
kfnParams[:totalComputeNeuron] / 100.0)) kfnParams[:totalNeurons]))
n.subscriptionList = [pop!(subscription_options) for i = 1:subscription_numbers] n.subscriptionList = [pop!(subscription_options) for i = 1:subscription_numbers]
n.synapticStrength = rand(-5:0.1:-3, length(n.subscriptionList)) n.synapticStrength = rand(-5:0.01:-4, length(n.subscriptionList))
n.epsilonRec = zeros(length(n.subscriptionList)) n.epsilonRec = zeros(length(n.subscriptionList))
n.wRec = rand(-0.2:0.01:0.2, length(n.subscriptionList)) n.wRec = rand(-0.2:0.01:0.2, length(n.subscriptionList))