add neuroplasticity for output neuron

This commit is contained in:
2023-05-21 15:55:40 +07:00
parent 4522d10142
commit d3d47d1114
5 changed files with 54 additions and 17 deletions

View File

@@ -36,8 +36,8 @@ using .interface
Todo: Todo:
[] using RL to control learning signal [] using RL to control learning signal
[] consider using Dates.now() instead of timestamp because time_stamp may overflow [] consider using Dates.now() instead of timestamp because time_stamp may overflow
[] training should include adjusting α, neuron membrane potential decay factor [1] training should include adjusting α, neuron membrane potential decay factor
which defined by neuron.tau_m formular in type.jl which defined by neuron.tau_m formula in type.jl
[DONE] each knowledgeFn should have its own noise generater [DONE] each knowledgeFn should have its own noise generater
[DONE] where to put pseudo derivative (n.phi) [DONE] where to put pseudo derivative (n.phi)

View File

@@ -60,7 +60,7 @@ function (kfn::kfn_1)(m::model, input_data::AbstractVector)
for i in 1:length(input_data)] for i in 1:length(input_data)]
# noise = [rand(rng, Distributions.Binomial(1, 0.5)) for i in 1:10] # another option # noise = [rand(rng, Distributions.Binomial(1, 0.5)) for i in 1:10] # another option
input_data = [noise; input_data] # noise start from neuron id 1 input_data = [noise; input_data] # noise must start from neuron id 1
for n in kfn.neuronsArray for n in kfn.neuronsArray
timestep_forward!(n) timestep_forward!(n)
@@ -218,13 +218,13 @@ function (n::linearNeuron)(kfn::T) where T<:knowledgeFn
# decay of v_t1 # decay of v_t1
n.v_t1 = n.alpha * n.v_t n.v_t1 = n.alpha * n.v_t
n.vError = n.v_t1 n.vError = n.v_t1 # store voltage that will be used to calculate error later
else else
n.recSignal = sum(n.wRec .* n.z_i_t) # signal from other neuron that this neuron subscribed n.recSignal = sum(n.wRec .* n.z_i_t) # signal from other neuron that this neuron subscribed
n.alpha_v_t = n.alpha * n.v_t n.alpha_v_t = n.alpha * n.v_t
n.v_t1 = n.alpha_v_t + n.recSignal n.v_t1 = n.alpha_v_t + n.recSignal
n.v_t1 = no_negative!(n.v_t1) n.v_t1 = no_negative!(n.v_t1)
n.vError = n.v_t1 n.vError = n.v_t1 # store voltage that will be used to calculate error later
if n.v_t1 > n.v_th if n.v_t1 > n.v_th
n.z_t1 = true n.z_t1 = true
n.refractoryCounter = n.refractoryDuration n.refractoryCounter = n.refractoryDuration

View File

@@ -27,8 +27,8 @@ function learn!(kfn::kfn_1, correctAnswer::AbstractVector)
outs = [n.z_t1 for n in kfn.outputNeuronsArray] outs = [n.z_t1 for n in kfn.outputNeuronsArray]
for (i, out) in enumerate(outs) for (i, out) in enumerate(outs)
if out != correctAnswer[i] # need to adjust weight if out != correctAnswer[i] # need to adjust weight
kfnError = (kfn.outputNeuronsArray[i].v_th - kfn.outputNeuronsArray[i].vError) * kfnError = ( (kfn.outputNeuronsArray[i].v_th - kfn.outputNeuronsArray[i].vError) *
100 / kfn.outputNeuronsArray[i].v_th 100 / kfn.outputNeuronsArray[i].v_th ) * 0.1 # 0.1 for scaling down error
Threads.@threads for n in kfn.neuronsArray Threads.@threads for n in kfn.neuronsArray
# for n in kfn.neuronsArray # for n in kfn.neuronsArray
@@ -67,7 +67,7 @@ function learn!(kfn::kfn_1, correctAnswer::AbstractVector)
n.wRec .*= nonFlipedSign # set weight that fliped sign to 0 for random new connection n.wRec .*= nonFlipedSign # set weight that fliped sign to 0 for random new connection
synapticConnStrength!(n) synapticConnStrength!(n)
neuroplasticity!(n, kfn.firedNeurons, kfn.nExInType) neuroplasticity!(n, kfn.firedNeurons, kfn.nExInType, kfn.kfnParams[:totalInputPort])
end end
kfn.learningStage = "inference" kfn.learningStage = "inference"

View File

@@ -362,7 +362,7 @@ end
""" rewire of neuron synaptic connection that has 0 weight. Without connection's excitatory and """ rewire of neuron synaptic connection that has 0 weight. Without connection's excitatory and
inhabitory ratio constraint. inhabitory ratio constraint.
""" """
function neuroplasticity!(n::Union{computeNeuron, outputNeuron}, firedNeurons::Vector, function neuroplasticity!(n::computeNeuron, firedNeurons::Vector,
nExInTypeList::Vector) nExInTypeList::Vector)
# if there is 0-weight then replace it with new connection # if there is 0-weight then replace it with new connection
zeroWeightConnIndex = findall(iszero.(n.wRec)) # connection that has 0 weight zeroWeightConnIndex = findall(iszero.(n.wRec)) # connection that has 0 weight
@@ -398,6 +398,44 @@ function neuroplasticity!(n::Union{computeNeuron, outputNeuron}, firedNeurons::V
end end
end end
function neuroplasticity!(n::outputNeuron, firedNeurons::Vector,
nExInTypeList::Vector, totalInputNeuron::Integer)
# if there is 0-weight then replace it with new connection
zeroWeightConnIndex = findall(iszero.(n.wRec)) # connection that has 0 weight
# new synaptic connection must sample fron neuron that fires
nFiredPool = filter(x -> x [n.id], firedNeurons) # exclude this neuron id from the id list
filter!(x -> x n.subscriptionList, nFiredPool) # exclude this neuron's subscriptionList from the list
filter!(x -> x [1:totalInputNeuron...], nFiredPool) # exclude input neuron
nNonFiredPool = setdiff!([1:length(nExInTypeList)...], nFiredPool)
filter!(x -> x [n.id], nNonFiredPool) # exclude this neuron id from the id list
filter!(x -> x n.subscriptionList, nNonFiredPool) # exclude this neuron's subscriptionList from the list
filter!(x -> x [1:totalInputNeuron...], nNonFiredPool) # exclude input neuron
w = rand(0.01:0.01:0.2, length(zeroWeightConnIndex))
synapticStrength = rand(-5:0.01:-4, length(zeroWeightConnIndex))
shuffle!(nFiredPool)
shuffle!(nNonFiredPool)
# add new synaptic connection to neuron
for (i, connIndex) in enumerate(zeroWeightConnIndex)
if length(nFiredPool) != 0
newConn = popfirst!(nFiredPool)
else
newConn = popfirst!(nNonFiredPool)
end
""" conn that is being replaced has to go into nNonFiredPool so nNonFiredPool isn't empty
"""
push!(nNonFiredPool, n.subscriptionList[connIndex])
n.subscriptionList[connIndex] = newConn
n.wRec[connIndex] = w[i] * nExInTypeList[newConn]
n.synapticStrength[connIndex] = synapticStrength[i]
end
end
@@ -425,7 +463,6 @@ end
end # end module end # end module

View File

@@ -190,17 +190,17 @@ function kfn_1(kfnParams::Dict)
throw(error("number of compute neuron must be greater than input neuron")) throw(error("number of compute neuron must be greater than input neuron"))
end end
# Bn # # Bn
if kfn.kfnParams[:Bn] == "random" # if kfn.kfnParams[:Bn] == "random"
kfn.Bn = [Random.rand(0:0.001:1) for i in 1:kfn.kfnParams[:computeNeuronNumber]] # kfn.Bn = [Random.rand(0:0.001:1) for i in 1:kfn.kfnParams[:computeNeuronNumber]]
else # in case I want to specify manually # else # in case I want to specify manually
kfn.Bn = [kfn.kfnParams[:Bn] for i in 1:kfn.kfnParams[:computeNeuronNumber]] # kfn.Bn = [kfn.kfnParams[:Bn] for i in 1:kfn.kfnParams[:computeNeuronNumber]]
end # end
# assign neurons ID by their position in kfn.neurons array because I think it is # assign neurons ID by their position in kfn.neurons array because I think it is
# straight forward way # straight forward way
# add input port # add input port, it must be added before any other neuron types
for (k, v) in kfn.kfnParams[:inputPort] for (k, v) in kfn.kfnParams[:inputPort]
current_type = kfn.kfnParams[:inputPort][k] current_type = kfn.kfnParams[:inputPort][k]
for i = 1:current_type[:numbers] for i = 1:current_type[:numbers]