This commit is contained in:
ton
2023-09-14 17:30:14 +07:00
parent 9e701cd042
commit d85edf5a19
3 changed files with 197 additions and 122 deletions

View File

@@ -301,7 +301,7 @@ function onComputeParamsChange!(phi::AbstractArray,
end
end
function learn!(kfn::kfn_1, device=cpu)
function learn!(kfn::kfn_1, progress, device=cpu)
# lif learn
kfn.lif_wRec, kfn.lif_neuronInactivityCounter, kfn.lif_synapseReconnectDelay =
lifLearn(kfn.lif_wRec,
@@ -311,9 +311,11 @@ function learn!(kfn::kfn_1, device=cpu)
kfn.lif_neuronInactivityCounter,
kfn.lif_synapseReconnectDelay,
kfn.lif_synapseConnectionNumber,
kfn.lif_synapticActivityCounter,
kfn.lif_eta,
kfn.lif_vt,
kfn.zitCumulative,
progress,
device)
# alif learn
@@ -325,9 +327,11 @@ function learn!(kfn::kfn_1, device=cpu)
kfn.alif_neuronInactivityCounter,
kfn.alif_synapseReconnectDelay,
kfn.alif_synapseConnectionNumber,
kfn.alif_synapticActivityCounter,
kfn.alif_eta,
kfn.alif_vt,
kfn.zitCumulative,
progress,
device)
# on learn
@@ -407,9 +411,11 @@ function lifLearn(wRec,
neuronInactivityCounter,
synapseReconnectDelay,
synapseConnectionNumber,
synapticActivityCounter,
eta,
vt,
zitCumulative,
progress,
device)
@@ -421,10 +427,14 @@ function lifLearn(wRec,
wRecChange_cpu = wRecChange_cpu[:,:,:,1]
eta_cpu = eta |> cpu
eta_cpu = eta_cpu[:,:,:,1]
exInType_cpu = exInType |> cpu
exInType_cpu = exInType_cpu[:,:,:,1]
neuronInactivityCounter_cpu = neuronInactivityCounter |> cpu
neuronInactivityCounter_cpu = neuronInactivityCounter_cpu[:,:,:,1] # (row, col, n)
synapseReconnectDelay_cpu = synapseReconnectDelay |> cpu
synapseReconnectDelay_cpu = synapseReconnectDelay_cpu[:,:,:,1]
synapticActivityCounter_cpu = synapticActivityCounter |> cpu
synapticActivityCounter_cpu = synapticActivityCounter_cpu[:,:,:,1]
zitCumulative_cpu = zitCumulative |> cpu
zitCumulative_cpu = zitCumulative_cpu[:,:,1]
@@ -433,10 +443,13 @@ function lifLearn(wRec,
neuroplasticity(synapseConnectionNumber,
zitCumulative_cpu,
wRec_cpu,
exInType_cpu,
wRecChange_cpu,
vt,
neuronInactivityCounter_cpu,
synapseReconnectDelay_cpu)
synapseReconnectDelay_cpu,
synapticActivityCounter_cpu,
progress,)
@@ -482,97 +495,6 @@ function lifLearn(wRec,
return wRec, neuronInactivityCounter, synapseReconnectDelay
end
#WORKING 1) implement 90% +w, 10% -w 2) rewrite this function
function neuroplasticity(synapseConnectionNumber,
zitCumulative, # (row, col)
wRec, # (row, col, n)
wRecChange,
vt,
neuronInactivityCounter,
synapseReconnectDelay) # (row, col, n)
i1,i2,i3 = size(wRec)
error("DEBUG -> neuroplasticity $(Dates.now())")
# merge weight
# adjust weight based on vt progress and repeatition (90% +w, 10% -w)
# -w all non-fire connection except mature connection
# prune weak connection
# rewire synapse connection
# for each neuron, find total number of synaptic conn that should draw
# new connection to firing and non-firing neurons pool
subToFireNeuron_toBe = Int(floor(0.7 * synapseConnectionNumber))
# for each neuron, count how many synap already subscribed to firing-neurons
zw = zitCumulative .* wRec
subToFireNeuron_current = sum(GeneralUtils.isBetween.(zw, 0.0, 100.0), dims=(1,2)) # (1, 1, n)
zitMask = (!iszero).(zitCumulative) # zitMask of firing neurons = 1, non-firing = 0
projection = ones(i1,i2,i3)
zitMask = zitMask .* projection # (row, col, n)
totalNewConn = sum(isequal.(wRec, -1.0), dims=(1,2)) # count new conn mark (-1.0), (1, 1, n)
println("neuroplasticity, from $(synapseConnectionNumber*size(totalNewConn, 3)) conn, $(sum(totalNewConn)) are replaced")
# clear -1.0 marker
GeneralUtils.replaceElements!(wRec, -1.0, synapseReconnectDelay, -0.99)
GeneralUtils.replaceElements!(wRec, -1.0, 0.0) # -1.0 marker is no longer required
for i in 1:i3
if neuronInactivityCounter[1:1:i][1] < -10000 # neuron die i.e. reset all weight
println("neuron die")
neuronInactivityCounter[:,:,i] .= 0 # reset
w = random_wRec(i1,i2,1,synapseConnectionNumber)
wRec[:,:,i] .= w
a = similar(w) .= -0.99 # synapseConnectionNumber of this neuron
mask = (!iszero).(w)
GeneralUtils.replaceElements!(mask, 1, a, 0)
synapseReconnectDelay[:,:,i] = a
else
remaining = 0
if subToFireNeuron_current[1,1,i] < subToFireNeuron_toBe
toAddConn = subToFireNeuron_toBe - subToFireNeuron_current[1,1,i]
totalNewConn[1,1,i] = totalNewConn[1,1,i] - toAddConn
# add new conn to firing neurons pool
remaining = addNewSynapticConn!(zitMask[:,:,i], 1,
@view(wRec[:,:,i]),
@view(synapseReconnectDelay[:,:,i]),
toAddConn)
totalNewConn[1,1,i] += remaining
end
# add new conn to non-firing neurons pool
remaining = addNewSynapticConn!(zitMask[:,:,i], 0,
@view(wRec[:,:,i]),
@view(synapseReconnectDelay[:,:,i]),
totalNewConn[1,1,i])
if remaining > 0 # final get-all round if somehow non-firing pool has not enough slot
remaining = addNewSynapticConn!(zitMask[:,:,i], 1,
@view(wRec[:,:,i]),
@view(synapseReconnectDelay[:,:,i]),
remaining)
end
end
end
# error("DEBUG -> neuroplasticity $(Dates.now())")
return wRec
end
function alifLearn(wRec,
wRecChange,
exInType,
@@ -580,9 +502,11 @@ function alifLearn(wRec,
neuronInactivityCounter,
synapseReconnectDelay,
synapseConnectionNumber,
synapticActivityCounter,
eta,
vt,
zitCumulative,
progress,
device)
# merge learning weight with average learning weight of all batch
@@ -640,9 +564,139 @@ function onLearn!(wOut,
# adaptive wOut to help convergence using c_decay
wOut .-= 0.001 .* wOut
end
#WORKING 1) implement 90% +w, 10% -w 2) rewrite this function
function neuroplasticity(synapseConnectionNumber,
zitCumulative, # (row, col)
wRec, # (row, col, n)
exInType,
wRecChange,
vt,
neuronInactivityCounter,
synapseReconnectDelay,
synapticActivityCounter,
progress,) # (row, col, n)
i1,i2,i3 = size(wRec)
error("DEBUG -> neuroplasticity $(Dates.now())")
#WORKING DEPEND ON modelError
if progress == 2 # no need to learn
# skip neuroplasticity
#TODO I may need to do something with neuronInactivityCounter and other variables
wRecChange .= 0
elseif progress == 1 # progress increase
# merge learning weight with average learning weight of all batch
wRec .= abs.((exInType .* wRec) .+ wRecChange) # abs because wRec doesn't carry sign
# adjust weight based on vt progress and repeatition (90% +w, 10% -w) depend on epsilonRec
mask = isless.()
# -w all non-fire connection except mature connection
# prune weak connection
# rewire synapse connection
elseif progress == 0 # no progress, no weight update, only rewire
# -w all non-fire connection except mature connection
# prune weak connection
# rewire synapse connection
elseif progress == -1 # setback
# adjust weight based on vt progress and repeatition (90% +w, 10% -w) depend on epsilonRec
# -w all non-fire connection except mature connection
# prune weak connection
# rewire synapse connection
else
error("undefined condition line $(@__LINE__)")
end
# error("DEBUG -> neuroplasticity $(Dates.now())")
# merge learning weight with average learning weight of all batch
wRec .= abs.((exInType .* wRec) .+ wRecChange) # abs because wRec doesn't carry sign
# adjust weight based on vt progress and repeatition (90% +w, 10% -w) depend on epsilonRec
# -w all non-fire connection except mature connection
# prune weak connection
# rewire synapse connection
# for each neuron, find total number of synaptic conn that should draw
# new connection to firing and non-firing neurons pool
subToFireNeuron_toBe = Int(floor(0.7 * synapseConnectionNumber))
# for each neuron, count how many synapse already subscribed to firing-neurons
zw = zitCumulative .* wRec
subToFireNeuron_current = sum(GeneralUtils.isBetween.(zw, 0.0, 100.0), dims=(1,2)) # (1, 1, n)
zitMask = (!iszero).(zitCumulative) # zitMask of firing neurons = 1, non-firing = 0
projection = ones(i1,i2,i3)
zitMask = zitMask .* projection # (row, col, n)
totalNewConn = sum(isequal.(wRec, -1.0), dims=(1,2)) # count new conn mark (-1.0), (1, 1, n)
println("neuroplasticity, from $(synapseConnectionNumber*size(totalNewConn, 3)) conn, $(sum(totalNewConn)) are replaced")
# clear -1.0 marker
GeneralUtils.replaceElements!(wRec, -1.0, synapseReconnectDelay, -0.99)
GeneralUtils.replaceElements!(wRec, -1.0, 0.0) # -1.0 marker is no longer required
for i in 1:i3
if neuronInactivityCounter[1:1:i][1] < -10000 # neuron die i.e. reset all weight
println("neuron die")
neuronInactivityCounter[:,:,i] .= 0 # reset
w = random_wRec(i1,i2,1,synapseConnectionNumber)
wRec[:,:,i] .= w
a = similar(w) .= -0.99 # synapseConnectionNumber of this neuron
mask = (!iszero).(w)
GeneralUtils.replaceElements!(mask, 1, a, 0)
synapseReconnectDelay[:,:,i] = a
else
remaining = 0
if subToFireNeuron_current[1,1,i] < subToFireNeuron_toBe
toAddConn = subToFireNeuron_toBe - subToFireNeuron_current[1,1,i]
totalNewConn[1,1,i] = totalNewConn[1,1,i] - toAddConn
# add new conn to firing neurons pool
remaining = addNewSynapticConn!(zitMask[:,:,i], 1,
@view(wRec[:,:,i]),
@view(synapseReconnectDelay[:,:,i]),
toAddConn)
totalNewConn[1,1,i] += remaining
end
# add new conn to non-firing neurons pool
remaining = addNewSynapticConn!(zitMask[:,:,i], 0,
@view(wRec[:,:,i]),
@view(synapseReconnectDelay[:,:,i]),
totalNewConn[1,1,i])
if remaining > 0 # final get-all round if somehow non-firing pool has not enough slot
remaining = addNewSynapticConn!(zitMask[:,:,i], 1,
@view(wRec[:,:,i]),
@view(synapseReconnectDelay[:,:,i]),
remaining)
end
end
end
# error("DEBUG -> neuroplasticity $(Dates.now())")
return wRec
end
# function neuroplasticity(synapseConnectionNumber,