This commit is contained in:
ton
2023-09-14 17:30:14 +07:00
parent 9e701cd042
commit d85edf5a19
3 changed files with 197 additions and 122 deletions

View File

@@ -26,7 +26,7 @@ function (kfn::kfn_1)(input::AbstractArray)
kfn.lif_firingCounter .= 0
kfn.lif_refractoryCounter .= 0
kfn.lif_zt .= 0
kfn.lif_synapseReconnectDelay .= 0
kfn.lif_synapticActivityCounter .= 0
kfn.alif_vt .= 0
kfn.alif_a .= 0
@@ -36,7 +36,7 @@ function (kfn::kfn_1)(input::AbstractArray)
kfn.alif_firingCounter .= 0
kfn.alif_refractoryCounter .= 0
kfn.alif_zt .= 0
kfn.alif_synapseReconnectDelay .= 0
kfn.alif_synapticActivityCounter .= 0
kfn.on_vt .= 0
kfn.on_epsilonRec .= 0
@@ -78,6 +78,7 @@ function (kfn::kfn_1)(input::AbstractArray)
kfn.lif_wRecChange,
kfn.lif_neuronInactivityCounter,
kfn.lif_synapseReconnectDelay,
kfn.lif_synapticActivityCounter,
kfn.timeStep,
)
end
@@ -105,12 +106,14 @@ function (kfn::kfn_1)(input::AbstractArray)
kfn.alif_wRecChange,
kfn.alif_neuronInactivityCounter,
kfn.alif_synapseReconnectDelay,
kfn.alif_synapticActivityCounter,
kfn.timeStep,
kfn.alif_epsilonRecA,
kfn.alif_a,
kfn.alif_avth,
kfn.alif_beta,
kfn.alif_rho,
kfn.timeStep,
)
end
end
@@ -176,6 +179,7 @@ function lifForward( zit::CuArray,
wRecChange::CuArray,
neuronInactivityCounter::CuArray,
synapseReconnectDelay::CuArray,
synapticActivityCounter::CuArray,
timeStep::CuArray,
)
@@ -197,8 +201,10 @@ function lifForward( zit::CuArray,
wRecChange,
neuronInactivityCounter,
synapseReconnectDelay,
GeneralUtils.linear_to_cartesian,
synapticActivityCounter,
timeStep,
GeneralUtils.linear_to_cartesian,
)
config = launch_configuration(kernel.fun)
@@ -232,8 +238,9 @@ function lifForward( zit::CuArray,
wRecChange,
neuronInactivityCounter,
synapseReconnectDelay,
GeneralUtils.linear_to_cartesian,
timeStep; threads, blocks)
synapticActivityCounter,
timeStep,
GeneralUtils.linear_to_cartesian; threads, blocks)
end
end
@@ -256,8 +263,9 @@ function lifForward( zit,
wRecChange,
neuronInactivityCounter,
synapseReconnectDelay,
linear_to_cartesian,
synapticActivityCounter,
timeStep,
linear_to_cartesian,
)
i = (blockIdx().x - 1) * blockDim().x + threadIdx().x # gpu threads index
@@ -302,8 +310,10 @@ function lifForward( zit,
# compute epsilonRec
epsilonRec[i1,i2,i3,i4] = (alpha[i1,i2,i3,i4] * epsilonRec[i1,i2,i3,i4]) +
(zit[i1,i2,i3,i4] * !iszero(wRec[i1,i2,i3,i4]))
# !iszero indicates synaptic subscription
(zit[i1,i2,i3,i4] * !iszero(wRec[i1,i2,i3,i4]))
# !iszero indicates synaptic subscription
synapticActivityCounter[i1,i2,i3,i4] = zit[i1,i2,i3,i4] * !iszero(wRec[i1,i2,i3,i4])
if !iszero(wRec[i1,i2,i3,i4]) # check if this is wRec subscription
synapseReconnectDelay[i1,i2,i3,i4] -= 1
@@ -312,6 +322,7 @@ function lifForward( zit,
synapseReconnectDelay[i1,i2,i3,i4] = sum(timeStep)
end
end
# voltage regulator
wRecChange[i1,i2,i3,i4] = -0.01*0.0001 * (vt[i1,i2,i3,i4] - vth[i1,i2,i3,i4]) *
zit[i1,i2,i3,i4]
@@ -339,12 +350,14 @@ function alifForward( zit::CuArray,
wRecChange::CuArray,
neuronInactivityCounter::CuArray,
synapseReconnectDelay::CuArray,
synapticActivityCounter::CuArray,
timeStep::CuArray,
epsilonRecA::CuArray,
a::CuArray,
avth::CuArray,
beta::CuArray,
rho::CuArray,
timeStep::CuArray,
)
kernel = @cuda launch=false alifForward( zit,
@@ -365,13 +378,15 @@ function alifForward( zit::CuArray,
wRecChange,
neuronInactivityCounter,
synapseReconnectDelay,
synapticActivityCounter,
timeStep,
epsilonRecA,
a,
avth,
beta,
rho,
GeneralUtils.linear_to_cartesian,
timeStep,
)
config = launch_configuration(kernel.fun)
@@ -404,13 +419,15 @@ function alifForward( zit::CuArray,
wRecChange,
neuronInactivityCounter,
synapseReconnectDelay,
synapticActivityCounter,
timeStep,
epsilonRecA,
a,
avth,
beta,
rho,
GeneralUtils.linear_to_cartesian,
timeStep; threads, blocks)
GeneralUtils.linear_to_cartesian; threads, blocks)
end
end
@@ -433,13 +450,15 @@ function alifForward( zit,
wRecChange,
neuronInactivityCounter,
synapseReconnectDelay,
synapticActivityCounter,
timeStep,
epsilonRecA,
a,
avth,
beta,
rho,
linear_to_cartesian,
timeStep,
)
i = (blockIdx().x - 1) * blockDim().x + threadIdx().x # gpu threads index
@@ -501,6 +520,8 @@ function alifForward( zit,
(phi[i1,i2,i3,i4] * epsilonRec[i1,i2,i3,i4])) +
(zit[i1,i2,i3,i4] * !iszero(wRec[i1,i2,i3,i4]))
synapticActivityCounter[i1,i2,i3,i4] = zit[i1,i2,i3,i4] * !iszero(wRec[i1,i2,i3,i4])
if !iszero(wRec[i1,i2,i3,i4]) # check if this is wRec subscription
synapseReconnectDelay[i1,i2,i3,i4] -= 1
if synapseReconnectDelay[i1,i2,i3,i4] == 0

View File

@@ -301,7 +301,7 @@ function onComputeParamsChange!(phi::AbstractArray,
end
end
function learn!(kfn::kfn_1, device=cpu)
function learn!(kfn::kfn_1, progress, device=cpu)
# lif learn
kfn.lif_wRec, kfn.lif_neuronInactivityCounter, kfn.lif_synapseReconnectDelay =
lifLearn(kfn.lif_wRec,
@@ -311,9 +311,11 @@ function learn!(kfn::kfn_1, device=cpu)
kfn.lif_neuronInactivityCounter,
kfn.lif_synapseReconnectDelay,
kfn.lif_synapseConnectionNumber,
kfn.lif_synapticActivityCounter,
kfn.lif_eta,
kfn.lif_vt,
kfn.zitCumulative,
progress,
device)
# alif learn
@@ -325,9 +327,11 @@ function learn!(kfn::kfn_1, device=cpu)
kfn.alif_neuronInactivityCounter,
kfn.alif_synapseReconnectDelay,
kfn.alif_synapseConnectionNumber,
kfn.alif_synapticActivityCounter,
kfn.alif_eta,
kfn.alif_vt,
kfn.zitCumulative,
progress,
device)
# on learn
@@ -407,9 +411,11 @@ function lifLearn(wRec,
neuronInactivityCounter,
synapseReconnectDelay,
synapseConnectionNumber,
synapticActivityCounter,
eta,
vt,
zitCumulative,
progress,
device)
@@ -421,10 +427,14 @@ function lifLearn(wRec,
wRecChange_cpu = wRecChange_cpu[:,:,:,1]
eta_cpu = eta |> cpu
eta_cpu = eta_cpu[:,:,:,1]
exInType_cpu = exInType |> cpu
exInType_cpu = exInType_cpu[:,:,:,1]
neuronInactivityCounter_cpu = neuronInactivityCounter |> cpu
neuronInactivityCounter_cpu = neuronInactivityCounter_cpu[:,:,:,1] # (row, col, n)
synapseReconnectDelay_cpu = synapseReconnectDelay |> cpu
synapseReconnectDelay_cpu = synapseReconnectDelay_cpu[:,:,:,1]
synapticActivityCounter_cpu = synapticActivityCounter |> cpu
synapticActivityCounter_cpu = synapticActivityCounter_cpu[:,:,:,1]
zitCumulative_cpu = zitCumulative |> cpu
zitCumulative_cpu = zitCumulative_cpu[:,:,1]
@@ -433,10 +443,13 @@ function lifLearn(wRec,
neuroplasticity(synapseConnectionNumber,
zitCumulative_cpu,
wRec_cpu,
exInType_cpu,
wRecChange_cpu,
vt,
neuronInactivityCounter_cpu,
synapseReconnectDelay_cpu)
synapseReconnectDelay_cpu,
synapticActivityCounter_cpu,
progress,)
@@ -482,97 +495,6 @@ function lifLearn(wRec,
return wRec, neuronInactivityCounter, synapseReconnectDelay
end
#WORKING 1) implement 90% +w, 10% -w 2) rewrite this function
function neuroplasticity(synapseConnectionNumber,
zitCumulative, # (row, col)
wRec, # (row, col, n)
wRecChange,
vt,
neuronInactivityCounter,
synapseReconnectDelay) # (row, col, n)
i1,i2,i3 = size(wRec)
error("DEBUG -> neuroplasticity $(Dates.now())")
# merge weight
# adjust weight based on vt progress and repeatition (90% +w, 10% -w)
# -w all non-fire connection except mature connection
# prune weak connection
# rewire synapse connection
# for each neuron, find total number of synaptic conn that should draw
# new connection to firing and non-firing neurons pool
subToFireNeuron_toBe = Int(floor(0.7 * synapseConnectionNumber))
# for each neuron, count how many synap already subscribed to firing-neurons
zw = zitCumulative .* wRec
subToFireNeuron_current = sum(GeneralUtils.isBetween.(zw, 0.0, 100.0), dims=(1,2)) # (1, 1, n)
zitMask = (!iszero).(zitCumulative) # zitMask of firing neurons = 1, non-firing = 0
projection = ones(i1,i2,i3)
zitMask = zitMask .* projection # (row, col, n)
totalNewConn = sum(isequal.(wRec, -1.0), dims=(1,2)) # count new conn mark (-1.0), (1, 1, n)
println("neuroplasticity, from $(synapseConnectionNumber*size(totalNewConn, 3)) conn, $(sum(totalNewConn)) are replaced")
# clear -1.0 marker
GeneralUtils.replaceElements!(wRec, -1.0, synapseReconnectDelay, -0.99)
GeneralUtils.replaceElements!(wRec, -1.0, 0.0) # -1.0 marker is no longer required
for i in 1:i3
if neuronInactivityCounter[1:1:i][1] < -10000 # neuron die i.e. reset all weight
println("neuron die")
neuronInactivityCounter[:,:,i] .= 0 # reset
w = random_wRec(i1,i2,1,synapseConnectionNumber)
wRec[:,:,i] .= w
a = similar(w) .= -0.99 # synapseConnectionNumber of this neuron
mask = (!iszero).(w)
GeneralUtils.replaceElements!(mask, 1, a, 0)
synapseReconnectDelay[:,:,i] = a
else
remaining = 0
if subToFireNeuron_current[1,1,i] < subToFireNeuron_toBe
toAddConn = subToFireNeuron_toBe - subToFireNeuron_current[1,1,i]
totalNewConn[1,1,i] = totalNewConn[1,1,i] - toAddConn
# add new conn to firing neurons pool
remaining = addNewSynapticConn!(zitMask[:,:,i], 1,
@view(wRec[:,:,i]),
@view(synapseReconnectDelay[:,:,i]),
toAddConn)
totalNewConn[1,1,i] += remaining
end
# add new conn to non-firing neurons pool
remaining = addNewSynapticConn!(zitMask[:,:,i], 0,
@view(wRec[:,:,i]),
@view(synapseReconnectDelay[:,:,i]),
totalNewConn[1,1,i])
if remaining > 0 # final get-all round if somehow non-firing pool has not enough slot
remaining = addNewSynapticConn!(zitMask[:,:,i], 1,
@view(wRec[:,:,i]),
@view(synapseReconnectDelay[:,:,i]),
remaining)
end
end
end
# error("DEBUG -> neuroplasticity $(Dates.now())")
return wRec
end
function alifLearn(wRec,
wRecChange,
exInType,
@@ -580,9 +502,11 @@ function alifLearn(wRec,
neuronInactivityCounter,
synapseReconnectDelay,
synapseConnectionNumber,
synapticActivityCounter,
eta,
vt,
zitCumulative,
progress,
device)
# merge learning weight with average learning weight of all batch
@@ -640,9 +564,139 @@ function onLearn!(wOut,
# adaptive wOut to help convergence using c_decay
wOut .-= 0.001 .* wOut
end
#WORKING 1) implement 90% +w, 10% -w 2) rewrite this function
function neuroplasticity(synapseConnectionNumber,
zitCumulative, # (row, col)
wRec, # (row, col, n)
exInType,
wRecChange,
vt,
neuronInactivityCounter,
synapseReconnectDelay,
synapticActivityCounter,
progress,) # (row, col, n)
i1,i2,i3 = size(wRec)
error("DEBUG -> neuroplasticity $(Dates.now())")
#WORKING DEPEND ON modelError
if progress == 2 # no need to learn
# skip neuroplasticity
#TODO I may need to do something with neuronInactivityCounter and other variables
wRecChange .= 0
elseif progress == 1 # progress increase
# merge learning weight with average learning weight of all batch
wRec .= abs.((exInType .* wRec) .+ wRecChange) # abs because wRec doesn't carry sign
# adjust weight based on vt progress and repeatition (90% +w, 10% -w) depend on epsilonRec
mask = isless.()
# -w all non-fire connection except mature connection
# prune weak connection
# rewire synapse connection
elseif progress == 0 # no progress, no weight update, only rewire
# -w all non-fire connection except mature connection
# prune weak connection
# rewire synapse connection
elseif progress == -1 # setback
# adjust weight based on vt progress and repeatition (90% +w, 10% -w) depend on epsilonRec
# -w all non-fire connection except mature connection
# prune weak connection
# rewire synapse connection
else
error("undefined condition line $(@__LINE__)")
end
# error("DEBUG -> neuroplasticity $(Dates.now())")
# merge learning weight with average learning weight of all batch
wRec .= abs.((exInType .* wRec) .+ wRecChange) # abs because wRec doesn't carry sign
# adjust weight based on vt progress and repeatition (90% +w, 10% -w) depend on epsilonRec
# -w all non-fire connection except mature connection
# prune weak connection
# rewire synapse connection
# for each neuron, find total number of synaptic conn that should draw
# new connection to firing and non-firing neurons pool
subToFireNeuron_toBe = Int(floor(0.7 * synapseConnectionNumber))
# for each neuron, count how many synapse already subscribed to firing-neurons
zw = zitCumulative .* wRec
subToFireNeuron_current = sum(GeneralUtils.isBetween.(zw, 0.0, 100.0), dims=(1,2)) # (1, 1, n)
zitMask = (!iszero).(zitCumulative) # zitMask of firing neurons = 1, non-firing = 0
projection = ones(i1,i2,i3)
zitMask = zitMask .* projection # (row, col, n)
totalNewConn = sum(isequal.(wRec, -1.0), dims=(1,2)) # count new conn mark (-1.0), (1, 1, n)
println("neuroplasticity, from $(synapseConnectionNumber*size(totalNewConn, 3)) conn, $(sum(totalNewConn)) are replaced")
# clear -1.0 marker
GeneralUtils.replaceElements!(wRec, -1.0, synapseReconnectDelay, -0.99)
GeneralUtils.replaceElements!(wRec, -1.0, 0.0) # -1.0 marker is no longer required
for i in 1:i3
if neuronInactivityCounter[1:1:i][1] < -10000 # neuron die i.e. reset all weight
println("neuron die")
neuronInactivityCounter[:,:,i] .= 0 # reset
w = random_wRec(i1,i2,1,synapseConnectionNumber)
wRec[:,:,i] .= w
a = similar(w) .= -0.99 # synapseConnectionNumber of this neuron
mask = (!iszero).(w)
GeneralUtils.replaceElements!(mask, 1, a, 0)
synapseReconnectDelay[:,:,i] = a
else
remaining = 0
if subToFireNeuron_current[1,1,i] < subToFireNeuron_toBe
toAddConn = subToFireNeuron_toBe - subToFireNeuron_current[1,1,i]
totalNewConn[1,1,i] = totalNewConn[1,1,i] - toAddConn
# add new conn to firing neurons pool
remaining = addNewSynapticConn!(zitMask[:,:,i], 1,
@view(wRec[:,:,i]),
@view(synapseReconnectDelay[:,:,i]),
toAddConn)
totalNewConn[1,1,i] += remaining
end
# add new conn to non-firing neurons pool
remaining = addNewSynapticConn!(zitMask[:,:,i], 0,
@view(wRec[:,:,i]),
@view(synapseReconnectDelay[:,:,i]),
totalNewConn[1,1,i])
if remaining > 0 # final get-all round if somehow non-firing pool has not enough slot
remaining = addNewSynapticConn!(zitMask[:,:,i], 1,
@view(wRec[:,:,i]),
@view(synapseReconnectDelay[:,:,i]),
remaining)
end
end
end
# error("DEBUG -> neuroplasticity $(Dates.now())")
return wRec
end
# function neuroplasticity(synapseConnectionNumber,

View File

@@ -61,7 +61,7 @@ Base.@kwdef mutable struct kfn_1 <: knowledgeFn
lif_neuronInactivityCounter::Union{AbstractArray, Nothing} = nothing
lif_synapseReconnectDelay::Union{AbstractArray, Nothing} = nothing
lif_synapseConnectionNumber::Union{Int, Nothing} = nothing
# lif_synapticWChangeCounter::Union{AbstractArray, Nothing} = nothing
lif_synapticActivityCounter::Union{AbstractArray, Nothing} = nothing # work
# pre-allocation array
lif_arrayProjection4d::Union{AbstractArray, Nothing} = nothing # use to project 3d array to 4d
@@ -102,7 +102,7 @@ Base.@kwdef mutable struct kfn_1 <: knowledgeFn
alif_neuronInactivityCounter::Union{AbstractArray, Nothing} = nothing
alif_synapseReconnectDelay::Union{AbstractArray, Nothing} = nothing
alif_synapseConnectionNumber::Union{Int, Nothing} = nothing
# alif_synapticWChangeCounter::Union{AbstractArray, Nothing} = nothing
alif_synapticActivityCounter::Union{AbstractArray, Nothing} = nothing
# pre-allocation array
alif_arrayProjection4d::Union{AbstractArray, Nothing} = nothing # use to project 3d array to 4d
@@ -237,16 +237,16 @@ function kfn_1(params::Dict; device=cpu)
# initial value subscribed conn
for i in eachindex(mask)
if mask[i] == 1
kfn.lif_synapseReconnectDelay[i] = rand(1:100)
kfn.lif_synapseReconnectDelay[i] = rand(1:1000)
end
end
kfn.lif_synapseReconnectDelay = kfn.lif_synapseReconnectDelay |> device
# kfn.lif_synapticWChangeCounter = Array(similar(kfn.lif_wRec) .= -0.99) # -0.99 for non-sub conn
# mask = Array((!iszero).(kfn.lif_wRec))
# # initial value subscribed conn, synapseReconnectDelay range -10000 to +10000
# GeneralUtils.replaceElements!(mask, 1, kfn.lif_synapticWChangeCounter, 1.0)
# kfn.lif_synapticWChangeCounter = kfn.lif_synapticWChangeCounter |> device
kfn.lif_synapticActivityCounter = Array(similar(kfn.lif_wRec) .= -0.99) # -0.99 for non-sub conn
mask = Array((!iszero).(kfn.lif_wRec))
# initial value subscribed conn
GeneralUtils.replaceElements!(mask, 1, kfn.lif_synapticActivityCounter, 0.0)
kfn.lif_synapticActivityCounter = kfn.lif_synapticActivityCounter |> device
kfn.lif_arrayProjection4d = (similar(kfn.lif_wRec) .= 1)
kfn.lif_recSignal = (similar(kfn.lif_wRec) .= 0)
@@ -296,16 +296,16 @@ function kfn_1(params::Dict; device=cpu)
# initial value subscribed conn
for i in eachindex(mask)
if mask[i] == 1
kfn.alif_synapseReconnectDelay[i] = rand(1:100)
kfn.alif_synapseReconnectDelay[i] = rand(1:1000)
end
end
kfn.alif_synapseReconnectDelay = kfn.alif_synapseReconnectDelay |> device
# kfn.alif_synapticWChangeCounter = Array(similar(kfn.alif_wRec) .= -0.99) # -9 for non-sub conn
# mask = Array((!iszero).(kfn.alif_wRec))
# # initial value subscribed conn, synapseReconnectDelay range -10000 to +10000
# GeneralUtils.replaceElements!(mask, 1, kfn.alif_synapticWChangeCounter, 1.0)
# kfn.alif_synapticWChangeCounter = kfn.alif_synapticWChangeCounter |> device
kfn.alif_synapticActivityCounter = Array(similar(kfn.alif_wRec) .= -0.99) # -0.99 for non-sub conn
mask = Array((!iszero).(kfn.alif_wRec))
# initial value subscribed conn
GeneralUtils.replaceElements!(mask, 1, kfn.alif_synapticActivityCounter, 0.0)
kfn.alif_synapticActivityCounter = kfn.alif_synapticActivityCounter |> device
kfn.alif_arrayProjection4d = (similar(kfn.alif_wRec) .= 1)
kfn.alif_recSignal = (similar(kfn.alif_wRec) .= 0)