This commit is contained in:
ton
2023-09-12 22:44:39 +07:00
parent e7c0228313
commit d05864e1e0
17 changed files with 7293 additions and 167 deletions

View File

@@ -27,8 +27,8 @@ using .interface
""" version 0.0.9
Todo:
[0*] change madel error calculation in user script, (progress based)
[1] +W 90% of most active conn
[DONE] change madel error calculation in user script, (progress based)
[1*] +W 90% of most active conn
[2] -W 10% of less active conn
[3] synapse reconnect delay counter
[-] add temporal summation in addition to already used spatial summation.

View File

@@ -8,8 +8,8 @@ using ..type, ..snnUtil
#------------------------------------------------------------------------------------------------100
function compute_paramsChange!(kfn::kfn_1, modelError, outputError)
function compute_paramsChange!(kfn::kfn_1, modelError::CuArray, outputError::CuArray, label)
lifComputeParamsChange!(kfn.timeStep,
kfn.lif_phi,
kfn.lif_epsilonRec,
@@ -24,29 +24,35 @@ function compute_paramsChange!(kfn::kfn_1, modelError, outputError)
kfn.lif_arrayProjection4d,
kfn.lif_error,
modelError,
outputError,
kfn.inputSize,
kfn.bk,
label,
)
alifComputeParamsChange!(kfn.timeStep,
kfn.alif_phi,
kfn.alif_epsilonRec,
kfn.alif_eta,
kfn.alif_eRec,
kfn.alif_wRec,
kfn.alif_exInType,
kfn.alif_wRecChange,
kfn.on_wOut,
kfn.alif_firingCounter,
kfn.alif_firingTargetFrequency,
kfn.alif_arrayProjection4d,
kfn.alif_error,
modelError,
kfn.alif_phi,
kfn.alif_epsilonRec,
kfn.alif_eta,
kfn.alif_eRec,
kfn.alif_wRec,
kfn.alif_exInType,
kfn.alif_wRecChange,
kfn.on_wOut,
kfn.alif_firingCounter,
kfn.alif_firingTargetFrequency,
kfn.alif_arrayProjection4d,
kfn.alif_error,
modelError,
outputError,
kfn.inputSize,
kfn.bk,
label,
kfn.alif_epsilonRecA,
kfn.alif_beta,
kfn.alif_epsilonRecA,
kfn.alif_beta,
)
onComputeParamsChange!(kfn.on_phi,
kfn.on_epsilonRec,
kfn.on_eta,
@@ -58,7 +64,7 @@ function compute_paramsChange!(kfn::kfn_1, modelError, outputError)
)
# error("DEBUG -> kfn compute_paramsChange! $(Dates.now())")
end
#WORKING implement 90% +w, 10% -w
function lifComputeParamsChange!( timeStep::CuArray,
phi::CuArray,
epsilonRec::CuArray,
@@ -73,45 +79,59 @@ function lifComputeParamsChange!( timeStep::CuArray,
arrayProjection4d::CuArray,
nError::CuArray,
modelError::CuArray,
outputError::CuArray,
inputSize::CuArray,
bk::CuArray,
label,
)
# Bₖⱼ in paper, sum() to get each neuron's total wOut weight,
# use absolute because only magnitude is needed
wOutSum_all = reshape( abs.(sum(wOut, dims=3)), (1,1,:, size(wOut, 4)) ) # (1,1,allNeuron,batch)
# get only each lif neuron's wOut, leaving out other neuron's wOut
eRec .= phi .* epsilonRec
# 2D wRec matrix contain input, lif, alif neurons. I need only lif neurons
startIndex = prod(inputSize) +1
stopIndex = startIndex + size(wRec, 3) -1
wOutSum = @view(wOutSum_all[1,1, startIndex:stopIndex, :])
wOutSum = reshape(wOutSum, (1, 1, size(wOutSum, 1), size(wOutSum, 2))) # (1,1,n,batch)
# nError a.k.a. learning signal use dopamine concept,
# this neuron receive summed error signal (modelError)
nError .= (modelError .* wOutSum) .* arrayProjection4d
eRec .= phi .* epsilonRec
wRecChange .+= (-eta .* nError .* eRec)
startCol = CartesianIndices(wRec)[startIndex][2]
stopCol = CartesianIndices(wRec)[stopIndex][2]
_bk = @view(bk[:, startCol:stopCol, 1])
nError = _bk .* modelError
nError = reshape(nError, (1,1,:,1))
# _,_,i3,_ = size(wOut)
# for i in 1:i3
# # nError a.k.a. learning signal use dopamine concept,
# # this neuron receive summed error signal (modelError)
# onW = @view(wOut[:, startCol:stopCol, i, 1])
# _bk = @view(bk[:, startCol:stopCol, i, 1])
# mask = (iszero).(onW)
# bk_ = mask .* _bk
# bkComposed = onW .+ bk_
# nError = bkComposed .* modelError
# nError = reshape(nError, (1,1,:,1))
# # compute wRecChange of all neurons wrt to iᵗʰ output neuron
# wRecChange .+= (eta .* nError .* eRec)
# end
# compute wRecChange of all neurons wrt to iᵗʰ output neuron
wRecChange .+= (eta .* nError .* eRec)
# frequency regulator
wRecChange .+= 0.001 .* ((firingTargetFrequency - (firingCounter./timeStep)) ./ timeStep) .*
eta .* eRec
# if sum(timeStep) == 785
# epsilonRec_cpu = epsilonRec |> cpu
# println("modelError ", modelError)
# println("modelError $modelError $(size(modelError))", modelError)
# println("")
# wchange = (-eta .* nError .* eRec) |> cpu
# println("wOutSum $(size(wOutSum))")
# wchange = (eta .* nError .* eRec) |> cpu
# println("wchange 5 1 ", wchange[:,:,5,1])
# println("")
# println("wchange 5 2 ", wchange[:,:,5,2])
# println("")
# println("epsilonRec 5 1 ", epsilonRec_cpu[:,:,5,1])
# println("")
# println("epsilonRec 5 2 ", epsilonRec_cpu[:,:,5,2])
# println("")
# error("DEBUG lifComputeParamsChange!")
# end
# reset epsilonRec
epsilonRec .= 0
end
@@ -130,24 +150,28 @@ function alifComputeParamsChange!( timeStep::CuArray,
arrayProjection4d::CuArray,
nError::CuArray,
modelError::CuArray,
outputError::CuArray,
inputSize::CuArray,
bk::CuArray,
label,
epsilonRecA::CuArray,
beta::CuArray
beta::CuArray,
)
# Bₖⱼ in paper, sum() to get each neuron's total wOut weight,
# use absolute because only magnitude is needed
wOutSum_all = reshape( abs.(sum(wOut, dims=3)), (1,1,:, size(wOut, 4)) ) # (1,1,allNeuron,batch)
# get only each lif neuron's wOut, leaving out other neuron's wOut
wOutSum = @view(wOutSum_all[1,1, end-size(wRec, 3)+1:end, :])
wOutSum = reshape(wOutSum, (1, 1, size(wOutSum, 1), size(wOutSum, 2))) # (1,1,n,batch)
# nError a.k.a. learning signal use dopamine concept,
# this neuron receive summed error signal (modelError)
nError .= (modelError .* wOutSum) .* arrayProjection4d
eRec .= phi .* (epsilonRec .- (beta .* epsilonRecA)) # use eq. 25
wRecChange .+= (-eta .* nError .* eRec)
# 2D wRec matrix contain input, lif, alif neurons. I need only lif neurons
startIndex = prod(inputSize) +1
stopIndex = startIndex + size(wRec, 3) -1
startCol = CartesianIndices(wRec)[startIndex][2]
stopCol = CartesianIndices(wRec)[stopIndex][2]
_bk = @view(bk[:, startCol:stopCol, 1])
nError = _bk .* modelError
nError = reshape(nError, (1,1,:,1))
wRecChange .+= (eta .* nError .* eRec)
# frequency regulator
wRecChange .+= 0.001 .* ((firingTargetFrequency - (firingCounter./timeStep)) ./ timeStep) .*
@@ -395,8 +419,8 @@ function lifLearn(wRec,
zitCumulative_cpu = zitCumulative_cpu[:,:,1]
#TODO neuroplasticity, work on CPU side
wRec_cpu, neuronInactivityCounter_cpu, synapseReconnectDelayCounter_cpu,
= neuroplasticity(synapseConnectionNumber,
wRec_cpu, neuronInactivityCounter_cpu, synapseReconnectDelayCounter_cpu =
neuroplasticity(synapseConnectionNumber,
zitCumulative_cpu,
wRec_cpu,
wRecChange_cpu,

View File

@@ -28,6 +28,7 @@ Base.@kwdef mutable struct kfn_1 <: knowledgeFn
exInType::Union{AbstractArray, Nothing} = nothing
modelError::Union{AbstractArray, Nothing} = nothing # store RSNN error
outputError::Union{AbstractArray, Nothing} = nothing # store output neurons error
bk::Union{AbstractArray, Nothing} = nothing # Bⱼₖ
# ---------------------------------------------------------------------------- #
# LIF Neurons #
@@ -190,6 +191,7 @@ function kfn_1(params::Dict; device=cpu)
kfn.zit = zeros(row, col, batch) |> device
kfn.zitCumulative = (similar(kfn.zit) .= 0)
kfn.modelError = zeros(1) |> device
kfn.bk = rand(size(kfn.zit)...) |> device
# ---------------------------------------------------------------------------- #
# LIF config #
@@ -222,7 +224,7 @@ function kfn_1(params::Dict; device=cpu)
kfn.lif_eta = (similar(kfn.lif_wRec) .= 0.01)
kfn.lif_gammaPd = (similar(kfn.lif_wRec) .= 0.3)
kfn.lif_wRecChange = (similar(kfn.lif_wRec) .= 0)
kfn.lif_error = (similar(kfn.lif_wRec) .= 0)
kfn.lif_error = (similar(kfn.lif_wRec) .= 0)
kfn.lif_firingCounter = (similar(kfn.lif_wRec) .= 0)
kfn.lif_firingTargetFrequency = (similar(kfn.lif_wRec) .= 0.1)
@@ -280,7 +282,7 @@ function kfn_1(params::Dict; device=cpu)
kfn.alif_eta = (similar(kfn.alif_wRec) .= 0.01)
kfn.alif_gammaPd = (similar(kfn.alif_wRec) .= 0.3)
kfn.alif_wRecChange = (similar(kfn.alif_wRec) .= 0)
kfn.alif_error = (similar(kfn.alif_wRec) .= 0)
kfn.alif_error = (similar(kfn.alif_wRec) .= 0)
kfn.alif_firingCounter = (similar(kfn.alif_wRec) .= 0)
kfn.alif_firingTargetFrequency = (similar(kfn.alif_wRec) .= 0.1)