add learn()

This commit is contained in:
ton
2023-07-26 15:21:34 +07:00
parent ff9909fd8d
commit a94354efb3
6 changed files with 224 additions and 87 deletions

View File

@@ -1,16 +1,131 @@
module learn
# export
export learn!, compute_paramsChange!
# using
using Statistics, Random, LinearAlgebra, JSON3, Flux, Dates
using GeneralUtils
using ..type, ..snnUtil
#------------------------------------------------------------------------------------------------100
function compute_paramsChange!(kfn::kfn_1, modelError, outputError)
#WORKING
lifComputeParamsChange!(kfn.lif_phi,
kfn.lif_epsilonRec,
kfn.lif_eta,
kfn.lif_wRec,
kfn.lif_wRecChange,
kfn.on_wOut,
modelError)
alifComputeParamsChange!(kfn.alif_phi,
kfn.alif_epsilonRec,
kfn.alif_epsilonRecA,
kfn.alif_eta,
kfn.alif_wRec,
kfn.alif_wRecChange,
kfn.alif_beta,
kfn.on_wOut,
modelError)
error("debug end -> kfn compute_paramsChange! $(Dates.now())")
# Threads.@threads for n in kfn.neuronsArray
# # for n in kfn.neuronsArray
# if typeof(n) <: computeNeuron
# wOut = Int64[]
# for oN in kfn.outputNeuronsArray
# wIndex = findall(isequal.(oN.subscriptionList, n.id))
# if length(wIndex) != 0
# push!(wOut, wIndex[1])
# end
# end
# if length(wOut) != 0
# compute_wRecChange!(n, wOut, modelError)
# # compute_alphaChange!(n, modelError)
# compute_firingRateError!(n, kfn.kfnParams[:neuronFiringRateTarget],
# kfn.kfnParams[:totalComputeNeuron])
# end
# end
# end
# for oN in kfn.outputNeuronsArray
# compute_wRecChange!(oN, outputError[oN.id])
# # compute_alphaChaZnge!(oN, outputError[oN.id])
# end
end
function lifComputeParamsChange!( phi,
epsilonRec,
eta,
wRec,
wRecChange,
wOut,
modelError)
d1, d2, d3, d4 = size(epsilonRec)
# Bₖⱼ in paper, sum() to get each neuron's total wOut weight
wOutSum = reshape(sum(wOut, dims=3), (d1, :, d4))
for j in 1:d4, i in 1:d3 # compute along neurons axis of every batch
# how much error of this neuron 1-spike causing each output neuron's error
view(wRecChange, :, :, i, j) .+= (-1 * view(eta, :, :, i, j)[1]) .*
# eRec
( (view(phi, :, :, i, j)[1] .* view(epsilonRec, :, :, i, j)) .*
# nError a.k.a. learning signal
(view(modelError, :, j)[1] .*
# RSNN neuron's total wOut weight (neuron synaptic subscription .* wOutSum)
sum(GeneralUtils.isNotEqual.(view(wRec, :, :, i, j), 0) .*
view(wOutSum, :, :, j))
)
)
end
end
function alifComputeParamsChange!( phi,
epsilonRec,
epsilonRecA,
eta,
wRec,
wRecChange,
beta,
wOut,
modelError)
d1, d2, d3, d4 = size(epsilonRec)
# Bₖⱼ in paper, sum() to get each neuron's total wOut weight
wOutSum = reshape(sum(wOut, dims=3), (d1, :, d4))
for j in 1:d4, i in 1:d3 # compute along neurons axis of every batch
# how much error of this neuron 1-spike causing each output neuron's error
view(wRecChange, :, :, i, j) .+= (-1 * view(eta, :, :, i, j)[1]) .*
# eRec
(
# eRec_v
(view(phi, :, :, i, j)[1] .* view(epsilonRec, :, :, i, j)) .+
# eRec_a
((view(phi, :, :, i, j)[1] * view(beta, :, :, i, j)[1]) .*
view(epsilonRecA, :, :, i, j))
) .*
# nError a.k.a. learning signal
(
view(modelError, :, j)[1] .*
# RSNN neuron's total wOut weight (neuron synaptic subscription .* wOutSum)
sum(GeneralUtils.isNotEqual.(view(wRec, :, :, i, j), 0) .*
view(wOutSum, :, :, j))
)
end
end