version 0.0.5-alpha

This commit is contained in:
ton
2023-08-25 09:40:43 +07:00
parent 0af39ee09e
commit c74eea9cdf
14 changed files with 4155 additions and 150 deletions

View File

@@ -171,7 +171,7 @@ function onComputeParamsChange!(phi::CuArray,
eRec .= phi .* epsilonRec
nError .= reshape(outputError, (1, 1, :, size(outputError, 2))) .* arrayProjection4d
wOutChange .+= (-eta .* nError .* eRec) #BUG why wOutChange not increase every timestep that madel get wrong answer?
wOutChange .+= (-eta .* nError .* eRec)
# reset epsilonRec
epsilonRec .= 0
@@ -264,16 +264,24 @@ function onComputeParamsChange!(phi::AbstractArray,
end
end
function learn!(kfn::kfn_1)
function learn!(kfn::kfn_1, device=cpu)
# lif learn
lifLearn!(kfn.lif_wRec,
kfn.lif_wRecChange,
kfn.lif_arrayProjection4d)
kfn.lif_arrayProjection4d,
kfn.lif_neuronInactivityCounter,
kfn.lif_synapticConnectionNumber,
kfn.zit_cumulative,
device)
# alif learn
alifLearn!(kfn.alif_wRec,
kfn.alif_wRecChange,
kfn.alif_arrayProjection4d)
kfn.alif_arrayProjection4d,
kfn.alif_neuronInactivityCounter,
kfn.alif_synapticConnectionNumber,
kfn.zit_cumulative,
device)
# on learn
onLearn!(kfn.on_wOut,
@@ -289,23 +297,52 @@ end
function lifLearn!(wRec,
wRecChange,
arrayProjection4d)
# merge learning weight with average learning weight
arrayProjection4d,
inactivityCounter,
synapticConnectionNumber,
zit_cumulative,
device)
# merge learning weight with average learning weight of all batch
wRec .+= (sum(wRecChange, dims=4) ./ (size(wRec, 4))) .* arrayProjection4d
#TODO synaptic strength
wRec_cpu = wRec |> cpu
wRec_cpu = wRec_cpu[:,:,:,1] # since every batch has the same neuron wRec, (row, col, n)
inactivityCounter_cpu = inactivityCounter |> cpu
inactivityCounter_cpu = inactivityCounter_cpu[:,:,:,1] # (row, col, n)
zit_cumulative_cpu = zit_cumulative |> cpu
zit_cumulative_cpu = zit_cumulative_cpu[:,:,1] # (row, col)
#TODO neuroplasticity
# error("DEBUG -> lifLearn! $(Dates.now())")
# weak / negative synaptic connection will get randomed in neuroplasticity()
wRec_cpu = GeneralUtils.replaceBetween.(wRec_cpu, 0.0, 0.1, -1.0) # mark with -1.0
# synaptic connection that has no inactivity will get randomed in neuroplasticity()
GeneralUtils.replace_elements!(inactivityCounter_cpu, 0.0, wRec_cpu, -1.0)
# reset lif_inactivity elements to 10000
GeneralUtils.replace_elements!(inactivityCounter_cpu, 0.0, -9.0) # -9.0 is base value
#WORKING neuroplasticity
wRec_cpu = neuroplasticity(synapticConnectionNumber, zit_cumulative_cpu, wRec_cpu,
inactivityCounter_cpu)
error("DEBUG -> lifLearn! $(Dates.now())")
# #TODO send to device with correct dimension
# wRec = wRec |> device
# inactivityCounter = inactivityCounter_cpu |> device
end
function alifLearn!(wRec,
wRecChange,
arrayProjection4d)
arrayProjection4d,
inactivityCounter,
synapticConnectionNumber,
zit_cumulative,
device)
# merge learning weight with average learning weight
wRec .+= (sum(wRecChange, dims=4) ./ (size(wRec, 4))) .* arrayProjection4d
# weak / negative synaptic connection will get randomed in neuroplasticity()
wRec .= GeneralUtils.replaceLessThan.(wRec, 0.01, 0.0)
#TODO synaptic strength
#TODO neuroplasticity
@@ -327,8 +364,56 @@ function onLearn!(wOut,
end
function neuroplasticity(synapticConnectionNumber,
zit_cumulative, # (row, col)
wRec, # (row, col, n)
inactivityCounter_cpu) # (row, col, n)
i1,i2,i3 = size(wRec)
# for each neuron, find total number of synaptic conn that should draw
# new connection to firing and non-firing neurons pool
subToFireNeuron_toBe = Int(floor(0.7 * synapticConnectionNumber))
subToNonFiringNeuron_toBe = synapticConnectionNumber - subToFireNeuron_toBe
#WORKING for each neuron, count how many synap already subscribed to firing-neurons
subToFireNeuron_current = sum((!iszero).(zit_cumulative .* wRec), dims=(1,2)) # (1, 1, n)
subToNonFiringNeuron_current = synapticConnectionNumber .- subToFireNeuron_current # (1, 1, n)
mask = (!iszero).(zit_cumulative) # mask of firing neurons = 1, non-firing = 0
projection = ones(i1,i2,i3)
mask = mask .* projection # (row, col, n)
totalNewConn = sum(isequal.(wRec, -1.0), dims=(1,2)) # count new conn mark (-1.0), (1, 1, n)
println("mask ", size(mask))
println("wRec ", size(wRec))
println("inactivityCounter_cpu ", size(inactivityCounter_cpu))
println("totalNeurons ", totalNewConn, size(totalNewConn))
error("DEBUG -> neuroplasticity $(Dates.now())")
for i in 1:i3
# add new conn to firing neurons pool
remaining = GeneralUtils.replace_elements(mask[:,:,i],
1,
wRecmask[:,:,i],
inactivityCounter_cpumask[:,:,i],
totalNewConn[:,:,i])
#TODO add new conn to non-firing neurons pool
end
newFiringConn = subToFireNeuron_toBe - subToFireNeuron_current
newFiringConn = newFiringConn > 0 ? newFiringConn : 0
newNonFiringConn = subToNonFiringNeuron_toBe - subToNonFiringNeuron_current
return wRec
end