version 0.0.7

This commit is contained in:
ton
2023-08-29 19:52:52 +07:00
parent 2f89905dc9
commit 619e8d7f83
14 changed files with 4013 additions and 35 deletions

View File

@@ -25,14 +25,13 @@ using .interface
#------------------------------------------------------------------------------------------------100
""" version 0.0.6
""" version 0.0.7
Todo:
[] synaptic liquidity range 0 to 100,000 -> 1.0 to 0.99
[] add weight liquidity
[DONE] add excitatory/inhabitory matrix
[-] add temporal summation in addition to already used spatial summation.
CANCELLED, spatial summation every second until membrane potential reach a threshold
is in itself a temporal summation.
[DONE] add neuroplasticity
[4] implement dormant connection and pruning machanism. the longer the training the longer
0 weight stay 0.
[] using RL to control learning signal
@@ -40,11 +39,12 @@ using .interface
[] Liquid time constant. training should include adjusting α, neuron membrane potential decay factor
which defined by neuron.tau_m formula in type.jl
Change from version: 0.0.4
Change from version: 0.0.6
-
All features
- excitatory/inhabitory matrix
- neuroplasticity
"""

View File

@@ -28,6 +28,7 @@ function (kfn::kfn_1)(input::AbstractArray)
kfn.lif_zt .= 0
kfn.alif_vt .= 0
kfn.alif_a .= 0
kfn.alif_epsilonRec .= 0
kfn.alif_epsilonRecA .= 0
kfn.alif_wRecChange .= 0

View File

@@ -9,9 +9,6 @@ using ..type, ..snnUtil
#------------------------------------------------------------------------------------------------100
function compute_paramsChange!(kfn::kfn_1, modelError, outputError)
# modelError = reshape(modelError, (1,1,1,:)) # (1,1,1,batch)
modelError = reshape(modelError, (1,1,:, size(modelError, 2)))
modelError = sum(modelError, dims=3)
lifComputeParamsChange!(kfn.timeStep,
kfn.lif_phi,
@@ -19,6 +16,7 @@ function compute_paramsChange!(kfn::kfn_1, modelError, outputError)
kfn.lif_eta,
kfn.lif_eRec,
kfn.lif_wRec,
kfn.lif_exInType,
kfn.lif_wRecChange,
kfn.on_wOut,
kfn.lif_firingCounter,
@@ -36,6 +34,7 @@ function compute_paramsChange!(kfn::kfn_1, modelError, outputError)
kfn.alif_eta,
kfn.alif_eRec,
kfn.alif_wRec,
kfn.alif_exInType,
kfn.alif_wRecChange,
kfn.on_wOut,
kfn.alif_firingCounter,
@@ -66,6 +65,7 @@ function lifComputeParamsChange!( timeStep::CuArray,
eta::CuArray,
eRec::CuArray,
wRec::CuArray,
exInType::CuArray,
wRecChange::CuArray,
wOut::CuArray,
firingCounter::CuArray,
@@ -122,6 +122,7 @@ function alifComputeParamsChange!( timeStep::CuArray,
eta::CuArray,
eRec::CuArray,
wRec::CuArray,
exInType::CuArray,
wRecChange::CuArray,
wOut::CuArray,
firingCounter::CuArray,
@@ -268,22 +269,26 @@ function learn!(kfn::kfn_1, device=cpu)
# lif learn
kfn.lif_wRec, kfn.lif_neuronInactivityCounter, kfn.lif_synapticInactivityCounter =
lifLearn(kfn.lif_wRec,
kfn.lif_exInType,
kfn.lif_wRecChange,
kfn.lif_arrayProjection4d,
kfn.lif_neuronInactivityCounter,
kfn.lif_synapticInactivityCounter,
kfn.lif_synapticConnectionNumber,
kfn.lif_synapticWChangeCounter,
kfn.zitCumulative,
device)
# alif learn
kfn.alif_wRec, kfn.alif_neuronInactivityCounter, kfn.alif_synapticInactivityCounter =
alifLearn(kfn.alif_wRec,
kfn.alif_exInType,
kfn.alif_wRecChange,
kfn.alif_arrayProjection4d,
kfn.alif_neuronInactivityCounter,
kfn.alif_synapticInactivityCounter,
kfn.alif_synapticConnectionNumber,
kfn.alif_synapticWChangeCounter,
kfn.zitCumulative,
device)
@@ -300,18 +305,19 @@ function learn!(kfn::kfn_1, device=cpu)
end
function lifLearn(wRec,
exInType,
wRecChange,
arrayProjection4d,
neuronInactivityCounter,
synapticInactivityCounter,
synapticConnectionNumber,
synapticWChangeCounter, #WORKING
zitCumulative,
device)
#WORKING - synapticInactivityCounter -10000 to 10000, weight change liquidity range from 1.0 to 0.1 respectively
# merge learning weight with average learning weight of all batch
wch = sum(wRecChange, dims=4) ./ (size(wRec, 4)) .* arrayProjection4d
wRec .+= wch
wRec .= (exInType .* wRec) .+ wch
arrayProjection4d_cpu = arrayProjection4d |> cpu
wRec_cpu = wRec |> cpu
@@ -327,7 +333,7 @@ function lifLearn(wRec,
wRec_cpu = GeneralUtils.replaceBetween.(wRec_cpu, 0.0, 0.01, -1.0) # mark with -1.0
# synaptic connection that has no activity will get randomed in neuroplasticity()
mask = isless.(synapticInactivityCounter_cpu, -10_000)
mask = isless.(synapticInactivityCounter_cpu, -100000)
GeneralUtils.replace_elements!(mask, 1, wRec_cpu, -1.0)
# reset lif_inactivity elements to base value
GeneralUtils.replace_elements!(mask, 1, synapticInactivityCounter_cpu, 0.0)
@@ -347,24 +353,25 @@ function lifLearn(wRec,
synapticInactivityCounter_cpu = synapticInactivityCounter_cpu .* arrayProjection4d_cpu
synapticInactivityCounter = synapticInactivityCounter_cpu |> device
# error("DEBUG -> lifLearn! $(Dates.now())")
return wRec, neuronInactivityCounter, synapticInactivityCounter
end
function alifLearn(wRec,
exInType,
wRecChange,
arrayProjection4d,
neuronInactivityCounter,
synapticInactivityCounter,
synapticConnectionNumber,
synapticWChangeCounter,
zitCumulative,
device)
#WORKING - synapticInactivityCounter -10000 to 10000, weight change liquidity range from 1.0 to 0.1 respectively
# merge learning weight with average learning weight of all batch
wch = sum(wRecChange, dims=4) ./ (size(wRec, 4)) .* arrayProjection4d
wRec .+= wch
wRec .= (exInType .* wRec) .+ wch
arrayProjection4d_cpu = arrayProjection4d |> cpu
wRec_cpu = wRec |> cpu
@@ -380,7 +387,7 @@ function alifLearn(wRec,
wRec_cpu = GeneralUtils.replaceBetween.(wRec_cpu, 0.0, 0.01, -1.0) # mark with -1.0
# synaptic connection that has no activity will get randomed in neuroplasticity()
mask = isless.(synapticInactivityCounter_cpu, -10_000)
mask = isless.(synapticInactivityCounter_cpu, -100000)
GeneralUtils.replace_elements!(mask, 1, wRec_cpu, -1.0)
# reset alif_inactivity elements to base value
GeneralUtils.replace_elements!(mask, 1, synapticInactivityCounter_cpu, 0.0)
@@ -439,17 +446,18 @@ function neuroplasticity(synapticConnectionNumber,
projection = ones(i1,i2,i3)
zitMask = zitMask .* projection # (row, col, n)
totalNewConn = sum(isequal.(wRec, -1.0), dims=(1,2)) # count new conn mark (-1.0), (1, 1, n)
println("neuroplasticity, from $synapticConnectionNumber, $totalNewConn are replaced")
# println("neuroplasticity, from $synapticConnectionNumber, $totalNewConn are replaced")
# clear -1.0 marker
GeneralUtils.replace_elements!(wRec, -1.0, synapticInactivityCounter, -0.99)
GeneralUtils.replace_elements!(wRec, -1.0, 0.0) # -1.0 marker is no longer required
for i in 1:i3
if neuronInactivityCounter[1:1:i][1] < -10_000 # neuron die i.e. reset all weight
if neuronInactivityCounter[1:1:i][1] < -10000 # neuron die i.e. reset all weight
println("neuron die")
neuronInactivityCounter[:,:,i] .= 0 # reset
w = wRec(i1,i2,1,synapticConnectionNumber)
wRec[:,:,i] = w
w = random_wRec(i1,i2,1,synapticConnectionNumber)
wRec[:,:,i] .= w
a = similar(w) .= -0.99 # synapticConnectionNumber of this neuron
mask = (!iszero).(w)

View File

@@ -52,13 +52,6 @@ function addNewSynapticConn!(mask::AbstractArray{<:Any}, x::Number, wRec::Abstra
# println("n ", n, size(n))
# println("")
total_x_tobeReplced = sum(isequal.(mask, x))
remaining = 0
if n == 0 || n > total_x_tobeReplced
remaining = n - total_x_tobeReplced
n = total_x_tobeReplced
end
# check if mask and wRec have the same size
if size(mask) != size(wRec)
error("mask and wRec must have the same size")
@@ -68,15 +61,22 @@ function addNewSynapticConn!(mask::AbstractArray{<:Any}, x::Number, wRec::Abstra
alreadySub = findall(x -> x != 0, wRec) # get already subscribe
setdiff!(indices, alreadySub) # remove already sub conn from pool
remaining = 0
if n == 0 || n > length(indices)
remaining = n - length(indices)
n = length(indices)
end
# shuffle the indices using the rng function
shuffle!(rng, indices)
# select the first n indices
n > length(indices) ? println(">>> ", total_x_tobeReplced) : nothing
selected = indices[1:n]
# replace the elements in wRec at the selected positions with a
for i in selected
wRec[i] = rand(0.01:0.01:0.1)
if counter !== nothing
counter[i] = 0 # reset
counter[i] = 0 # counting start from 0
end
end
# error("DEBUG addNewSynapticConn!")

View File

@@ -2,9 +2,10 @@ module type
export
# struct
kfn_1
kfn_1,
# function
random_wRec
using Random, GeneralUtils
@@ -59,6 +60,7 @@ Base.@kwdef mutable struct kfn_1 <: knowledgeFn
lif_neuronInactivityCounter::Union{AbstractArray, Nothing} = nothing
lif_synapticInactivityCounter::Union{AbstractArray, Nothing} = nothing
lif_synapticConnectionNumber::Union{Int, Nothing} = nothing
lif_synapticWChangeCounter::Union{AbstractArray, Nothing} = nothing
# pre-allocation array
lif_arrayProjection4d::Union{AbstractArray, Nothing} = nothing # use to project 3d array to 4d
@@ -99,6 +101,7 @@ Base.@kwdef mutable struct kfn_1 <: knowledgeFn
alif_neuronInactivityCounter::Union{AbstractArray, Nothing} = nothing
alif_synapticInactivityCounter::Union{AbstractArray, Nothing} = nothing
alif_synapticConnectionNumber::Union{Int, Nothing} = nothing
alif_synapticWChangeCounter::Union{AbstractArray, Nothing} = nothing
# pre-allocation array
alif_arrayProjection4d::Union{AbstractArray, Nothing} = nothing # use to project 3d array to 4d
@@ -198,7 +201,7 @@ function kfn_1(params::Dict; device=cpu)
# subscription
synapticConnectionPercent = kfn.params[:computeNeuron][:lif][:params][:synapticConnectionPercent]
kfn.lif_synapticConnectionNumber = Int(floor(row*col * synapticConnectionPercent/100))
w = wRec(row, col, lif_n, kfn.lif_synapticConnectionNumber)
w = random_wRec(row, col, lif_n, kfn.lif_synapticConnectionNumber)
# project 3D w into 4D kfn.lif_wRec (row, col, n, batch)
kfn.lif_wRec = reshape(w, (row, col, lif_n, 1)) .* ones(row, col, lif_n, batch) |> device
@@ -216,7 +219,7 @@ function kfn_1(params::Dict; device=cpu)
kfn.lif_phi = (similar(kfn.lif_wRec) .= 0)
kfn.lif_epsilonRec = (similar(kfn.lif_wRec) .= 0)
kfn.lif_eRec = (similar(kfn.lif_wRec) .= 0)
kfn.lif_eta = (similar(kfn.lif_wRec) .= 0.001)
kfn.lif_eta = (similar(kfn.lif_wRec) .= 0.01)
kfn.lif_gammaPd = (similar(kfn.lif_wRec) .= 0.3)
kfn.lif_wRecChange = (similar(kfn.lif_wRec) .= 0)
kfn.lif_error = (similar(kfn.lif_wRec) .= 0)
@@ -228,7 +231,12 @@ function kfn_1(params::Dict; device=cpu)
mask = Array((!iszero).(kfn.lif_wRec))
# initial value subscribed conn, synapticInactivityCounter range -10000 to +10000
GeneralUtils.replace_elements!(mask, 1, kfn.lif_synapticInactivityCounter, 0)
kfn.lif_synapticInactivityCounter = kfn.lif_synapticInactivityCounter |> device
kfn.lif_synapticInactivityCounter = kfn.lif_synapticInactivityCounter |> device
kfn.lif_synapticWChangeCounter = Array(similar(kfn.lif_wRec) .= -0.99) # -9 for non-sub conn
mask = Array((!iszero).(kfn.lif_wRec))
# initial value subscribed conn, synapticInactivityCounter range -10000 to +10000
GeneralUtils.replace_elements!(mask, 1, kfn.lif_synapticWChangeCounter, 1.0)
kfn.lif_synapticWChangeCounter = kfn.lif_synapticWChangeCounter |> device
kfn.lif_arrayProjection4d = (similar(kfn.lif_wRec) .= 1)
kfn.lif_recSignal = (similar(kfn.lif_wRec) .= 0)
@@ -247,7 +255,7 @@ function kfn_1(params::Dict; device=cpu)
# subscription
synapticConnectionPercent = kfn.params[:computeNeuron][:alif][:params][:synapticConnectionPercent]
kfn.alif_synapticConnectionNumber = Int(floor(row*col * synapticConnectionPercent/100))
w = wRec(row, col, alif_n, kfn.alif_synapticConnectionNumber)
w = random_wRec(row, col, alif_n, kfn.alif_synapticConnectionNumber)
# project 3D w into 4D kfn.alif_wRec
kfn.alif_wRec = reshape(w, (row, col, alif_n, 1)) .* ones(row, col, alif_n, batch) |> device
@@ -265,7 +273,7 @@ function kfn_1(params::Dict; device=cpu)
kfn.alif_phi = (similar(kfn.alif_wRec) .= 0)
kfn.alif_epsilonRec = (similar(kfn.alif_wRec) .= 0)
kfn.alif_eRec = (similar(kfn.alif_wRec) .= 0)
kfn.alif_eta = (similar(kfn.alif_wRec) .= 0.001)
kfn.alif_eta = (similar(kfn.alif_wRec) .= 0.01)
kfn.alif_gammaPd = (similar(kfn.alif_wRec) .= 0.3)
kfn.alif_wRecChange = (similar(kfn.alif_wRec) .= 0)
kfn.alif_error = (similar(kfn.alif_wRec) .= 0)
@@ -278,6 +286,11 @@ function kfn_1(params::Dict; device=cpu)
# initial value subscribed conn, synapticInactivityCounter range -10000 to +10000
GeneralUtils.replace_elements!(mask, 1, kfn.alif_synapticInactivityCounter, 0)
kfn.alif_synapticInactivityCounter = kfn.alif_synapticInactivityCounter |> device
kfn.alif_synapticWChangeCounter = Array(similar(kfn.alif_wRec) .= -0.99) # -9 for non-sub conn
mask = Array((!iszero).(kfn.alif_wRec))
# initial value subscribed conn, synapticInactivityCounter range -10000 to +10000
GeneralUtils.replace_elements!(mask, 1, kfn.alif_synapticWChangeCounter, 1.0)
kfn.alif_synapticWChangeCounter = kfn.alif_synapticWChangeCounter |> device
kfn.alif_arrayProjection4d = (similar(kfn.alif_wRec) .= 1)
kfn.alif_recSignal = (similar(kfn.alif_wRec) .= 0)
@@ -341,7 +354,7 @@ function kfn_1(params::Dict; device=cpu)
kfn.on_phi = (similar(kfn.on_wOut) .= 0)
kfn.on_epsilonRec = (similar(kfn.on_wOut) .= 0)
kfn.on_eRec = (similar(kfn.on_wOut) .= 0)
kfn.on_eta = (similar(kfn.on_wOut) .= 0.001)
kfn.on_eta = (similar(kfn.on_wOut) .= 0.01)
kfn.on_gammaPd = (similar(kfn.on_wOut) .= 0.3)
kfn.on_wOutChange = (similar(kfn.on_wOut) .= 0)
kfn.on_error = (similar(kfn.on_wOut) .= 0)
@@ -363,7 +376,7 @@ function kfn_1(params::Dict; device=cpu)
return kfn
end
function wRec(row, col, n, synapticConnectionNumber)
function random_wRec(row, col, n, synapticConnectionNumber)
# subscription
w = zeros(row, col, n)