version 0.0.5-alpha

This commit is contained in:
ton
2023-08-25 09:40:43 +07:00
parent 0af39ee09e
commit c74eea9cdf
14 changed files with 4155 additions and 150 deletions

View File

@@ -25,19 +25,22 @@ using .interface
#------------------------------------------------------------------------------------------------100
""" version 0.0.3
""" version 0.0.5
Todo:
[2] implement dormant connection and pruning machanism. the longer the training the longer
[DONE] add excitatory/inhabitory matrix
[-] add temporal summation in addition to already used spatial summation.
CANCELLED, spatial summation every second until membrane potential reach a threshold
is in itself a temporal summation.
[x] add neuroplasticity
[4] implement dormant connection and pruning machanism. the longer the training the longer
0 weight stay 0.
[] using RL to control learning signal
[] consider using Dates.now() instead of timestamp because time_stamp may overflow
[] Liquid time constant. training should include adjusting α, neuron membrane potential decay factor
which defined by neuron.tau_m formula in type.jl
Change from version: 0.0.2
- knowledgeFn in GPU format
- use partial error update for computeNeuron
- frequency regulator
Change from version: 0.0.4
-
All features

View File

@@ -55,6 +55,7 @@ function (kfn::kfn_1)(input::AbstractArray)
# project 3D kfn zit into 4D lif zit
i1, i2, i3, i4 = size(kfn.lif_zit)
kfn.lif_zit .= reshape(kfn.zit, (i1, i2, 1, i4)) .* kfn.lif_arrayProjection4d
kfn.lif_exInType .= kfn.exInType .* kfn.lif_arrayProjection4d
lifForward( kfn.lif_zit,
kfn.lif_wRec,
@@ -70,13 +71,16 @@ function (kfn::kfn_1)(input::AbstractArray)
kfn.lif_gammaPd,
kfn.lif_firingCounter,
kfn.lif_recSignal,
kfn.lif_subscription,
kfn.lif_exInType,
kfn.lif_neuronInactivityCounter,
kfn.lif_synapticInactivityCounter,
)
end
@async begin
# project 3D kfn zit into 4D alif zit
i1, i2, i3, i4 = size(kfn.alif_zit)
kfn.alif_zit .= reshape(kfn.zit, (i1, i2, 1, i4)) .* kfn.alif_arrayProjection4d
kfn.alif_exInType .= kfn.exInType .* kfn.alif_arrayProjection4d
alifForward(kfn.alif_zit,
kfn.alif_wRec,
@@ -92,12 +96,15 @@ function (kfn::kfn_1)(input::AbstractArray)
kfn.alif_gammaPd,
kfn.alif_firingCounter,
kfn.alif_recSignal,
kfn.alif_subscription,
kfn.alif_exInType,
kfn.alif_neuronInactivityCounter,
kfn.alif_synapticInactivityCounter,
kfn.alif_epsilonRecA,
kfn.alif_a,
kfn.alif_avth,
kfn.alif_beta,
kfn.alif_rho,)
kfn.alif_rho,
)
end
end
@@ -132,13 +139,11 @@ function (kfn::kfn_1)(input::AbstractArray)
kfn.on_gammaPd,
kfn.on_firingCounter,
kfn.on_recSignal,
kfn.on_subscription,
)
# get on_zt4d to on_zt
kfn.on_zt .= reduce(max, kfn.on_zt4d, dims=(1,2))
logit = reshape(kfn.on_zt, (size(input, 1), :))
# error("DEBUG -> kfn forward")
return logit,
kfn.zit
end
@@ -158,7 +163,9 @@ function lifForward( zit::CuArray,
gammaPd::CuArray,
firingCounter::CuArray,
recSignal::CuArray,
subscription::CuArray,
exInType::CuArray,
neuronInactivityCounter::CuArray,
synapticInactivityCounter::CuArray,
)
kernel = @cuda launch=false lifForward( zit,
@@ -175,7 +182,9 @@ function lifForward( zit::CuArray,
gammaPd,
firingCounter,
recSignal,
subscription,
exInType,
neuronInactivityCounter,
synapticInactivityCounter,
GeneralUtils.linear_to_cartesian,
)
config = launch_configuration(kernel.fun)
@@ -206,7 +215,9 @@ function lifForward( zit::CuArray,
gammaPd,
firingCounter,
recSignal,
subscription,
exInType,
neuronInactivityCounter,
synapticInactivityCounter,
GeneralUtils.linear_to_cartesian; threads, blocks)
end
end
@@ -226,7 +237,9 @@ function lifForward( zit,
gammaPd,
firingCounter,
recSignal,
subscription,
exInType,
neuronInactivityCounter,
synapticInactivityCounter,
linear_to_cartesian,
)
i = (blockIdx().x - 1) * blockDim().x + threadIdx().x # gpu threads index
@@ -247,7 +260,8 @@ function lifForward( zit,
epsilonRec[i1,i2,i3,i4] = (alpha[i1,i2,i3,i4] * epsilonRec[i1,i2,i3,i4])
else # refractory period is inactive
recSignal[i1,i2,i3,i4] = wRec[i1,i2,i3,i4] * zit[i1,i2,i3,i4]
recSignal[i1,i2,i3,i4] = wRec[i1,i2,i3,i4] * zit[i1,i2,i3,i4] *
exInType[i1,i2,i3,i4]
vt[i1,i2,i3,i4] = (alpha[i1,i2,i3,i4] * vt[i1,i2,i3,i4]) +
sum(@view(recSignal[:,:,i3,i4]))
@@ -257,8 +271,12 @@ function lifForward( zit,
refractoryCounter[i1,i2,i3,i4] = refractoryDuration[i1,i2,i3,i4]
firingCounter[i1,i2,i3,i4] += 1
vt[i1,i2,i3,i4] = vRest[i1,i2,i3,i4]
# reset counter if neuron fires
neuronInactivityCounter[i1,i2,i3,i4] = 10000
else
zt[i1,i2,i3,i4] = 0
neuronInactivityCounter[i1,i2,i3,i4] -= 1
end
# compute phi, there is a difference from lif formula
@@ -267,7 +285,18 @@ function lifForward( zit,
# compute epsilonRec
epsilonRec[i1,i2,i3,i4] = (alpha[i1,i2,i3,i4] * epsilonRec[i1,i2,i3,i4]) +
(zit[i1,i2,i3,i4] * subscription[i1,i2,i3,i4])
(zit[i1,i2,i3,i4] * !iszero(wRec[i1,i2,i3,i4]))
# !iszero indicates synaptic subscription
# count synaptic inactivity
if !iszero(wRec[i1,i2,i3,i4]) # check if this is wRec subscription
if !iszero(zit[i1,i2,i3,i4]) # synapse is active, reset counter
synapticInactivityCounter[i1,i2,i3,i4] = 10000
else # synapse is inactive, counting
synapticInactivityCounter[i1,i2,i3,i4] -= 1
end
end
end
end
return nothing
@@ -288,7 +317,9 @@ function alifForward( zit::CuArray,
gammaPd::CuArray,
firingCounter::CuArray,
recSignal::CuArray,
subscription::CuArray,
exInType::CuArray,
neuronInactivityCounter::CuArray,
synapticInactivityCounter::CuArray,
epsilonRecA::CuArray,
a::CuArray,
avth::CuArray,
@@ -310,7 +341,9 @@ function alifForward( zit::CuArray,
gammaPd,
firingCounter,
recSignal,
subscription,
exInType,
neuronInactivityCounter,
synapticInactivityCounter,
epsilonRecA,
a,
avth,
@@ -345,7 +378,9 @@ function alifForward( zit::CuArray,
gammaPd,
firingCounter,
recSignal,
subscription,
exInType,
neuronInactivityCounter,
synapticInactivityCounter,
epsilonRecA,
a,
avth,
@@ -370,7 +405,9 @@ function alifForward( zit,
gammaPd,
firingCounter,
recSignal,
subscription,
exInType,
neuronInactivityCounter,
synapticInactivityCounter,
epsilonRecA,
a,
avth,
@@ -404,7 +441,8 @@ function alifForward( zit,
avth[i1,i2,i3,i4] = vth[i1,i2,i3,i4] + (beta[i1,i2,i3,i4] * a[i1,i2,i3,i4])
else # refractory period is inactive
recSignal[i1,i2,i3,i4] = zit[i1,i2,i3,i4] * wRec[i1,i2,i3,i4]
recSignal[i1,i2,i3,i4] = wRec[i1,i2,i3,i4] * zit[i1,i2,i3,i4] *
exInType[i1,i2,i3,i4]
vt[i1,i2,i3,i4] = (alpha[i1,i2,i3,i4] * vt[i1,i2,i3,i4]) +
sum(@view(recSignal[:,:,i3,i4]))
@@ -418,9 +456,11 @@ function alifForward( zit,
firingCounter[i1,i2,i3,i4] += 1
vt[i1,i2,i3,i4] = vRest[i1,i2,i3,i4]
a[i1,i2,i3,i4] = (rho[i1,i2,i3,i4] * a[i1,i2,i3,i4]) + 1
neuronInactivityCounter[i1,i2,i3,i4] = 10000
else
zt[i1,i2,i3,i4] = 0
a[i1,i2,i3,i4] = (rho[i1,i2,i3,i4] * a[i1,i2,i3,i4])
neuronInactivityCounter[i1,i2,i3,i4] -= 1
end
# compute phi, there is a difference from alif formula
@@ -429,11 +469,20 @@ function alifForward( zit,
# compute epsilonRec
epsilonRec[i1,i2,i3,i4] = (alpha[i1,i2,i3,i4] * epsilonRec[i1,i2,i3,i4]) +
(zit[i1,i2,i3,i4] * subscription[i1,i2,i3,i4])
(zit[i1,i2,i3,i4] * !iszero(wRec[i1,i2,i3,i4]))
# compute epsilonRecA use eq.26
epsilonRecA[i1,i2,i3,i4] = (rho[i1,i2,i3,i4] *
(phi[i1,i2,i3,i4] * epsilonRec[i1,i2,i3,i4])) +
(zit[i1,i2,i3,i4] * subscription[i1,i2,i3,i4])
(phi[i1,i2,i3,i4] * epsilonRec[i1,i2,i3,i4])) +
(zit[i1,i2,i3,i4] * !iszero(wRec[i1,i2,i3,i4]))
# count synaptic inactivity
if !iszero(wRec[i1,i2,i3,i4]) # check if this is wRec subscription
if !iszero(zit[i1,i2,i3,i4]) # synapse is active, reset counter
synapticInactivityCounter[i1,i2,i3,i4] = 10000
else # synapse is inactive, counting
synapticInactivityCounter[i1,i2,i3,i4] -= 1
end
end
end
end
return nothing
@@ -454,7 +503,6 @@ function onForward( zit::CuArray,
gammaPd::CuArray,
firingCounter::CuArray,
recSignal::CuArray,
subscription::CuArray,
)
kernel = @cuda launch=false onForward( zit,
@@ -471,7 +519,6 @@ function onForward( zit::CuArray,
gammaPd,
firingCounter,
recSignal,
subscription,
GeneralUtils.linear_to_cartesian,
)
config = launch_configuration(kernel.fun)
@@ -501,7 +548,6 @@ function onForward( zit::CuArray,
gammaPd,
firingCounter,
recSignal,
subscription,
GeneralUtils.linear_to_cartesian; threads, blocks)
end
end
@@ -521,7 +567,6 @@ function onForward( zit,
gammaPd,
firingCounter,
recSignal,
subscription,
linear_to_cartesian,
)
i = (blockIdx().x - 1) * blockDim().x + threadIdx().x # gpu threads index
@@ -556,11 +601,12 @@ function onForward( zit,
end
# compute phi, there is a difference from on formula
phi[i1,i2,i3,i4] = (gammaPd[i1,i2,i3,i4] / vth[i1,i2,i3,i4]) * max(0, 1 - ((vt[i1,i2,i3,i4] - vth[i1,i2,i3,i4]) / vth[i1,i2,i3,i4]))
phi[i1,i2,i3,i4] = (gammaPd[i1,i2,i3,i4] / vth[i1,i2,i3,i4]) *
max(0, 1 - ((vt[i1,i2,i3,i4] - vth[i1,i2,i3,i4]) / vth[i1,i2,i3,i4]))
# compute epsilonRec
epsilonRec[i1,i2,i3,i4] = (alpha[i1,i2,i3,i4] * epsilonRec[i1,i2,i3,i4]) +
(zit[i1,i2,i3,i4] * subscription[i1,i2,i3,i4])
(zit[i1,i2,i3,i4] * !iszero(wOut[i1,i2,i3,i4]))
end
end
return nothing

View File

@@ -171,7 +171,7 @@ function onComputeParamsChange!(phi::CuArray,
eRec .= phi .* epsilonRec
nError .= reshape(outputError, (1, 1, :, size(outputError, 2))) .* arrayProjection4d
wOutChange .+= (-eta .* nError .* eRec) #BUG why wOutChange not increase every timestep that madel get wrong answer?
wOutChange .+= (-eta .* nError .* eRec)
# reset epsilonRec
epsilonRec .= 0
@@ -264,16 +264,24 @@ function onComputeParamsChange!(phi::AbstractArray,
end
end
function learn!(kfn::kfn_1)
function learn!(kfn::kfn_1, device=cpu)
# lif learn
lifLearn!(kfn.lif_wRec,
kfn.lif_wRecChange,
kfn.lif_arrayProjection4d)
kfn.lif_arrayProjection4d,
kfn.lif_neuronInactivityCounter,
kfn.lif_synapticConnectionNumber,
kfn.zit_cumulative,
device)
# alif learn
alifLearn!(kfn.alif_wRec,
kfn.alif_wRecChange,
kfn.alif_arrayProjection4d)
kfn.alif_arrayProjection4d,
kfn.alif_neuronInactivityCounter,
kfn.alif_synapticConnectionNumber,
kfn.zit_cumulative,
device)
# on learn
onLearn!(kfn.on_wOut,
@@ -289,23 +297,52 @@ end
function lifLearn!(wRec,
wRecChange,
arrayProjection4d)
# merge learning weight with average learning weight
arrayProjection4d,
inactivityCounter,
synapticConnectionNumber,
zit_cumulative,
device)
# merge learning weight with average learning weight of all batch
wRec .+= (sum(wRecChange, dims=4) ./ (size(wRec, 4))) .* arrayProjection4d
#TODO synaptic strength
wRec_cpu = wRec |> cpu
wRec_cpu = wRec_cpu[:,:,:,1] # since every batch has the same neuron wRec, (row, col, n)
inactivityCounter_cpu = inactivityCounter |> cpu
inactivityCounter_cpu = inactivityCounter_cpu[:,:,:,1] # (row, col, n)
zit_cumulative_cpu = zit_cumulative |> cpu
zit_cumulative_cpu = zit_cumulative_cpu[:,:,1] # (row, col)
#TODO neuroplasticity
# error("DEBUG -> lifLearn! $(Dates.now())")
# weak / negative synaptic connection will get randomed in neuroplasticity()
wRec_cpu = GeneralUtils.replaceBetween.(wRec_cpu, 0.0, 0.1, -1.0) # mark with -1.0
# synaptic connection that has no inactivity will get randomed in neuroplasticity()
GeneralUtils.replace_elements!(inactivityCounter_cpu, 0.0, wRec_cpu, -1.0)
# reset lif_inactivity elements to 10000
GeneralUtils.replace_elements!(inactivityCounter_cpu, 0.0, -9.0) # -9.0 is base value
#WORKING neuroplasticity
wRec_cpu = neuroplasticity(synapticConnectionNumber, zit_cumulative_cpu, wRec_cpu,
inactivityCounter_cpu)
error("DEBUG -> lifLearn! $(Dates.now())")
# #TODO send to device with correct dimension
# wRec = wRec |> device
# inactivityCounter = inactivityCounter_cpu |> device
end
function alifLearn!(wRec,
wRecChange,
arrayProjection4d)
arrayProjection4d,
inactivityCounter,
synapticConnectionNumber,
zit_cumulative,
device)
# merge learning weight with average learning weight
wRec .+= (sum(wRecChange, dims=4) ./ (size(wRec, 4))) .* arrayProjection4d
# weak / negative synaptic connection will get randomed in neuroplasticity()
wRec .= GeneralUtils.replaceLessThan.(wRec, 0.01, 0.0)
#TODO synaptic strength
#TODO neuroplasticity
@@ -327,8 +364,56 @@ function onLearn!(wOut,
end
function neuroplasticity(synapticConnectionNumber,
zit_cumulative, # (row, col)
wRec, # (row, col, n)
inactivityCounter_cpu) # (row, col, n)
i1,i2,i3 = size(wRec)
# for each neuron, find total number of synaptic conn that should draw
# new connection to firing and non-firing neurons pool
subToFireNeuron_toBe = Int(floor(0.7 * synapticConnectionNumber))
subToNonFiringNeuron_toBe = synapticConnectionNumber - subToFireNeuron_toBe
#WORKING for each neuron, count how many synap already subscribed to firing-neurons
subToFireNeuron_current = sum((!iszero).(zit_cumulative .* wRec), dims=(1,2)) # (1, 1, n)
subToNonFiringNeuron_current = synapticConnectionNumber .- subToFireNeuron_current # (1, 1, n)
mask = (!iszero).(zit_cumulative) # mask of firing neurons = 1, non-firing = 0
projection = ones(i1,i2,i3)
mask = mask .* projection # (row, col, n)
totalNewConn = sum(isequal.(wRec, -1.0), dims=(1,2)) # count new conn mark (-1.0), (1, 1, n)
println("mask ", size(mask))
println("wRec ", size(wRec))
println("inactivityCounter_cpu ", size(inactivityCounter_cpu))
println("totalNeurons ", totalNewConn, size(totalNewConn))
error("DEBUG -> neuroplasticity $(Dates.now())")
for i in 1:i3
# add new conn to firing neurons pool
remaining = GeneralUtils.replace_elements(mask[:,:,i],
1,
wRecmask[:,:,i],
inactivityCounter_cpumask[:,:,i],
totalNewConn[:,:,i])
#TODO add new conn to non-firing neurons pool
end
newFiringConn = subToFireNeuron_toBe - subToFireNeuron_current
newFiringConn = newFiringConn > 0 ? newFiringConn : 0
newNonFiringConn = subToNonFiringNeuron_toBe - subToNonFiringNeuron_current
return wRec
end

View File

@@ -21,6 +21,23 @@ function refractoryStatus!(refractoryCounter, refractoryActive, refractoryInacti
end
end
function frobenius_distance(A, B)
# Check if the matrices have the same size
if size(A) != size(B)
error("The matrices must have the same size")
end
# Initialize the distance to zero
distance = 0.0
# Loop over the elements of the matrices and add the squared differences
for i in 1:size(A, 1)
for j in 1:size(A, 2)
distance += (A[i, j] - B[i, j])^2
end
end
# Return the square root of the distance
return sqrt(distance)
end
@@ -59,13 +76,6 @@ end

View File

@@ -24,6 +24,7 @@ Base.@kwdef mutable struct kfn_1 <: knowledgeFn
inputSize::Union{AbstractArray, Nothing} = nothing
zit::Union{AbstractArray, Nothing} = nothing # 3D activation matrix
zit_cumulative::Union{AbstractArray, Nothing} = nothing
exInType::Union{AbstractArray, Nothing} = nothing
modelError::Union{AbstractArray, Nothing} = nothing # store RSNN error
outputError::Union{AbstractArray, Nothing} = nothing # store output neurons error
@@ -52,14 +53,17 @@ Base.@kwdef mutable struct kfn_1 <: knowledgeFn
lif_gammaPd::Union{AbstractArray, Nothing} = nothing
lif_wRecChange::Union{AbstractArray, Nothing} = nothing
lif_error::Union{AbstractArray, Nothing} = nothing
lif_subscription::Union{AbstractArray, Nothing} = nothing
lif_firingCounter::Union{AbstractArray, Nothing} = nothing
lif_firingTargetFrequency::Union{AbstractArray, Nothing} = nothing
lif_neuronInactivityCounter::Union{AbstractArray, Nothing} = nothing
lif_synapticInactivityCounter::Union{AbstractArray, Nothing} = nothing
lif_synapticConnectionNumber::Union{Int, Nothing} = nothing
# pre-allocation array
lif_arrayProjection4d::Union{AbstractArray, Nothing} = nothing # use to project 3d array to 4d
lif_recSignal::Union{AbstractArray, Nothing} = nothing
lif_exInType::Union{AbstractArray, Nothing} = nothing
# lif_decayed_epsilonRec::Union{AbstractArray, Nothing} = nothing
# lif_vt_diff_vth::Union{AbstractArray, Nothing} = nothing
# lif_vt_diff_vth_div_vth::Union{AbstractArray, Nothing} = nothing
@@ -89,14 +93,17 @@ Base.@kwdef mutable struct kfn_1 <: knowledgeFn
alif_gammaPd::Union{AbstractArray, Nothing} = nothing
alif_wRecChange::Union{AbstractArray, Nothing} = nothing
alif_error::Union{AbstractArray, Nothing} = nothing
alif_subscription::Union{AbstractArray, Nothing} = nothing
alif_firingCounter::Union{AbstractArray, Nothing} = nothing
alif_firingTargetFrequency::Union{AbstractArray, Nothing} = nothing
alif_neuronInactivityCounter::Union{AbstractArray, Nothing} = nothing
alif_synapticInactivityCounter::Union{AbstractArray, Nothing} = nothing
alif_synapticConnectionNumber::Union{Int, Nothing} = nothing
# pre-allocation array
alif_arrayProjection4d::Union{AbstractArray, Nothing} = nothing # use to project 3d array to 4d
alif_recSignal::Union{AbstractArray, Nothing} = nothing
alif_exInType::Union{AbstractArray, Nothing} = nothing
# alif_decayed_epsilonRec::Union{AbstractArray, Nothing} = nothing
# alif_vt_diff_vth::Union{AbstractArray, Nothing} = nothing
# alif_vt_diff_vth_div_vth::Union{AbstractArray, Nothing} = nothing
@@ -168,15 +175,17 @@ function kfn_1(params::Dict; device=cpu)
# initialize activation matrix #
# ---------------------------------------------------------------------------- #
# row*col is a 2D matrix represent all RSNN activation
row, col, batch = kfn.params[:inputPort][:signal][:numbers] # z-axis represent signal batch number
row, signal_col, batch = kfn.params[:inputPort][:signal][:numbers] # z-axis represent signal batch number
kfn.inputSize = [row, col] |> device
col += kfn.params[:computeNeuron][:lif][:numbers][2]
col += kfn.params[:computeNeuron][:alif][:numbers][2]
kfn.inputSize = [row, signal_col] |> device
lif_col = kfn.params[:computeNeuron][:lif][:numbers][2]
alif_col = kfn.params[:computeNeuron][:alif][:numbers][2]
col = signal_col + lif_col + alif_col
# activation matrix
kfn.zit = zeros(row, col, batch) |> device
kfn.zit_cumulative = (similar(kfn.zit) .= 0) |> device
kfn.zit_cumulative = (similar(kfn.zit) .= 0)
kfn.modelError = zeros(1) |> device
# ---------------------------------------------------------------------------- #
@@ -184,115 +193,133 @@ function kfn_1(params::Dict; device=cpu)
# ---------------------------------------------------------------------------- #
# In 3D LIF matrix, z-axis represent each neuron while each 2D slice represent that neuron's
# synaptic subscription to other neurons (via activation matrix)
n = kfn.params[:computeNeuron][:lif][:numbers][1] * kfn.params[:computeNeuron][:lif][:numbers][2]
lif_n = kfn.params[:computeNeuron][:lif][:numbers][1] * kfn.params[:computeNeuron][:lif][:numbers][2]
# subscription
w = zeros(row, col, n)
w = zeros(row, col, lif_n)
synapticConnectionPercent = kfn.params[:computeNeuron][:lif][:params][:synapticConnectionPercent]
synapticConnection = Int(floor(row*col * synapticConnectionPercent/100))
kfn.lif_synapticConnectionNumber = Int(floor(row*col * synapticConnectionPercent/100))
for slice in eachslice(w, dims=3)
pool = shuffle!([1:row*col...])[1:synapticConnection]
pool = shuffle!([1:row*col...])[1:kfn.lif_synapticConnectionNumber]
for i in pool
slice[i] = randn()/10 # assign weight to synaptic connection. /10 to start small,
slice[i] = rand() # assign weight to synaptic connection. /10 to start small,
# otherwise RSNN's vt Usually stay negative (-)
end
end
# 10% of neuron connection should be enough to start to make neuron fires
should_be_avg_weight = 1 / (0.1 * lif_n)
w = w .* (should_be_avg_weight / maximum(w)) # adjust overall weight
# project 3D w into 4D kfn.lif_wRec (row, col, n, batch)
kfn.lif_wRec = reshape(w, (row, col, n, 1)) .* ones(row, col, n, batch) |> device
kfn.lif_zit = (similar(kfn.lif_wRec) .= 0) |> device
kfn.lif_vt = (similar(kfn.lif_wRec) .= 0) |> device
kfn.lif_vth = (similar(kfn.lif_wRec) .= 1) |> device
kfn.lif_vRest = (similar(kfn.lif_wRec) .= 0) |> device
kfn.lif_zt = zeros(1, 1, n, batch) |> device
kfn.lif_zt4d = (similar(kfn.lif_wRec) .= 0) |> device
kfn.lif_refractoryCounter = (similar(kfn.lif_wRec) .= 0) |> device
kfn.lif_refractoryDuration = (similar(kfn.lif_wRec) .= 3) |> device
kfn.lif_wRec = reshape(w, (row, col, lif_n, 1)) .* ones(row, col, lif_n, batch) |> device
kfn.lif_zit = (similar(kfn.lif_wRec) .= 0)
kfn.lif_vt = (similar(kfn.lif_wRec) .= 0)
kfn.lif_vth = (similar(kfn.lif_wRec) .= 1)
kfn.lif_vRest = (similar(kfn.lif_wRec) .= 0)
kfn.lif_zt = zeros(1, 1, lif_n, batch) |> device
kfn.lif_zt4d = (similar(kfn.lif_wRec) .= 0)
kfn.lif_refractoryCounter = (similar(kfn.lif_wRec) .= 0)
kfn.lif_refractoryDuration = (similar(kfn.lif_wRec) .= 3)
kfn.lif_delta = 1.0
kfn.lif_tau_m = 20.0
kfn.lif_alpha = (similar(kfn.lif_wRec) .= (exp(-kfn.lif_delta / kfn.lif_tau_m))) |> device
kfn.lif_phi = (similar(kfn.lif_wRec) .= 0) |> device
kfn.lif_epsilonRec = (similar(kfn.lif_wRec) .= 0) |> device
kfn.lif_eRec = (similar(kfn.lif_wRec) .= 0) |> device
kfn.lif_eta = (similar(kfn.lif_wRec) .= 0.001) |> device
kfn.lif_gammaPd = (similar(kfn.lif_wRec) .= 0.3) |> device
kfn.lif_wRecChange = (similar(kfn.lif_wRec) .= 0) |> device
kfn.lif_error = (similar(kfn.lif_wRec) .= 0) |> device
kfn.lif_subscription = (GeneralUtils.isNotEqual.(kfn.lif_wRec, 0)) |> device
kfn.lif_alpha = (similar(kfn.lif_wRec) .= (exp(-kfn.lif_delta / kfn.lif_tau_m)))
kfn.lif_phi = (similar(kfn.lif_wRec) .= 0)
kfn.lif_epsilonRec = (similar(kfn.lif_wRec) .= 0)
kfn.lif_eRec = (similar(kfn.lif_wRec) .= 0)
kfn.lif_eta = (similar(kfn.lif_wRec) .= 0.001)
kfn.lif_gammaPd = (similar(kfn.lif_wRec) .= 0.3)
kfn.lif_wRecChange = (similar(kfn.lif_wRec) .= 0)
kfn.lif_error = (similar(kfn.lif_wRec) .= 0)
kfn.lif_firingCounter = (similar(kfn.lif_wRec) .= 0) |> device
# firingTargetFrequency = desired count / total sequence length
kfn.lif_firingTargetFrequency = (similar(kfn.lif_wRec) .= 0.1) |> device
kfn.lif_firingCounter = (similar(kfn.lif_wRec) .= 0)
kfn.lif_firingTargetFrequency = (similar(kfn.lif_wRec) .= 0.1)
kfn.lif_neuronInactivityCounter = (similar(kfn.lif_wRec) .= 10000)
kfn.lif_synapticInactivityCounter = Array(similar(kfn.lif_wRec) .= -9) # -9 for non-sub conn
mask = Array((!iszero).(kfn.lif_wRec))
GeneralUtils.replace_elements!(mask, 1, kfn.lif_synapticInactivityCounter, 10000)
kfn.lif_synapticInactivityCounter = kfn.lif_synapticInactivityCounter |> device
kfn.lif_arrayProjection4d = (similar(kfn.lif_wRec) .= 1) |> device
kfn.lif_recSignal = (similar(kfn.lif_wRec) .= 0) |> device
# kfn.lif_decayed_epsilonRec = (similar(kfn.lif_wRec) .= 0) |> device
# kfn.lif_vt_diff_vth = (similar(kfn.lif_wRec) .= 0) |> device
# kfn.lif_vt_diff_vth_div_vth = (similar(kfn.lif_wRec) .= 0) |> device
# kfn.lif_gammaPd_div_vth = (similar(kfn.lif_wRec) .= 0) |> device
# kfn.lif_phiActivation = (similar(kfn.lif_wRec) .= 0) |> device
kfn.lif_arrayProjection4d = (similar(kfn.lif_wRec) .= 1)
kfn.lif_recSignal = (similar(kfn.lif_wRec) .= 0)
kfn.lif_exInType = (similar(kfn.lif_wRec) .= 0)
# kfn.lif_decayed_epsilonRec = (similar(kfn.lif_wRec) .= 0)
# kfn.lif_vt_diff_vth = (similar(kfn.lif_wRec) .= 0)
# kfn.lif_vt_diff_vth_div_vth = (similar(kfn.lif_wRec) .= 0)
# kfn.lif_gammaPd_div_vth = (similar(kfn.lif_wRec) .= 0)
# kfn.lif_phiActivation = (similar(kfn.lif_wRec) .= 0)
# ---------------------------------------------------------------------------- #
# ALIF config #
# ---------------------------------------------------------------------------- #
n = kfn.params[:computeNeuron][:alif][:numbers][1] * kfn.params[:computeNeuron][:alif][:numbers][2]
alif_n = kfn.params[:computeNeuron][:alif][:numbers][1] * kfn.params[:computeNeuron][:alif][:numbers][2]
# subscription
w = zeros(row, col, n)
w = zeros(row, col, alif_n)
synapticConnectionPercent = kfn.params[:computeNeuron][:alif][:params][:synapticConnectionPercent]
synapticConnection = Int(floor(row*col * synapticConnectionPercent/100))
kfn.alif_synapticConnectionNumber = Int(floor(row*col * synapticConnectionPercent/100))
for slice in eachslice(w, dims=3)
pool = shuffle!([1:row*col...])[1:synapticConnection]
pool = shuffle!([1:row*col...])[1:kfn.alif_synapticConnectionNumber]
for i in pool
slice[i] = randn()/10 # assign weight to synaptic connection. /10 to start small,
slice[i] = rand() # assign weight to synaptic connection. /10 to start small,
# otherwise RSNN's vt Usually stay negative (-)
end
end
# 10% of neuron connection should be enough to start to make neuron fires
should_be_avg_weight = 1 / (0.1 * alif_n)
w = w .* (should_be_avg_weight / maximum(w)) # adjust overall weight
# project 3D w into 4D kfn.alif_wRec
kfn.alif_wRec = reshape(w, (row, col, n, 1)) .* ones(row, col, n, batch) |> device
kfn.alif_zit = (similar(kfn.alif_wRec) .= 0) |> device
kfn.alif_vt = (similar(kfn.alif_wRec) .= 0) |> device
kfn.alif_vth = (similar(kfn.alif_wRec) .= 1) |> device
kfn.alif_vRest = (similar(kfn.alif_wRec) .= 0) |> device
kfn.alif_zt = zeros(1, 1, n, batch) |> device
kfn.alif_zt4d = (similar(kfn.alif_wRec) .= 0) |> device
kfn.alif_refractoryCounter = (similar(kfn.alif_wRec) .= 0) |> device
kfn.alif_refractoryDuration = (similar(kfn.alif_wRec) .= 3) |> device
kfn.alif_wRec = reshape(w, (row, col, alif_n, 1)) .* ones(row, col, alif_n, batch) |> device
kfn.alif_zit = (similar(kfn.alif_wRec) .= 0)
kfn.alif_vt = (similar(kfn.alif_wRec) .= 0)
kfn.alif_vth = (similar(kfn.alif_wRec) .= 1)
kfn.alif_vRest = (similar(kfn.alif_wRec) .= 0)
kfn.alif_zt = zeros(1, 1, alif_n, batch) |> device
kfn.alif_zt4d = (similar(kfn.alif_wRec) .= 0)
kfn.alif_refractoryCounter = (similar(kfn.alif_wRec) .= 0)
kfn.alif_refractoryDuration = (similar(kfn.alif_wRec) .= 3)
kfn.alif_delta = 1.0
kfn.alif_tau_m = 20.0
kfn.alif_alpha = (similar(kfn.alif_wRec) .= (exp(-kfn.alif_delta / kfn.alif_tau_m))) |> device
kfn.alif_phi = (similar(kfn.alif_wRec) .= 0) |> device
kfn.alif_epsilonRec = (similar(kfn.alif_wRec) .= 0) |> device
kfn.alif_eRec = (similar(kfn.alif_wRec) .= 0) |> device
kfn.alif_eta = (similar(kfn.alif_wRec) .= 0.001) |> device
kfn.alif_gammaPd = (similar(kfn.alif_wRec) .= 0.3) |> device
kfn.alif_wRecChange = (similar(kfn.alif_wRec) .= 0) |> device
kfn.alif_error = (similar(kfn.alif_wRec) .= 0) |> device
kfn.alif_subscription = (GeneralUtils.isNotEqual.(kfn.alif_wRec, 0)) |> device
kfn.alif_alpha = (similar(kfn.alif_wRec) .= (exp(-kfn.alif_delta / kfn.alif_tau_m)))
kfn.alif_phi = (similar(kfn.alif_wRec) .= 0)
kfn.alif_epsilonRec = (similar(kfn.alif_wRec) .= 0)
kfn.alif_eRec = (similar(kfn.alif_wRec) .= 0)
kfn.alif_eta = (similar(kfn.alif_wRec) .= 0.001)
kfn.alif_gammaPd = (similar(kfn.alif_wRec) .= 0.3)
kfn.alif_wRecChange = (similar(kfn.alif_wRec) .= 0)
kfn.alif_error = (similar(kfn.alif_wRec) .= 0)
kfn.alif_firingCounter = (similar(kfn.alif_wRec) .= 0) |> device
# firingTargetFrequency = desired count / total sequence length
kfn.alif_firingTargetFrequency = (similar(kfn.alif_wRec) .= 0.1) |> device
kfn.alif_firingCounter = (similar(kfn.alif_wRec) .= 0)
kfn.alif_firingTargetFrequency = (similar(kfn.alif_wRec) .= 0.1)
kfn.alif_neuronInactivityCounter = (similar(kfn.alif_wRec) .= 10000)
kfn.alif_synapticInactivityCounter = Array(similar(kfn.alif_wRec) .= -9) # -9 for non-sub conn
mask = Array((!iszero).(kfn.alif_wRec))
GeneralUtils.replace_elements!(mask, 1, kfn.alif_synapticInactivityCounter, 10000)
kfn.alif_synapticInactivityCounter = kfn.alif_synapticInactivityCounter |> device
kfn.alif_arrayProjection4d = (similar(kfn.alif_wRec) .= 1) |> device
kfn.alif_recSignal = (similar(kfn.alif_wRec) .= 0) |> device
# kfn.alif_decayed_epsilonRec = (similar(kfn.alif_wRec) .= 0) |> device
# kfn.alif_vt_diff_vth = (similar(kfn.alif_wRec) .= 0) |> device
# kfn.alif_vt_diff_vth_div_vth = (similar(kfn.alif_wRec) .= 0) |> device
# kfn.alif_gammaPd_div_vth = (similar(kfn.alif_wRec) .= 0) |> device
# kfn.alif_phiActivation = (similar(kfn.alif_wRec) .= 0) |> device
kfn.alif_arrayProjection4d = (similar(kfn.alif_wRec) .= 1)
kfn.alif_recSignal = (similar(kfn.alif_wRec) .= 0)
kfn.alif_exInType = (similar(kfn.alif_wRec) .= 0)
# kfn.alif_decayed_epsilonRec = (similar(kfn.alif_wRec) .= 0)
# kfn.alif_vt_diff_vth = (similar(kfn.alif_wRec) .= 0)
# kfn.alif_vt_diff_vth_div_vth = (similar(kfn.alif_wRec) .= 0)
# kfn.alif_gammaPd_div_vth = (similar(kfn.alif_wRec) .= 0)
# kfn.alif_phiActivation = (similar(kfn.alif_wRec) .= 0)
# alif specific variables
kfn.alif_epsilonRecA = (similar(kfn.alif_wRec) .= 0) |> device
kfn.alif_avth = (similar(kfn.alif_wRec) .= 0) |> device
kfn.alif_a = (similar(kfn.alif_wRec) .= 0) |> device
kfn.alif_beta = (similar(kfn.alif_wRec) .= 0.07) |> device
kfn.alif_epsilonRecA = (similar(kfn.alif_wRec) .= 0)
kfn.alif_avth = (similar(kfn.alif_wRec) .= 0)
kfn.alif_a = (similar(kfn.alif_wRec) .= 0)
kfn.alif_beta = (similar(kfn.alif_wRec) .= 0.07)
kfn.alif_tau_a = 800.0
kfn.alif_rho = (similar(kfn.alif_wRec) .= (exp(-kfn.alif_delta / kfn.alif_tau_a))) |> device
# kfn.alif_phi_x_epsilonRec = (similar(kfn.alif_wRec) .= 0) |> device
# kfn.alif_phi_x_beta = (similar(kfn.alif_wRec) .= 0) |> device
# kfn.alif_rho_diff_phi_x_beta = (similar(kfn.alif_wRec) .= 0) |> device
# kfn.alif_rho_div_phi_x_beta_x_epsilonRecA = (similar(kfn.alif_wRec) .= 0) |> device
# kfn.alif_beta_x_a = (similar(kfn.alif_wRec) .= 0) |> device
# kfn.alif_phi_x_epsilonRec = (similar(kfn.alif_wRec) .= 0)
# kfn.alif_phi_x_beta = (similar(kfn.alif_wRec) .= 0)
# kfn.alif_rho_diff_phi_x_beta = (similar(kfn.alif_wRec) .= 0)
# kfn.alif_rho_div_phi_x_beta_x_epsilonRecA = (similar(kfn.alif_wRec) .= 0)
# kfn.alif_beta_x_a = (similar(kfn.alif_wRec) .= 0)
# ---------------------------------------------------------------------------- #
# output config #
@@ -310,38 +337,49 @@ function kfn_1(params::Dict; device=cpu)
# pool must contain only lif, alif neurons
pool = shuffle!([startInd:row*col...])[1:synapticConnection]
for i in pool
slice[i] = randn()/10 # assign weight to synaptic connection. /10 to start small,
slice[i] = rand() # assign weight to synaptic connection. /10 to start small,
# otherwise RSNN's vt Usually stay negative (-)
end
end
# # 10% of neuron connection should be enough to start to make neuron fires
# should_be_avg_weight = 1 / (0.2 * n)
# w = w .* (should_be_avg_weight / maximum(w)) # adjust overall weight
# project 3D w into 4D kfn.lif_wOut (row, col, n, batch)
kfn.on_wOut = reshape(w, (row, col, n, 1)) .* ones(row, col, n, batch) |> device
kfn.on_zit = (similar(kfn.on_wOut) .= 0) |> device
kfn.on_vt = (similar(kfn.on_wOut) .= 0) |> device
kfn.on_vth = (similar(kfn.on_wOut) .= 1) |> device
kfn.on_vRest = (similar(kfn.on_wOut) .= 0) |> device
kfn.on_zit = (similar(kfn.on_wOut) .= 0)
kfn.on_vt = (similar(kfn.on_wOut) .= 0)
kfn.on_vth = (similar(kfn.on_wOut) .= 1)
kfn.on_vRest = (similar(kfn.on_wOut) .= 0)
kfn.on_zt = zeros(1, 1, n, batch) |> device
kfn.on_zt4d = (similar(kfn.on_wOut) .= 0) |> device
kfn.on_refractoryCounter = (similar(kfn.on_wOut) .= 0) |> device
kfn.on_refractoryDuration = (similar(kfn.on_wOut) .= 0) |> device
kfn.on_zt4d = (similar(kfn.on_wOut) .= 0)
kfn.on_refractoryCounter = (similar(kfn.on_wOut) .= 0)
kfn.on_refractoryDuration = (similar(kfn.on_wOut) .= 0)
kfn.on_delta = 1.0
kfn.on_tau_m = 20.0
kfn.on_alpha = (similar(kfn.on_wOut) .= (exp(-kfn.on_delta / kfn.on_tau_m))) |> device
kfn.on_phi = (similar(kfn.on_wOut) .= 0) |> device
kfn.on_epsilonRec = (similar(kfn.on_wOut) .= 0) |> device
kfn.on_eRec = (similar(kfn.on_wOut) .= 0) |> device
kfn.on_eta = (similar(kfn.on_wOut) .= 0.001) |> device
kfn.on_gammaPd = (similar(kfn.on_wOut) .= 0.3) |> device
kfn.on_wOutChange = (similar(kfn.on_wOut) .= 0) |> device
kfn.on_error = (similar(kfn.on_wOut) .= 0) |> device
kfn.on_alpha = (similar(kfn.on_wOut) .= (exp(-kfn.on_delta / kfn.on_tau_m)))
kfn.on_phi = (similar(kfn.on_wOut) .= 0)
kfn.on_epsilonRec = (similar(kfn.on_wOut) .= 0)
kfn.on_eRec = (similar(kfn.on_wOut) .= 0)
kfn.on_eta = (similar(kfn.on_wOut) .= 0.001)
kfn.on_gammaPd = (similar(kfn.on_wOut) .= 0.3)
kfn.on_wOutChange = (similar(kfn.on_wOut) .= 0)
kfn.on_error = (similar(kfn.on_wOut) .= 0)
kfn.on_subscription = (GeneralUtils.isNotEqual.(kfn.on_wOut, 0)) |> device
kfn.on_firingCounter = (similar(kfn.on_wOut) .= 0) |> device
kfn.on_firingCounter = (similar(kfn.on_wOut) .= 0)
kfn.on_arrayProjection4d = (similar(kfn.on_wOut) .= 1) |> device
kfn.on_recSignal = (similar(kfn.on_wOut) .= 0) |> device
kfn.on_arrayProjection4d = (similar(kfn.on_wOut) .= 1)
kfn.on_recSignal = (similar(kfn.on_wOut) .= 0)
kfn.outputError = zeros(n, batch) |> device
totalComputeNeurons = lif_n + alif_n
inhabitoryNeurons = Int(floor(totalComputeNeurons * 30/100))
mask1 = ones(row, signal_col)
mask2 = GeneralUtils.multiply_random_elements(ones(row, lif_col + alif_col),
-1, inhabitoryNeurons, MersenneTwister(1234))
kfn.exInType = cat(mask1, mask2, dims=2) |> device
return kfn
end