dev
This commit is contained in:
@@ -25,29 +25,24 @@ using .interface
|
||||
|
||||
#------------------------------------------------------------------------------------------------100
|
||||
|
||||
""" version 0.0.9
|
||||
""" version 0.0.11
|
||||
Todo:
|
||||
[DONE] change madel error calculation in user script, (progress based)
|
||||
[DONE] +W 90% of most active conn
|
||||
[2] -W 10% of less active conn
|
||||
[3] synapse reconnect delay counter
|
||||
[] growRepeatedPath!(), instead of synapse with 20% less activity count gets -w, may be I
|
||||
should rank synapse based on activity count from highest perforimg synapse to lowest
|
||||
and the last 20% of the rank get -w
|
||||
[-] add temporal summation in addition to already used spatial summation.
|
||||
CANCELLED, spatial summation every second until membrane potential reach a threshold
|
||||
is in itself a temporal summation.
|
||||
[4] implement dormant connection and pruning machanism. the longer the training the longer
|
||||
[4] implement variable dormant connection and pruning machanism. the longer the training the longer
|
||||
0 weight stay 0.
|
||||
[] using RL to control learning signal
|
||||
[] consider using Dates.now() instead of timestamp because time_stamp may overflow
|
||||
[] Liquid time constant. training should include adjusting α, neuron membrane potential decay factor
|
||||
which defined by neuron.tau_m formula in type.jl
|
||||
|
||||
Change from version: 0.0.6
|
||||
Change from version: 0.0.10
|
||||
-
|
||||
|
||||
All features
|
||||
- excitatory/inhabitory matrix
|
||||
- neuroplasticity
|
||||
- voltage regulator
|
||||
"""
|
||||
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@ function (kfn::kfn_1)(input::AbstractArray)
|
||||
kfn.timeStep .= 1
|
||||
|
||||
# reset learning params
|
||||
kfn.zitCumulative = kfn.zitCumulative[:,:,1,:]
|
||||
kfn.zitCumulative = (kfn.zitCumulative[:,:,1] .= 0)
|
||||
|
||||
kfn.lif_vt .= 0
|
||||
kfn.lif_wRecChange .= 0
|
||||
@@ -44,6 +44,7 @@ function (kfn::kfn_1)(input::AbstractArray)
|
||||
kfn.on_epsilonRec .= 0
|
||||
kfn.on_wOutChange .= 0
|
||||
kfn.on_refractoryCounter .= 0
|
||||
kfn.on_synapticActivityCounter .= 0
|
||||
|
||||
kfn.learningStage = [2]
|
||||
end
|
||||
@@ -153,6 +154,7 @@ function (kfn::kfn_1)(input::AbstractArray)
|
||||
kfn.on_gammaPd,
|
||||
kfn.on_firingCounter,
|
||||
kfn.on_recSignal,
|
||||
kfn.on_synapticActivityCounter,
|
||||
)
|
||||
# get on_zt4d to on_zt
|
||||
kfn.on_zt .= reduce(max, kfn.on_zt4d, dims=(1,2))
|
||||
@@ -557,6 +559,7 @@ function onForward( zit::CuArray,
|
||||
gammaPd::CuArray,
|
||||
firingCounter::CuArray,
|
||||
recSignal::CuArray,
|
||||
synapticActivityCounter::CuArray,
|
||||
)
|
||||
|
||||
kernel = @cuda launch=false onForward( zit,
|
||||
@@ -573,6 +576,7 @@ function onForward( zit::CuArray,
|
||||
gammaPd,
|
||||
firingCounter,
|
||||
recSignal,
|
||||
synapticActivityCounter,
|
||||
GeneralUtils.linear_to_cartesian,
|
||||
)
|
||||
config = launch_configuration(kernel.fun)
|
||||
@@ -602,6 +606,7 @@ function onForward( zit::CuArray,
|
||||
gammaPd,
|
||||
firingCounter,
|
||||
recSignal,
|
||||
synapticActivityCounter,
|
||||
GeneralUtils.linear_to_cartesian; threads, blocks)
|
||||
end
|
||||
end
|
||||
@@ -621,6 +626,7 @@ function onForward( zit,
|
||||
gammaPd,
|
||||
firingCounter,
|
||||
recSignal,
|
||||
synapticActivityCounter,
|
||||
linear_to_cartesian,
|
||||
)
|
||||
i = (blockIdx().x - 1) * blockDim().x + threadIdx().x # gpu threads index
|
||||
@@ -661,6 +667,8 @@ function onForward( zit,
|
||||
# compute epsilonRec
|
||||
epsilonRec[i1,i2,i3,i4] = (alpha[i1,i2,i3,i4] * epsilonRec[i1,i2,i3,i4]) +
|
||||
(zit[i1,i2,i3,i4] * !iszero(wOut[i1,i2,i3,i4]))
|
||||
|
||||
synapticActivityCounter[i1,i2,i3,i4] += zit[i1,i2,i3,i4] * !iszero(wOut[i1,i2,i3,i4])
|
||||
end
|
||||
end
|
||||
return nothing
|
||||
|
||||
22
src/learn.jl
22
src/learn.jl
@@ -106,8 +106,10 @@ function lifComputeParamsChange!( timeStep::CuArray,
|
||||
wRecChange .+= (eta .* nError .* eRec)
|
||||
|
||||
# frequency regulator
|
||||
wRecChange .+= 0.001 .* ((firingTargetFrequency - (firingCounter./timeStep)) ./ timeStep) .*
|
||||
eta .* eRec
|
||||
freqError = (firingTargetFrequency - (firingCounter./timeStep)) ./ timeStep
|
||||
freqWRecChange = -0.1 .* freqError .* eta .* eRec
|
||||
wRecChange .+= freqWRecChange
|
||||
|
||||
# reset epsilonRec
|
||||
epsilonRec .= 0
|
||||
end
|
||||
@@ -156,8 +158,12 @@ function alifComputeParamsChange!( timeStep::CuArray,
|
||||
wRecChange .+= (eta .* nError .* eRec)
|
||||
|
||||
# frequency regulator
|
||||
wRecChange .+= 0.001 .* ((firingTargetFrequency - (firingCounter./timeStep)) ./ timeStep) .*
|
||||
eta .* eRec
|
||||
freqError = (firingTargetFrequency - (firingCounter./timeStep)) ./ timeStep
|
||||
freqWRecChange = -0.1 .* freqError .* eta .* eRec
|
||||
wRecChange .+= freqWRecChange
|
||||
# wRecChange .+= 0.01 .* ((firingTargetFrequency - (firingCounter./timeStep)) ./ timeStep) .*
|
||||
# eta .* eRec
|
||||
|
||||
|
||||
# reset epsilonRec
|
||||
epsilonRec .= 0
|
||||
@@ -272,6 +278,14 @@ function onComputeParamsChange!(phi::AbstractArray,
|
||||
end
|
||||
|
||||
function learn!(kfn::kfn_1, progress, device=cpu)
|
||||
if sum(kfn.timeStep) == 800
|
||||
println("zitCumulative ", sum(kfn.zitCumulative[:,:,784:size(kfn.zitCumulative, 3)], dims=3))
|
||||
# println("on_synapticActivityCounter ", kfn.on_synapticActivityCounter[:,:,1,:])
|
||||
end
|
||||
|
||||
#WORKING compare output neuron 0 synapse activity when input are label 0 and 5, (!isequal).(wOut)
|
||||
|
||||
|
||||
# lif learn
|
||||
kfn.lif_wRec, kfn.lif_neuronInactivityCounter, kfn.lif_synapticActivityCounter, kfn.lif_synapseReconnectDelay =
|
||||
lifLearn(kfn.lif_wRec,
|
||||
|
||||
104
src/snnUtil.jl
104
src/snnUtil.jl
@@ -88,37 +88,57 @@ function mergeLearnWeight!(wRec::AbstractArray, exInType, wRecChange::AbstractAr
|
||||
# error("DEBUG -> mergeLearnWeight!")
|
||||
end
|
||||
|
||||
# function growRepeatedPath!(wRec, synapticActivityCounter, eta)
|
||||
# # seperate active synapse out of inactive in this signal
|
||||
# mask_activeSynapse = (!isequal).(synapticActivityCounter, 0)
|
||||
|
||||
# # adjust weight based on vt progress and repeatition (80% +w, 20% -w) depend on epsilonRec
|
||||
# avgActivity = sum(synapticActivityCounter) / sum(mask_activeSynapse)
|
||||
# lowerlimit = 0.2 * avgActivity # boundary at 20%
|
||||
|
||||
# # +w, synapse with more than 10% of avg activity get increase weight by eta
|
||||
# mask_more = (!isless).(synapticActivityCounter, lowerlimit)
|
||||
# mask_2 = GeneralUtils.allTrue.(mask_activeSynapse, mask_more)
|
||||
# mask_3 = mask_2 .* (1 .+ eta) # minor activity synapse weight will be reduced by eta
|
||||
# GeneralUtils.replaceElements!(mask_3, 0, 1) # replace 0 with 1 so mask * Wrec will not get 0 weight
|
||||
# wRec .*= mask_3
|
||||
|
||||
# # -w, synapse with less than 10% of avg activity get reduced weight by eta
|
||||
# mask_less = GeneralUtils.isBetween.(synapticActivityCounter, 0, lowerlimit) # 1st criteria
|
||||
# mask_3 = GeneralUtils.allTrue.(mask_activeSynapse, mask_less)
|
||||
# mask_4 = mask_3 .* (1 .- eta) # minor activity synapse weight will be reduced by eta
|
||||
# # replace 0 with 1 so mask * wRec will not get 0 weight i.e. non-effected weight remain the same
|
||||
# GeneralUtils.replaceElements!(mask_4, 0, 1)
|
||||
# wRec .*= mask_4
|
||||
# # error("DEBUG -> growRepeatedPath!")
|
||||
# end
|
||||
|
||||
|
||||
function growRepeatedPath!(wRec, synapticActivityCounter, eta)
|
||||
# seperate active synapse out of inactive in this signal
|
||||
mask_activeSynapse = (!isequal).(synapticActivityCounter, 0)
|
||||
# println("synapticActivityCounter ", synapticActivityCounter[:,:,1,1])
|
||||
|
||||
# adjust weight based on vt progress and repeatition (80% +w, 20% -w) depend on epsilonRec
|
||||
avgActivity = sum(synapticActivityCounter) / sum(mask_activeSynapse)
|
||||
# println("avgActivity ", avgActivity)
|
||||
# println("mask_activeSynapse ", sum(mask_activeSynapse))
|
||||
lowerlimit = 0.2 * avgActivity
|
||||
# println("lowerlimit ", lowerlimit)
|
||||
mask_more, mask_less, _ = rankMatrix(synapticActivityCounter, 0.2) # sort synapse from highest to lowest activity
|
||||
|
||||
# +w, synapse with more than 10% of avg activity get increase weight by eta
|
||||
mask_more = (!isless).(synapticActivityCounter, lowerlimit)
|
||||
# mask_more = (!isless).(synapticActivityCounter, lowerlimit)
|
||||
mask_2 = GeneralUtils.allTrue.(mask_activeSynapse, mask_more)
|
||||
mask_3 = mask_2 .* (1 .+ eta) # minor activity synapse weight will be reduced by eta
|
||||
GeneralUtils.replaceElements!(mask_3, 0, 1) # replace 0 with 1 so mask * Wrec will not get 0 weight
|
||||
wRec .*= mask_3
|
||||
|
||||
# -w, synapse with less than 10% of avg activity get reduced weight by eta
|
||||
mask_less = GeneralUtils.isBetween.(synapticActivityCounter, 0, lowerlimit) # 1st criteria
|
||||
# mask_less = GeneralUtils.isBetween.(synapticActivityCounter, 0, lowerlimit) # 1st criteria
|
||||
mask_3 = GeneralUtils.allTrue.(mask_activeSynapse, mask_less)
|
||||
mask_4 = mask_3 .* (1 .- eta) # minor activity synapse weight will be reduced by eta
|
||||
# replace 0 with 1 so mask * wRec will not get 0 weight i.e. non-effected weight remain the same
|
||||
GeneralUtils.replaceElements!(mask_4, 0, 1)
|
||||
# println("mask_4 ", mask_4[:,:,1,1])
|
||||
wRec .*= mask_4
|
||||
# println("wRec 2 ", wRec[:,:,1,1])
|
||||
# error("DEBUG -> growRepeatedPath!")
|
||||
end
|
||||
|
||||
|
||||
function weakenNotMatureSynapse!(wRec, synapticActivityCounter, eta) # TODO not fully tested, there is no connection YET where there is 0 synapse activity but wRec is not 0 (subscribed)
|
||||
mask_inactiveSynapse = isequal.(synapticActivityCounter, 0)
|
||||
mask_notmature = GeneralUtils.isBetween.(wRec, 0.0, 0.1) # 2nd criteria, not mature synapse has weight < 0.1
|
||||
@@ -164,8 +184,12 @@ function rewireSynapse!(wRec::AbstractArray, neuronInactivityCounter::AbstractAr
|
||||
if timemark > 0 #TODO not fully tested. mark timeStep available
|
||||
timemark = Int(timemark)
|
||||
# get neuron pool at 10 timeStep earlier
|
||||
earlier = timemark - 10 > 0 ? timemark - 10 : timemark
|
||||
pool = sum(zitCumulative[:,:,earlier:timemark], dims=3) #BUG BoundsError: attempt to access 10×25×801 Array{Float32, 3} at index [1:10, 1:25, 1340.0f0:1.0f0:1350.0f0]
|
||||
earlier = size(zitCumulative, 3) - 10 > 0 ? size(zitCumulative, 3) - 10 : size(zitCumulative, 3)
|
||||
current = size(zitCumulative, 3)
|
||||
pool = sum(zitCumulative[:,:,earlier:current], dims=3)
|
||||
# earlier = timemark - 10 > 0 ? timemark - 10 : timemark
|
||||
# timemark = timemark == 800 ? 799 : timemark
|
||||
# pool = sum(zitCumulative[:,:,earlier:timemark], dims=3) #BUG BoundsError: attempt to access 10×25×801 Array{Float32, 3} at index [1:10, 1:25, 1340.0f0:1.0f0:1350.0f0]
|
||||
if sum(pool) != 0
|
||||
indices = findall(x -> x != 0, pool)
|
||||
pick = rand(indices) # cartesian indice
|
||||
@@ -188,6 +212,62 @@ end
|
||||
|
||||
|
||||
|
||||
function rankMatrix(X, percent)
|
||||
"""prompt
|
||||
write a function in julia that satisfy the following requirements.
|
||||
1. the function operate on column-major 3D matrix
|
||||
2. the function input are matrix X and percent value from 0.0 to 1.0
|
||||
3. the function rank the matrix's elements value from high to low ignoring 0
|
||||
4. return first bitmatrix according to percent, true for 1-percent and false otherwise
|
||||
5. return second bitmatrix according to percent, true for percent and false otherwise
|
||||
6. the first and second bitmatrix must be in the same shape as X
|
||||
"""
|
||||
if percent < 0.0 || percent > 1.0
|
||||
error("percent must be 0.0 <= percent <= 1.0")
|
||||
end
|
||||
percent = 1 - percent
|
||||
if percent == 1.0
|
||||
first_bitmatrix = zeros(size(X)...)
|
||||
second_bitmatrix = ones(size(X)...)
|
||||
threshold = 0.0
|
||||
elseif percent == 0.0
|
||||
first_bitmatrix = ones(size(X)...)
|
||||
second_bitmatrix = zeros(size(X)...)
|
||||
threshold = 1.0
|
||||
else
|
||||
# Create an array to store the ranked values
|
||||
ranked_values = sort(vec(X), rev=true)
|
||||
|
||||
# Calculate the threshold value based on the given percent
|
||||
threshold = ranked_values[ceil(Int, percent * length(ranked_values))]
|
||||
|
||||
# Create the first bitmatrix according to the threshold
|
||||
first_bitmatrix = X .> threshold
|
||||
|
||||
# Create the second bitmatrix according to the threshold
|
||||
second_bitmatrix = X .<= threshold
|
||||
end
|
||||
|
||||
return first_bitmatrix, second_bitmatrix, threshold
|
||||
end
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
18
src/type.jl
18
src/type.jl
@@ -154,7 +154,7 @@ Base.@kwdef mutable struct kfn_1 <: knowledgeFn
|
||||
on_gammaPd::Union{AbstractArray, Nothing} = nothing
|
||||
on_wOutChange::Union{AbstractArray, Nothing} = nothing
|
||||
on_error::Union{AbstractArray, Nothing} = nothing
|
||||
on_subscription::Union{AbstractArray, Nothing} = nothing
|
||||
on_synapticActivityCounter::Union{AbstractArray, Nothing} = nothing
|
||||
|
||||
on_firingCounter::Union{AbstractArray, Nothing} = nothing
|
||||
|
||||
@@ -227,7 +227,7 @@ function kfn_1(params::Dict; device=cpu)
|
||||
kfn.lif_error = (similar(kfn.lif_wRec) .= 0)
|
||||
|
||||
kfn.lif_firingCounter = (similar(kfn.lif_wRec) .= 0)
|
||||
kfn.lif_firingTargetFrequency = (similar(kfn.lif_wRec) .= 0.1)
|
||||
kfn.lif_firingTargetFrequency = (similar(kfn.lif_wRec) .= 10)
|
||||
kfn.lif_neuronInactivityCounter = (similar(kfn.lif_wRec) .= 0)
|
||||
|
||||
# count subscribed synapse activity, just like epsilonRec but without decay.
|
||||
@@ -276,7 +276,7 @@ function kfn_1(params::Dict; device=cpu)
|
||||
kfn.alif_error = (similar(kfn.alif_wRec) .= 0)
|
||||
|
||||
kfn.alif_firingCounter = (similar(kfn.alif_wRec) .= 0)
|
||||
kfn.alif_firingTargetFrequency = (similar(kfn.alif_wRec) .= 0.1)
|
||||
kfn.alif_firingTargetFrequency = (similar(kfn.alif_wRec) .= 10)
|
||||
kfn.alif_neuronInactivityCounter = (similar(kfn.alif_wRec) .= 0)
|
||||
kfn.alif_synapseReconnectDelay = (similar(kfn.alif_wRec) .= -0.1) # -0.1 for non-sub conn
|
||||
kfn.alif_synapticActivityCounter = (similar(kfn.alif_wRec) .= 0)
|
||||
@@ -347,7 +347,7 @@ function kfn_1(params::Dict; device=cpu)
|
||||
kfn.on_gammaPd = (similar(kfn.on_wOut) .= 0.3)
|
||||
kfn.on_wOutChange = (similar(kfn.on_wOut) .= 0)
|
||||
kfn.on_error = (similar(kfn.on_wOut) .= 0)
|
||||
kfn.on_subscription = (GeneralUtils.isNotEqual.(kfn.on_wOut, 0)) |> device
|
||||
kfn.on_synapticActivityCounter = (similar(kfn.on_wOut) .= 0)
|
||||
|
||||
kfn.on_firingCounter = (similar(kfn.on_wOut) .= 0)
|
||||
|
||||
@@ -372,15 +372,15 @@ function random_wRec(row, col, n, synapseConnectionNumber)
|
||||
for slice in eachslice(w, dims=3)
|
||||
pool = shuffle!([1:row*col...])[1:synapseConnectionNumber]
|
||||
for i in pool
|
||||
slice[i] = rand(0.01:0.01:0.5) # assign weight to synaptic connection. /10 to start small,
|
||||
slice[i] = rand(0.01:0.01:0.05) # assign weight to synaptic connection. /10 to start small,
|
||||
# otherwise RSNN's vt Usually stay negative (-)
|
||||
end
|
||||
end
|
||||
|
||||
# adjust weight so that RSNN fires small amount of neurons at the beginning to avoid overwhelming
|
||||
# all-fire situation. it also better than not-fire-at-all situation.
|
||||
avgWeight = sum(w)/length(w)
|
||||
w = w .* (0.01 / avgWeight) # adjust overall weight
|
||||
# # adjust weight so that RSNN fires small amount of neurons at the beginning to avoid overwhelming
|
||||
# # all-fire situation. it also better than not-fire-at-all situation.
|
||||
# avgWeight = sum(w)/length(w)
|
||||
# w = w .* (0.01 / avgWeight) # adjust overall weight
|
||||
|
||||
return w #(row, col, n)
|
||||
end
|
||||
|
||||
Reference in New Issue
Block a user