synaptic connection strength concept

This commit is contained in:
2023-05-17 07:33:03 +07:00
parent b9a140372d
commit dee756adac
4 changed files with 19 additions and 26 deletions

View File

@@ -1,6 +1,6 @@
module Ironpen
export kfn_1
export kfn_1, synapticConnStrength!
""" Order by dependencies of each file. The 1st included file must not depend on any other
@@ -35,7 +35,6 @@ using .interface
"""
Todo:
[3] verify that model can complete learning cycle with no error
[*1] synaptic connection strength concept. use sigmoid, turn connection offline
[2] neuroplasticity() i.e. change connection
[] using RL to control learning signal
[] consider using Dates.now() instead of timestamp because time_stamp may overflow
@@ -57,6 +56,7 @@ using .interface
(vt*100)/vth as error
[DONE] use LinearAlgebra.normalize!(vector, 1) to adjust weight after weight merge
[DONE] reset_epsilonRec after ΔwRecChange is calculated
[DONE] synaptic connection strength concept. use sigmoid, turn connection offline
Change from version: v06_36a
-

View File

@@ -65,7 +65,7 @@ function learn!(kfn::kfn_1, correctAnswer::AbstractVector)
outs = [n.z_t1 for n in kfn.outputNeuronsArray]
for (i, out) in enumerate(outs)
if out != correctAnswer[i] # need to adjust weight
kfnError = (kfn.outputNeuronsArray[i].v_th - kfn.outputNeuronsArray[i].v_t) *
kfnError = (kfn.outputNeuronsArray[i].v_th - kfn.outputNeuronsArray[i].v_t1) *
100 / kfn.outputNeuronsArray[i].v_th
# Threads.@threads for n in kfn.neuronsArray
@@ -87,15 +87,9 @@ function learn!(kfn::kfn_1, correctAnswer::AbstractVector)
LinearAlgebra.normalize!(n.wRec, 1)
n.wRec .*= nonFlipedSign # set weight that fliped sign to 0 for random new connection
# Threads.@threads for n in kfn.neuronsArray
for n in kfn.neuronsArray
#WORKING synapticConnStrength
synapticConnStrength!(n)
#TODO neuroplasticity
end
println("")
end
for n in kfn.outputNeuronsArray # merge wRecChange into wRec
@@ -105,7 +99,7 @@ function learn!(kfn::kfn_1, correctAnswer::AbstractVector)
LinearAlgebra.normalize!(n.wRec, 1)
n.wRec .*= nonFlipedSign # set weight that fliped sign to 0 for random new connection
#TODO synapticConnStrength
synapticConnStrength!(n)
#TODO neuroplasticity
end

View File

@@ -4,13 +4,13 @@ using Flux.Optimise: apply!
export calculate_α, calculate_ρ, calculate_k, timestep_forward!, init_neuron, no_negative!,
precision, calculate_w_change!, store_knowledgefn_error!, interneurons_adjustment!,
reset_z_t!, resetLearningParams!, reset_learning_history_params!, reset_epsilonRec!,
reset_epsilonRecA!,
reset_epsilonRecA!, synapticConnStrength!,
firing_rate_error!, firing_rate_regulator!, update_Bn!, cal_firing_reg!,
neuroplasticity!, shakeup!, reset_learning_no_wchange!, adjust_internal_learning_rate!,
gradient_withloss
using Statistics, Random, LinearAlgebra, Distributions, Zygote, Flux
using GeneralUtils
using ..types
#------------------------------------------------------------------------------------------------100
@@ -298,22 +298,21 @@ function synapticConnStrength(currentStrength::AbstractFloat, updown::String, bi
return updatedStrength
end
function synapticConnStrength(n::compute_neuron)
function synapticConnStrength!(n::Union{compute_neuron, output_neuron})
for (i, connStrength) in enumerate(n.synapticStrength)
#WORKING
# check whether connStrength increase or decrease based on usage from n.epsilonRec
# compute synaptic strength for this conn
# apply conn lowerlimit and upperlimit
updown = n.epsilonRec[i] == 0.0 ? "down" : "up"
updatedConnStrength = synapticConnStrength(connStrength, updown)
updatedConnStrength = GeneralUtils.limitvalue(updatedConnStrength,
n.synapticStrengthLimit.lowerlimit, n.synapticStrengthLimit.upperlimit)
# at lowerlimit, mark wRec at this position to 0. for new random synaptic conn
if updatedConnStrength == n.synapticStrengthLimit.lowerlimit[1]
n.wRec[i] = 0.0
end
end
end
function synapticConnStrength!(n::input_neuron) end

View File

@@ -3,7 +3,7 @@ module types
export
# struct
IronpenStruct, model, knowledgeFn, lif_neuron, alif_neuron, linear_neuron,
kfn_1, compute_neuron, neuron, output_neuron, passthrough_neuron,
kfn_1, input_neuron, compute_neuron, neuron, output_neuron, passthrough_neuron,
# function
instantiate_custom_types, init_neuron, populate_neuron,