diff --git a/src/Ironpen.jl b/src/Ironpen.jl index 6edf4ea..df824eb 100644 --- a/src/Ironpen.jl +++ b/src/Ironpen.jl @@ -34,9 +34,9 @@ using .interface """ Todo: - [9] verify that model can complete learning cycle with no error - [*5] synaptic connection strength concept. use sigmoid, turn connection offline - [8] neuroplasticity() i.e. change connection + [3] verify that model can complete learning cycle with no error + [*1] synaptic connection strength concept. use sigmoid, turn connection offline + [2] neuroplasticity() i.e. change connection [] using RL to control learning signal [] consider using Dates.now() instead of timestamp because time_stamp may overflow [] training should include adjusting α, neuron membrane potential decay factor diff --git a/src/learn.jl b/src/learn.jl index f1ab14d..38b9d22 100644 --- a/src/learn.jl +++ b/src/learn.jl @@ -87,11 +87,15 @@ function learn!(kfn::kfn_1, correctAnswer::AbstractVector) LinearAlgebra.normalize!(n.wRec, 1) n.wRec .*= nonFlipedSign # set weight that fliped sign to 0 for random new connection - #WORKING synapticConnStrength + # Threads.@threads for n in kfn.neuronsArray + for n in kfn.neuronsArray + #WORKING synapticConnStrength + + + + #TODO neuroplasticity + end - - - #TODO neuroplasticity end for n in kfn.outputNeuronsArray # merge wRecChange into wRec diff --git a/src/snn_utils.jl b/src/snn_utils.jl index 3104ea6..0eed47e 100644 --- a/src/snn_utils.jl +++ b/src/snn_utils.jl @@ -291,9 +291,9 @@ end """ function synapticConnStrength(currentStrength::AbstractFloat, bias::Number=0) currentStrength += bias - currentStrength - (1.0 - sigmoid(currentStrength)) - currentStrength -= bias - return currentStrength + updatedStrength - (1.0 - sigmoid(currentStrength)) + updatedStrength -= bias + return updatedStrength end