use LinearAlgebra.normalize!(vector, 1) to adjust weight after weight merge

This commit is contained in:
2023-05-16 21:01:11 +07:00
parent 70d2521c5e
commit 0ac5a703ea
2 changed files with 12 additions and 7 deletions

View File

@@ -34,13 +34,8 @@ using .interface
""" """
Todo: Todo:
[*6] time-based learning method based on new error formula
(use output vt compared to vth instead of late time)
if output neuron not activate when it should, use output neuron's
(vth - vt)*100/vth as error
if output neuron activates when it should NOT, use output neuron's
(vt*100)/vth as error
[7] use LinearAlgebra.normalize!(vector, 1) to adjust weight after weight merge
[9] verify that model can complete learning cycle with no error [9] verify that model can complete learning cycle with no error
[*5] synaptic connection strength concept. use sigmoid, turn connection offline [*5] synaptic connection strength concept. use sigmoid, turn connection offline
[8] neuroplasticity() i.e. change connection [8] neuroplasticity() i.e. change connection
@@ -56,6 +51,13 @@ using .interface
"inference" "inference"
[DONE] output neuron connect to random multiple compute neurons and overall have [DONE] output neuron connect to random multiple compute neurons and overall have
the same structure as lif the same structure as lif
[DONE] time-based learning method based on new error formula
(use output vt compared to vth instead of late time)
if output neuron not activate when it should, use output neuron's
(vth - vt)*100/vth as error
if output neuron activates when it should NOT, use output neuron's
(vt*100)/vth as error
[DONE] use LinearAlgebra.normalize!(vector, 1) to adjust weight after weight merge
Change from version: v06_36a Change from version: v06_36a
- -

View File

@@ -162,6 +162,7 @@ function learn!(n::lif_neuron, error::Number)
ΔwRecChange = n.eta * error ΔwRecChange = n.eta * error
n.wRecChange = (n.subExInType * n.wRecChange) + ΔwRecChange n.wRecChange = (n.subExInType * n.wRecChange) + ΔwRecChange
LinearAlgebra.normalize!(n.wRecChange, 1)
# check for fliped sign, 1 indicates non-fliped sign # check for fliped sign, 1 indicates non-fliped sign
wSign = sign.(n.wRecChange) wSign = sign.(n.wRecChange)
@@ -182,6 +183,7 @@ function learn!(n::alif_neuron, error::Number)
ΔwRecChange = n.eta * error ΔwRecChange = n.eta * error
n.wRecChange = (n.subExInType * n.wRecChange) + ΔwRecChange n.wRecChange = (n.subExInType * n.wRecChange) + ΔwRecChange
LinearAlgebra.normalize!(n.wRecChange, 1)
# check for fliped sign, 1 indicates non-fliped sign # check for fliped sign, 1 indicates non-fliped sign
wSign = sign.(n.wRecChange) wSign = sign.(n.wRecChange)
@@ -198,6 +200,7 @@ function learn!(n::linear_neuron, error::Number)
ΔwRecChange = n.eta * error ΔwRecChange = n.eta * error
n.wRecChange = (n.subExInType * n.wRecChange) + ΔwRecChange n.wRecChange = (n.subExInType * n.wRecChange) + ΔwRecChange
LinearAlgebra.normalize!(n.wRecChange, 1)
# check for fliped sign, 1 indicates non-fliped sign # check for fliped sign, 1 indicates non-fliped sign
wSign = sign.(n.wRecChange) wSign = sign.(n.wRecChange)