diff --git a/src/snn_utils.jl b/src/snn_utils.jl index 0eed47e..333ed96 100644 --- a/src/snn_utils.jl +++ b/src/snn_utils.jl @@ -289,13 +289,32 @@ end one may use bias = -5 to transform synaptic strength into range -5 to 5 the return value is shifted back to original scale """ -function synapticConnStrength(currentStrength::AbstractFloat, bias::Number=0) +function synapticConnStrength(currentStrength::AbstractFloat, updown::String, bias::Number=0) currentStrength += bias - updatedStrength - (1.0 - sigmoid(currentStrength)) + if currentStrength > 0 + Δstrength = (1.0 - sigmoid(currentStrength)) + else + Δstrength = sigmoid(currentStrength) + end + + if updown == "up" + updatedStrength = currentStrength + Δstrength + else + updatedStrength = currentStrength - Δstrength + end updatedStrength -= bias return updatedStrength end +function synapticConnStrength(n::compute_neuron) + for connStrength in n.synapticStrength + + synapticConnStrength + + + +end + @@ -313,8 +332,6 @@ end - - end # end module \ No newline at end of file diff --git a/src/types.jl b/src/types.jl index 2885d6a..c3747ea 100644 --- a/src/types.jl +++ b/src/types.jl @@ -529,7 +529,7 @@ Base.@kwdef mutable struct linear_neuron <: output_neuron # previous timestep) z_i_t::Union{Array{Bool},Nothing} = nothing synapticStrength::Union{Array{Float64},Nothing} = nothing - synapticStrengthLimit::Union{NamedTuple,Nothing} = (lowerlimit=(-5=>0), upperlimit=(5=>5)) + synapticStrengthLimit::Union{NamedTuple,Nothing} = (lowerlimit=(-5=>-5), upperlimit=(5=>5)) gammaPd::Union{Float64,Nothing} = 0.3 # γ_pd, discount factor, value from paper alpha::Union{Float64,Nothing} = nothing # α, neuron membrane potential decay factor @@ -630,10 +630,10 @@ function init_neuron!(id::Int64, n::lif_neuron, n_params::Dict, kfnParams::Dict) # prevent subscription to itself by removing this neuron id filter!(x -> x != n.id, n.subscriptionList) - n.synapticStrength = normalize!(rand(length(n.subscriptionList)), 1) + n.synapticStrength = rand(-5:0.1:-3, length(n.subscriptionList)) n.epsilonRec = zeros(length(n.subscriptionList)) - n.wRec = Random.rand(length(n.subscriptionList)) + n.wRec = LinearAlgebra.normalize!(rand(length(n.subscriptionList)), 1) n.wRecChange = zeros(length(n.subscriptionList)) n.alpha = calculate_α(n) end @@ -649,10 +649,10 @@ function init_neuron!(id::Int64, n::alif_neuron, n_params::Dict, # prevent subscription to itself by removing this neuron id filter!(x -> x != n.id, n.subscriptionList) - n.synapticStrength = normalize!(rand(length(n.subscriptionList)), 1) + n.synapticStrength = rand(-5:0.1:-3, length(n.subscriptionList)) n.epsilonRec = zeros(length(n.subscriptionList)) - n.wRec = Random.rand(length(n.subscriptionList)) + n.wRec = LinearAlgebra.normalize!(rand(length(n.subscriptionList)), 1) n.wRecChange = zeros(length(n.subscriptionList)) # the more time has passed from the last time neuron was activated, the more @@ -671,9 +671,10 @@ function init_neuron!(id::Int64, n::linear_neuron, n_params::Dict, kfnParams::Di subscription_numbers = Int(floor(n_params[:synaptic_connection_number] * kfnParams[:total_compute_neuron] / 100.0)) n.subscriptionList = [pop!(subscription_options) for i = 1:subscription_numbers] - n.synapticStrength = normalize!(rand(length(n.subscriptionList)), 1) + n.synapticStrength = rand(-5:0.1:-3, length(n.subscriptionList)) + n.epsilonRec = zeros(length(n.subscriptionList)) - n.wRec = Random.rand(length(n.subscriptionList)) + n.wRec = LinearAlgebra.normalize!(rand(length(n.subscriptionList)), 1) n.wRecChange = zeros(length(n.subscriptionList)) n.alpha = calculate_k(n) end