building forward()

This commit is contained in:
ton
2023-07-10 21:02:12 +07:00
parent d427875679
commit 3482e87892
4 changed files with 194 additions and 17 deletions

View File

@@ -2,44 +2,122 @@ module type
export
# struct
kfn
kfn_1
# function
using Random
using Random, GeneralUtils
#------------------------------------------------------------------------------------------------100
rng = MersenneTwister(1234)
abstract type Ironpen end
abstract type knowledgeFn <: Ironpen end
rng = MersenneTwister(1234)
#------------------------------------------------------------------------------------------------100
Base.@kwdef mutable struct kfn <: knowledgeFn
Base.@kwdef mutable struct kfn_1 <: knowledgeFn
params::Dict = Dict() # store params of knowledgeFn itself for later use
timeStep::AbstractArray = [0]
refractory::Union{AbstractArray, Nothing} = nothing
learningStage::AbstractArray = [0] # 0 inference, 1 start, 2 during, 3 end learning
z_i_t1::Union{AbstractArray, Nothing} = nothing # 2D activation matrix
z_i_t::Union{AbstractArray, Nothing} = nothing
z_t::Union{AbstractArray, Nothing} = nothing
z_t1::Union{AbstractArray, Nothing} = nothing
z_i_t0::Union{AbstractArray, Nothing} = nothing
lif_w::Union{AbstractArray, Nothing} = nothing
alif_w::Union{AbstractArray, Nothing} = nothing
end
function kfn(kfnParams::Dict)
kfn_1 = kfn()
kfn_1.params = kfnParams
# outer constructor
function kfn_1(params::Dict)
kfn = kfn_1()
kfn.params = params
# initialize activation matrix
row, col = kfn.params[:inputPort][:noise][:numbers]
row += kfn.params[:inputPort][:signal][:numbers][1]
col += kfn.params[:inputPort][:signal][:numbers][2]
col += kfn.params[:computeNeuron][:lif][:numbers][2]
col += kfn.params[:computeNeuron][:alif][:numbers][2]
if kfn_1.params[:computeNeuronNumber] < kfn_1.params[:totalInputPort]
throw(error("number of compute neuron must be greater than input neuron"))
kfn.z_i_t1 = zeros(row, col, 1)
kfn.z_i_t0 = zeros(row, col, 1)
kfn.lif_w = zeros(row, col, row*col)
kfn.alif_w = zeros(row, col, row*col)
# lif subscription
row, col, z = size(kfn.lif_w) # row*col is synaptic subscribe weight for each neuron in z
synapticConnectionPercent = kfn.params[:computeNeuron][:lif][:params][:synapticConnectionPercent]
synapticConnection = Int(floor(z*synapticConnectionPercent/100))
for slice in eachslice(kfn.lif_w, dims=3)
pool = shuffle!([1:z...])[1:synapticConnection]
for i in pool
slice[i] = randn()/10
end
end
error("debug end")
# alif subscription
row, col, z = size(kfn.alif_w) # row*col is synaptic subscribe weight for each neuron in z
synapticConnectionPercent = kfn.params[:computeNeuron][:alif][:params][:synapticConnectionPercent]
synapticConnection = Int(floor(z*synapticConnectionPercent/100))
for slice in eachslice(kfn.alif_w, dims=3)
pool = shuffle!([1:z...])[1:synapticConnection]
for i in pool
slice[i] = randn()/10
end
end
# error("debug end outer constructor")
return kfn
end
# kfn forward
function (kfn::kfn_1)(input::AbstractArray)
kfn.timeStep .+= 1
# row, col = size(input) # if input is a 2D matrix
println(">>> 5 ", size(input))
println(">>> 6 ", size(kfn.z_i_t1))
#WORKING multiply input with kfn.z_i_t1 may be using cartesian coordinates
println(">>> 7 ", view(kfn.z_i_t1, :, 1, :))
view(kfn.z_i_t1, :, 1) .= input
println(">>> 8 ", kfn.z_i_t1[:, 1])
# multiply kfn.z_i_t1 with kfn.lif_w
r = GeneralUtils.batchMatEleMul(kfn.z_i_t1, kfn.lif_w)
println(size(r))
error("debug end kfn forward")
end