Code: Select all
#include "string.bi"
#define i8 byte
#define u8 ubyte
#define i16 short
#define u16 ushort
#define i32 long
#define u32 ulong
#define i64 longint
#define u64 ulongint
#define f32 single
#define f64 double
#define rnd_range(low, high)_
(rnd(1) * (high - low) + low)
#define max(v1, v2)_
(iif(v1 > v2, v1, v2))
#define min(v1, v2)_
(iif(v1 < v2, v1, v2))
'value range: -1...+1
function smart_color(value as f32) as u32
if value > 0 then
return rgb(255 - min(value * 256, 255), 255, 0)
else
return rgb(255, 255 - min(-value * 256, 255), 0)
end if
end function
'-------------------------------------------------------------------------------
'soft limit to 0...+1
function sigmoid(x as f32) as f32
return 1 / (1 + exp(-x))
end function
'soft limit to -1...+1
function sigmoid_symetric(x as f32) as f32
return 2 / (1 + exp(-x)) - 1
end function
'derivative / slope of sigmoid
'https://kawahara.ca/how-to-compute-the-derivative-of-a-sigmoid-function-fully-worked-example/
'https://beckernick.github.io/sigmoid-derivative-neural-network/
function dsigmoid(x as f32) as f32
dim as f32 s = sigmoid(x)
return s * (1 - s)
end function
function dsigmoid_symetric(x as f32) as f32
dim as f32 s = sigmoid(x)
return 2 * s * (1 - s)
end function
'-------------------------------------------------------------------------------
const as single LRT = 0.01 'learning rate
type layer_type
dim as i32 num_cells
dim as f32 cell(any)
dim as f32 err0r(any)
dim as f32 link(any, any) 'N input <-> M output
end type
type nn_type
dim as i32 num_layers
dim as layer_type layer(any)
declare constructor(num_cells() as i32)
declare destructor()
declare function set_input(source() as f32) as i32
declare sub forward_propagation()
declare function back_propagation(target() as f32) as i32
declare sub show_summary()
declare sub visualise(w as i32, h as i32)
end type
'build and initialise the neural network
constructor nn_type(num_cells() as i32)
dim as i32 ub = ubound(num_cells)
num_layers = ub + 1
redim layer(0 to ub)
for i as i32 = 0 to ub
layer(i).num_cells = num_cells(i)
'crazy syntax, who needs pointers anyway?
redim (layer(i).cell)(0 to num_cells(i) - 1)
redim (layer(i).err0r)(0 to num_cells(i) - 1)
'fill cells with initial values
for j as i32 = 0 to num_cells(i) - 1
layer(i).cell(j) = 0 'rnd_range(-1, +1)
layer(i).err0r(j) = 0
next
'allocate memory for the links
if i <> ub then 'skip final layer, no further links
'more crazy business!
redim (layer(i).link)(0 to num_cells(i) - 1, 0 to num_cells(i + 1) - 1)
'fill links with initial values
for j as i32 = 0 to num_cells(i) - 1
for k as i32 = 0 to num_cells(i + 1) - 1
layer(i).link(j, k) = rnd_range(-1, +1)
next
next
end if
next
print "constructor nn_type() - done"
end constructor
'let do some clean-up
destructor nn_type()
for i as i32 = 0 to ubound(layer)
erase layer(i).link
erase layer(i).cell
erase layer(i).err0r
next
erase layer
print "destructor nn_type() - done"
end destructor
function nn_type.set_input(source() as f32) as i32
dim as i32 ub_cells = ubound(layer(0).cell)
'check num sources == num input cells
if ubound(source) <> ub_cells then return -1
for i as i32 = 0 to ub_cells
layer(0).cell(i) = source(i)
next
end function
sub nn_type.forward_propagation()
'loop layer lo: 1 to N-1
'for each cell(co):
'value = Σci(prev_layercell(ci) * link_to_this_cell(ci,co))
for lo as i32 = 1 to num_layers - 1
dim as i32 li = lo - 1
for co as i32 = 0 to layer(lo).num_cells - 1 'loop output layer
dim as f32 sum = 0.0
for ci as i32 = 0 to layer(li).num_cells - 1 'loop input layer
sum += layer(li).cell(ci) * layer(li).link(ci, co)
next
layer(lo).cell(co) = sum 'adjust output cell
next
next
end sub
function nn_type.back_propagation(target() as f32) as i32
'steps:
'determine output error(s), compare with target(s)
'- for each input layer (backwards), each cell:
'determine output error(s) for input layer, using link weight
'for each input layer (forwards), each link:
'- adjust link weight: + learning_rate * error * dsigmoid(cell_value) * input_cell
dim as i32 ubLayer = ubound(layer)
dim as i32 ubCell = ubound(layer(ubLayer).cell)
if ubound(target) <> ubCell then return -1 'num targets <> num output cells
dim as i32 ubLink
'Determine output error(s)
for i as i32 = 0 to ubCell 'loop calls output layer
layer(ubLayer).err0r(i) = target(i) - layer(ubLayer).cell(i)
next
'Determine mid layer errors
for l as i32 = ubLayer - 1 to 1 step -1 'no errors needed for input layer 0
ubCell = ubound(layer(l).cell)
ubLink = ubound(layer(l + 1).cell)
for i as i32 = 0 to ubCell 'loop cells
layer(l).err0r(i) = 0
for j as i32 = 0 to ubLink 'loop links / output cells
layer(l).err0r(i) += layer(l).link(i, j) * layer(l + 1).err0r(j)
next
next
next
'Update nerves / weights in forward direction
for l as i32 = 0 to ubLayer - 1 'skip last layer, no further links
ubCell = ubound(layer(l).cell)
ubLink = ubound(layer(l + 1).cell)
for i as i32 = 0 to ubCell 'loop cells
for j as i32 = 0 to ubLink 'loop links / output cells
'here is where the magic happens, I hope
layer(l).link(i, j) += LRT * layer(l + 1).err0r(j) _
* dsigmoid(layer(l+1).cell(j)) * layer(l).cell(i)
next
next
next
end function
sub nn_type.show_summary()
print "num_layers: "; num_layers
for i as i32 = 0 to num_layers - 1
print "layer(" & i & ").num_cells: "; layer(i).num_cells
next
end sub
'graphics screen needed, w = width, h = height
sub nn_type.visualise (w as i32, h as i32)
dim as i32 cell_radius = 10
dim as u32 cell_color = &hff00ff00 'green
dim as u32 link_color = &hff0077ff 'blueish
dim as i32 w_dist = w / num_layers
'line(0, 0)-(w-1, h-1), &hff777777, bf
for i as i32 = 0 to num_layers - 1
dim as i32 ub_cells = ubound(layer(i).cell)
dim as i32 h_dist = h / (ub_cells + 1)
for j as i32 = 0 to ub_cells
cell_color = smart_color(layer(i).cell(j))
dim as i32 x = w_dist * (i + 0.5)
dim as i32 y = h_dist * (j + 0.5)
circle (x, y), cell_radius, cell_color
dim as string text = format(layer(i).cell(j), "0.000")
draw string(x - len(text) * 4, y + 8), text', cell_color
'draw string(x + 16, y - 8), text
next
if i <> num_layers - 1 then 'skip last layer
dim as i32 ub_cells_next = ubound(layer(i + 1).cell)
dim as i32 h_dist_next = h / (ub_cells_next + 1)
for j as i32 = 0 to ub_cells
for k as i32 = 0 to ub_cells_next
link_color = smart_color(layer(i).link(j, k))
dim as i32 x1 = w_dist * (i + 0.5)
dim as i32 y1 = h_dist * (j + 0.5)
dim as i32 x2 = w_dist * (i + 1.5)
dim as i32 y2 = h_dist_next * (k + 0.5)
line(x1, y1)-(x2, y2), link_color
dim as string text = format(layer(i).link(j, k), "0.000")
circle ((x1 + x2)\2, (y1 + y2)\2), cell_radius \ 3, link_color
draw string((x1 + x2)\2 - len(text) * 4, (y1 + y2)\2 + 8), text', link_color
next
next
end if
next
end sub
'-------------------------------------------------------------------------------
const as i32 SW = 800, SH = 600
screenres SW, SH, 32
width SW \ 8, SH \ 16
randomize 1234
'the size of the neural network is define by the user/caller
dim as f32 source(0 to 1) = {-1, +1}
dim as f32 target(0 to 1) = {+1, -1}
dim as i32 nn_size_def(...) = {ubound(source) + 1, 2, 3, 2, ubound(target) + 1}
dim as nn_type nn = nn_type(nn_size_def())
nn.set_input(source())
nn.forward_propagation()
nn.show_summary()
while not multikey(1)
screenlock()
cls()
nn.visualise(SW, SH)
screenunlock()
nn.back_propagation(target())
nn.forward_propagation()
'ugly check for end condition
if abs(target(0) - nn.layer(4).cell(0)) < 0.0002 then
if abs(target(1) - nn.layer(4).cell(1)) < 0.0002 then
exit while
end if
end if
sleep(10)
wend
print "End, network trained!"
print "Press any key to exit."
getkey()
screen 0 'close graphics screen, for destructor printout
'TODO:
'backward propagation
'decrease learnrate exponentially?
'remove num_links, num_cells, use ubound, use ub_cells not ubCells
'store sigmoid at cell at forward propagation, for use during backward propagation
'clone network
'random mutation
'merge networks
'save/load network