Another neural network experiment

User projects written in or related to FreeBASIC.
Post Reply
badidea
Posts: 2586
Joined: May 24, 2007 22:10
Location: The Netherlands

Another neural network experiment

Post by badidea »

Still work in progress. For now, the network is trained for 1 input set {-1, +1} only where the target output set is {+1, -1} starting from a randomly initialized network.

Code: Select all

#include "string.bi"

#define i8 byte
#define u8 ubyte
#define i16 short
#define u16 ushort
#define i32 long
#define u32 ulong
#define i64 longint
#define u64 ulongint
#define f32 single
#define f64 double

#define rnd_range(low, high)_
	(rnd(1) * (high - low) + low)

#define max(v1, v2)_
	(iif(v1 > v2, v1, v2))

#define min(v1, v2)_
	(iif(v1 < v2, v1, v2))

'value range: -1...+1
function smart_color(value as f32) as u32
	if value > 0 then
		return rgb(255 - min(value * 256, 255), 255, 0)
	else
		return rgb(255, 255 - min(-value * 256, 255), 0)
	end if
end function

'-------------------------------------------------------------------------------

'soft limit to 0...+1
function sigmoid(x as f32) as f32
	return 1 / (1 + exp(-x))
end function

'soft limit to -1...+1
function sigmoid_symetric(x as f32) as f32
	return 2 / (1 + exp(-x)) - 1
end function

'derivative / slope of sigmoid
'https://kawahara.ca/how-to-compute-the-derivative-of-a-sigmoid-function-fully-worked-example/
'https://beckernick.github.io/sigmoid-derivative-neural-network/
function dsigmoid(x as f32) as f32
	dim as f32 s = sigmoid(x)
	return s * (1 - s)
end function

function dsigmoid_symetric(x as f32) as f32
	dim as f32 s = sigmoid(x)
	return 2 * s * (1 - s)
end function

'-------------------------------------------------------------------------------

const as single LRT = 0.01 'learning rate

type layer_type
	dim as i32 num_cells
	dim as f32 cell(any)
	dim as f32 err0r(any)
	dim as f32 link(any, any) 'N input <-> M output
end type

type nn_type
	dim as i32 num_layers
	dim as layer_type layer(any)
	declare constructor(num_cells() as i32)
	declare destructor()
	declare function set_input(source() as f32) as i32
	declare sub forward_propagation()
	declare function back_propagation(target() as f32) as i32
	declare sub show_summary()
	declare sub visualise(w as i32, h as i32)
end type

'build and initialise the neural network
constructor nn_type(num_cells() as i32)
	dim as i32 ub = ubound(num_cells)
	num_layers = ub + 1
	redim layer(0 to ub)
	for i as i32 = 0 to ub
		layer(i).num_cells = num_cells(i)
		 'crazy syntax, who needs pointers anyway?
		redim (layer(i).cell)(0 to num_cells(i) - 1)
		redim (layer(i).err0r)(0 to num_cells(i) - 1)
		'fill cells with initial values
		for j as i32 = 0 to num_cells(i) - 1
			layer(i).cell(j) = 0 'rnd_range(-1, +1)
			layer(i).err0r(j) = 0
		next
		'allocate memory for the links
		if i <> ub then 'skip final layer, no further links
			'more crazy business!
			redim (layer(i).link)(0 to num_cells(i) - 1, 0 to num_cells(i + 1) - 1)
			'fill links with initial values
			for j as i32 = 0 to num_cells(i) - 1
				for k as i32 = 0 to num_cells(i + 1) - 1
					layer(i).link(j, k) = rnd_range(-1, +1)
				next
			next
		end if
	next
	print "constructor nn_type() - done"
end constructor

'let do some clean-up
destructor nn_type()
	for i as i32 = 0 to ubound(layer)
		erase layer(i).link
		erase layer(i).cell
		erase layer(i).err0r
	next
	erase layer
	print "destructor nn_type() - done"
end destructor

function nn_type.set_input(source() as f32) as i32
	dim as i32 ub_cells = ubound(layer(0).cell)
	'check num sources == num input cells
	if ubound(source) <> ub_cells then return -1
	for i as i32 = 0 to ub_cells
		layer(0).cell(i) = source(i)
	next
end function

sub nn_type.forward_propagation()
	'loop layer lo: 1 to N-1
	'for each cell(co):
	'value = Σci(prev_layercell(ci) * link_to_this_cell(ci,co))
	for lo as i32 = 1 to num_layers - 1
		dim as i32 li = lo - 1
		for co as i32 = 0 to layer(lo).num_cells - 1 'loop output layer
			dim as f32 sum = 0.0
			for ci as i32 = 0 to layer(li).num_cells - 1 'loop input layer
				sum += layer(li).cell(ci) * layer(li).link(ci, co)
			next
			layer(lo).cell(co) = sum 'adjust output cell
		next
	next
end sub

function nn_type.back_propagation(target() as f32) as i32
	'steps:
	'determine output error(s), compare with target(s)
	'- for each input layer (backwards), each cell:
	'determine output error(s) for input layer, using link weight
	'for each input layer (forwards), each link:
	'- adjust link weight: + learning_rate * error * dsigmoid(cell_value) * input_cell
	dim as i32 ubLayer = ubound(layer)
	dim as i32 ubCell = ubound(layer(ubLayer).cell)
	if ubound(target) <> ubCell then return -1 'num targets <> num output cells
	dim as i32 ubLink
	'Determine output error(s)
	for i as i32 = 0 to ubCell 'loop calls output layer
		layer(ubLayer).err0r(i) = target(i) - layer(ubLayer).cell(i)
	next
	'Determine mid layer errors
	for l as i32 = ubLayer - 1 to 1 step -1 'no errors needed for input layer 0
		ubCell = ubound(layer(l).cell)
		ubLink = ubound(layer(l + 1).cell)
		for i as i32 = 0 to ubCell 'loop cells
			layer(l).err0r(i) = 0
			for j as i32 = 0 to ubLink 'loop links / output cells
				layer(l).err0r(i) += layer(l).link(i, j) * layer(l + 1).err0r(j)
			next
		next
	next
	'Update nerves / weights in forward direction
	for l as i32 = 0 to ubLayer - 1 'skip last layer, no further links 
		ubCell = ubound(layer(l).cell)
		ubLink = ubound(layer(l + 1).cell)
		for i as i32 = 0 to ubCell 'loop cells
			for j as i32 = 0 to ubLink 'loop links / output cells
				'here is where the magic happens, I hope
				layer(l).link(i, j) += LRT * layer(l + 1).err0r(j) _
					* dsigmoid(layer(l+1).cell(j)) * layer(l).cell(i)
			next
		next
	next
end function

sub nn_type.show_summary()
	print "num_layers: "; num_layers
	for i as i32 = 0 to num_layers - 1
		print "layer(" & i & ").num_cells: "; layer(i).num_cells
	next
end sub

'graphics screen needed, w = width, h = height
sub nn_type.visualise (w as i32, h as i32)
	dim as i32 cell_radius = 10
	dim as u32 cell_color = &hff00ff00 'green
	dim as u32 link_color = &hff0077ff 'blueish
	dim as i32 w_dist = w / num_layers
	'line(0, 0)-(w-1, h-1), &hff777777, bf
	for i as i32 = 0 to num_layers - 1
		dim as i32 ub_cells = ubound(layer(i).cell)
		dim as i32 h_dist = h / (ub_cells + 1)
		for j as i32 = 0 to ub_cells
			cell_color = smart_color(layer(i).cell(j))
			dim as i32 x = w_dist * (i + 0.5)
			dim as i32 y = h_dist * (j + 0.5)
			circle (x, y), cell_radius, cell_color
			dim as string text = format(layer(i).cell(j), "0.000")
			draw string(x - len(text) * 4, y + 8), text', cell_color
			'draw string(x + 16, y - 8), text
		next
		if i <> num_layers - 1 then 'skip last layer
			dim as i32 ub_cells_next = ubound(layer(i + 1).cell)
			dim as i32 h_dist_next = h / (ub_cells_next + 1)
			for j as i32 = 0 to ub_cells
				for k as i32 = 0 to ub_cells_next
					link_color = smart_color(layer(i).link(j, k))
					dim as i32 x1 = w_dist * (i + 0.5)
					dim as i32 y1 = h_dist * (j + 0.5)
					dim as i32 x2 = w_dist * (i + 1.5)
					dim as i32 y2 = h_dist_next * (k + 0.5)
					line(x1, y1)-(x2, y2), link_color
					dim as string text = format(layer(i).link(j, k), "0.000")
					circle ((x1 + x2)\2, (y1 + y2)\2), cell_radius \ 3, link_color
					draw string((x1 + x2)\2 - len(text) * 4, (y1 + y2)\2 + 8), text', link_color
				next
			next
		end if
	next
end sub

'-------------------------------------------------------------------------------

const as i32 SW = 800, SH = 600
screenres SW, SH, 32
width SW \ 8, SH \ 16

randomize 1234

'the size of the neural network is define by the user/caller
dim as f32 source(0 to 1) = {-1, +1}
dim as f32 target(0 to 1) = {+1, -1}
dim as i32 nn_size_def(...) = {ubound(source) + 1, 2, 3, 2, ubound(target) + 1}
dim as nn_type nn = nn_type(nn_size_def())
nn.set_input(source())
nn.forward_propagation()
nn.show_summary()
while not multikey(1)
	screenlock()
	cls()
	nn.visualise(SW, SH)
	screenunlock()
	nn.back_propagation(target())
	nn.forward_propagation()
	'ugly check for end condition
	if abs(target(0) - nn.layer(4).cell(0)) < 0.0002 then
		if abs(target(1) - nn.layer(4).cell(1)) < 0.0002 then
			exit while
		end if
	end if
	sleep(10)
wend
print "End, network trained!"
print "Press any key to exit."
getkey()

screen 0 'close graphics screen, for destructor printout

'TODO:
'backward propagation
'decrease learnrate exponentially?
'remove num_links, num_cells, use ubound, use ub_cells not ubCells
'store sigmoid at cell at forward propagation, for use during backward propagation
'clone network
'random mutation
'merge networks
'save/load network
badidea
Posts: 2586
Joined: May 24, 2007 22:10
Location: The Netherlands

Re: Another neural network experiment

Post by badidea »

Some blurry digit/character images to train the network with:

Code: Select all

#include "fbgfx.bi"

#define i32 long
#define f32 single

sub panic(text as string)
	print text
	getkey()
	end -1
end sub

sub imageKill(p_img as any ptr)
	imageDestroy(p_img)
	p_img = 0
end sub

'-------------------------------------------------------------------------------

const as i32 N_IMG = 10 'number of character images
const as i32 W_IMG = 12, H_IMG = 16 'width & height in pixels

'grey scale image, float 0...1 per pixel
type char_img_type
	dim as f32 intensity(0 to H_IMG - 1, 0 to W_IMG - 1) 'y,x!
	declare sub erode(count as i32)
	declare sub blur(runs as i32)
	declare sub stretch_contrast()
end type

'set <count> random pixels, but not on outer border
sub char_img_type.erode(count as i32)
	for i as i32 = 0 to count - 1
		dim as i32 y = int(rnd * (H_IMG - 2)) + 1
		dim as i32 x = int(rnd * (W_IMG - 2)) + 1
		intensity(y, x) = rnd
	next
end sub

sub char_img_type.blur(runs as i32)
	dim as char_img_type char_img_blur
	dim as i32 weight(-1 to +1, -1 to +1) = _
		{{1,2,1},{2,4,2},{1,2,1}} 'blur matrix
	for i as i32 = 0 to runs - 1
		for y_tar as i32 = 0 to H_IMG - 1
			for x_tar as i32 = 0 to W_IMG - 1
				dim as i32 divider = 0
				dim as f32 integrator = 0
				for yi as i32 = -1 to +1
					dim as i32 y_src = y_tar + yi
					if y_src < 0 then continue for
					if y_src >= H_IMG then exit for
					for xi as i32 = -1 to +1
						dim as i32 x_src = x_tar + xi
						if x_src < 0 then continue for
						if x_src >= W_IMG then exit for
						divider += weight(xi, yi)
						integrator += intensity(y_src, x_src)
					next
				next
				integrator /= divider
				if integrator > 1 then panic("Error in 'blur': color > 255")
				if integrator < 0 then panic("Error in 'blur': color < 0")
				char_img_blur.intensity(y_tar, x_tar) = integrator
			next
		next
		'copy blurred image to original image
		this = char_img_blur
	next
end sub

'compensate contrast compression by blur 
sub char_img_type.stretch_contrast()
	dim as f32 min_int = 1.0, max_int = 0.0
	'find min & max intensities
	for y as i32 = 0 to H_IMG - 1
		for x as i32 = 0 to W_IMG - 1
			dim as f32 now_int = intensity(y, x)
			if now_int > max_int then max_int = now_int
			if now_int < min_int then min_int = now_int
		next
	next
	'do the strech
	for y as i32 = 0 to H_IMG - 1
		for x as i32 = 0 to W_IMG - 1
			intensity(y, x) = (intensity(y, x) - min_int) _
				* (1.0 - 0.0) / (max_int - min_int)
		next
	next
end sub

'-------------------------------------------------------------------------------

const as i32 SW = 800, SH = 600
screenres SW, SH, 32
width SW \ 8, SH \ 16

dim as char_img_type char_img(0 to N_IMG - 1)
dim as fb.image ptr p_img_temp = imageCreate(W_IMG, H_IMG, &hff000000)

randomize timer

'create grey scale character images 0 to 9
for i as i32 = 0 to ubound(char_img)
	line p_img_temp, (0, 0)-(H_IMG - 1, W_IMG - 1), &hff000000, bf 'clear
	draw string p_img_temp, (2, 0), str(i), &hffffffff 'draw char
	for y as i32 = 0 to H_IMG - 1
		for x as i32 = 0 to W_IMG - 1
			char_img(i).intensity(y, x) = (point(x, y, p_img_temp) and &h000000ff) / 255
		next
	next
next

'erode & blur chracter images
for i as i32 = 0 to ubound(char_img)
	char_img(i).erode(20)
	char_img(i).blur(1)
	char_img(i).stretch_contrast()
next

line(0, 0)-(SW - 1, SH - 1), &hff404040, bf

'show all character images
for i as i32 = 0 to ubound(char_img)
	for y as i32 = 0 to H_IMG - 1
		for x as i32 = 0 to W_IMG - 1
			dim as i32 intensity = char_img(i).intensity(y, x) * 255
			if intensity > 255 then intensity = 255
			if intensity < 0 then intensity = 0
			pset(100 + i * 20 + x, 100 + y), rgb(intensity, intensity, intensity)
		next
	next
next

imageKill(p_img_temp) : p_img_temp = 0
getkey()
To be combined with previous code...
badidea
Posts: 2586
Joined: May 24, 2007 22:10
Location: The Netherlands

Re: Another neural network experiment

Post by badidea »

Demonstration that the back-propagation (the learning) works. 10 fixed grey-scale 16 x 12 pixel images of numbers 0 to 9 are the input to a neural network with 43 middle layer cells and 10 output cells. If e.g. the image number 3 is put into the network, output 3 should be +1, the other outputs -1.
The network is randomly initialized, so learning speed varies, but usually at loop counter 1000 it is doing quite well. For performance reasons, only the output of the network is visualized (not all the links). Completely on the right is the input image displayed at the height where the one +1 (green) output should be. Simulation can be paused/resumed be pressing <spacebar>.

Code: Select all

#include "fbgfx.bi"
#include "string.bi"

#define i8 byte
#define u8 ubyte
#define i16 short
#define u16 ushort
#define i32 long
#define u32 ulong
#define i64 longint
#define u64 ulongint
#define f32 single
#define f64 double

#define rnd_range(low, high)_
	(rnd(1) * (high - low) + low)

#define max(v1, v2)_
	(iif(v1 > v2, v1, v2))

#define min(v1, v2)_
	(iif(v1 < v2, v1, v2))

sub panic(text as string)
	print text
	getkey()
	end -1
end sub

'value range: -1...+1
function smart_color(value as f32) as u32
	if value > 0 then
		return rgb(255 - min(value * 256, 255), 255, 0)
	else
		return rgb(255, 255 - min(-value * 256, 255), 0)
	end if
end function

sub imageKill(p_img as any ptr)
	imageDestroy(p_img)
	p_img = 0
end sub

'-------------------------------------------------------------------------------

const as i32 N_IMG = 10 'number of character images
const as i32 W_IMG = 12, H_IMG = 16 'width & height in pixels

'grey scale image, float 0...1 per pixel
type char_img_type
	dim as f32 intensity(0 to H_IMG - 1, 0 to W_IMG - 1) 'y,x!
	declare sub erode(count as i32)
	declare sub from_image(p_img as fb.image ptr)
	declare sub blur(runs as i32)
	declare sub stretch_contrast()
	declare sub put_(x as i32, y as i32)
end type

sub char_img_type.from_image(p_img as fb.image ptr)
	if p_img->width <> W_IMG then panic("Error: char_img_type.from_image: Width")
	if p_img->height <> H_IMG then panic("Error: char_img_type.from_image: height")
	for y as i32 = 0 to H_IMG - 1
		for x as i32 = 0 to W_IMG - 1
			intensity(y, x) = (point(x, y, p_img) and &h000000ff) / 255
		next
	next
end sub

'set <count> random pixels, but not on outer border
sub char_img_type.erode(count as i32)
	for i as i32 = 0 to count - 1
		dim as i32 y = int(rnd * (H_IMG - 2)) + 1
		dim as i32 x = int(rnd * (W_IMG - 2)) + 1
		intensity(y, x) = rnd
	next
end sub

sub char_img_type.blur(runs as i32)
	dim as char_img_type char_img_blur
	dim as i32 weight(-1 to +1, -1 to +1) = _
		{{1,2,1},{2,4,2},{1,2,1}} 'blur matrix
	for i as i32 = 0 to runs - 1
		for y_tar as i32 = 0 to H_IMG - 1
			for x_tar as i32 = 0 to W_IMG - 1
				dim as i32 divider = 0
				dim as f32 integrator = 0
				for yi as i32 = -1 to +1
					dim as i32 y_src = y_tar + yi
					if y_src < 0 then continue for
					if y_src >= H_IMG then exit for
					for xi as i32 = -1 to +1
						dim as i32 x_src = x_tar + xi
						if x_src < 0 then continue for
						if x_src >= W_IMG then exit for
						divider += weight(xi, yi)
						integrator += intensity(y_src, x_src)
					next
				next
				integrator /= divider
				if integrator > 1 then panic("Error in 'blur': color > 255")
				if integrator < 0 then panic("Error in 'blur': color < 0")
				char_img_blur.intensity(y_tar, x_tar) = integrator
			next
		next
		'copy blurred image to original image
		this = char_img_blur
	next
end sub

'compensate contrast compression by blur 
sub char_img_type.stretch_contrast()
	dim as f32 min_int = 1.0, max_int = 0.0
	'find min & max intensities
	for y as i32 = 0 to H_IMG - 1
		for x as i32 = 0 to W_IMG - 1
			dim as f32 now_int = intensity(y, x)
			if now_int > max_int then max_int = now_int
			if now_int < min_int then min_int = now_int
		next
	next
	'do the strech
	for y as i32 = 0 to H_IMG - 1
		for x as i32 = 0 to W_IMG - 1
			intensity(y, x) = (intensity(y, x) - min_int) _
				* (1.0 - 0.0) / (max_int - min_int)
		next
	next
end sub

'draw on screen
sub char_img_type.put_(x as i32, y as i32)
	for yi as i32 = 0 to H_IMG - 1
		for xi as i32 = 0 to W_IMG - 1
			dim as i32 cur_int = intensity(yi, xi) * 255
			if cur_int > 255 then cur_int = 255
			if cur_int < 0 then cur_int = 0
			pset(x + xi, y + yi), rgb(cur_int, cur_int, cur_int)
		next
	next
end sub

'-------------------------------------------------------------------------------

'Logarithmic mean
#define log_mean(v1, v2)_
	((v1 - v2) / (log(v1) - log(v2)))

'a different mean
#define my_mean(v1, v2)_
	(sqr(v1 / v2) * v2)

'soft limit to 0...+1
function sigmoid(x as f32) as f32
	return 1 / (1 + exp(-x))
end function

'soft limit to -1...+1
function sigmoid_symetric(x as f32) as f32
	return 2 / (1 + exp(-x)) - 1
end function

'derivative / slope of sigmoid
'https://kawahara.ca/how-to-compute-the-derivative-of-a-sigmoid-function-fully-worked-example/
'https://beckernick.github.io/sigmoid-derivative-neural-network/
function dsigmoid(x as f32) as f32
	dim as f32 s = sigmoid(x)
	return s * (1 - s)
end function

function dsigmoid_symetric(x as f32) as f32
	dim as f32 s = sigmoid(x)
	return 2 * s * (1 - s)
end function

'-------------------------------------------------------------------------------

const as single LRT = 0.01 'learning rate

type layer_type
	dim as i32 num_cells
	dim as f32 cell(any)
	dim as f32 err0r(any)
	dim as f32 link(any, any) 'N input <-> M output
end type

type nn_type
	dim as i32 num_layers
	dim as layer_type layer(any)
	declare constructor(num_cells() as i32)
	declare destructor()
	declare function set_input(source() as f32) as i32
	declare sub set_input2(p_src as f32 ptr)
	declare sub forward_propagation()
	declare function back_propagation(target() as f32) as i32
	declare sub show_summary()
	declare sub visualise(w as i32, h as i32, target_only as i32)
end type

'build and initialise the neural network
constructor nn_type(num_cells() as i32)
	dim as i32 ub = ubound(num_cells)
	num_layers = ub + 1
	redim layer(0 to ub)
	for i as i32 = 0 to ub
		layer(i).num_cells = num_cells(i)
		 'crazy syntax, who needs pointers anyway?
		redim (layer(i).cell)(0 to num_cells(i) - 1)
		redim (layer(i).err0r)(0 to num_cells(i) - 1)
		'fill cells with initial values
		for j as i32 = 0 to num_cells(i) - 1
			layer(i).cell(j) = 0 'rnd_range(-1, +1)
			layer(i).err0r(j) = 0
		next
		'allocate memory for the links
		if i <> ub then 'skip final layer, no further links
			'more crazy business!
			redim (layer(i).link)(0 to num_cells(i) - 1, 0 to num_cells(i + 1) - 1)
			'fill links with initial values
			for j as i32 = 0 to num_cells(i) - 1
				for k as i32 = 0 to num_cells(i + 1) - 1
					layer(i).link(j, k) = rnd_range(-1, +1)
				next
			next
		end if
	next
	print "constructor nn_type() - done"
end constructor

'let do some clean-up
destructor nn_type()
	for i as i32 = 0 to ubound(layer)
		erase layer(i).link
		erase layer(i).cell
		erase layer(i).err0r
	next
	erase layer
	print "destructor nn_type() - done"
end destructor

'copy source array to input layer
function nn_type.set_input(source() as f32) as i32
	dim as i32 ub_cells = ubound(layer(0).cell)
	'check num sources == num input cells
	if ubound(source) <> ub_cells then return -1
	for i as i32 = 0 to ub_cells
		layer(0).cell(i) = source(i)
	next
end function

'alternative (more dangerous) way to set the input layer
sub nn_type.set_input2(p_src as f32 ptr)
	for i as i32 = 0 to ubound(layer(0).cell)
		layer(0).cell(i) = p_src[i]
	next
end sub

sub nn_type.forward_propagation()
	'loop layer lo: 1 to N-1
	'for each cell(co):
	'value = Σci(prev_layercell(ci) * link_to_this_cell(ci,co))
	for lo as i32 = 1 to num_layers - 1
		dim as i32 li = lo - 1
		for co as i32 = 0 to layer(lo).num_cells - 1 'loop output layer
			dim as f32 sum = 0.0
			for ci as i32 = 0 to layer(li).num_cells - 1 'loop input layer
				sum += layer(li).cell(ci) * layer(li).link(ci, co)
			next
			layer(lo).cell(co) = sum 'adjust output cell
		next
	next
end sub

function nn_type.back_propagation(target() as f32) as i32
	'steps:
	'determine output error(s), compare with target(s)
	'- for each input layer (backwards), each cell:
	'determine output error(s) for input layer, using link weight
	'for each input layer (forwards), each link:
	'- adjust link weight: + learning_rate * error * dsigmoid(cell_value) * input_cell
	dim as i32 ubLayer = ubound(layer)
	dim as i32 ubCell = ubound(layer(ubLayer).cell)
	if ubound(target) <> ubCell then return -1 'num targets <> num output cells
	dim as i32 ubLink
	'Determine output error(s)
	for i as i32 = 0 to ubCell 'loop calls output layer
		layer(ubLayer).err0r(i) = target(i) - layer(ubLayer).cell(i)
	next
	'Determine mid layer errors
	for l as i32 = ubLayer - 1 to 1 step -1 'no errors needed for input layer 0
		ubCell = ubound(layer(l).cell)
		ubLink = ubound(layer(l + 1).cell)
		for i as i32 = 0 to ubCell 'loop cells
			layer(l).err0r(i) = 0
			for j as i32 = 0 to ubLink 'loop links / output cells
				layer(l).err0r(i) += layer(l).link(i, j) * layer(l + 1).err0r(j)
			next
		next
	next
	'Update nerves / weights in forward direction
	for l as i32 = 0 to ubLayer - 1 'skip last layer, no further links 
		ubCell = ubound(layer(l).cell)
		ubLink = ubound(layer(l + 1).cell)
		for i as i32 = 0 to ubCell 'loop cells
			for j as i32 = 0 to ubLink 'loop links / output cells
				'here is where the magic happens, I hope
				layer(l).link(i, j) += LRT * layer(l + 1).err0r(j) _
					* dsigmoid(layer(l+1).cell(j)) * layer(l).cell(i)
			next
		next
	next
end function

sub nn_type.show_summary()
	print "num_layers: "; num_layers
	for i as i32 = 0 to num_layers - 1
		print "layer(" & i & ").num_cells: "; layer(i).num_cells
	next
end sub

'graphics screen needed, w = width, h = height
sub nn_type.visualise (w as i32, h as i32, target_only as i32)
	dim as i32 cell_radius = 10
	dim as u32 cell_color = &hff00ff00 'green
	dim as u32 link_color = &hff0077ff 'blueish
	dim as i32 w_dist = w / num_layers
	'line(0, 0)-(w-1, h-1), &hff777777, bf
	dim as i32 start_layer = iif(target_only = 0, 0, num_layers - 1)
	for i as i32 = start_layer to num_layers - 1
		dim as i32 ub_cells = ubound(layer(i).cell)
		dim as i32 h_dist = h / (ub_cells + 1)
		'draw cells in layer
		for j as i32 = 0 to ub_cells
			cell_color = smart_color(layer(i).cell(j))
			dim as i32 x = w_dist * (i + 0.5)
			dim as i32 y = h_dist * (j + 0.5)
			circle (x, y), cell_radius, cell_color
			dim as string text = format(layer(i).cell(j), "0.000")
			draw string(x - len(text) * 4, y + 8), text', cell_color
			'draw string(x + 16, y - 8), text
		next
		'draw links to next layer
		if i <> num_layers - 1 then 'skip last layer
			dim as i32 ub_cells_next = ubound(layer(i + 1).cell)
			dim as i32 h_dist_next = h / (ub_cells_next + 1)
			for j as i32 = 0 to ub_cells
				for k as i32 = 0 to ub_cells_next
					link_color = smart_color(layer(i).link(j, k))
					dim as i32 x1 = w_dist * (i + 0.5)
					dim as i32 y1 = h_dist * (j + 0.5)
					dim as i32 x2 = w_dist * (i + 1.5)
					dim as i32 y2 = h_dist_next * (k + 0.5)
					line(x1, y1)-(x2, y2), link_color
					dim as string text = format(layer(i).link(j, k), "0.000")
					circle ((x1 + x2)\2, (y1 + y2)\2), cell_radius \ 3, link_color
					draw string((x1 + x2)\2 - len(text) * 4, (y1 + y2)\2 + 8), text', link_color
				next
			next
		end if
	next
end sub

'-------------------------------------------------------------------------------

const as i32 SW = 800, SH = 600
screenres SW, SH, 32
width SW \ 8, SH \ 16

randomize timer '1234

const as i32 N_IMG = 10
const as i32 W_IMG = 12, H_IMG = 16 'width & height in pixels
dim as char_img_type char_img(0 to N_IMG - 1)
dim as fb.image ptr p_img_temp = imageCreate(W_IMG, H_IMG, &hff000000)

'create 10 images with bad quality numbers
dim as fb.image ptr p_img = imageCreate(W_IMG, H_IMG, &hff000000)
for i as i32 = 0 to ubound(char_img)
	line p_img, (0, 0)-(H_IMG - 1, W_IMG - 1), &hff000000, bf 'clear
	draw string p_img, (2, 0), str(i), &hffffffff 'draw char
	char_img(i).from_image(p_img)
	char_img(i).erode(20)
	char_img(i).blur(1)
	char_img(i).stretch_contrast()
	'char_img(i).put_(100 + i * 20, 100)
next
imageKill(p_img)

'define the neural network
dim as f32 source(0 to W_IMG * H_IMG - 1) 'inputs
dim as f32 target(0 to 9) 'outputs
dim as i32 source_size = ubound(source) + 1
dim as i32 target_size = ubound(target) + 1
dim as i32 mid_size = int(my_mean(source_size, target_size))
dim as i32 nn_size_def(...) = {source_size, mid_size, target_size}
'create the network, filled with random weight links
dim as nn_type nn = nn_type(nn_size_def())

'nn.show_summary()

dim as i32 loop_count = 0
while not multikey(1)
	dim as i32 digit = int(rnd * N_IMG) '10
	'set target array
	for i as i32 = 0 to N_IMG - 1
		target(i) = iif(i = digit, +1, -1)
	next
	'set input layer directly from char image, skip source array
	nn.set_input2(@char_img(digit).intensity(0, 0))
	nn.forward_propagation()
	nn.back_propagation(target())

	screenlock()
		cls()
		nn.visualise(SW, SH, 1)
		draw string (10, 10), "loop_count: " & loop_count
		'draw string (SW - 50, (SH / 10) * (digit + 0.5)), "<-- " & str(digit)
		char_img(digit).put_(SW - 50, (SH / 10) * (digit + 0.5))
	screenunlock()

	'pause & on spacebar
	if inkey = " " then
		cls()
		nn.visualise(SW, SH, 1)
		draw string (10, 10), "loop_count: " & loop_count
		'draw string (SW - 50, (SH / 10) * (digit + 0.5)), "<-- " & str(digit)
		char_img(digit).put_(SW - 50, (SH / 10) * (digit + 0.5))
		while inkey = "": sleep 1 : wend
	end if

	sleep 10
	loop_count += 1
wend
print "End of program. Press any key to exit."
getkey()

screen 0 'close graphics screen, for destructor printout

'TODO:
'statistics error/fail (per output)
'training set + test set
'test with multiple mid layers
'decrease learnrate exponentially?
'remove num_links, num_cells, use ubound, use ub_cells not ubCells
'store sigmoid at cell at forward propagation, for use during backward propagation
'clone network
'random mutation
'merge networks
'save/load network
Next step is to create a proper training set and test set. And then play with the learning rate, the number and the size of intermediate layers to see what works best. Also some decent statics graphs are needed to judge the performance. Further steps could include evolutionary simulation.
dafhi
Posts: 1641
Joined: Jun 04, 2005 9:51

Re: Another neural network experiment

Post by dafhi »

this stuff's pretty interesting. I pick up things here and there. 2 things I have in mind as I work on my image thing

1. weight randomization (in my NN i never even bothered with backprop) (mostly because i don't get it)
2. importance of a non-linear function. even something as simple as ReLU
badidea
Posts: 2586
Joined: May 24, 2007 22:10
Location: The Netherlands

Re: Another neural network experiment

Post by badidea »

Not much changed, but some statistics display added.
Now, after 2000 successive correct answers the simulation stops.
I had to reduce the learning rate (LRT) for better results.
For multiple mid-layers further twaeking of this parameter is needed.

Code: Select all

#include "fbgfx.bi"
#include "string.bi"

#define i8 byte
#define u8 ubyte
#define i16 short
#define u16 ushort
#define i32 long
#define u32 ulong
#define i64 longint
#define u64 ulongint
#define f32 single
#define f64 double

#define rnd_range(low, high)_
	(rnd(1) * (high - low) + low)

#define max(v1, v2)_
	(iif(v1 > v2, v1, v2))

#define min(v1, v2)_
	(iif(v1 < v2, v1, v2))

sub panic(text as string)
	print text
	getkey()
	end -1
end sub

'value range: -1...+1
function smart_color(value as f32) as u32
	if value > 0 then
		return rgb(255 - min(value * 256, 255), 255, 0)
	else
		return rgb(255, 255 - min(-value * 256, 255), 0)
	end if
end function

sub imageKill(p_img as any ptr)
	imageDestroy(p_img)
	p_img = 0
end sub

'-------------------------------------------------------------------------------

const as i32 N_IMG = 10 'number of character images
const as i32 W_IMG = 12, H_IMG = 16 'width & height in pixels

'grey scale image, float 0...1 per pixel
type char_img_type
	dim as f32 intensity(0 to H_IMG - 1, 0 to W_IMG - 1) 'y,x!
	declare sub erode(count as i32)
	declare sub from_image(p_img as fb.image ptr)
	declare sub blur(runs as i32)
	declare sub stretch_contrast()
	declare sub put_(x as i32, y as i32)
end type

sub char_img_type.from_image(p_img as fb.image ptr)
	if p_img->width <> W_IMG then panic("Error: char_img_type.from_image: Width")
	if p_img->height <> H_IMG then panic("Error: char_img_type.from_image: height")
	for y as i32 = 0 to H_IMG - 1
		for x as i32 = 0 to W_IMG - 1
			intensity(y, x) = (point(x, y, p_img) and &h000000ff) / 255
		next
	next
end sub

'set <count> random pixels, but not on outer border
sub char_img_type.erode(count as i32)
	for i as i32 = 0 to count - 1
		dim as i32 y = int(rnd * (H_IMG - 2)) + 1
		dim as i32 x = int(rnd * (W_IMG - 2)) + 1
		intensity(y, x) = rnd
	next
end sub

sub char_img_type.blur(runs as i32)
	dim as char_img_type char_img_blur
	dim as i32 weight(-1 to +1, -1 to +1) = _
		{{1,2,1},{2,4,2},{1,2,1}} 'blur matrix
	for i as i32 = 0 to runs - 1
		for y_tar as i32 = 0 to H_IMG - 1
			for x_tar as i32 = 0 to W_IMG - 1
				dim as i32 divider = 0
				dim as f32 integrator = 0
				for yi as i32 = -1 to +1
					dim as i32 y_src = y_tar + yi
					if y_src < 0 then continue for
					if y_src >= H_IMG then exit for
					for xi as i32 = -1 to +1
						dim as i32 x_src = x_tar + xi
						if x_src < 0 then continue for
						if x_src >= W_IMG then exit for
						divider += weight(xi, yi)
						integrator += intensity(y_src, x_src)
					next
				next
				integrator /= divider
				if integrator > 1 then panic("Error in 'blur': color > 255")
				if integrator < 0 then panic("Error in 'blur': color < 0")
				char_img_blur.intensity(y_tar, x_tar) = integrator
			next
		next
		'copy blurred image to original image
		this = char_img_blur
	next
end sub

'compensate contrast compression by blur 
sub char_img_type.stretch_contrast()
	dim as f32 min_int = 1.0, max_int = 0.0
	'find min & max intensities
	for y as i32 = 0 to H_IMG - 1
		for x as i32 = 0 to W_IMG - 1
			dim as f32 now_int = intensity(y, x)
			if now_int > max_int then max_int = now_int
			if now_int < min_int then min_int = now_int
		next
	next
	'do the strech
	for y as i32 = 0 to H_IMG - 1
		for x as i32 = 0 to W_IMG - 1
			intensity(y, x) = (intensity(y, x) - min_int) _
				* (1.0 - 0.0) / (max_int - min_int)
		next
	next
end sub

'draw on screen
sub char_img_type.put_(x as i32, y as i32)
	for yi as i32 = 0 to H_IMG - 1
		for xi as i32 = 0 to W_IMG - 1
			dim as i32 cur_int = intensity(yi, xi) * 255
			if cur_int > 255 then cur_int = 255
			if cur_int < 0 then cur_int = 0
			pset(x + xi, y + yi), rgb(cur_int, cur_int, cur_int)
		next
	next
end sub

'-------------------------------------------------------------------------------

'Logarithmic mean
#define log_mean(v1, v2)_
	((v1 - v2) / (log(v1) - log(v2)))

'a different mean
#define my_mean(v1, v2)_
	(sqr(v1 / v2) * v2)

'soft limit to 0...+1
function sigmoid(x as f32) as f32
	return 1 / (1 + exp(-x))
end function

'soft limit to -1...+1
function sigmoid_symetric(x as f32) as f32
	return 2 / (1 + exp(-x)) - 1
end function

'derivative / slope of sigmoid
'https://kawahara.ca/how-to-compute-the-derivative-of-a-sigmoid-function-fully-worked-example/
'https://beckernick.github.io/sigmoid-derivative-neural-network/
function dsigmoid(x as f32) as f32
	dim as f32 s = sigmoid(x)
	return s * (1 - s)
end function

function dsigmoid_symetric(x as f32) as f32
	dim as f32 s = sigmoid(x)
	return 2 * s * (1 - s)
end function

'-------------------------------------------------------------------------------

const as single LRT = 0.005 'learning rate

type layer_type
	dim as f32 cell(any)
	dim as f32 err0r(any)
	dim as f32 link(any, any) 'N input <-> M output
end type

type nn_type
	dim as layer_type layer(any)
	declare constructor(num_cells() as i32)
	declare destructor()
	declare function set_input(source() as f32) as i32
	declare sub set_input2(p_src as f32 ptr)
	declare sub forward_propagation()
	declare function back_propagation(target() as f32) as i32
	declare function highest_output() as i32
	declare sub show_summary()
	declare sub visualise(w as i32, h as i32, target_only as i32)
end type

'build and initialise the neural network
constructor nn_type(num_cells() as i32)
	dim as i32 ub = ubound(num_cells)
	redim layer(0 to ub)
	for i as i32 = 0 to ub
		'crazy syntax, who needs pointers anyway?
		redim (layer(i).cell)(0 to num_cells(i) - 1)
		redim (layer(i).err0r)(0 to num_cells(i) - 1)
		'fill cells with initial values
		for j as i32 = 0 to num_cells(i) - 1
			layer(i).cell(j) = 0 'rnd_range(-1, +1)
			layer(i).err0r(j) = 0
		next
		'allocate memory for the links
		if i <> ub then 'skip final layer, no further links
			'more crazy business!
			redim (layer(i).link)(0 to num_cells(i) - 1, 0 to num_cells(i + 1) - 1)
			'fill links with initial values
			for j as i32 = 0 to num_cells(i) - 1
				for k as i32 = 0 to num_cells(i + 1) - 1
					layer(i).link(j, k) = rnd_range(-1, +1)
				next
			next
		end if
	next
	print "constructor nn_type() - done"
end constructor

'let do some clean-up
destructor nn_type()
	for i as i32 = 0 to ubound(layer)
		erase layer(i).link
		erase layer(i).cell
		erase layer(i).err0r
	next
	erase layer
	print "destructor nn_type() - done"
end destructor

'copy source array to input layer
function nn_type.set_input(source() as f32) as i32
	dim as i32 ub_cells = ubound(layer(0).cell)
	'check num sources == num input cells
	if ubound(source) <> ub_cells then return -1
	for i as i32 = 0 to ub_cells
		layer(0).cell(i) = source(i)
	next
end function

'alternative (more dangerous) way to set the input layer
sub nn_type.set_input2(p_src as f32 ptr)
	for i as i32 = 0 to ubound(layer(0).cell)
		layer(0).cell(i) = p_src[i]
	next
end sub

sub nn_type.forward_propagation()
	'loop layer lo: 1 to N-1
	'for each cell(co):
	'value = Σci(prev_layercell(ci) * link_to_this_cell(ci,co))
	for lo as i32 = 1 to ubound(layer)
		dim as i32 li = lo - 1
		for co as i32 = 0 to ubound(layer(lo).cell) 'loop output layer
			dim as f32 sum = 0.0
			for ci as i32 = 0 to ubound(layer(li).cell) 'loop input layer
				sum += layer(li).cell(ci) * layer(li).link(ci, co)
			next
			layer(lo).cell(co) = sum 'adjust output cell
		next
	next
end sub

function nn_type.back_propagation(target() as f32) as i32
	'steps:
	'determine output error(s), compare with target(s)
	'- for each input layer (backwards), each cell:
	'determine output error(s) for input layer, using link weight
	'for each input layer (forwards), each link:
	'- adjust link weight: + learning_rate * error * dsigmoid(cell_value) * input_cell
	dim as i32 ubLayer = ubound(layer)
	dim as i32 ubCell = ubound(layer(ubLayer).cell)
	if ubound(target) <> ubCell then return -1 'num targets <> num output cells
	dim as i32 ubLink
	'Determine output error(s)
	for i as i32 = 0 to ubCell 'loop calls output layer
		layer(ubLayer).err0r(i) = target(i) - layer(ubLayer).cell(i)
	next
	'Determine mid layer errors
	for l as i32 = ubLayer - 1 to 1 step -1 'no errors needed for input layer 0
		ubCell = ubound(layer(l).cell)
		ubLink = ubound(layer(l + 1).cell)
		for i as i32 = 0 to ubCell 'loop cells
			layer(l).err0r(i) = 0
			for j as i32 = 0 to ubLink 'loop links / output cells
				layer(l).err0r(i) += layer(l).link(i, j) * layer(l + 1).err0r(j)
			next
		next
	next
	'Update nerves / weights in forward direction
	for l as i32 = 0 to ubLayer - 1 'skip last layer, no further links 
		ubCell = ubound(layer(l).cell)
		ubLink = ubound(layer(l + 1).cell)
		for i as i32 = 0 to ubCell 'loop cells
			for j as i32 = 0 to ubLink 'loop links / output cells
				'here is where the magic happens, I hope
				layer(l).link(i, j) += LRT * layer(l + 1).err0r(j) _
					* dsigmoid(layer(l+1).cell(j)) * layer(l).cell(i)
			next
		next
	next
end function

function nn_type.highest_output() as i32
	dim as i32 last_layer = ubound(layer)
	dim as i32 i_max_val = -1
	dim as f32 max_val = -1e10
	for i as i32 = 0 to ubound(layer(last_layer).cell)
		if layer(last_layer).cell(i) > max_val then
			max_val = layer(last_layer).cell(i)
			i_max_val = i
		end if
	next
	if i_max_val = -1 then panic("Error: nn_type.highest_output()")
	return i_max_val
end function

sub nn_type.show_summary()
	print "num_layers: " & str(ubound(layer) + 1)
	for i as i32 = 0 to ubound(layer)
		print "layer(" & i & ").num_cells: "; str(ubound(layer(i).cell))
	next
end sub

'graphics screen needed, w = width, h = height
sub nn_type.visualise (w as i32, h as i32, target_only as i32)
	dim as i32 cell_radius = 10
	dim as u32 cell_color = &hff00ff00 'green
	dim as u32 link_color = &hff0077ff 'blueish
	dim as i32 w_dist = w / (ubound(layer) + 1)
	'line(0, 0)-(w-1, h-1), &hff777777, bf
	dim as i32 start_layer = iif(target_only = 0, 0, ubound(layer))
	for i as i32 = start_layer to ubound(layer)
		dim as i32 ub_cells = ubound(layer(i).cell)
		dim as i32 h_dist = h / (ub_cells + 1)
		'draw cells in layer
		for j as i32 = 0 to ub_cells
			cell_color = smart_color(layer(i).cell(j))
			dim as i32 x = w_dist * (i + 0.5)
			dim as i32 y = h_dist * (j + 0.5)
			circle (x, y), cell_radius, cell_color
			dim as string text = format(layer(i).cell(j), "0.000")
			draw string(x - len(text) * 4, y + 8), text', cell_color
			'draw string(x + 16, y - 8), text
		next
		'draw links to next layer
		if i <> ubound(layer) then 'skip last layer
			dim as i32 ub_cells_next = ubound(layer(i + 1).cell)
			dim as i32 h_dist_next = h / (ub_cells_next + 1)
			for j as i32 = 0 to ub_cells
				for k as i32 = 0 to ub_cells_next
					link_color = smart_color(layer(i).link(j, k))
					dim as i32 x1 = w_dist * (i + 0.5)
					dim as i32 y1 = h_dist * (j + 0.5)
					dim as i32 x2 = w_dist * (i + 1.5)
					dim as i32 y2 = h_dist_next * (k + 0.5)
					line(x1, y1)-(x2, y2), link_color
					dim as string text = format(layer(i).link(j, k), "0.000")
					circle ((x1 + x2)\2, (y1 + y2)\2), cell_radius \ 3, link_color
					draw string((x1 + x2)\2 - len(text) * 4, (y1 + y2)\2 + 8), text', link_color
				next
			next
		end if
	next
end sub

'-------------------------------------------------------------------------------

const as i32 SW = 800, SH = 600
screenres SW, SH, 32
width SW \ 8, SH \ 16

randomize timer '1234

const as i32 N_IMG = 10
const as i32 W_IMG = 12, H_IMG = 16 'width & height in pixels
dim as char_img_type char_img(0 to N_IMG - 1)
dim as fb.image ptr p_img_temp = imageCreate(W_IMG, H_IMG, &hff000000)

'create 10 images with bad quality numbers
dim as fb.image ptr p_img = imageCreate(W_IMG, H_IMG, &hff000000)
for i as i32 = 0 to ubound(char_img)
	line p_img, (0, 0)-(H_IMG - 1, W_IMG - 1), &hff000000, bf 'clear
	draw string p_img, (2, 0), str(i), &hffffffff 'draw char
	char_img(i).from_image(p_img)
	char_img(i).erode(20)
	char_img(i).blur(1)
	char_img(i).stretch_contrast()
	'char_img(i).put_(100 + i * 20, 100)
next
imageKill(p_img)

'define the neural network
dim as f32 source(0 to W_IMG * H_IMG - 1) 'inputs
dim as f32 target(0 to 9) 'outputs
dim as i32 source_size = ubound(source) + 1
dim as i32 target_size = ubound(target) + 1
dim as i32 mid_size = int(my_mean(source_size, target_size))
dim as i32 nn_size_def(...) = {source_size, mid_size, target_size}
'create the network, filled with random weight links
dim as nn_type nn = nn_type(nn_size_def())

'nn.show_summary()
dim as i32 pass = 0, fail = 0 'counters
dim as i32 ai_digit 'best answer form network
dim as i32 successive_pass_count = 0

dim as double t = timer()
dim as i32 loop_count = 0
while not multikey(1)
	dim as i32 digit = int(rnd * N_IMG) '10
	'set target array
	for i as i32 = 0 to N_IMG - 1
		target(i) = iif(i = digit, +1, -1)
	next
	'set input layer directly from char image, skip source array
	nn.set_input2(@char_img(digit).intensity(0, 0))
	nn.forward_propagation()
	ai_digit = nn.highest_output()
	if ai_digit = digit then
		successive_pass_count += 1
		pass += 1
	else
		successive_pass_count = 0 'reset
		fail += 1
	end if

	screenlock()
		cls()
		nn.visualise(SW, SH, 1)
		draw string (10, 30), "loop_count: " & loop_count
		draw string (10, 50), "match: " & iif(ai_digit = digit, "OK", "FAIL")
		draw string (10, 70), "successive_pass_count: " & successive_pass_count
		draw string (10, 90), "fail %: " & format((fail / loop_count) * 100, "0.0")
		draw string (10, 110), "pass %: " & format((pass / loop_count) * 100, "0.0")
		'draw string (SW - 50, (SH / 10) * (digit + 0.5)), "<-- " & str(digit)
		char_img(digit).put_(SW - 50, (SH / 10) * (digit + 0.5))
	screenunlock()

	'pause & on spacebar
	if inkey = " " then
		while inkey = "": sleep 1 : wend
	end if

	'adjust the neuwork, even if result was good enough
	nn.back_propagation(target())

	sleep 1 'disable for faster run
	loop_count += 1
	if successive_pass_count = 2000 then exit while
wend
print "End. Press any key to exit. Duration [s]: " & format(timer() - t, "0.000")
getkey()

screen 0 'close graphics screen, for destructor printout

'TODO:
'training set + test set
'test with multiple mid layers
'decrease learnrate exponentially?
'store sigmoid at cell at forward propagation, for use during backward propagation
'clone network
'random mutation
'merge networks
'save/load network
dafhi wrote:1. weight randomization (in my NN i never even bothered with backprop) (mostly because i don't get it)
It is not essential to know how the back-propagation works, but you do have to implement it correctly. Which I did this time I think. I noticed an error in my older (disappointing) neural network tryouts, so I am not giving it another try now. And I want to combine this with 'evolutionary behavior'. After some test runs I see large differences in the training speed of randomly initialized networks. And I am pretty sure that nature also does not produce creatures (like us) with completely randomized neural links to start with.
BasicCoder2
Posts: 3906
Joined: Jan 01, 2009 7:03
Location: Australia

Re: Another neural network experiment

Post by BasicCoder2 »

@badidea
It is not essential to know how the back-propagation works, but you do have to implement it correctly
Indeed you only need to understand the math in backpropagation if you are designing a network not simply using one.

The trick is coding a neural net class which any coder can instantiate with parameters determining the number of layers and neurons per layer and a set of training inputs with their desired outputs. In other words show a coder how they can use the network class in their own programs.
Post Reply