Skip to content

Commit b1929d3

Browse files
committed
fnn revisions, add Japanese alias demo
1 parent 0d13e5b commit b1929d3

13 files changed

+646
-327
lines changed

demos/cnn/demo.lisp

+3-3
Original file line numberDiff line numberDiff line change
@@ -26,8 +26,8 @@
2626
(if (= 1 (slot-value idx-input 'rank))
2727
(slot-value idx-input 'data)
2828
(make-array (loop :for d :across (slot-value idx-input 'dimensions) :collect d)
29-
:displaced-to (slot-value idx-input 'data)
30-
:element-type (array-element-type (slot-value idx-input 'data))))))))
29+
:element-type (array-element-type (slot-value idx-input 'data))
30+
:displaced-to (slot-value idx-input 'data)))))))
3131

3232
(april-load (with (:space cnn-demo-space))
3333
(asdf:system-relative-pathname (intern (package-name *package*) "KEYWORD") "cnn.apl"))
@@ -101,7 +101,7 @@ tests ← 100 ⍝ 10000
101101
⎕ ← ' ' ⋄ ⎕ ← '--' ⋄ ⎕ ← ' '
102102
103103
t ← timeFactors⊥¯4↑⎕ts
104-
correct ← +/(⎕←tests↑[0]telabs) = ⎕←(tests↑[0]teimgs) testZhang⍤2⊢k1 b1 k2 b2 fc b
104+
correct ← +/(tests↑[0]telabs) = (tests↑[0]teimgs) testZhang⍤2⊢k1 b1 k2 b2 fc b
105105
106106
⎕ ← 'Recognition testing completed in ',formatElapsed t
107107
⎕ ← (⍕correct),' images out of ',(⍕tests),' recognized correctly.'

demos/fnn/demo.lisp

+120-35
Original file line numberDiff line numberDiff line change
@@ -33,28 +33,50 @@ MakeNormalArray ← {
3333
∘○ To Verify ○∘
3434
Normally Distributed Array Elements
3535
36-
Evaluate (verify-normal-array) to see an array of normally-distributed floating point numbers. Optionally, pass a shape like #(2 3 4) to this function to receive an array of the corresponding shape.
36+
* (verify-normal-array)
3737
38-
Evaluate (verify-normal-distrib) to see a vector representing the distribution of values generated by MakeNormalArray.
38+
Evaluate this to see an array of normally-distributed floating point numbers. Optionally, pass a shape like #(2 3 4) to this function to receive an array of the corresponding shape.
3939
40-
Evaluate (verify-plot-normal-distrib) to see a plot of the value distribution generated by MakeNormalArray. Try smaller counts as with (verify-plot-normal-vector 100) to see a less normal distribution and larger counts like 10,000 to see a more normal distribution. You may pass a width value as the second argument to control the width of the plot, i.e. (verify-plot-normal-vector :count 1000 :width 40) will produce a plot whose rows may be no longer than 40 characters.
40+
You can optionally pass a set of dimensions to the function:
41+
42+
(verify-normal-array 2 3)
43+
44+
* (verify-normal-distrib)
45+
46+
Evaluate this to see a vector representing the distribution of values generated by MakeNormalArray. This function optionally takes a length argument to determine the length of the normal vector created:
47+
48+
(verify-normal-distrib 100)
49+
50+
* (verify-plot-normal-distrib)
51+
52+
Evaluate this to see a plot of the value distribution generated by MakeNormalArray. To use less elements and see a less normal distribution, try:
53+
54+
(verify-plot-normal-distrib :count 100)
55+
56+
Or try with larger counts like 10,000 to see a more normal distribution. You may pass a width argument to control the width of the plot:
57+
58+
(verify-plot-normal-distrib :count 1000 :width 40)
59+
60+
This will produce a plot whose rows may be no longer than 40 characters.
4161
|#
4262

43-
(defun verify-normal-array (&optional (shape 10))
44-
"Generate an array of normally-distributed floating point numbers."
45-
(april-c (with (:space fnn-demo-space)) "MakeNormalArray" shape))
63+
(defun verify-normal-array (&rest shape)
64+
"Generate an array of normally-distributed floating point numbers with an optional shape."
65+
(let ((shape (or shape '(10))))
66+
(april-c (with (:space fnn-demo-space)) "MakeNormalArray" (coerce shape 'vector))))
4667

4768
(defun verify-normal-distrib (&optional (count 1000))
4869
"Generate a vector of integers reflecting the distribution of floating point numbers in an array produced by MakeNormalArray."
4970
(april-c (with (:space fnn-demo-space))
50-
"{{{¯1+≢⍵}⌸⍵,⍨⍳21}⊢⊢(2.0÷⍨¯11+⍳21)⍸MakeNormalArray ⍵}"
71+
"{{{1-⍨≢⍵}⌸⍵,⍨⍳21}(2÷⍨⎕IO-⍨¯10+⍳21)⍸MakeNormalArray ⍵}"
5172
count))
5273

5374
(defun verify-plot-normal-distrib (&key (count 1000) (width 80))
5475
"Plot the distribution of numbers generated using MakeNormalArray."
5576
(april-c (with (:space fnn-demo-space))
56-
"{⍺{⎕←↑'⎕'⍴¨⍨⌊⍺×⍵÷⌈/⍵}{{¯1+≢⍵}⌸⍵,⍨⍳21}⊢⊢(2.0÷⍨¯11+⍳21)⍸MakeNormalArray ⍵ ⋄ 'Plotted.'}"
57-
count width))
77+
"{⍺{⎕←↑'⎕'⍴¨⍨⌊⍺×⍵÷⌈/⍵}{{1-⍨≢⍵}⌸⍵,⍨⍳21}(2÷⍨⎕IO-⍨¯10+⍳21)⍸MakeNormalArray ⍵}"
78+
count width)
79+
(format nil "Distribution of ~a random numbers plotted with width ~a." count width))
5880

5981
#|
6082
-- Functions --
@@ -90,14 +112,19 @@ InitWeightMatrices ← {
90112
∘○ To Verify ○∘
91113
Neural Network Structure
92114
93-
Evaluate (verify-network-structure) to see the printed structure of a random neural network. Optionally, choose a shape for the network by passing a vector of dimensions to the function, as with (verify-network-structure #(2 5 3)).
115+
* (verify-network-structure)
116+
117+
Evaluate this to see the printed structure of a random neural network. Optionally, you can set a shape for the network by passing a set of dimensions as the argument, like this:
118+
119+
(verify-network-structure 1 3 2)
94120
|#
95121

96-
(defun verify-network-structure (&optional (shape #(3 6 2)))
122+
(defun verify-network-structure (&rest shape)
97123
"Display the structure of a neural network with an optionally specified shape."
98-
(april-c (with (:space fnn-demo-space))
99-
"{⎕←display InitNetwork ⍵ ⋄ 'Neural network structure.'}"
100-
shape))
124+
(let ((shape (or shape #(3 6 2))))
125+
(april-c (with (:space fnn-demo-space))
126+
"{⎕←display InitNetwork ⍵ ⋄ 'Printed network strucure with base shape ',(⍕⍵),'.'}"
127+
(coerce shape 'vector))))
101128

102129
#|
103130
-- Function --
@@ -161,17 +188,32 @@ DF ← {
161188
∘○ To Verify ○∘
162189
Activation and Neural Network Output
163190
164-
Evaluate (verify-activation-output) to see the output of the LeakyReLU function with an optional input value and its standard leaky parameter of 0.1. Evaluate (verify-network-output) to see the printed output of a random neural network. As with (verify-network-structure), you can optionally choose a shape for the input network.
191+
* (verify-activation-output)
192+
193+
Evaluate this to see the output of the LeakyReLU function. You can pass it a series of input values:
194+
195+
(verify-activation-output 10.0d0 -20.0d0 30.0d0)
196+
197+
* (verify-network-output)
198+
199+
Evaluate this to see the printed output of a random neural network. As with (verify-network-structure), you can optionally set a shape:
200+
201+
(verify-network-output 2 5 3)
165202
|#
166203

167-
(defun verify-activation-output (&optional (input #(1.0d0 -2.0d0 3.0d0 -4.0d0)))
168-
(april-c (with (:space fnn-demo-space)) "LeakyReLU.F" input))
204+
(defun verify-activation-output (&rest input)
205+
"Display the output of the LeakyReLU activation function called on an optionally specified series of numbers."
206+
(let ((input (or input #(1.0d0 -2.0d0 3.0d0 -4.0d0))))
207+
(april-c (with (:space fnn-demo-space)) "LeakyReLU.F" (coerce input 'vector))))
169208

170-
(defun verify-network-output (&optional (shape #(3 6 2)))
209+
(defun verify-network-output (&rest shape)
171210
"Display the output of a forward pass through a neural network."
172-
(april-c (with (:space fnn-demo-space))
173-
"{⎕←display (InitNetwork ⍵) (LeakyReLU.F _ForwardPass) ⍪1 0 0 ⋄ 'Neural network output.'}"
174-
shape))
211+
(let ((shape (or shape #(3 6 2))))
212+
(april-c (with (:space fnn-demo-space))
213+
"{
214+
⎕←display (InitNetwork ⍵) (LeakyReLU.F _ForwardPass) ⍪1↑⍨⊃⍵
215+
'Printed output of neural network with shape ',(⍕⍵),'.'}"
216+
(coerce shape 'vector))))
175217

176218
#|
177219
-- Function --
@@ -209,9 +251,23 @@ DF ← {
209251
∘○ To Verify ○∘
210252
Loss Function and Its Derivative
211253
212-
Evaluate (verify-loss-convergence) to see a vector of the output from the derivative loss function with a given input and the output of the algorithmically derived loss function given the same input and a given dx value. For example, you can run (verify-loss-convergence :input 5 :dx 0.1) To see the same with a series of dx values, evaluate (verify-loss-convergence-series), optionally with input as with (verify-loss-convergence-series :input 5 :series #(0.1d0 0.01d0 0.001d0)).
254+
* (verify-loss-convergence)
255+
256+
Evaluate this to see a vector of the output from the derivative loss function with a given input and the output of the algorithmically derived loss function given the same input and a given dx value. For example:
257+
258+
(verify-loss-convergence :input 5 :dx 0.1)
259+
260+
* (verify-loss-convergence-series)
261+
262+
Evaluate this to see the same with a series of dx values, optionally with input:
213263
214-
The (verify-network-output-loss)
264+
(verify-loss-convergence-series :input 5 :series #(0.1d0 0.01d0 0.001d0)).
265+
266+
* (verify-network-output-loss)
267+
268+
Evaluate this to see the loss value for a given input and output to a network. You can set a shape for the network as well as specifying the input and target values:
269+
270+
(verify-network-output-loss :shape #(3 6 2) :input #(2 0) :target #(0 1))
215271
|#
216272

217273
(defun verify-loss-convergence (&key (input 3) (dx 0.1))
@@ -269,9 +325,11 @@ _Train ← {
269325
⊢⊢(⊢⊣⌽¯1↓xs){
270326
W ← ⍵⊃⌽Ws ⋄ b ← ⍵⊃⌽bs ⋄ x ← ⍺
271327
272-
dbs ,← ⊂dx×dfAct b+W+.×x
273-
dx ⊢← (⍉W)+.×⊃⌽dbs
274-
dWs ,← ⊂(⊃⌽dbs)+.×⍉x
328+
dbs ,← ⊂db ← dx×dfAct b+W+.×x
329+
⍝ dx ⊢← (⍉W)+.×⊃⌽dbs
330+
dx ⊢← db+.×⍨⍉W
331+
⍝ dWs ,← ⊂(⊃⌽dbs)+.×⍉x
332+
dWs ,← ⊂db+.×⍉x
275333
}¨⊢⊣⍳≢Ws
276334
(0.001×⌽dWs) (0.001×⌽dbs)
277335
}
@@ -281,7 +339,17 @@ _Train ← {
281339
∘○ To Verify ○∘
282340
Trained Network States
283341
284-
Evaluate (verify-training-output) to either 1. initialize a neural network if none is stored or 2. perform an iteration of training upon the stored neural network. You can start again with a fresh network by evaluating (verify-training-output-restart). As with (verify-loss-applied), (verify-training-output) can take 3 arguments specifying the shape of the network, its input and its target.
342+
* (verify-training-output)
343+
344+
Evaluate this to either 1. initialize a neural network if none is stored or 2. perform an iteration of training upon the stored neural network.
345+
346+
* (verify-training-output-restart)
347+
348+
Evaluate this to start again with a fresh neural network.
349+
350+
As with (verify-loss-applied), (verify-training-output) can take 3 arguments specifying the shape of the network, its input and its target:
351+
352+
(verify-training-output :shape #(3 6 2) :input #(0 2) :target 1)
285353
286354
The input and target values will be reshaped into vectors matching the first and last dimensions of the network, respectively. Changing the shape, input or target values when a network exists will cause the network to be rebuilt with those values applying.
287355
|#
@@ -291,9 +359,9 @@ The input and target values will be reshaped into vectors matching the first and
291359
(defun verify-training-output-restart ()
292360
"Clear the network state of the training output test function."
293361
(setf net-state nil))
294-
(defun verify-training-output (&optional (shape #(2 5 3)) (input 0) (target 0))
362+
(defun verify-training-output (&key (shape #(2 5 3)) (input 0) (target 0))
295363
"Get the output of a training iteration upon a neural network with an optionally specified shape, input and target."
296-
(unless (not (equalp shape net-shape))
364+
(unless (equalp shape net-shape)
297365
(setf net-shape shape
298366
net-state nil))
299367
(unless (equalp input net-input)
@@ -304,7 +372,7 @@ The input and target values will be reshaped into vectors matching the first and
304372
(setf net-target target
305373
derived-target (april-c "{⍪⍺⍴⍨⊃⌽⍵}" net-shape target)
306374
net-state nil))
307-
;; (print (list :ns net-state shape net-shape input net-input target net-target))
375+
;; (print (list :ns net-state shape net-shape input net-input))
308376
(if net-state
309377
(setf net-state (april (with (:space fnn-demo-space)
310378
(:state :in ((net net-state) (target derived-target)
@@ -381,11 +449,27 @@ The following functions implement tools for importing the MNIST data into arrays
381449
#|
382450
○∘ To Demonstrate ∘○
383451
384-
Evaluate (load-digit-training-data) followed by (build-digit-network) to load the MNIST training data and build a neural network. Then, run (train-digit-network) to train the network. Optionally, a count may be passed to (train-digit-network) if you wish to train the network on a limited subset of the MNIST training data rather than training on all 60,000 images.
452+
* (load-digit-training-data)
453+
454+
Evaluate this, followed by...
455+
456+
* (build-digit-network)
457+
458+
...to load the MNIST training data and build a neural network. Then:
459+
460+
* (train-digit-network)
461+
462+
Evaluate this to train the network. Optionally, a count argument may be passed if you wish to train the network on a limited subset of the MNIST training data rather than training on all 60,000 images:
463+
464+
(train-digit-network 2000)
385465
386466
The functions (get-net-state) (get-data-segment) and (set-data-segment) are utility functions which may be useful in analyzing the data passing through the network.
387467
468+
The (build-digit-network) function can optionally be passed a set of intermediate dimensions. For example:
469+
470+
(build-digit-network 12 18)
388471
472+
Will yield a training network with shape 728 12 18 10. The default shape is 728 16 16 10, which has been found to produce good results with the MNIST digit set, but you can use dimensional inputs to experiment with other structures.
389473
|#
390474

391475
(let ((net-shape) (net-state) (image-size 0) (training-data) (training-labels)
@@ -407,11 +491,12 @@ The functions (get-net-state) (get-data-segment) and (set-data-segment) are util
407491
:displaced-to training-data :displaced-index-offset 0)))
408492
(april-c "{'Loaded ',(⍕⍴⍵),' MNIST training images with labels.'}" training-labels))
409493

410-
(defun build-digit-network (&optional (intermediate-shape #(16 16)))
494+
(defun build-digit-network (&rest intermediate-shape)
411495
"Generate a neural network with an optionally specified intermediate state (not determined by the input or output shapes)."
412-
(setf net-shape (april-c "{⊃,/⍺ ⍵ 10}" intermediate-shape image-size)
413-
net-state (april-c (with (:space fnn-demo-space)) "InitNetwork" net-shape))
414-
(april-c "{'Built MNIST digit recognition network with shape ',(⍕⍵),'.'}" net-shape))
496+
(let ((intermediate-shape (or intermediate-shape #(16 16))))
497+
(setf net-shape (april-c "{⍵,⍺,10}" image-size (coerce intermediate-shape 'vector))
498+
net-state (april-c (with (:space fnn-demo-space)) "InitNetwork" net-shape))
499+
(april-c "{'Built MNIST digit recognition network with shape ',(⍕⍵),'.'}" net-shape)))
415500

416501
(defun get-net-state ()
417502
"Retrieve the state of the neural network."

functions.lisp

+3-3
Original file line numberDiff line numberDiff line change
@@ -196,7 +196,8 @@
196196
(defun apl-gcd (comparison-tolerance)
197197
"Implementation of greatest common denominator extended to complex numbers based on the complex-floor function."
198198
(lambda (omega alpha)
199-
(if (or (complexp omega) (complexp alpha))
199+
(if (not (or (complexp omega) (complexp alpha)))
200+
(funcall (apl-xcy #'gcd) omega alpha)
200201
(if (zerop (funcall (apl-residue comparison-tolerance)
201202
omega alpha))
202203
alpha (if (or (not (integerp (realpart omega)))
@@ -229,8 +230,7 @@
229230
(if (< (- comparison-tolerance)
230231
(realpart residue)
231232
comparison-tolerance)
232-
residue (imagpart residue))))))
233-
(funcall (apl-xcy #'gcd) omega alpha))))
233+
residue (imagpart residue)))))))))
234234

235235
(defun apl-lcm (comparison-tolerance)
236236
"Implementation of lease common multiple extended to complex numbers based on the complex-floor function."

libraries/extensions/jkanji/README.md

+9
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
# APRIL-EXT.JKANJI
2+
### _Your Name <[email protected]>_
3+
4+
This is a project to do ... something.
5+
6+
## License
7+
8+
Specify license here
9+
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
;;;; april-ext.jkanji.asd
2+
3+
(asdf:defsystem #:april-ext.jkanji
4+
:description "An extension to April aliasing the lexicon with Japanese kanji."
5+
:author "Andrew Sengul"
6+
:license "Apache-2.0"
7+
:version "1.0"
8+
:serial t
9+
:depends-on ("april")
10+
:components ((:file "package")
11+
(:file "jkanji")))

0 commit comments

Comments
 (0)