Skip to content

Commit

Permalink
Prepare for pre-release 0.4.1~beta2
Browse files Browse the repository at this point in the history
  • Loading branch information
lukstafi committed Sep 13, 2024
1 parent 3e0596a commit 72d12ba
Show file tree
Hide file tree
Showing 8 changed files with 104 additions and 18 deletions.
2 changes: 1 addition & 1 deletion arrayjit.opam
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# This file is generated by dune, edit dune-project instead
opam-version: "2.0"
version: "0.4.1~beta"
version: "0.4.1~beta2"
synopsis:
"An array language compiler with multiple backends (CPU, CUDA), staged compilation"
description:
Expand Down
4 changes: 2 additions & 2 deletions bin/moons_benchmark.ml
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,8 @@ let classify_moons ~seed ~on_device ~inlining_cutoff ~num_devices ~batch_size ~b
(* let data_len = 3 * 4 in *)
let flat_len = data_len / 2 in
(* Note: [minibatch_size = batch_size / num_devices] is the actual per-device batch used. *)
let epochs = 200 in
(* let epochs = 50 in *)
(* let epochs = 200 in *)
let epochs = 100 in
(* TINY for debugging: *)
(* let epochs = 2 in *)
(* let epochs = 1 in *)
Expand Down
2 changes: 1 addition & 1 deletion dune-project
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

(name ocannl)

(version 0.4.1~beta)
(version 0.4.1~beta2)

(generate_opam_files true)

Expand Down
12 changes: 6 additions & 6 deletions lib/tensor.mli
Original file line number Diff line number Diff line change
Expand Up @@ -44,16 +44,16 @@ val with_unchanged_roots : f:(unit -> 'a) -> 'a
val default_value_prec : Arrayjit.Ops.prec ref
(** The default precision for the value node of terminal (i.e. non-composite) tensors.
Note: the precision of a node can be set arbitrarily via {!Tnode.update_precision}. The default
precision for value nodes of composite tensors is the maximum of precisions of the value nodes
of sub-tensors. *)
Note: the precision of a node can be set arbitrarily via {!Arrayjit.Tnode.update_precision}. The
default precision for value nodes of composite tensors is the maximum of precisions of the value
nodes of sub-tensors. *)

val default_grad_prec : Arrayjit.Ops.prec ref
(** The default precision for the gradient node of terminal (i.e. non-composite) tensors.
Note: the precision of a node can be set arbitrarily via {!Tnode.update_precision}. The default
precision for gradient nodes of composite tensors is the maximum of precisions of the gradient
nodes of sub-tensors. *)
Note: the precision of a node can be set arbitrarily via {!Arrayjit.Tnode.update_precision}. The
default precision for gradient nodes of composite tensors is the maximum of precisions of the
gradient nodes of sub-tensors. *)

exception Session_error of string * t option

Expand Down
2 changes: 1 addition & 1 deletion neural_nets_lib.opam
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# This file is generated by dune, edit dune-project instead
opam-version: "2.0"
version: "0.4.1~beta"
version: "0.4.1~beta2"
synopsis:
"A from-scratch Deep Learning framework with an optimizing compiler, shape inference, concise syntax"
description:
Expand Down
2 changes: 1 addition & 1 deletion ocannl_npy.opam
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# This file is generated by dune, edit dune-project instead
opam-version: "2.0"
version: "0.4.1~beta"
version: "0.4.1~beta2"
synopsis: "Numpy file format support for ocaml"
maintainer: ["Lukasz Stafiniak <[email protected]>"]
authors: ["Laurent Mazare"]
Expand Down
9 changes: 5 additions & 4 deletions test/micrograd_demo.ml
Original file line number Diff line number Diff line change
Expand Up @@ -304,8 +304,8 @@ let%expect_test "Micrograd half-moons example" =
- - - - - -
- -
-
- - - -
- - -
-
-
Expand All @@ -326,7 +326,7 @@ let%expect_test "Micrograd half-moons example" =
[%expect
{|
Learning rate:
-1.002e-1-
-1.003e-1-
-----
-----
-----
Expand Down Expand Up @@ -358,7 +358,8 @@ let%expect_test "Micrograd half-moons example" =
-2.000e-1----
──────────┼────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────
0.000e+0 3.990e+2
step |}];
step
|}];

(* Testing how the syntax extension %op creates labels for the resulting tensors: *)
Stdio.printf "mlp_result's name: %s\n%!" @@ Tensor.debug_name mlp_result;
Expand Down
89 changes: 87 additions & 2 deletions test/moons_demo_parallel.ml
Original file line number Diff line number Diff line change
Expand Up @@ -168,6 +168,91 @@ Half-moons scatterplot and decision boundary:
***********.................................................%%%%%%%%%%%%%%%%%.%%%%%%%%%%%%%%%%%%%.......................
**********......................................................%%%%%%.%%%%%%%%%%%%%%%%%%%%%%%..........................
*********..........................................................%....%%%%%.%%..%%%%...%..............................|}
in
let new_typical_target =
{|
Batch=19, step=20, lr=0.195250, batch loss=0.263683, epoch loss=45.768524
Epoch=0, step=20, lr=0.195250, epoch loss=45.768524
Batch=19, step=40, lr=0.190250, batch loss=0.220793, epoch loss=5.662821
Epoch=1, step=40, lr=0.190250, epoch loss=5.662821
Batch=19, step=60, lr=0.185250, batch loss=0.197911, epoch loss=5.259723
Epoch=2, step=60, lr=0.185250, epoch loss=5.259723
Batch=19, step=80, lr=0.180250, batch loss=0.191768, epoch loss=5.281074
Epoch=3, step=80, lr=0.180250, epoch loss=5.281074
Batch=19, step=100, lr=0.175250, batch loss=0.186862, epoch loss=5.097180
Epoch=4, step=100, lr=0.175250, epoch loss=5.097180
Batch=19, step=120, lr=0.170250, batch loss=0.181911, epoch loss=4.987223
Epoch=5, step=120, lr=0.170250, epoch loss=4.987223
Batch=19, step=140, lr=0.165250, batch loss=0.178275, epoch loss=4.835103
Epoch=6, step=140, lr=0.165250, epoch loss=4.835103
Batch=19, step=160, lr=0.160250, batch loss=0.165620, epoch loss=4.702625
Epoch=7, step=160, lr=0.160250, epoch loss=4.702625
Batch=19, step=180, lr=0.155250, batch loss=0.156137, epoch loss=4.458982
Epoch=8, step=180, lr=0.155250, epoch loss=4.458982
Batch=19, step=200, lr=0.150250, batch loss=0.139483, epoch loss=4.074086
Epoch=9, step=200, lr=0.150250, epoch loss=4.074086
Batch=19, step=220, lr=0.145250, batch loss=0.118495, epoch loss=3.605957
Epoch=10, step=220, lr=0.145250, epoch loss=3.605957
Batch=19, step=240, lr=0.140250, batch loss=0.091701, epoch loss=3.061533
Epoch=11, step=240, lr=0.140250, epoch loss=3.061533
Batch=19, step=260, lr=0.135250, batch loss=0.062137, epoch loss=2.342423
Epoch=12, step=260, lr=0.135250, epoch loss=2.342423
Batch=19, step=280, lr=0.130250, batch loss=0.030009, epoch loss=1.588885
Epoch=13, step=280, lr=0.130250, epoch loss=1.588885
Batch=19, step=300, lr=0.125250, batch loss=0.016336, epoch loss=0.904919
Epoch=14, step=300, lr=0.125250, epoch loss=0.904919
Batch=19, step=320, lr=0.120250, batch loss=0.009264, epoch loss=0.579104
Epoch=15, step=320, lr=0.120250, epoch loss=0.579104
Batch=19, step=340, lr=0.115250, batch loss=0.007289, epoch loss=0.451346
Epoch=16, step=340, lr=0.115250, epoch loss=0.451346
Batch=19, step=360, lr=0.110250, batch loss=0.005304, epoch loss=0.342835
Epoch=17, step=360, lr=0.110250, epoch loss=0.342835
Batch=19, step=380, lr=0.105250, batch loss=0.004483, epoch loss=0.259518
Epoch=18, step=380, lr=0.105250, epoch loss=0.259518
Batch=19, step=400, lr=0.100250, batch loss=0.004777, epoch loss=0.212706
Epoch=19, step=400, lr=0.100250, epoch loss=0.212706

Half-moons scatterplot and decision boundary:
***************************************#********************************************************************************
***************************#*#*#########*###**######********************************************************************
***************************######*####*#*#####*########*#***************************************************************
*********************#**#########**#######*###############*###**********************************************************
******************####*####################################*###*********************************************************
***************#*#*###*###*###########*#*##*#####################*******************************************************
************#*######**#########*##*****************##*##*########*#*****************************************************
*************########*#*###*#**********************#******####*######*************************************************..
**************#######*#*##******************************#########*##*##********************************************.....
**********#######*###*#****************************************###**###*#***************************************........
********#*######**##****************.....*********************#*##*####*#************************************...........
********###*#*#**##************..............******************###########*#*******************************.............
******########**************.........%....%.%...*******************##########*****************************............%.
*******#######*************...........%...........******************##*######***************************.........%.%..%.
****##########************............%%%.%%%......*****************##########*************************........%..%%%%%.
*****######*#************............%%%.%...........**************#*#########***********************..........%.%.%%..%
**######*#***************............%%%%%%%%..........****************#*##*###*********************............%%%%%%%.
**##*#####**************..............%%%%%%%...........**************#########*******************..............%%.%%%..
**########*************..............%%%%%%%%.............**************##*######****************...............%%%%%%%.
*########**************..............%%%.%%%.%%.............*************#####*****************...............%%%%%%%%%.
*########*************................%%%%%%%%%..............************###*##*#*************.................%%%%%%%..
##*######*************................%%%%%%%.%................***********######*#**********..................%%%%%%%%..
######*##************.................%%.%%%%%%..................*********########*********...................%%%%.%%.%.
###*##**#***********...................%.%%%%%%%%.................*********#####*#********...................%%%%%%%%...
##*#####************....................%%%%%%.%.%..................******#*#*####******....................%%.%%%%%....
#####*##***********.....................%.%%%%%%%%...................*****##**##*******...................%%%%%%%%%%%...
**#*##*#**********.......................%%%.%%%%%.%...................***#####*#****......................%%%%%%%......
##****##**********........................%%.%%%%%%%%....................***###*##**....................%%%%%%%%%%......
*****************.........................%%.%%%%%%%......................********......................%..%%.%%%.......
****************............................%...%%%%%.%%....................*****..................%.%%%%%%%%%%.........
****************..............................%.%%%%%.%%%%....................*....................%%%%%%%%.%.%%........
***************.................................%..%%%%%...%......................................%%%%%%%%%%............
**************....................................%%%.%%%%%%%%..............................%%..%%%%.%%%%%.%............
**************...................................%%%.%%%%%%.%%...%.........................%.%%%%%%%.%%%.%..............
*************........................................%.%%%.%%%%%%%%%...................%.%%%%%%%%%%%%%.%.%..............
************..........................................%.%%%%.%%%%%%%%%.%%%%%%%%%.%.%%%%%%%%%%%%%%%%%%%.%................
************............................................%%%%%%%%%%%%%%%%%%%%%.%%%%%%%.%%%.%%%%%%%%%%....................
***********.................................................%%%%%%%%%%%%%%%%%.%%%%%%%%%%%%%%%%%%%.......................
**********......................................................%%%%%%.%%%%%%%%%%%%%%%%%%%%%%%..........................
**********.........................................................%....%%%%%.%%..%%%%...%..............................|}
in
let arm64_and_s390x_target =
{|
Expand Down Expand Up @@ -340,8 +425,8 @@ Half-moons scatterplot and decision boundary:
**********.........................................................%....%%%%%.%%..%%%%...%..............................|}
in
let result_deltas =
List.map [ typical_target; arm64_and_s390x_target; ppc64_target ] ~f:(fun target ->
Expect_test_patdiff.patdiff target result)
List.map [ new_typical_target; typical_target; arm64_and_s390x_target; ppc64_target ]
~f:(fun target -> Expect_test_patdiff.patdiff target result)
in
(if List.exists ~f:String.is_empty result_deltas then
Stdio.print_string "moons_demo_parallel result is as expected"
Expand Down

0 comments on commit 72d12ba

Please sign in to comment.