]> gitweb.factorcode.org Git - factor.git/commitdiff
tensors: updated with the latest tensors vocab.
authorJohn Benediktsson <mrjbq7@gmail.com>
Tue, 28 Apr 2020 03:15:33 +0000 (20:15 -0700)
committerJohn Benediktsson <mrjbq7@gmail.com>
Tue, 28 Apr 2020 03:15:33 +0000 (20:15 -0700)
rebased and merged #2283

extra/tensors/benchmark/benchmark.factor
extra/tensors/demos/data.csv [new file with mode: 0644]
extra/tensors/demos/demos.factor [new file with mode: 0644]
extra/tensors/demos/target.csv [new file with mode: 0644]
extra/tensors/tensors-docs.factor
extra/tensors/tensors-tests.factor
extra/tensors/tensors.factor

index 6de3a25ce9d5688d605a8d6cd0148eaf565cd629..c3b883e4b1a6a3e92196eb7a7ca548c284e1f348 100644 (file)
@@ -1,50 +1,28 @@
 ! Copyright (C) 2019 HMC Clinic.
 ! See http://factorcode.org/license.txt for BSD license.
-USING: arrays io kernel locals math prettyprint tensors tools.time ;
+USING: arrays kernel locals math math.functions math.statistics memory
+sequences tools.time ;
 IN: tensors.benchmark
 
-<PRIVATE
+! puts items from els (a quotation) on stack, runs ops (a quot w no stack effect) n times
+! returns an array with times (ns) for each trial
+:: benchmark-multiple ( els: ( -- .. ) op: ( .. -- .. ) n -- ..arr )
+    ! put els on stack
+    els call
+    ! create array
+    n 0 <array> :> arr
+    ! perform op n times
+    n [ gc [ op benchmark ] dip arr set-nth ] each-integer
+    arr ; inline
 
-:: add-tensors ( trials elems -- time )
-    ! Create the arrays to be added
-    elems 1array naturals dup
-    ! Benchmark!
-    [ trials [ 2dup t+ drop ] times ] benchmark
-    ! Normalize
-    trials / >float
-    nip nip ;
 
-:: matmul-tensors ( trials elems -- time )
-    ! Create the arrays to be multiplied
-    elems elems 2array naturals dup
-    ! Benchmark!
-    [ trials [ 2dup matmul drop ] times ] benchmark
-    ! Normalize
-    trials / >float
-    nip nip ;
-
-:: transpose-tensor ( trials elems -- time )
-    ! Create the array to be transposed
-    elems elems 2array naturals
-    ! benchmark
-    [ trials [ dup transpose drop ] times ] benchmark
-    ! Normalize
-    trials / >float
-    nip ;
-
-PRIVATE>
-
-: run-benchmarks ( -- )
-    "Benchmarking the tensors vocabulary" print
-    "Add two 100 element tensors" print
-    1000000 100 add-tensors .
-    "Add two 100,000 element tensors" print
-    10000 100000 add-tensors .
-    "Multiply two 10x10 matrices" print
-    100000 10 matmul-tensors .
-    "Multiply two 100x100 matrices" print
-    1000 100 matmul-tensors .
-    "Transpose a 10x10 matrix" print
-    10000 10 transpose-tensor .
-    "Transpose a 100x100 matrix" print
-    10 100 transpose-tensor . ;
+! finds the confidence interval of seq with significance level 95
+:: confidence-interval ( seq -- {c1,c2} )
+    seq mean :> m
+    ! HARDCODING ALERT: z value for alpha = 95 is 1.96
+    seq sample-std 1.96 *
+    ! div by sqrt(n)
+    seq length sqrt / :> modifier
+    m modifier -
+    m modifier +
+    2array ;
diff --git a/extra/tensors/demos/data.csv b/extra/tensors/demos/data.csv
new file mode 100644 (file)
index 0000000..b780ecd
--- /dev/null
@@ -0,0 +1,506 @@
+0.00632,18,2.31,0,0.538,6.575,65.2,4.09,1,296,15.3,396.9,4.98\r
+0.02731,0,7.07,0,0.469,6.421,78.9,4.9671,2,242,17.8,396.9,9.14\r
+0.02729,0,7.07,0,0.469,7.185,61.1,4.9671,2,242,17.8,392.83,4.03\r
+0.03237,0,2.18,0,0.458,6.998,45.8,6.0622,3,222,18.7,394.63,2.94\r
+0.06905,0,2.18,0,0.458,7.147,54.2,6.0622,3,222,18.7,396.9,5.33\r
+0.02985,0,2.18,0,0.458,6.43,58.7,6.0622,3,222,18.7,394.12,5.21\r
+0.08829,12.5,7.87,0,0.524,6.012,66.6,5.5605,5,311,15.2,395.6,12.43\r
+0.14455,12.5,7.87,0,0.524,6.172,96.1,5.9505,5,311,15.2,396.9,19.15\r
+0.21124,12.5,7.87,0,0.524,5.631,100,6.0821,5,311,15.2,386.63,29.93\r
+0.17004,12.5,7.87,0,0.524,6.004,85.9,6.5921,5,311,15.2,386.71,17.1\r
+0.22489,12.5,7.87,0,0.524,6.377,94.3,6.3467,5,311,15.2,392.52,20.45\r
+0.11747,12.5,7.87,0,0.524,6.009,82.9,6.2267,5,311,15.2,396.9,13.27\r
+0.09378,12.5,7.87,0,0.524,5.889,39,5.4509,5,311,15.2,390.5,15.71\r
+0.62976,0,8.14,0,0.538,5.949,61.8,4.7075,4,307,21,396.9,8.26\r
+0.63796,0,8.14,0,0.538,6.096,84.5,4.4619,4,307,21,380.02,10.26\r
+0.62739,0,8.14,0,0.538,5.834,56.5,4.4986,4,307,21,395.62,8.47\r
+1.05393,0,8.14,0,0.538,5.935,29.3,4.4986,4,307,21,386.85,6.58\r
+0.7842,0,8.14,0,0.538,5.99,81.7,4.2579,4,307,21,386.75,14.67\r
+0.80271,0,8.14,0,0.538,5.456,36.6,3.7965,4,307,21,288.99,11.69\r
+0.7258,0,8.14,0,0.538,5.727,69.5,3.7965,4,307,21,390.95,11.28\r
+1.25179,0,8.14,0,0.538,5.57,98.1,3.7979,4,307,21,376.57,21.02\r
+0.85204,0,8.14,0,0.538,5.965,89.2,4.0123,4,307,21,392.53,13.83\r
+1.23247,0,8.14,0,0.538,6.142,91.7,3.9769,4,307,21,396.9,18.72\r
+0.98843,0,8.14,0,0.538,5.813,100,4.0952,4,307,21,394.54,19.88\r
+0.75026,0,8.14,0,0.538,5.924,94.1,4.3996,4,307,21,394.33,16.3\r
+0.84054,0,8.14,0,0.538,5.599,85.7,4.4546,4,307,21,303.42,16.51\r
+0.67191,0,8.14,0,0.538,5.813,90.3,4.682,4,307,21,376.88,14.81\r
+0.95577,0,8.14,0,0.538,6.047,88.8,4.4534,4,307,21,306.38,17.28\r
+0.77299,0,8.14,0,0.538,6.495,94.4,4.4547,4,307,21,387.94,12.8\r
+1.00245,0,8.14,0,0.538,6.674,87.3,4.239,4,307,21,380.23,11.98\r
+1.13081,0,8.14,0,0.538,5.713,94.1,4.233,4,307,21,360.17,22.6\r
+1.35472,0,8.14,0,0.538,6.072,100,4.175,4,307,21,376.73,13.04\r
+1.38799,0,8.14,0,0.538,5.95,82,3.99,4,307,21,232.6,27.71\r
+1.15172,0,8.14,0,0.538,5.701,95,3.7872,4,307,21,358.77,18.35\r
+1.61282,0,8.14,0,0.538,6.096,96.9,3.7598,4,307,21,248.31,20.34\r
+0.06417,0,5.96,0,0.499,5.933,68.2,3.3603,5,279,19.2,396.9,9.68\r
+0.09744,0,5.96,0,0.499,5.841,61.4,3.3779,5,279,19.2,377.56,11.41\r
+0.08014,0,5.96,0,0.499,5.85,41.5,3.9342,5,279,19.2,396.9,8.77\r
+0.17505,0,5.96,0,0.499,5.966,30.2,3.8473,5,279,19.2,393.43,10.13\r
+0.02763,75,2.95,0,0.428,6.595,21.8,5.4011,3,252,18.3,395.63,4.32\r
+0.03359,75,2.95,0,0.428,7.024,15.8,5.4011,3,252,18.3,395.62,1.98\r
+0.12744,0,6.91,0,0.448,6.77,2.9,5.7209,3,233,17.9,385.41,4.84\r
+0.1415,0,6.91,0,0.448,6.169,6.6,5.7209,3,233,17.9,383.37,5.81\r
+0.15936,0,6.91,0,0.448,6.211,6.5,5.7209,3,233,17.9,394.46,7.44\r
+0.12269,0,6.91,0,0.448,6.069,40,5.7209,3,233,17.9,389.39,9.55\r
+0.17142,0,6.91,0,0.448,5.682,33.8,5.1004,3,233,17.9,396.9,10.21\r
+0.18836,0,6.91,0,0.448,5.786,33.3,5.1004,3,233,17.9,396.9,14.15\r
+0.22927,0,6.91,0,0.448,6.03,85.5,5.6894,3,233,17.9,392.74,18.8\r
+0.25387,0,6.91,0,0.448,5.399,95.3,5.87,3,233,17.9,396.9,30.81\r
+0.21977,0,6.91,0,0.448,5.602,62,6.0877,3,233,17.9,396.9,16.2\r
+0.08873,21,5.64,0,0.439,5.963,45.7,6.8147,4,243,16.8,395.56,13.45\r
+0.04337,21,5.64,0,0.439,6.115,63,6.8147,4,243,16.8,393.97,9.43\r
+0.0536,21,5.64,0,0.439,6.511,21.1,6.8147,4,243,16.8,396.9,5.28\r
+0.04981,21,5.64,0,0.439,5.998,21.4,6.8147,4,243,16.8,396.9,8.43\r
+0.0136,75,4,0,0.41,5.888,47.6,7.3197,3,469,21.1,396.9,14.8\r
+0.01311,90,1.22,0,0.403,7.249,21.9,8.6966,5,226,17.9,395.93,4.81\r
+0.02055,85,0.74,0,0.41,6.383,35.7,9.1876,2,313,17.3,396.9,5.77\r
+0.01432,100,1.32,0,0.411,6.816,40.5,8.3248,5,256,15.1,392.9,3.95\r
+0.15445,25,5.13,0,0.453,6.145,29.2,7.8148,8,284,19.7,390.68,6.86\r
+0.10328,25,5.13,0,0.453,5.927,47.2,6.932,8,284,19.7,396.9,9.22\r
+0.14932,25,5.13,0,0.453,5.741,66.2,7.2254,8,284,19.7,395.11,13.15\r
+0.17171,25,5.13,0,0.453,5.966,93.4,6.8185,8,284,19.7,378.08,14.44\r
+0.11027,25,5.13,0,0.453,6.456,67.8,7.2255,8,284,19.7,396.9,6.73\r
+0.1265,25,5.13,0,0.453,6.762,43.4,7.9809,8,284,19.7,395.58,9.5\r
+0.01951,17.5,1.38,0,0.4161,7.104,59.5,9.2229,3,216,18.6,393.24,8.05\r
+0.03584,80,3.37,0,0.398,6.29,17.8,6.6115,4,337,16.1,396.9,4.67\r
+0.04379,80,3.37,0,0.398,5.787,31.1,6.6115,4,337,16.1,396.9,10.24\r
+0.05789,12.5,6.07,0,0.409,5.878,21.4,6.498,4,345,18.9,396.21,8.1\r
+0.13554,12.5,6.07,0,0.409,5.594,36.8,6.498,4,345,18.9,396.9,13.09\r
+0.12816,12.5,6.07,0,0.409,5.885,33,6.498,4,345,18.9,396.9,8.79\r
+0.08826,0,10.81,0,0.413,6.417,6.6,5.2873,4,305,19.2,383.73,6.72\r
+0.15876,0,10.81,0,0.413,5.961,17.5,5.2873,4,305,19.2,376.94,9.88\r
+0.09164,0,10.81,0,0.413,6.065,7.8,5.2873,4,305,19.2,390.91,5.52\r
+0.19539,0,10.81,0,0.413,6.245,6.2,5.2873,4,305,19.2,377.17,7.54\r
+0.07896,0,12.83,0,0.437,6.273,6,4.2515,5,398,18.7,394.92,6.78\r
+0.09512,0,12.83,0,0.437,6.286,45,4.5026,5,398,18.7,383.23,8.94\r
+0.10153,0,12.83,0,0.437,6.279,74.5,4.0522,5,398,18.7,373.66,11.97\r
+0.08707,0,12.83,0,0.437,6.14,45.8,4.0905,5,398,18.7,386.96,10.27\r
+0.05646,0,12.83,0,0.437,6.232,53.7,5.0141,5,398,18.7,386.4,12.34\r
+0.08387,0,12.83,0,0.437,5.874,36.6,4.5026,5,398,18.7,396.06,9.1\r
+0.04113,25,4.86,0,0.426,6.727,33.5,5.4007,4,281,19,396.9,5.29\r
+0.04462,25,4.86,0,0.426,6.619,70.4,5.4007,4,281,19,395.63,7.22\r
+0.03659,25,4.86,0,0.426,6.302,32.2,5.4007,4,281,19,396.9,6.72\r
+0.03551,25,4.86,0,0.426,6.167,46.7,5.4007,4,281,19,390.64,7.51\r
+0.05059,0,4.49,0,0.449,6.389,48,4.7794,3,247,18.5,396.9,9.62\r
+0.05735,0,4.49,0,0.449,6.63,56.1,4.4377,3,247,18.5,392.3,6.53\r
+0.05188,0,4.49,0,0.449,6.015,45.1,4.4272,3,247,18.5,395.99,12.86\r
+0.07151,0,4.49,0,0.449,6.121,56.8,3.7476,3,247,18.5,395.15,8.44\r
+0.0566,0,3.41,0,0.489,7.007,86.3,3.4217,2,270,17.8,396.9,5.5\r
+0.05302,0,3.41,0,0.489,7.079,63.1,3.4145,2,270,17.8,396.06,5.7\r
+0.04684,0,3.41,0,0.489,6.417,66.1,3.0923,2,270,17.8,392.18,8.81\r
+0.03932,0,3.41,0,0.489,6.405,73.9,3.0921,2,270,17.8,393.55,8.2\r
+0.04203,28,15.04,0,0.464,6.442,53.6,3.6659,4,270,18.2,395.01,8.16\r
+0.02875,28,15.04,0,0.464,6.211,28.9,3.6659,4,270,18.2,396.33,6.21\r
+0.04294,28,15.04,0,0.464,6.249,77.3,3.615,4,270,18.2,396.9,10.59\r
+0.12204,0,2.89,0,0.445,6.625,57.8,3.4952,2,276,18,357.98,6.65\r
+0.11504,0,2.89,0,0.445,6.163,69.6,3.4952,2,276,18,391.83,11.34\r
+0.12083,0,2.89,0,0.445,8.069,76,3.4952,2,276,18,396.9,4.21\r
+0.08187,0,2.89,0,0.445,7.82,36.9,3.4952,2,276,18,393.53,3.57\r
+0.0686,0,2.89,0,0.445,7.416,62.5,3.4952,2,276,18,396.9,6.19\r
+0.14866,0,8.56,0,0.52,6.727,79.9,2.7778,5,384,20.9,394.76,9.42\r
+0.11432,0,8.56,0,0.52,6.781,71.3,2.8561,5,384,20.9,395.58,7.67\r
+0.22876,0,8.56,0,0.52,6.405,85.4,2.7147,5,384,20.9,70.8,10.63\r
+0.21161,0,8.56,0,0.52,6.137,87.4,2.7147,5,384,20.9,394.47,13.44\r
+0.1396,0,8.56,0,0.52,6.167,90,2.421,5,384,20.9,392.69,12.33\r
+0.13262,0,8.56,0,0.52,5.851,96.7,2.1069,5,384,20.9,394.05,16.47\r
+0.1712,0,8.56,0,0.52,5.836,91.9,2.211,5,384,20.9,395.67,18.66\r
+0.13117,0,8.56,0,0.52,6.127,85.2,2.1224,5,384,20.9,387.69,14.09\r
+0.12802,0,8.56,0,0.52,6.474,97.1,2.4329,5,384,20.9,395.24,12.27\r
+0.26363,0,8.56,0,0.52,6.229,91.2,2.5451,5,384,20.9,391.23,15.55\r
+0.10793,0,8.56,0,0.52,6.195,54.4,2.7778,5,384,20.9,393.49,13\r
+0.10084,0,10.01,0,0.547,6.715,81.6,2.6775,6,432,17.8,395.59,10.16\r
+0.12329,0,10.01,0,0.547,5.913,92.9,2.3534,6,432,17.8,394.95,16.21\r
+0.22212,0,10.01,0,0.547,6.092,95.4,2.548,6,432,17.8,396.9,17.09\r
+0.14231,0,10.01,0,0.547,6.254,84.2,2.2565,6,432,17.8,388.74,10.45\r
+0.17134,0,10.01,0,0.547,5.928,88.2,2.4631,6,432,17.8,344.91,15.76\r
+0.13158,0,10.01,0,0.547,6.176,72.5,2.7301,6,432,17.8,393.3,12.04\r
+0.15098,0,10.01,0,0.547,6.021,82.6,2.7474,6,432,17.8,394.51,10.3\r
+0.13058,0,10.01,0,0.547,5.872,73.1,2.4775,6,432,17.8,338.63,15.37\r
+0.14476,0,10.01,0,0.547,5.731,65.2,2.7592,6,432,17.8,391.5,13.61\r
+0.06899,0,25.65,0,0.581,5.87,69.7,2.2577,2,188,19.1,389.15,14.37\r
+0.07165,0,25.65,0,0.581,6.004,84.1,2.1974,2,188,19.1,377.67,14.27\r
+0.09299,0,25.65,0,0.581,5.961,92.9,2.0869,2,188,19.1,378.09,17.93\r
+0.15038,0,25.65,0,0.581,5.856,97,1.9444,2,188,19.1,370.31,25.41\r
+0.09849,0,25.65,0,0.581,5.879,95.8,2.0063,2,188,19.1,379.38,17.58\r
+0.16902,0,25.65,0,0.581,5.986,88.4,1.9929,2,188,19.1,385.02,14.81\r
+0.38735,0,25.65,0,0.581,5.613,95.6,1.7572,2,188,19.1,359.29,27.26\r
+0.25915,0,21.89,0,0.624,5.693,96,1.7883,4,437,21.2,392.11,17.19\r
+0.32543,0,21.89,0,0.624,6.431,98.8,1.8125,4,437,21.2,396.9,15.39\r
+0.88125,0,21.89,0,0.624,5.637,94.7,1.9799,4,437,21.2,396.9,18.34\r
+0.34006,0,21.89,0,0.624,6.458,98.9,2.1185,4,437,21.2,395.04,12.6\r
+1.19294,0,21.89,0,0.624,6.326,97.7,2.271,4,437,21.2,396.9,12.26\r
+0.59005,0,21.89,0,0.624,6.372,97.9,2.3274,4,437,21.2,385.76,11.12\r
+0.32982,0,21.89,0,0.624,5.822,95.4,2.4699,4,437,21.2,388.69,15.03\r
+0.97617,0,21.89,0,0.624,5.757,98.4,2.346,4,437,21.2,262.76,17.31\r
+0.55778,0,21.89,0,0.624,6.335,98.2,2.1107,4,437,21.2,394.67,16.96\r
+0.32264,0,21.89,0,0.624,5.942,93.5,1.9669,4,437,21.2,378.25,16.9\r
+0.35233,0,21.89,0,0.624,6.454,98.4,1.8498,4,437,21.2,394.08,14.59\r
+0.2498,0,21.89,0,0.624,5.857,98.2,1.6686,4,437,21.2,392.04,21.32\r
+0.54452,0,21.89,0,0.624,6.151,97.9,1.6687,4,437,21.2,396.9,18.46\r
+0.2909,0,21.89,0,0.624,6.174,93.6,1.6119,4,437,21.2,388.08,24.16\r
+1.62864,0,21.89,0,0.624,5.019,100,1.4394,4,437,21.2,396.9,34.41\r
+3.32105,0,19.58,1,0.871,5.403,100,1.3216,5,403,14.7,396.9,26.82\r
+4.0974,0,19.58,0,0.871,5.468,100,1.4118,5,403,14.7,396.9,26.42\r
+2.77974,0,19.58,0,0.871,4.903,97.8,1.3459,5,403,14.7,396.9,29.29\r
+2.37934,0,19.58,0,0.871,6.13,100,1.4191,5,403,14.7,172.91,27.8\r
+2.15505,0,19.58,0,0.871,5.628,100,1.5166,5,403,14.7,169.27,16.65\r
+2.36862,0,19.58,0,0.871,4.926,95.7,1.4608,5,403,14.7,391.71,29.53\r
+2.33099,0,19.58,0,0.871,5.186,93.8,1.5296,5,403,14.7,356.99,28.32\r
+2.73397,0,19.58,0,0.871,5.597,94.9,1.5257,5,403,14.7,351.85,21.45\r
+1.6566,0,19.58,0,0.871,6.122,97.3,1.618,5,403,14.7,372.8,14.1\r
+1.49632,0,19.58,0,0.871,5.404,100,1.5916,5,403,14.7,341.6,13.28\r
+1.12658,0,19.58,1,0.871,5.012,88,1.6102,5,403,14.7,343.28,12.12\r
+2.14918,0,19.58,0,0.871,5.709,98.5,1.6232,5,403,14.7,261.95,15.79\r
+1.41385,0,19.58,1,0.871,6.129,96,1.7494,5,403,14.7,321.02,15.12\r
+3.53501,0,19.58,1,0.871,6.152,82.6,1.7455,5,403,14.7,88.01,15.02\r
+2.44668,0,19.58,0,0.871,5.272,94,1.7364,5,403,14.7,88.63,16.14\r
+1.22358,0,19.58,0,0.605,6.943,97.4,1.8773,5,403,14.7,363.43,4.59\r
+1.34284,0,19.58,0,0.605,6.066,100,1.7573,5,403,14.7,353.89,6.43\r
+1.42502,0,19.58,0,0.871,6.51,100,1.7659,5,403,14.7,364.31,7.39\r
+1.27346,0,19.58,1,0.605,6.25,92.6,1.7984,5,403,14.7,338.92,5.5\r
+1.46336,0,19.58,0,0.605,7.489,90.8,1.9709,5,403,14.7,374.43,1.73\r
+1.83377,0,19.58,1,0.605,7.802,98.2,2.0407,5,403,14.7,389.61,1.92\r
+1.51902,0,19.58,1,0.605,8.375,93.9,2.162,5,403,14.7,388.45,3.32\r
+2.24236,0,19.58,0,0.605,5.854,91.8,2.422,5,403,14.7,395.11,11.64\r
+2.924,0,19.58,0,0.605,6.101,93,2.2834,5,403,14.7,240.16,9.81\r
+2.01019,0,19.58,0,0.605,7.929,96.2,2.0459,5,403,14.7,369.3,3.7\r
+1.80028,0,19.58,0,0.605,5.877,79.2,2.4259,5,403,14.7,227.61,12.14\r
+2.3004,0,19.58,0,0.605,6.319,96.1,2.1,5,403,14.7,297.09,11.1\r
+2.44953,0,19.58,0,0.605,6.402,95.2,2.2625,5,403,14.7,330.04,11.32\r
+1.20742,0,19.58,0,0.605,5.875,94.6,2.4259,5,403,14.7,292.29,14.43\r
+2.3139,0,19.58,0,0.605,5.88,97.3,2.3887,5,403,14.7,348.13,12.03\r
+0.13914,0,4.05,0,0.51,5.572,88.5,2.5961,5,296,16.6,396.9,14.69\r
+0.09178,0,4.05,0,0.51,6.416,84.1,2.6463,5,296,16.6,395.5,9.04\r
+0.08447,0,4.05,0,0.51,5.859,68.7,2.7019,5,296,16.6,393.23,9.64\r
+0.06664,0,4.05,0,0.51,6.546,33.1,3.1323,5,296,16.6,390.96,5.33\r
+0.07022,0,4.05,0,0.51,6.02,47.2,3.5549,5,296,16.6,393.23,10.11\r
+0.05425,0,4.05,0,0.51,6.315,73.4,3.3175,5,296,16.6,395.6,6.29\r
+0.06642,0,4.05,0,0.51,6.86,74.4,2.9153,5,296,16.6,391.27,6.92\r
+0.0578,0,2.46,0,0.488,6.98,58.4,2.829,3,193,17.8,396.9,5.04\r
+0.06588,0,2.46,0,0.488,7.765,83.3,2.741,3,193,17.8,395.56,7.56\r
+0.06888,0,2.46,0,0.488,6.144,62.2,2.5979,3,193,17.8,396.9,9.45\r
+0.09103,0,2.46,0,0.488,7.155,92.2,2.7006,3,193,17.8,394.12,4.82\r
+0.10008,0,2.46,0,0.488,6.563,95.6,2.847,3,193,17.8,396.9,5.68\r
+0.08308,0,2.46,0,0.488,5.604,89.8,2.9879,3,193,17.8,391,13.98\r
+0.06047,0,2.46,0,0.488,6.153,68.8,3.2797,3,193,17.8,387.11,13.15\r
+0.05602,0,2.46,0,0.488,7.831,53.6,3.1992,3,193,17.8,392.63,4.45\r
+0.07875,45,3.44,0,0.437,6.782,41.1,3.7886,5,398,15.2,393.87,6.68\r
+0.12579,45,3.44,0,0.437,6.556,29.1,4.5667,5,398,15.2,382.84,4.56\r
+0.0837,45,3.44,0,0.437,7.185,38.9,4.5667,5,398,15.2,396.9,5.39\r
+0.09068,45,3.44,0,0.437,6.951,21.5,6.4798,5,398,15.2,377.68,5.1\r
+0.06911,45,3.44,0,0.437,6.739,30.8,6.4798,5,398,15.2,389.71,4.69\r
+0.08664,45,3.44,0,0.437,7.178,26.3,6.4798,5,398,15.2,390.49,2.87\r
+0.02187,60,2.93,0,0.401,6.8,9.9,6.2196,1,265,15.6,393.37,5.03\r
+0.01439,60,2.93,0,0.401,6.604,18.8,6.2196,1,265,15.6,376.7,4.38\r
+0.01381,80,0.46,0,0.422,7.875,32,5.6484,4,255,14.4,394.23,2.97\r
+0.04011,80,1.52,0,0.404,7.287,34.1,7.309,2,329,12.6,396.9,4.08\r
+0.04666,80,1.52,0,0.404,7.107,36.6,7.309,2,329,12.6,354.31,8.61\r
+0.03768,80,1.52,0,0.404,7.274,38.3,7.309,2,329,12.6,392.2,6.62\r
+0.0315,95,1.47,0,0.403,6.975,15.3,7.6534,3,402,17,396.9,4.56\r
+0.01778,95,1.47,0,0.403,7.135,13.9,7.6534,3,402,17,384.3,4.45\r
+0.03445,82.5,2.03,0,0.415,6.162,38.4,6.27,2,348,14.7,393.77,7.43\r
+0.02177,82.5,2.03,0,0.415,7.61,15.7,6.27,2,348,14.7,395.38,3.11\r
+0.0351,95,2.68,0,0.4161,7.853,33.2,5.118,4,224,14.7,392.78,3.81\r
+0.02009,95,2.68,0,0.4161,8.034,31.9,5.118,4,224,14.7,390.55,2.88\r
+0.13642,0,10.59,0,0.489,5.891,22.3,3.9454,4,277,18.6,396.9,10.87\r
+0.22969,0,10.59,0,0.489,6.326,52.5,4.3549,4,277,18.6,394.87,10.97\r
+0.25199,0,10.59,0,0.489,5.783,72.7,4.3549,4,277,18.6,389.43,18.06\r
+0.13587,0,10.59,1,0.489,6.064,59.1,4.2392,4,277,18.6,381.32,14.66\r
+0.43571,0,10.59,1,0.489,5.344,100,3.875,4,277,18.6,396.9,23.09\r
+0.17446,0,10.59,1,0.489,5.96,92.1,3.8771,4,277,18.6,393.25,17.27\r
+0.37578,0,10.59,1,0.489,5.404,88.6,3.665,4,277,18.6,395.24,23.98\r
+0.21719,0,10.59,1,0.489,5.807,53.8,3.6526,4,277,18.6,390.94,16.03\r
+0.14052,0,10.59,0,0.489,6.375,32.3,3.9454,4,277,18.6,385.81,9.38\r
+0.28955,0,10.59,0,0.489,5.412,9.8,3.5875,4,277,18.6,348.93,29.55\r
+0.19802,0,10.59,0,0.489,6.182,42.4,3.9454,4,277,18.6,393.63,9.47\r
+0.0456,0,13.89,1,0.55,5.888,56,3.1121,5,276,16.4,392.8,13.51\r
+0.07013,0,13.89,0,0.55,6.642,85.1,3.4211,5,276,16.4,392.78,9.69\r
+0.11069,0,13.89,1,0.55,5.951,93.8,2.8893,5,276,16.4,396.9,17.92\r
+0.11425,0,13.89,1,0.55,6.373,92.4,3.3633,5,276,16.4,393.74,10.5\r
+0.35809,0,6.2,1,0.507,6.951,88.5,2.8617,8,307,17.4,391.7,9.71\r
+0.40771,0,6.2,1,0.507,6.164,91.3,3.048,8,307,17.4,395.24,21.46\r
+0.62356,0,6.2,1,0.507,6.879,77.7,3.2721,8,307,17.4,390.39,9.93\r
+0.6147,0,6.2,0,0.507,6.618,80.8,3.2721,8,307,17.4,396.9,7.6\r
+0.31533,0,6.2,0,0.504,8.266,78.3,2.8944,8,307,17.4,385.05,4.14\r
+0.52693,0,6.2,0,0.504,8.725,83,2.8944,8,307,17.4,382,4.63\r
+0.38214,0,6.2,0,0.504,8.04,86.5,3.2157,8,307,17.4,387.38,3.13\r
+0.41238,0,6.2,0,0.504,7.163,79.9,3.2157,8,307,17.4,372.08,6.36\r
+0.29819,0,6.2,0,0.504,7.686,17,3.3751,8,307,17.4,377.51,3.92\r
+0.44178,0,6.2,0,0.504,6.552,21.4,3.3751,8,307,17.4,380.34,3.76\r
+0.537,0,6.2,0,0.504,5.981,68.1,3.6715,8,307,17.4,378.35,11.65\r
+0.46296,0,6.2,0,0.504,7.412,76.9,3.6715,8,307,17.4,376.14,5.25\r
+0.57529,0,6.2,0,0.507,8.337,73.3,3.8384,8,307,17.4,385.91,2.47\r
+0.33147,0,6.2,0,0.507,8.247,70.4,3.6519,8,307,17.4,378.95,3.95\r
+0.44791,0,6.2,1,0.507,6.726,66.5,3.6519,8,307,17.4,360.2,8.05\r
+0.33045,0,6.2,0,0.507,6.086,61.5,3.6519,8,307,17.4,376.75,10.88\r
+0.52058,0,6.2,1,0.507,6.631,76.5,4.148,8,307,17.4,388.45,9.54\r
+0.51183,0,6.2,0,0.507,7.358,71.6,4.148,8,307,17.4,390.07,4.73\r
+0.08244,30,4.93,0,0.428,6.481,18.5,6.1899,6,300,16.6,379.41,6.36\r
+0.09252,30,4.93,0,0.428,6.606,42.2,6.1899,6,300,16.6,383.78,7.37\r
+0.11329,30,4.93,0,0.428,6.897,54.3,6.3361,6,300,16.6,391.25,11.38\r
+0.10612,30,4.93,0,0.428,6.095,65.1,6.3361,6,300,16.6,394.62,12.4\r
+0.1029,30,4.93,0,0.428,6.358,52.9,7.0355,6,300,16.6,372.75,11.22\r
+0.12757,30,4.93,0,0.428,6.393,7.8,7.0355,6,300,16.6,374.71,5.19\r
+0.20608,22,5.86,0,0.431,5.593,76.5,7.9549,7,330,19.1,372.49,12.5\r
+0.19133,22,5.86,0,0.431,5.605,70.2,7.9549,7,330,19.1,389.13,18.46\r
+0.33983,22,5.86,0,0.431,6.108,34.9,8.0555,7,330,19.1,390.18,9.16\r
+0.19657,22,5.86,0,0.431,6.226,79.2,8.0555,7,330,19.1,376.14,10.15\r
+0.16439,22,5.86,0,0.431,6.433,49.1,7.8265,7,330,19.1,374.71,9.52\r
+0.19073,22,5.86,0,0.431,6.718,17.5,7.8265,7,330,19.1,393.74,6.56\r
+0.1403,22,5.86,0,0.431,6.487,13,7.3967,7,330,19.1,396.28,5.9\r
+0.21409,22,5.86,0,0.431,6.438,8.9,7.3967,7,330,19.1,377.07,3.59\r
+0.08221,22,5.86,0,0.431,6.957,6.8,8.9067,7,330,19.1,386.09,3.53\r
+0.36894,22,5.86,0,0.431,8.259,8.4,8.9067,7,330,19.1,396.9,3.54\r
+0.04819,80,3.64,0,0.392,6.108,32,9.2203,1,315,16.4,392.89,6.57\r
+0.03548,80,3.64,0,0.392,5.876,19.1,9.2203,1,315,16.4,395.18,9.25\r
+0.01538,90,3.75,0,0.394,7.454,34.2,6.3361,3,244,15.9,386.34,3.11\r
+0.61154,20,3.97,0,0.647,8.704,86.9,1.801,5,264,13,389.7,5.12\r
+0.66351,20,3.97,0,0.647,7.333,100,1.8946,5,264,13,383.29,7.79\r
+0.65665,20,3.97,0,0.647,6.842,100,2.0107,5,264,13,391.93,6.9\r
+0.54011,20,3.97,0,0.647,7.203,81.8,2.1121,5,264,13,392.8,9.59\r
+0.53412,20,3.97,0,0.647,7.52,89.4,2.1398,5,264,13,388.37,7.26\r
+0.52014,20,3.97,0,0.647,8.398,91.5,2.2885,5,264,13,386.86,5.91\r
+0.82526,20,3.97,0,0.647,7.327,94.5,2.0788,5,264,13,393.42,11.25\r
+0.55007,20,3.97,0,0.647,7.206,91.6,1.9301,5,264,13,387.89,8.1\r
+0.76162,20,3.97,0,0.647,5.56,62.8,1.9865,5,264,13,392.4,10.45\r
+0.7857,20,3.97,0,0.647,7.014,84.6,2.1329,5,264,13,384.07,14.79\r
+0.57834,20,3.97,0,0.575,8.297,67,2.4216,5,264,13,384.54,7.44\r
+0.5405,20,3.97,0,0.575,7.47,52.6,2.872,5,264,13,390.3,3.16\r
+0.09065,20,6.96,1,0.464,5.92,61.5,3.9175,3,223,18.6,391.34,13.65\r
+0.29916,20,6.96,0,0.464,5.856,42.1,4.429,3,223,18.6,388.65,13\r
+0.16211,20,6.96,0,0.464,6.24,16.3,4.429,3,223,18.6,396.9,6.59\r
+0.1146,20,6.96,0,0.464,6.538,58.7,3.9175,3,223,18.6,394.96,7.73\r
+0.22188,20,6.96,1,0.464,7.691,51.8,4.3665,3,223,18.6,390.77,6.58\r
+0.05644,40,6.41,1,0.447,6.758,32.9,4.0776,4,254,17.6,396.9,3.53\r
+0.09604,40,6.41,0,0.447,6.854,42.8,4.2673,4,254,17.6,396.9,2.98\r
+0.10469,40,6.41,1,0.447,7.267,49,4.7872,4,254,17.6,389.25,6.05\r
+0.06127,40,6.41,1,0.447,6.826,27.6,4.8628,4,254,17.6,393.45,4.16\r
+0.07978,40,6.41,0,0.447,6.482,32.1,4.1403,4,254,17.6,396.9,7.19\r
+0.21038,20,3.33,0,0.4429,6.812,32.2,4.1007,5,216,14.9,396.9,4.85\r
+0.03578,20,3.33,0,0.4429,7.82,64.5,4.6947,5,216,14.9,387.31,3.76\r
+0.03705,20,3.33,0,0.4429,6.968,37.2,5.2447,5,216,14.9,392.23,4.59\r
+0.06129,20,3.33,1,0.4429,7.645,49.7,5.2119,5,216,14.9,377.07,3.01\r
+0.01501,90,1.21,1,0.401,7.923,24.8,5.885,1,198,13.6,395.52,3.16\r
+0.00906,90,2.97,0,0.4,7.088,20.8,7.3073,1,285,15.3,394.72,7.85\r
+0.01096,55,2.25,0,0.389,6.453,31.9,7.3073,1,300,15.3,394.72,8.23\r
+0.01965,80,1.76,0,0.385,6.23,31.5,9.0892,1,241,18.2,341.6,12.93\r
+0.03871,52.5,5.32,0,0.405,6.209,31.3,7.3172,6,293,16.6,396.9,7.14\r
+0.0459,52.5,5.32,0,0.405,6.315,45.6,7.3172,6,293,16.6,396.9,7.6\r
+0.04297,52.5,5.32,0,0.405,6.565,22.9,7.3172,6,293,16.6,371.72,9.51\r
+0.03502,80,4.95,0,0.411,6.861,27.9,5.1167,4,245,19.2,396.9,3.33\r
+0.07886,80,4.95,0,0.411,7.148,27.7,5.1167,4,245,19.2,396.9,3.56\r
+0.03615,80,4.95,0,0.411,6.63,23.4,5.1167,4,245,19.2,396.9,4.7\r
+0.08265,0,13.92,0,0.437,6.127,18.4,5.5027,4,289,16,396.9,8.58\r
+0.08199,0,13.92,0,0.437,6.009,42.3,5.5027,4,289,16,396.9,10.4\r
+0.12932,0,13.92,0,0.437,6.678,31.1,5.9604,4,289,16,396.9,6.27\r
+0.05372,0,13.92,0,0.437,6.549,51,5.9604,4,289,16,392.85,7.39\r
+0.14103,0,13.92,0,0.437,5.79,58,6.32,4,289,16,396.9,15.84\r
+0.06466,70,2.24,0,0.4,6.345,20.1,7.8278,5,358,14.8,368.24,4.97\r
+0.05561,70,2.24,0,0.4,7.041,10,7.8278,5,358,14.8,371.58,4.74\r
+0.04417,70,2.24,0,0.4,6.871,47.4,7.8278,5,358,14.8,390.86,6.07\r
+0.03537,34,6.09,0,0.433,6.59,40.4,5.4917,7,329,16.1,395.75,9.5\r
+0.09266,34,6.09,0,0.433,6.495,18.4,5.4917,7,329,16.1,383.61,8.67\r
+0.1,34,6.09,0,0.433,6.982,17.7,5.4917,7,329,16.1,390.43,4.86\r
+0.05515,33,2.18,0,0.472,7.236,41.1,4.022,7,222,18.4,393.68,6.93\r
+0.05479,33,2.18,0,0.472,6.616,58.1,3.37,7,222,18.4,393.36,8.93\r
+0.07503,33,2.18,0,0.472,7.42,71.9,3.0992,7,222,18.4,396.9,6.47\r
+0.04932,33,2.18,0,0.472,6.849,70.3,3.1827,7,222,18.4,396.9,7.53\r
+0.49298,0,9.9,0,0.544,6.635,82.5,3.3175,4,304,18.4,396.9,4.54\r
+0.3494,0,9.9,0,0.544,5.972,76.7,3.1025,4,304,18.4,396.24,9.97\r
+2.63548,0,9.9,0,0.544,4.973,37.8,2.5194,4,304,18.4,350.45,12.64\r
+0.79041,0,9.9,0,0.544,6.122,52.8,2.6403,4,304,18.4,396.9,5.98\r
+0.26169,0,9.9,0,0.544,6.023,90.4,2.834,4,304,18.4,396.3,11.72\r
+0.26938,0,9.9,0,0.544,6.266,82.8,3.2628,4,304,18.4,393.39,7.9\r
+0.3692,0,9.9,0,0.544,6.567,87.3,3.6023,4,304,18.4,395.69,9.28\r
+0.25356,0,9.9,0,0.544,5.705,77.7,3.945,4,304,18.4,396.42,11.5\r
+0.31827,0,9.9,0,0.544,5.914,83.2,3.9986,4,304,18.4,390.7,18.33\r
+0.24522,0,9.9,0,0.544,5.782,71.7,4.0317,4,304,18.4,396.9,15.94\r
+0.40202,0,9.9,0,0.544,6.382,67.2,3.5325,4,304,18.4,395.21,10.36\r
+0.47547,0,9.9,0,0.544,6.113,58.8,4.0019,4,304,18.4,396.23,12.73\r
+0.1676,0,7.38,0,0.493,6.426,52.3,4.5404,5,287,19.6,396.9,7.2\r
+0.18159,0,7.38,0,0.493,6.376,54.3,4.5404,5,287,19.6,396.9,6.87\r
+0.35114,0,7.38,0,0.493,6.041,49.9,4.7211,5,287,19.6,396.9,7.7\r
+0.28392,0,7.38,0,0.493,5.708,74.3,4.7211,5,287,19.6,391.13,11.74\r
+0.34109,0,7.38,0,0.493,6.415,40.1,4.7211,5,287,19.6,396.9,6.12\r
+0.19186,0,7.38,0,0.493,6.431,14.7,5.4159,5,287,19.6,393.68,5.08\r
+0.30347,0,7.38,0,0.493,6.312,28.9,5.4159,5,287,19.6,396.9,6.15\r
+0.24103,0,7.38,0,0.493,6.083,43.7,5.4159,5,287,19.6,396.9,12.79\r
+0.06617,0,3.24,0,0.46,5.868,25.8,5.2146,4,430,16.9,382.44,9.97\r
+0.06724,0,3.24,0,0.46,6.333,17.2,5.2146,4,430,16.9,375.21,7.34\r
+0.04544,0,3.24,0,0.46,6.144,32.2,5.8736,4,430,16.9,368.57,9.09\r
+0.05023,35,6.06,0,0.4379,5.706,28.4,6.6407,1,304,16.9,394.02,12.43\r
+0.03466,35,6.06,0,0.4379,6.031,23.3,6.6407,1,304,16.9,362.25,7.83\r
+0.05083,0,5.19,0,0.515,6.316,38.1,6.4584,5,224,20.2,389.71,5.68\r
+0.03738,0,5.19,0,0.515,6.31,38.5,6.4584,5,224,20.2,389.4,6.75\r
+0.03961,0,5.19,0,0.515,6.037,34.5,5.9853,5,224,20.2,396.9,8.01\r
+0.03427,0,5.19,0,0.515,5.869,46.3,5.2311,5,224,20.2,396.9,9.8\r
+0.03041,0,5.19,0,0.515,5.895,59.6,5.615,5,224,20.2,394.81,10.56\r
+0.03306,0,5.19,0,0.515,6.059,37.3,4.8122,5,224,20.2,396.14,8.51\r
+0.05497,0,5.19,0,0.515,5.985,45.4,4.8122,5,224,20.2,396.9,9.74\r
+0.06151,0,5.19,0,0.515,5.968,58.5,4.8122,5,224,20.2,396.9,9.29\r
+0.01301,35,1.52,0,0.442,7.241,49.3,7.0379,1,284,15.5,394.74,5.49\r
+0.02498,0,1.89,0,0.518,6.54,59.7,6.2669,1,422,15.9,389.96,8.65\r
+0.02543,55,3.78,0,0.484,6.696,56.4,5.7321,5,370,17.6,396.9,7.18\r
+0.03049,55,3.78,0,0.484,6.874,28.1,6.4654,5,370,17.6,387.97,4.61\r
+0.03113,0,4.39,0,0.442,6.014,48.5,8.0136,3,352,18.8,385.64,10.53\r
+0.06162,0,4.39,0,0.442,5.898,52.3,8.0136,3,352,18.8,364.61,12.67\r
+0.0187,85,4.15,0,0.429,6.516,27.7,8.5353,4,351,17.9,392.43,6.36\r
+0.01501,80,2.01,0,0.435,6.635,29.7,8.344,4,280,17,390.94,5.99\r
+0.02899,40,1.25,0,0.429,6.939,34.5,8.7921,1,335,19.7,389.85,5.89\r
+0.06211,40,1.25,0,0.429,6.49,44.4,8.7921,1,335,19.7,396.9,5.98\r
+0.0795,60,1.69,0,0.411,6.579,35.9,10.7103,4,411,18.3,370.78,5.49\r
+0.07244,60,1.69,0,0.411,5.884,18.5,10.7103,4,411,18.3,392.33,7.79\r
+0.01709,90,2.02,0,0.41,6.728,36.1,12.1265,5,187,17,384.46,4.5\r
+0.04301,80,1.91,0,0.413,5.663,21.9,10.5857,4,334,22,382.8,8.05\r
+0.10659,80,1.91,0,0.413,5.936,19.5,10.5857,4,334,22,376.04,5.57\r
+8.98296,0,18.1,1,0.77,6.212,97.4,2.1222,24,666,20.2,377.73,17.6\r
+3.8497,0,18.1,1,0.77,6.395,91,2.5052,24,666,20.2,391.34,13.27\r
+5.20177,0,18.1,1,0.77,6.127,83.4,2.7227,24,666,20.2,395.43,11.48\r
+4.26131,0,18.1,0,0.77,6.112,81.3,2.5091,24,666,20.2,390.74,12.67\r
+4.54192,0,18.1,0,0.77,6.398,88,2.5182,24,666,20.2,374.56,7.79\r
+3.83684,0,18.1,0,0.77,6.251,91.1,2.2955,24,666,20.2,350.65,14.19\r
+3.67822,0,18.1,0,0.77,5.362,96.2,2.1036,24,666,20.2,380.79,10.19\r
+4.22239,0,18.1,1,0.77,5.803,89,1.9047,24,666,20.2,353.04,14.64\r
+3.47428,0,18.1,1,0.718,8.78,82.9,1.9047,24,666,20.2,354.55,5.29\r
+4.55587,0,18.1,0,0.718,3.561,87.9,1.6132,24,666,20.2,354.7,7.12\r
+3.69695,0,18.1,0,0.718,4.963,91.4,1.7523,24,666,20.2,316.03,14\r
+13.5222,0,18.1,0,0.631,3.863,100,1.5106,24,666,20.2,131.42,13.33\r
+4.89822,0,18.1,0,0.631,4.97,100,1.3325,24,666,20.2,375.52,3.26\r
+5.66998,0,18.1,1,0.631,6.683,96.8,1.3567,24,666,20.2,375.33,3.73\r
+6.53876,0,18.1,1,0.631,7.016,97.5,1.2024,24,666,20.2,392.05,2.96\r
+9.2323,0,18.1,0,0.631,6.216,100,1.1691,24,666,20.2,366.15,9.53\r
+8.26725,0,18.1,1,0.668,5.875,89.6,1.1296,24,666,20.2,347.88,8.88\r
+11.1081,0,18.1,0,0.668,4.906,100,1.1742,24,666,20.2,396.9,34.77\r
+18.4982,0,18.1,0,0.668,4.138,100,1.137,24,666,20.2,396.9,37.97\r
+19.6091,0,18.1,0,0.671,7.313,97.9,1.3163,24,666,20.2,396.9,13.44\r
+15.288,0,18.1,0,0.671,6.649,93.3,1.3449,24,666,20.2,363.02,23.24\r
+9.82349,0,18.1,0,0.671,6.794,98.8,1.358,24,666,20.2,396.9,21.24\r
+23.6482,0,18.1,0,0.671,6.38,96.2,1.3861,24,666,20.2,396.9,23.69\r
+17.8667,0,18.1,0,0.671,6.223,100,1.3861,24,666,20.2,393.74,21.78\r
+88.9762,0,18.1,0,0.671,6.968,91.9,1.4165,24,666,20.2,396.9,17.21\r
+15.8744,0,18.1,0,0.671,6.545,99.1,1.5192,24,666,20.2,396.9,21.08\r
+9.18702,0,18.1,0,0.7,5.536,100,1.5804,24,666,20.2,396.9,23.6\r
+7.99248,0,18.1,0,0.7,5.52,100,1.5331,24,666,20.2,396.9,24.56\r
+20.0849,0,18.1,0,0.7,4.368,91.2,1.4395,24,666,20.2,285.83,30.63\r
+16.8118,0,18.1,0,0.7,5.277,98.1,1.4261,24,666,20.2,396.9,30.81\r
+24.3938,0,18.1,0,0.7,4.652,100,1.4672,24,666,20.2,396.9,28.28\r
+22.5971,0,18.1,0,0.7,5,89.5,1.5184,24,666,20.2,396.9,31.99\r
+14.3337,0,18.1,0,0.7,4.88,100,1.5895,24,666,20.2,372.92,30.62\r
+8.15174,0,18.1,0,0.7,5.39,98.9,1.7281,24,666,20.2,396.9,20.85\r
+6.96215,0,18.1,0,0.7,5.713,97,1.9265,24,666,20.2,394.43,17.11\r
+5.29305,0,18.1,0,0.7,6.051,82.5,2.1678,24,666,20.2,378.38,18.76\r
+11.5779,0,18.1,0,0.7,5.036,97,1.77,24,666,20.2,396.9,25.68\r
+8.64476,0,18.1,0,0.693,6.193,92.6,1.7912,24,666,20.2,396.9,15.17\r
+13.3598,0,18.1,0,0.693,5.887,94.7,1.7821,24,666,20.2,396.9,16.35\r
+8.71675,0,18.1,0,0.693,6.471,98.8,1.7257,24,666,20.2,391.98,17.12\r
+5.87205,0,18.1,0,0.693,6.405,96,1.6768,24,666,20.2,396.9,19.37\r
+7.67202,0,18.1,0,0.693,5.747,98.9,1.6334,24,666,20.2,393.1,19.92\r
+38.3518,0,18.1,0,0.693,5.453,100,1.4896,24,666,20.2,396.9,30.59\r
+9.91655,0,18.1,0,0.693,5.852,77.8,1.5004,24,666,20.2,338.16,29.97\r
+25.0461,0,18.1,0,0.693,5.987,100,1.5888,24,666,20.2,396.9,26.77\r
+14.2362,0,18.1,0,0.693,6.343,100,1.5741,24,666,20.2,396.9,20.32\r
+9.59571,0,18.1,0,0.693,6.404,100,1.639,24,666,20.2,376.11,20.31\r
+24.8017,0,18.1,0,0.693,5.349,96,1.7028,24,666,20.2,396.9,19.77\r
+41.5292,0,18.1,0,0.693,5.531,85.4,1.6074,24,666,20.2,329.46,27.38\r
+67.9208,0,18.1,0,0.693,5.683,100,1.4254,24,666,20.2,384.97,22.98\r
+20.7162,0,18.1,0,0.659,4.138,100,1.1781,24,666,20.2,370.22,23.34\r
+11.9511,0,18.1,0,0.659,5.608,100,1.2852,24,666,20.2,332.09,12.13\r
+7.40389,0,18.1,0,0.597,5.617,97.9,1.4547,24,666,20.2,314.64,26.4\r
+14.4383,0,18.1,0,0.597,6.852,100,1.4655,24,666,20.2,179.36,19.78\r
+51.1358,0,18.1,0,0.597,5.757,100,1.413,24,666,20.2,2.6,10.11\r
+14.0507,0,18.1,0,0.597,6.657,100,1.5275,24,666,20.2,35.05,21.22\r
+18.811,0,18.1,0,0.597,4.628,100,1.5539,24,666,20.2,28.79,34.37\r
+28.6558,0,18.1,0,0.597,5.155,100,1.5894,24,666,20.2,210.97,20.08\r
+45.7461,0,18.1,0,0.693,4.519,100,1.6582,24,666,20.2,88.27,36.98\r
+18.0846,0,18.1,0,0.679,6.434,100,1.8347,24,666,20.2,27.25,29.05\r
+10.8342,0,18.1,0,0.679,6.782,90.8,1.8195,24,666,20.2,21.57,25.79\r
+25.9406,0,18.1,0,0.679,5.304,89.1,1.6475,24,666,20.2,127.36,26.64\r
+73.5341,0,18.1,0,0.679,5.957,100,1.8026,24,666,20.2,16.45,20.62\r
+11.8123,0,18.1,0,0.718,6.824,76.5,1.794,24,666,20.2,48.45,22.74\r
+11.0874,0,18.1,0,0.718,6.411,100,1.8589,24,666,20.2,318.75,15.02\r
+7.02259,0,18.1,0,0.718,6.006,95.3,1.8746,24,666,20.2,319.98,15.7\r
+12.0482,0,18.1,0,0.614,5.648,87.6,1.9512,24,666,20.2,291.55,14.1\r
+7.05042,0,18.1,0,0.614,6.103,85.1,2.0218,24,666,20.2,2.52,23.29\r
+8.79212,0,18.1,0,0.584,5.565,70.6,2.0635,24,666,20.2,3.65,17.16\r
+15.8603,0,18.1,0,0.679,5.896,95.4,1.9096,24,666,20.2,7.68,24.39\r
+12.2472,0,18.1,0,0.584,5.837,59.7,1.9976,24,666,20.2,24.65,15.69\r
+37.6619,0,18.1,0,0.679,6.202,78.7,1.8629,24,666,20.2,18.82,14.52\r
+7.36711,0,18.1,0,0.679,6.193,78.1,1.9356,24,666,20.2,96.73,21.52\r
+9.33889,0,18.1,0,0.679,6.38,95.6,1.9682,24,666,20.2,60.72,24.08\r
+8.49213,0,18.1,0,0.584,6.348,86.1,2.0527,24,666,20.2,83.45,17.64\r
+10.0623,0,18.1,0,0.584,6.833,94.3,2.0882,24,666,20.2,81.33,19.69\r
+6.44405,0,18.1,0,0.584,6.425,74.8,2.2004,24,666,20.2,97.95,12.03\r
+5.58107,0,18.1,0,0.713,6.436,87.9,2.3158,24,666,20.2,100.19,16.22\r
+13.9134,0,18.1,0,0.713,6.208,95,2.2222,24,666,20.2,100.63,15.17\r
+11.1604,0,18.1,0,0.74,6.629,94.6,2.1247,24,666,20.2,109.85,23.27\r
+14.4208,0,18.1,0,0.74,6.461,93.3,2.0026,24,666,20.2,27.49,18.05\r
+15.1772,0,18.1,0,0.74,6.152,100,1.9142,24,666,20.2,9.32,26.45\r
+13.6781,0,18.1,0,0.74,5.935,87.9,1.8206,24,666,20.2,68.95,34.02\r
+9.39063,0,18.1,0,0.74,5.627,93.9,1.8172,24,666,20.2,396.9,22.88\r
+22.0511,0,18.1,0,0.74,5.818,92.4,1.8662,24,666,20.2,391.45,22.11\r
+9.72418,0,18.1,0,0.74,6.406,97.2,2.0651,24,666,20.2,385.96,19.52\r
+5.66637,0,18.1,0,0.74,6.219,100,2.0048,24,666,20.2,395.69,16.59\r
+9.96654,0,18.1,0,0.74,6.485,100,1.9784,24,666,20.2,386.73,18.85\r
+12.8023,0,18.1,0,0.74,5.854,96.6,1.8956,24,666,20.2,240.52,23.79\r
+10.6718,0,18.1,0,0.74,6.459,94.8,1.9879,24,666,20.2,43.06,23.98\r
+6.28807,0,18.1,0,0.74,6.341,96.4,2.072,24,666,20.2,318.01,17.79\r
+9.92485,0,18.1,0,0.74,6.251,96.6,2.198,24,666,20.2,388.52,16.44\r
+9.32909,0,18.1,0,0.713,6.185,98.7,2.2616,24,666,20.2,396.9,18.13\r
+7.52601,0,18.1,0,0.713,6.417,98.3,2.185,24,666,20.2,304.21,19.31\r
+6.71772,0,18.1,0,0.713,6.749,92.6,2.3236,24,666,20.2,0.32,17.44\r
+5.44114,0,18.1,0,0.713,6.655,98.2,2.3552,24,666,20.2,355.29,17.73\r
+5.09017,0,18.1,0,0.713,6.297,91.8,2.3682,24,666,20.2,385.09,17.27\r
+8.24809,0,18.1,0,0.713,7.393,99.3,2.4527,24,666,20.2,375.87,16.74\r
+9.51363,0,18.1,0,0.713,6.728,94.1,2.4961,24,666,20.2,6.68,18.71\r
+4.75237,0,18.1,0,0.713,6.525,86.5,2.4358,24,666,20.2,50.92,18.13\r
+4.66883,0,18.1,0,0.713,5.976,87.9,2.5806,24,666,20.2,10.48,19.01\r
+8.20058,0,18.1,0,0.713,5.936,80.3,2.7792,24,666,20.2,3.5,16.94\r
+7.75223,0,18.1,0,0.713,6.301,83.7,2.7831,24,666,20.2,272.21,16.23\r
+6.80117,0,18.1,0,0.713,6.081,84.4,2.7175,24,666,20.2,396.9,14.7\r
+4.81213,0,18.1,0,0.713,6.701,90,2.5975,24,666,20.2,255.23,16.42\r
+3.69311,0,18.1,0,0.713,6.376,88.4,2.5671,24,666,20.2,391.43,14.65\r
+6.65492,0,18.1,0,0.713,6.317,83,2.7344,24,666,20.2,396.9,13.99\r
+5.82115,0,18.1,0,0.713,6.513,89.9,2.8016,24,666,20.2,393.82,10.29\r
+7.83932,0,18.1,0,0.655,6.209,65.4,2.9634,24,666,20.2,396.9,13.22\r
+3.1636,0,18.1,0,0.655,5.759,48.2,3.0665,24,666,20.2,334.4,14.13\r
+3.77498,0,18.1,0,0.655,5.952,84.7,2.8715,24,666,20.2,22.01,17.15\r
+4.42228,0,18.1,0,0.584,6.003,94.5,2.5403,24,666,20.2,331.29,21.32\r
+15.5757,0,18.1,0,0.58,5.926,71,2.9084,24,666,20.2,368.74,18.13\r
+13.0751,0,18.1,0,0.58,5.713,56.7,2.8237,24,666,20.2,396.9,14.76\r
+4.34879,0,18.1,0,0.58,6.167,84,3.0334,24,666,20.2,396.9,16.29\r
+4.03841,0,18.1,0,0.532,6.229,90.7,3.0993,24,666,20.2,395.33,12.87\r
+3.56868,0,18.1,0,0.58,6.437,75,2.8965,24,666,20.2,393.37,14.36\r
+4.64689,0,18.1,0,0.614,6.98,67.6,2.5329,24,666,20.2,374.68,11.66\r
+8.05579,0,18.1,0,0.584,5.427,95.4,2.4298,24,666,20.2,352.58,18.14\r
+6.39312,0,18.1,0,0.584,6.162,97.4,2.206,24,666,20.2,302.76,24.1\r
+4.87141,0,18.1,0,0.614,6.484,93.6,2.3053,24,666,20.2,396.21,18.68\r
+15.0234,0,18.1,0,0.614,5.304,97.3,2.1007,24,666,20.2,349.48,24.91\r
+10.233,0,18.1,0,0.614,6.185,96.7,2.1705,24,666,20.2,379.7,18.03\r
+14.3337,0,18.1,0,0.614,6.229,88,1.9512,24,666,20.2,383.32,13.11\r
+5.82401,0,18.1,0,0.532,6.242,64.7,3.4242,24,666,20.2,396.9,10.74\r
+5.70818,0,18.1,0,0.532,6.75,74.9,3.3317,24,666,20.2,393.07,7.74\r
+5.73116,0,18.1,0,0.532,7.061,77,3.4106,24,666,20.2,395.28,7.01\r
+2.81838,0,18.1,0,0.532,5.762,40.3,4.0983,24,666,20.2,392.92,10.42\r
+2.37857,0,18.1,0,0.583,5.871,41.9,3.724,24,666,20.2,370.73,13.34\r
+3.67367,0,18.1,0,0.583,6.312,51.9,3.9917,24,666,20.2,388.62,10.58\r
+5.69175,0,18.1,0,0.583,6.114,79.8,3.5459,24,666,20.2,392.68,14.98\r
+4.83567,0,18.1,0,0.583,5.905,53.2,3.1523,24,666,20.2,388.22,11.45\r
+0.15086,0,27.74,0,0.609,5.454,92.7,1.8209,4,711,20.1,395.09,18.06\r
+0.18337,0,27.74,0,0.609,5.414,98.3,1.7554,4,711,20.1,344.05,23.97\r
+0.20746,0,27.74,0,0.609,5.093,98,1.8226,4,711,20.1,318.43,29.68\r
+0.10574,0,27.74,0,0.609,5.983,98.8,1.8681,4,711,20.1,390.11,18.07\r
+0.11132,0,27.74,0,0.609,5.983,83.5,2.1099,4,711,20.1,396.9,13.35\r
+0.17331,0,9.69,0,0.585,5.707,54,2.3817,6,391,19.2,396.9,12.01\r
+0.27957,0,9.69,0,0.585,5.926,42.6,2.3817,6,391,19.2,396.9,13.59\r
+0.17899,0,9.69,0,0.585,5.67,28.8,2.7986,6,391,19.2,393.29,17.6\r
+0.2896,0,9.69,0,0.585,5.39,72.9,2.7986,6,391,19.2,396.9,21.14\r
+0.26838,0,9.69,0,0.585,5.794,70.6,2.8927,6,391,19.2,396.9,14.1\r
+0.23912,0,9.69,0,0.585,6.019,65.3,2.4091,6,391,19.2,396.9,12.92\r
+0.17783,0,9.69,0,0.585,5.569,73.5,2.3999,6,391,19.2,395.77,15.1\r
+0.22438,0,9.69,0,0.585,6.027,79.7,2.4982,6,391,19.2,396.9,14.33\r
+0.06263,0,11.93,0,0.573,6.593,69.1,2.4786,1,273,21,391.99,9.67\r
+0.04527,0,11.93,0,0.573,6.12,76.7,2.2875,1,273,21,396.9,9.08\r
+0.06076,0,11.93,0,0.573,6.976,91,2.1675,1,273,21,396.9,5.64\r
+0.10959,0,11.93,0,0.573,6.794,89.3,2.3889,1,273,21,393.45,6.48\r
+0.04741,0,11.93,0,0.573,6.03,80.8,2.505,1,273,21,396.9,7.88
\ No newline at end of file
diff --git a/extra/tensors/demos/demos.factor b/extra/tensors/demos/demos.factor
new file mode 100644 (file)
index 0000000..c2720cc
--- /dev/null
@@ -0,0 +1,73 @@
+! Copyright (C) 2019 HMC Clinic.
+! See http://factorcode.org/license.txt for BSD license.
+! Code based on https://towardsdatascience.com/linear-regression-from-scratch-with-numpy-implementation-finally-8e617d8e274c
+
+USING: arrays accessors csv io io.encodings.utf8 kernel locals math math.parser
+math.ranges math.statistics prettyprint sequences tensors ;
+IN: tensors.demos
+
+<PRIVATE
+
+! Normalize across each of the features
+:: normalize ( X -- norm )
+    ! Compute the mean for each of the features and repeat it so that it can be
+    ! combined with X
+    X transpose tensor>array :> X-T
+    X-T [ mean ] map >tensor :> feat-means
+    X shape>> first [0,b) [ drop feat-means ] map stack :> means
+    ! Compute the std for each of the features and repeat it so that it can be
+    ! combined with X
+    X-T [ std ] map >tensor :> feat-stds
+    X shape>> first [0,b) [ drop feat-stds ] map stack :> stds
+    X means t- stds t/ ;
+
+:: compute-cost ( X y params -- cost )
+    ! Compute (1/(2*n_samples))
+    1 2 y shape>> first * /
+    ! Compute h
+    X params matmul
+    ! Compute sum((h-y)**2)
+    y t- dup t* sum
+    ! Multiply to get final cost
+    * ;
+
+:: gradient-descent ( X y params lr n-iters -- history params )
+    lr y shape>> first / :> batch-lr
+    { n-iters } zeros :> history
+    X transpose :> X-T
+    params
+    n-iters [
+        ! Update params with
+        ! params = params - (learning_rate/n_samples) * X.T @ (X @ params - y)
+        swap dup :> old-params
+        batch-lr X-T X old-params matmul y t- matmul t* t- :> new-params
+        ! Compute the cost and add it to the history
+        X y new-params compute-cost swap history set-nth
+        new-params
+    ] each-integer
+    history swap ;
+
+PRIVATE>
+
+:: linear-regression ( X y lr n-iters -- )
+    X normalize
+    ! Add the constant coefficient
+    y shape>> first 1 2array ones swap 2array hstack :> X-norm
+    ! Create the array of parameters
+    X-norm shape>> second 1 2array zeros :> params
+    ! Compute the initial cost
+    X-norm y params compute-cost
+    ! Print!
+    number>string "The initial cost is " swap append print
+    ! Perform gradient descent
+    X-norm y params lr n-iters gradient-descent
+    "The optimal parameters are " print .
+    last number>string "The final cost was " swap append print
+    ;
+
+! Load and return the boston house-prices dataset
+: load-boston-data ( -- X y )
+    "vocab:tensors/demos/data.csv" utf8 file>csv
+    [ [ string>number ] map ] map >tensor
+    "vocab:tensors/demos/target.csv" utf8 file>csv
+    [ [ string>number ] map ] map >tensor ;
\ No newline at end of file
diff --git a/extra/tensors/demos/target.csv b/extra/tensors/demos/target.csv
new file mode 100644 (file)
index 0000000..97233ea
--- /dev/null
@@ -0,0 +1,506 @@
+24\r
+21.6\r
+34.7\r
+33.4\r
+36.2\r
+28.7\r
+22.9\r
+27.1\r
+16.5\r
+18.9\r
+15\r
+18.9\r
+21.7\r
+20.4\r
+18.2\r
+19.9\r
+23.1\r
+17.5\r
+20.2\r
+18.2\r
+13.6\r
+19.6\r
+15.2\r
+14.5\r
+15.6\r
+13.9\r
+16.6\r
+14.8\r
+18.4\r
+21\r
+12.7\r
+14.5\r
+13.2\r
+13.1\r
+13.5\r
+18.9\r
+20\r
+21\r
+24.7\r
+30.8\r
+34.9\r
+26.6\r
+25.3\r
+24.7\r
+21.2\r
+19.3\r
+20\r
+16.6\r
+14.4\r
+19.4\r
+19.7\r
+20.5\r
+25\r
+23.4\r
+18.9\r
+35.4\r
+24.7\r
+31.6\r
+23.3\r
+19.6\r
+18.7\r
+16\r
+22.2\r
+25\r
+33\r
+23.5\r
+19.4\r
+22\r
+17.4\r
+20.9\r
+24.2\r
+21.7\r
+22.8\r
+23.4\r
+24.1\r
+21.4\r
+20\r
+20.8\r
+21.2\r
+20.3\r
+28\r
+23.9\r
+24.8\r
+22.9\r
+23.9\r
+26.6\r
+22.5\r
+22.2\r
+23.6\r
+28.7\r
+22.6\r
+22\r
+22.9\r
+25\r
+20.6\r
+28.4\r
+21.4\r
+38.7\r
+43.8\r
+33.2\r
+27.5\r
+26.5\r
+18.6\r
+19.3\r
+20.1\r
+19.5\r
+19.5\r
+20.4\r
+19.8\r
+19.4\r
+21.7\r
+22.8\r
+18.8\r
+18.7\r
+18.5\r
+18.3\r
+21.2\r
+19.2\r
+20.4\r
+19.3\r
+22\r
+20.3\r
+20.5\r
+17.3\r
+18.8\r
+21.4\r
+15.7\r
+16.2\r
+18\r
+14.3\r
+19.2\r
+19.6\r
+23\r
+18.4\r
+15.6\r
+18.1\r
+17.4\r
+17.1\r
+13.3\r
+17.8\r
+14\r
+14.4\r
+13.4\r
+15.6\r
+11.8\r
+13.8\r
+15.6\r
+14.6\r
+17.8\r
+15.4\r
+21.5\r
+19.6\r
+15.3\r
+19.4\r
+17\r
+15.6\r
+13.1\r
+41.3\r
+24.3\r
+23.3\r
+27\r
+50\r
+50\r
+50\r
+22.7\r
+25\r
+50\r
+23.8\r
+23.8\r
+22.3\r
+17.4\r
+19.1\r
+23.1\r
+23.6\r
+22.6\r
+29.4\r
+23.2\r
+24.6\r
+29.9\r
+37.2\r
+39.8\r
+36.2\r
+37.9\r
+32.5\r
+26.4\r
+29.6\r
+50\r
+32\r
+29.8\r
+34.9\r
+37\r
+30.5\r
+36.4\r
+31.1\r
+29.1\r
+50\r
+33.3\r
+30.3\r
+34.6\r
+34.9\r
+32.9\r
+24.1\r
+42.3\r
+48.5\r
+50\r
+22.6\r
+24.4\r
+22.5\r
+24.4\r
+20\r
+21.7\r
+19.3\r
+22.4\r
+28.1\r
+23.7\r
+25\r
+23.3\r
+28.7\r
+21.5\r
+23\r
+26.7\r
+21.7\r
+27.5\r
+30.1\r
+44.8\r
+50\r
+37.6\r
+31.6\r
+46.7\r
+31.5\r
+24.3\r
+31.7\r
+41.7\r
+48.3\r
+29\r
+24\r
+25.1\r
+31.5\r
+23.7\r
+23.3\r
+22\r
+20.1\r
+22.2\r
+23.7\r
+17.6\r
+18.5\r
+24.3\r
+20.5\r
+24.5\r
+26.2\r
+24.4\r
+24.8\r
+29.6\r
+42.8\r
+21.9\r
+20.9\r
+44\r
+50\r
+36\r
+30.1\r
+33.8\r
+43.1\r
+48.8\r
+31\r
+36.5\r
+22.8\r
+30.7\r
+50\r
+43.5\r
+20.7\r
+21.1\r
+25.2\r
+24.4\r
+35.2\r
+32.4\r
+32\r
+33.2\r
+33.1\r
+29.1\r
+35.1\r
+45.4\r
+35.4\r
+46\r
+50\r
+32.2\r
+22\r
+20.1\r
+23.2\r
+22.3\r
+24.8\r
+28.5\r
+37.3\r
+27.9\r
+23.9\r
+21.7\r
+28.6\r
+27.1\r
+20.3\r
+22.5\r
+29\r
+24.8\r
+22\r
+26.4\r
+33.1\r
+36.1\r
+28.4\r
+33.4\r
+28.2\r
+22.8\r
+20.3\r
+16.1\r
+22.1\r
+19.4\r
+21.6\r
+23.8\r
+16.2\r
+17.8\r
+19.8\r
+23.1\r
+21\r
+23.8\r
+23.1\r
+20.4\r
+18.5\r
+25\r
+24.6\r
+23\r
+22.2\r
+19.3\r
+22.6\r
+19.8\r
+17.1\r
+19.4\r
+22.2\r
+20.7\r
+21.1\r
+19.5\r
+18.5\r
+20.6\r
+19\r
+18.7\r
+32.7\r
+16.5\r
+23.9\r
+31.2\r
+17.5\r
+17.2\r
+23.1\r
+24.5\r
+26.6\r
+22.9\r
+24.1\r
+18.6\r
+30.1\r
+18.2\r
+20.6\r
+17.8\r
+21.7\r
+22.7\r
+22.6\r
+25\r
+19.9\r
+20.8\r
+16.8\r
+21.9\r
+27.5\r
+21.9\r
+23.1\r
+50\r
+50\r
+50\r
+50\r
+50\r
+13.8\r
+13.8\r
+15\r
+13.9\r
+13.3\r
+13.1\r
+10.2\r
+10.4\r
+10.9\r
+11.3\r
+12.3\r
+8.8\r
+7.2\r
+10.5\r
+7.4\r
+10.2\r
+11.5\r
+15.1\r
+23.2\r
+9.7\r
+13.8\r
+12.7\r
+13.1\r
+12.5\r
+8.5\r
+5\r
+6.3\r
+5.6\r
+7.2\r
+12.1\r
+8.3\r
+8.5\r
+5\r
+11.9\r
+27.9\r
+17.2\r
+27.5\r
+15\r
+17.2\r
+17.9\r
+16.3\r
+7\r
+7.2\r
+7.5\r
+10.4\r
+8.8\r
+8.4\r
+16.7\r
+14.2\r
+20.8\r
+13.4\r
+11.7\r
+8.3\r
+10.2\r
+10.9\r
+11\r
+9.5\r
+14.5\r
+14.1\r
+16.1\r
+14.3\r
+11.7\r
+13.4\r
+9.6\r
+8.7\r
+8.4\r
+12.8\r
+10.5\r
+17.1\r
+18.4\r
+15.4\r
+10.8\r
+11.8\r
+14.9\r
+12.6\r
+14.1\r
+13\r
+13.4\r
+15.2\r
+16.1\r
+17.8\r
+14.9\r
+14.1\r
+12.7\r
+13.5\r
+14.9\r
+20\r
+16.4\r
+17.7\r
+19.5\r
+20.2\r
+21.4\r
+19.9\r
+19\r
+19.1\r
+19.1\r
+20.1\r
+19.9\r
+19.6\r
+23.2\r
+29.8\r
+13.8\r
+13.3\r
+16.7\r
+12\r
+14.6\r
+21.4\r
+23\r
+23.7\r
+25\r
+21.8\r
+20.6\r
+21.2\r
+19.1\r
+20.6\r
+15.2\r
+7\r
+8.1\r
+13.6\r
+20.1\r
+21.8\r
+24.5\r
+23.1\r
+19.7\r
+18.3\r
+21.2\r
+17.5\r
+16.8\r
+22.4\r
+20.6\r
+23.9\r
+22\r
+11.9
\ No newline at end of file
index b4fa8cc37ad2e72bafc4f846b90a1751680c11ad..239b3df6538ff2a4f5307650003a177395f25c3c 100644 (file)
@@ -1,27 +1,46 @@
 ! Copyright (C) 2019 HMC Clinic.
 ! See http://factorcode.org/license.txt for BSD license.
-USING: arrays help.markup help.syntax math sequences ;
+USING: arrays help.markup help.syntax lexer math sequences ;
 IN: tensors
 
-ARTICLE: "tensors" "Tensors" "A " { $snippet "tensor" } " is a sequence "
-"of floating point numbers "
+ARTICLE: "tensors" "Tensors"
+"A " { $snippet "tensor" } " is a sequence of floating point numbers "
 "shaped into an n-dimensional matrix. It supports fast, scalable matrix "
 "operations such as matrix multiplication and transposition as well as a "
 "number of element-wise operations. Words for working with tensors are found "
-"in the " { $vocab-link "tensors" } " vocabulary." $nl $nl
-"Tensors can be created "
-"by calling one of four constructors:"
-{ $subsections zeros ones naturals arange }
-"They can be converted to the corresponding N-dimensional array with"
-{ $subsections tensor>array }
+"in the " { $vocab-link "tensors" } " vocabulary." $nl
+"More information about tensors can be found here:"
+{ $subsections "creation" "manipulation" } ;
+
+ARTICLE: "creation" "Creating Tensors"
+"Tensors can be created by calling one of following constructors:"
+{ $subsections zeros ones naturals arange (tensor) }
+"They can be converted to/from the corresponding N-dimensional array with"
+{ $subsections tensor>array >tensor }
+"There is also a tensor parsing word"
+{ $subsections POSTPONE: t{ } ;
+
+ARTICLE: "manipulation" "Manipulating Tensors"
 "The number of dimensions can be extracted with:"
 { $subsections dims }
-"Additionally, tensors can be reshaped with:"
+"Tensors can be reshaped with:"
 { $subsections reshape flatten }
 "Tensors can be combined element-wise with other tensors as well as numbers with:"
 { $subsections t+ t- t* t/ t% }
-"Finally, tensors support the following matrix operations:"
-{ $subsections matmul transpose } ;
+"Tensors support the following matrix operations:"
+{ $subsections matmul transpose }
+"Tensors also support the following concatenation operations:"
+{ $subsections stack hstack vstack t-concat }
+"Tensors implement all " { $vocab-link "sequences" } " operations." $nl
+"Tensors can be indexed into using either numbers or arrays, for example:"
+{ $example
+    "USING: prettyprint sequences tensors ;"
+    "t{ { 0.0 1.0 2.0 } { 3.0 4.0 5.0 } }"
+    "[ { 1 1 } swap nth ] [ 4 swap nth ] bi = ."
+    "t"
+}
+"If the array being used to index into the tensor has the wrong number "
+"of dimensions, a " { $link dimension-mismatch-error } " will be thrown." ;
 
 ARTICLE: "tensor-operators" "Tensor Operators" "Info here" ;
 
@@ -46,6 +65,34 @@ HELP: non-positive-shape-error
 ", which allow users to directly set the shape of a " { $link tensor }
 ", when the shape has zero or negative values." } ;
 
+HELP: non-uniform-seq-error
+{ $values { "seq" sequence } }
+{ $description "Throws a " { $link non-uniform-seq-error } "." }
+{ $error-description "Thrown by operations such as " { $link >tensor } 
+", which allow users to directly input the values of a " { $link tensor }
+" as a nested sequence, when the subsequences have varying lengths." } ;
+
+HELP: dimension-mismatch-error
+{ $values { "tensor-dim" number } { "index-dim" number } }
+{ $description "Throws a " { $link dimension-mismatch-error } "." }
+{ $error-description "Thrown by indexing operations such as " { $link nth }
+" and " { $link set-nth } " if the array being used to index has a different number "
+"of dimensions than the tensor." } ;
+
+HELP: t{
+{ $syntax "t{ elements... }" }
+{ $values { "elements" "a list of numbers" } }
+{ $description "Initializes a tensor with the given elements."
+" Preserves the shape of nested sequences. Assumes uniformly nested sequences." } 
+{ $errors "Throws a " { $link non-uniform-seq-error } " if the given "
+"sequence have subsequences of varying lengths. Throws a " 
+{ $link lexer-error } " if the given sequence is not uniformly nested." } ;
+
+HELP: (tensor)
+{ $values { "shape" sequence } { "tensor" tensor } }
+{ $description "Creates a tensor with shape " { $snippet "shape" }
+" containing uninitialized values. Allows non-positive shapes." } ;
+
 HELP: zeros
 { $values { "shape" sequence } { "tensor" tensor } }
 { $description "Initializes a tensor with shape " { $snippet "shape" }
@@ -87,6 +134,14 @@ HELP: dims
 { $values { "tensor" tensor } { "n" integer } }
 { $description "Returns the dimension of " { $snippet "tensor" } "." } ;
 
+HELP: >tensor
+{ $values { "seq" sequence } { "tensor" tensor } }
+{ $description "Turns a nested sequence " { $snippet "seq" } 
+" into a tensor of the corresponding shape. Assumes a uniformly nested sequence." } 
+{ $errors "Throws a " { $link non-uniform-seq-error } " if the given "
+"sequence have subsequences of varying lengths. Throws a " 
+{ $link lexer-error } " if the given sequence is not uniformly nested." } ;
+
 HELP: t+
 { $values { "x" { $or tensor number } } { "y" { $or tensor number } } { "tensor" tensor } }
 { $description "Element-wise addition. Intakes two tensors or a tensor and a number (in either order)." }
@@ -131,6 +186,33 @@ HELP: matmul
 
 HELP: transpose
 { $values { "tensor" tensor } { "tensor'" tensor } }
-{ $description "Performs n-dimensional matrix transposition on " { $snippet "tens" } "." } ;
+{ $description "Performs n-dimensional matrix transposition on " { $snippet "tensor" } "." } ;
+
+HELP: stack 
+{ $values { "seq" sequence } { "tensor" tensor } } 
+{ $description "Joins the sequences in " { $snippet "seq" } " along a new axis. "
+{ $snippet "tensor" } " will have one more dimension than the arrays in " { $snippet "seq" } "." } 
+{ $errors "Throws a " { $link shape-mismatch-error } " if the sequences in "
+{ $snippet "seq" } " do not have the same shape."} ;
+
+
+HELP: hstack 
+{ $values { "seq" sequence } { "tensor" tensor } } 
+{ $description "Joins the sequences in " { $snippet "seq" } " column-wise." }
+{ $errors "Throws a " { $link shape-mismatch-error } " if the sequences in "
+{ $snippet "seq" } " do not have the same shape along all but the second axis."} ;
+
+HELP: vstack 
+{ $values { "seq" sequence } { "tensor" tensor } } 
+{ $description "Joins the sequences in " { $snippet "seq" } " row-wise." }
+{ $errors "Throws a " { $link shape-mismatch-error } " if the sequences in "
+{ $snippet "seq" } " do not have the same shape along all but the first axis."} ;
+
+HELP: t-concat
+{ $values { "seq" sequence } { "tensor" tensor } } 
+{ $description "Joins the sequences in " { $snippet "seq" } " along the first axis." }
+{ $errors "Throws a " { $link shape-mismatch-error } " if the sequences in "
+{ $snippet "seq" } " do not have the same shape along all but the first axis."} ;
+
 
 ABOUT: "tensors"
index 04c4ebe1b7a1df46dc70f7294126ff3d795ffbeb..50d9e4f89681f5856ec0c585ce0a69461759cfcc 100644 (file)
@@ -1,6 +1,6 @@
 ! Copyright (C) 2019 HMC Clinic.
 ! See http://factorcode.org/license.txt for BSD license.
-USING: accessors alien.c-types kernel math math.order math.vectors
+USING: accessors alien.c-types arrays kernel math math.order math.vectors
 sequences specialized-arrays tensors tools.test ;
 QUALIFIED-WITH: alien.c-types c
 SPECIALIZED-ARRAY: c:float
@@ -106,6 +106,19 @@ IN: tensors.tests
 ]
 [ { -3 5 } \ non-positive-shape-error boa = ] must-fail-with
 
+! Test (tensor)
+{ { 2 4 } } [
+    { 2 4 } (tensor) shape>>
+] unit-test
+
+{ { 0 } } [
+    { 0 } (tensor) shape>>
+] unit-test
+
+{ float-array{ } } [
+    { 0 } (tensor) vec>>
+] unit-test
+
 
 ! Test reshape
 { float-array{ 0.0 0.0 0.0 0.0 } } [
@@ -156,6 +169,173 @@ IN: tensors.tests
     { 1 2 3 } zeros dims
 ] unit-test
 
+! Test sequence operations
+! TODO: add tests for clone-like
+! test length
+{ 20 } [
+    { 2 2 5 } naturals length
+] unit-test
+
+{ 0 } [
+    t{ } length
+] unit-test
+
+! test new-sequence
+{ 10 } [
+    10 { 2 5 } ones new-sequence shape>> product
+] unit-test
+
+{ 2 } [
+    2 { 3 4 5 } ones new-sequence shape>> product
+] unit-test
+
+{ 20 } [
+    20 { 2 5 } ones new-sequence shape>> product
+] unit-test
+
+! test nth
+{ 1.0 } [
+    1 { 5 } naturals nth
+] unit-test
+
+{ 1.0 } [
+    { 1 } { 5 } naturals nth
+] unit-test
+
+{ 3.0 } [
+    { 1 1 } { 2 2 } naturals nth
+] unit-test
+
+{ 5.0 } [
+    { 1 0 1 } { 2 2 2 } naturals nth
+] unit-test
+
+[
+    { 1 2 3 } t{ 1 2 3 } nth
+]
+[ 1 3 \ dimension-mismatch-error boa = ] must-fail-with
+
+! test set-nth
+{ t{ 1 5 3 } } [
+    t{ 1 2 3 } dup [ 5 { 1 } ] dip set-nth
+] unit-test
+
+{ t{ { 0 1 } { 5 3 } } } [
+    { 2 2 } naturals dup [ 5 { 1 0 } ] dip set-nth
+] unit-test
+
+{ t{ { { 0 1 } { 2 3 } } { { 4 10 } { 6 7 } } } } [
+    { 2 2 2 } naturals dup [ 10 { 1 0 1 } ] dip set-nth
+] unit-test
+
+[
+    { 2 2 } naturals dup [ 5 { 1 } ] dip set-nth
+]
+[ 2 1 \ dimension-mismatch-error boa = ] must-fail-with
+
+! test clone
+{ t{ 1 2 3 }  } [
+    t{ 1 2 3 } dup clone [ 5 1 ] dip set-nth
+] unit-test
+
+{ t } [
+    t{ 1 2 3 } dup clone =
+] unit-test
+
+{ f } [
+    t{ 1 2 3 } dup clone dup [ 5 1 ] dip set-nth =
+] unit-test
+
+! Test like
+{ float-array{ 0.0 1.0 2.0 3.0 4.0 5.0 } } [
+    { 2 3 } naturals dup like vec>>
+] unit-test
+
+{ { 2 3 } } [
+    { 2 3 } naturals dup like shape>>
+] unit-test
+
+{ float-array{ 0.0 1.0 2.0 3.0 4.0 5.0 } } [
+    { 0 1 2 3 4 5 } { 2 3 } naturals like vec>>
+] unit-test
+
+{ { 2 3 } } [
+    { 0 1 2 3 4 5 } { 2 3 } naturals like shape>>
+] unit-test
+
+{ float-array{ 0.0 1.0 2.0 3.0 4.0 5.0 } } [
+    float-array{ 0 1 2 3 4 5 } { 2 3 } naturals like vec>>
+] unit-test
+
+{ { 2 3 } } [
+    float-array{ 0 1 2 3 4 5 } { 2 3 } naturals like shape>>
+] unit-test
+
+{ float-array{ 0.0 1.0 2.0 3.0 4.0 } } [
+    { 0 1 2 3 4 } { 2 3 } naturals like vec>>
+] unit-test
+
+{ { 5 } } [
+    { 0 1 2 3 4 } { 2 3 } naturals like shape>>
+] unit-test
+
+{ float-array{ 0.0 1.0 2.0 3.0 4.0 } } [
+    float-array{ 0 1 2 3 4 } { 2 3 } naturals like vec>>
+] unit-test
+
+{ { 5 } } [
+    float-array{ 0 1 2 3 4 } { 2 3 } naturals like shape>>
+] unit-test
+
+{ t{ { 0.0 1.0 } { 2.0 3.0 } } } [
+    { { 0 1 } { 2 3 } } t{ } like
+] unit-test
+
+! test clone-like
+{ float-array{ 1.0 2.0 3.0 } } [
+    { 1 2 3 } t{ } clone-like vec>>
+] unit-test
+
+{ f } [
+    float-array{ 1.0 2.0 3.0 } dup t{ } clone-like
+    dup [ 5 1 ] dip set-nth vec>> =
+] unit-test
+
+! Test sum
+{ 21.0 } [
+    t{ 1 2 3 4 5 6 } sum
+] unit-test
+
+{ 50005000.0 } [
+    { 100 100 } naturals 1 t+ sum
+] unit-test
+
+! Test tensor parsing word
+{ float-array{ 1 2 3 4 5 6 7 8 } } [
+    t{ 1 2 3 4 5 6 7 8 } vec>>
+] unit-test
+
+{ { 8 } } [
+    t{ 1 2 3 4 5 6 7 8 } shape>>
+] unit-test
+
+{ float-array{ 1 2 3 4 5 6 7 8 } } [
+    t{ { 1 2 3 4 } { 5 6 7 8 } } vec>>
+] unit-test
+
+{ { 2 4 } } [
+    t{ { 1 2 3 4 } { 5 6 7 8 } } shape>>
+] unit-test
+
+{ float-array{ 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 } } [
+    t{ { { 1 2 3 4 } { 5 6 7 8 } { 9 10 11 12 } } { { 13 14 15 16 } { 17 18 19 20 } { 21 22 23 24 } } } vec>>
+] unit-test
+
+{ { 2 3 4 } } [
+    t{ { { 1 2 3 4 } { 5 6 7 8 } { 9 10 11 12 } } { { 13 14 15 16 } { 17 18 19 20 } { 21 22 23 24 } } } shape>>
+] unit-test
+
+
 ! Test addition
 { float-array{ 1.0 2.0 3.0 4.0 } } [
     { 4 } naturals { 4 } ones t+ vec>>
@@ -461,6 +641,188 @@ IN: tensors.tests
     t% shape>>
 ] unit-test
 
+! t-concat
+{ t{ { 0.0 1.0 2.0 } { 3.0 4.0 5.0 } { 0.0 1.0 2.0 } { 3.0 4.0 5.0 } } } [
+    { 2 3 } naturals dup 2array t-concat
+] unit-test
+
+{ t{ { 0.0 1.0 2.0 } { 3.0 4.0 5.0 } { 0.0 1.0 2.0 } { 3.0 4.0 5.0 }
+     { 0.0 1.0 2.0 } { 3.0 4.0 5.0 } } } [
+    { 2 3 } naturals dup dup 3array t-concat
+] unit-test
+
+{ t{ { 0.0 1.0 2.0 }
+     { 3.0 4.0 5.0 }
+     { 0.0 1.0 2.0 }
+     { 3.0 4.0 5.0 }
+     { 6.0 7.0 8.0 }
+     { 9.0 10.0 11.0 }
+     { 12.0 13.0 14.0 } } } [
+    { 2 3 } naturals { 5 3 } naturals 2array t-concat
+] unit-test
+
+{ t{ { { 0.0 1.0 } { 2.0 3.0 } { 4.0 5.0 } }
+     { { 6.0 7.0 } { 8.0 9.0 } { 10.0 11.0 } }
+     { { 0.0 1.0 } { 2.0 3.0 } { 4.0 5.0 } }
+     { { 6.0 7.0 } { 8.0 9.0 } { 10.0 11.0 } } } } [
+     { 2 3 2 } naturals dup 2array t-concat
+] unit-test
+
+{ t{ { { 0.0 1.0 } { 2.0 3.0 } { 4.0 5.0 } }
+     { { 6.0 7.0 } { 8.0 9.0 } { 10.0 11.0 } }
+     { { 0.0 1.0 } { 2.0 3.0 } { 4.0 5.0 } }
+     { { 6.0 7.0 } { 8.0 9.0 } { 10.0 11.0 } }
+     { { 0.0 1.0 } { 2.0 3.0 } { 4.0 5.0 } }
+     { { 6.0 7.0 } { 8.0 9.0 } { 10.0 11.0 } } } } [
+     { 2 3 2 } naturals dup dup 3array t-concat
+] unit-test
+
+[
+    { 2 2 } naturals { 2 3 } naturals 2array t-concat
+]
+[ { 2 2 } { 2 3 } \ shape-mismatch-error boa = ] must-fail-with
+
+[
+    { 2 2 } naturals dup { 2 3 } naturals 3array t-concat
+]
+[ { 2 2 } { 2 3 } \ shape-mismatch-error boa = ] must-fail-with
+
+! stack
+{ t{ { { 0.0 1.0 2.0 } { 3.0 4.0 5.0 } }
+     { { 0.0 1.0 2.0 } { 3.0 4.0 5.0 } } } } [
+    { 2 3 } naturals dup 2array stack
+] unit-test
+
+{ t{ { { 0.0 1.0 2.0 } { 3.0 4.0 5.0 } }
+     { { 0.0 1.0 2.0 } { 3.0 4.0 5.0 } }
+     { { 0.0 1.0 2.0 } { 3.0 4.0 5.0 } } } } [
+    { 2 3 } naturals dup dup 3array stack
+] unit-test
+
+{ t{ { { { 0.0 1.0 } { 2.0 3.0 } { 4.0 5.0 } }
+       { { 6.0 7.0 } { 8.0 9.0 } { 10.0 11.0 } } }
+     { { { 0.0 1.0 } { 2.0 3.0 } { 4.0 5.0 } }
+       { { 6.0 7.0 } { 8.0 9.0 } { 10.0 11.0 } } } } } [
+     { 2 3 2 } naturals dup 2array stack
+] unit-test
+
+{ t{ { { { 0.0 1.0 } { 2.0 3.0 } { 4.0 5.0 } }
+       { { 6.0 7.0 } { 8.0 9.0 } { 10.0 11.0 } } }
+     { { { 0.0 1.0 } { 2.0 3.0 } { 4.0 5.0 } }
+       { { 6.0 7.0 } { 8.0 9.0 } { 10.0 11.0 } } }
+     { { { 0.0 1.0 } { 2.0 3.0 } { 4.0 5.0 } }
+       { { 6.0 7.0 } { 8.0 9.0 } { 10.0 11.0 } } } } } [
+     { 2 3 2 } naturals dup dup 3array stack
+] unit-test
+
+[
+    { 2 2 } naturals { 2 3 } naturals 2array stack
+]
+[ { 2 2 } { 2 3 } \ shape-mismatch-error boa = ] must-fail-with
+
+[
+    { 2 2 } naturals { 3 2 } naturals 2array stack
+]
+[ { 2 2 } { 3 2 } \ shape-mismatch-error boa = ] must-fail-with
+
+[
+    { 2 2 } naturals dup { 2 3 } naturals 3array stack
+]
+[ { 2 2 } { 2 3 } \ shape-mismatch-error boa = ] must-fail-with
+
+[
+    { 2 2 } naturals dup { 3 2 } naturals 3array stack
+]
+[ { 2 2 } { 3 2 } \ shape-mismatch-error boa = ] must-fail-with
+
+! hstack
+
+{ t{ { 0.0 1.0 2.0 3.0 1.0 }
+     { 4.0 5.0 6.0 7.0 1.0 }
+     { 8.0 9.0 10.0 11.0 1.0 } } } [
+    { 3 4 } naturals { 3 1 } ones 2array hstack
+] unit-test
+
+{ t{ { 0.0 1.0 2.0 3.0 1.0 0.0 0.0 }
+     { 4.0 5.0 6.0 7.0 1.0 0.0 0.0 }
+     { 8.0 9.0 10.0 11.0 1.0 0.0 0.0 } } } [
+    { 3 4 } naturals { 3 1 } ones { 3 2 } zeros 3array hstack
+] unit-test
+
+{ t{ { { 0.0 1.0 2.0 3.0 1.0 } { 4.0 5.0 6.0 7.0 1.0 } }
+     { { 8.0 9.0 10.0 11.0 1.0 } { 12.0 13.0 14.0 15.0 1.0 } } } } [
+     { 2 2 4 } naturals { 2 2 1 } ones 2array hstack
+] unit-test
+
+{ t{ { { 0.0 1.0 2.0 3.0 1.0 0.0 0.0 }
+       { 4.0 5.0 6.0 7.0 1.0 0.0 0.0 } }
+     { { 8.0 9.0 10.0 11.0 1.0 0.0 0.0 }
+       { 12.0 13.0 14.0 15.0 1.0 0.0 0.0 } } } } [
+     { 2 2 4 } naturals { 2 2 1 } ones { 2 2 2 } zeros 3array hstack
+] unit-test
+
+[
+    { 2 2 } naturals { 3 2 } naturals 2array hstack
+]
+[ { 2 2 } { 3 2 } \ shape-mismatch-error boa = ] must-fail-with
+
+[
+    { 2 2 } naturals dup { 3 2 } naturals 3array hstack
+]
+[ { 2 2 } { 3 2 } \ shape-mismatch-error boa = ] must-fail-with
+
+[
+    { 2 2 2 } naturals { 3 2 2 } naturals 2array hstack
+]
+[ { 2 2 2 } { 3 2 2 } \ shape-mismatch-error boa = ] must-fail-with
+
+[
+    { 2 2 2 } naturals dup { 3 2 2 } naturals 3array hstack
+]
+[ { 2 2 2 } { 3 2 2 } \ shape-mismatch-error boa = ] must-fail-with
+
+! vstack
+{ t{ { 0.0 1.0 } { 2.0 3.0 } { 4.0 5.0 } { 0.0 1.0 } } } [
+    { 3 2 } naturals { 1 2 } naturals 2array vstack
+] unit-test
+
+{ t{ { 0.0 1.0 } { 2.0 3.0 } { 4.0 5.0 }
+     { 0.0 1.0 } { 0.0 1.0 } { 2.0 3.0 } } } [
+    { 3 2 } naturals { 1 2 } naturals { 2 2 } naturals 3array vstack
+] unit-test
+
+{ t{ { { 0.0 1.0 } { 2.0 3.0 } { 4.0 5.0 } { 0.0 1.0 } }
+     { { 6.0 7.0 } { 8.0 9.0 } { 10.0 11.0 } { 2.0 3.0 } } } } [
+    { 2 3 2 } naturals { 2 1 2 } naturals 2array vstack
+] unit-test
+
+{ t{ { { 0.0 1.0 } { 2.0 3.0 } { 4.0 5.0 }
+       { 0.0 1.0 } { 0.0 1.0 } { 2.0 3.0 } }
+     { { 6.0 7.0 } { 8.0 9.0 } { 10.0 11.0 }
+       { 2.0 3.0 } { 4.0 5.0 } { 6.0 7.0 } } } } [
+    { 2 3 2 } naturals { 2 1 2 } naturals { 2 2 2 } naturals 3array vstack
+] unit-test
+
+[
+    { 2 2 } naturals { 2 3 } naturals 2array vstack
+]
+[ { 2 2 } { 2 3 } \ shape-mismatch-error boa = ] must-fail-with
+
+[
+    { 2 2 } naturals dup { 2 3 } naturals 3array vstack
+]
+[ { 2 2 } { 2 3 } \ shape-mismatch-error boa = ] must-fail-with
+
+[
+    { 2 2 2 } naturals { 3 2 2 } naturals 2array vstack
+]
+[ { 2 2 2 } { 3 2 2 } \ shape-mismatch-error boa = ] must-fail-with
+
+[
+    { 2 2 2 } naturals dup { 3 2 2 } naturals 3array vstack
+]
+[ { 2 2 2 } { 3 2 2 } \ shape-mismatch-error boa = ] must-fail-with
+
 ! test tensor>array
 { { 0.0 0.0 } } [
     { 2 } zeros tensor>array
@@ -475,6 +837,36 @@ IN: tensors.tests
     { 2 3 2 } ones tensor>array
 ] unit-test
 
+! test >tensor
+{ t } [
+    { 2 3 4 } naturals dup tensor>array >tensor =
+] unit-test
+
+{ t } [
+    { { { 1.0 2.0 } { 3.0 4.0 } }
+      { { 5.0 6.0 } { 7.0 8.0 } }
+      { { 9.0 10.0 } { 11.0 12.0 } } }
+    dup >tensor tensor>array =
+] unit-test
+
+{ t } [
+    { 2 3 } naturals
+    { { 0 1 2 } { 3 4 5 } } >tensor =
+] unit-test
+
+[
+    { { 1 2 } { 3 } } >tensor
+]
+[ { { 1 2 } { 3 } } \ non-uniform-seq-error boa = ] must-fail-with
+
+{ float-array{ } } [
+    t{ } vec>>
+] unit-test
+
+{ { 0 } } [
+    t{ } shape>>
+] unit-test
+
 ! test matmul
 { float-array{ 70.0 76.0 82.0 88.0 94.0 190.0 212.0 234.0
                256.0 278.0 310.0 348.0 386.0 424.0 462.0 } } [
@@ -510,6 +902,45 @@ IN: tensors.tests
     { 2 2 3 4 } naturals { 2 2 4 5 } naturals matmul shape>>
 ] unit-test
 
+! where n mod 4 is not 0 and m & p have same mod 4 val
+{ float-array{ 45.0 48.0 51.0 54.0 57.0 60.0 63.0 66.0 69.0 126.0
+    138.0 150.0 162.0 174.0 186.0 198.0 210.0 222.0 207.0 228.0
+    249.0 270.0 291.0 312.0 333.0 354.0 375.0 288.0 318.0 348.0
+    378.0 408.0 438.0 468.0 498.0 528.0 369.0 408.0 447.0 486.0
+    525.0 564.0 603.0 642.0 681.0 } } [
+    { 5 3 } naturals { 3 9 } naturals matmul vec>>
+] unit-test
+
+! where n mod 4 is not 0 and m & p have same mod 4 val
+{ float-array{ 270.0 280.0 290.0 300.0 310.0 320.0 330.0 340.0 350.0
+               720.0 755.0 790.0 825.0 860.0 895.0 930.0 965.0 1000.0
+               1170.0 1230.0 1290.0 1350.0 1410.0 1470.0 1530.0 1590.0
+               1650.0 1620.0 1705.0 1790.0 1875.0 1960.0 2045.0 2130.0
+               2215.0 2300.0 2070.0 2180.0 2290.0 2400.0 2510.0 2620.0
+               2730.0 2840.0 2950.0 } } [
+    { 5 5 } naturals { 5 9 } naturals matmul vec>>
+] unit-test
+
+! where n mod 4 is not 0 and m & p have different mod 4 vals
+{ float-array{ 35.0 38.0 41.0 44.0 47.0 50.0 53.0 98.0 110.0
+    122.0 134.0 146.0 158.0 170.0 161.0 182.0 203.0 224.0
+    245.0 266.0 287.0 224.0 254.0 284.0 314.0 344.0 374.0
+    404.0 287.0 326.0 365.0 404.0 443.0 482.0 521.0 } } [
+    { 5 3 } naturals { 3 7 } naturals matmul vec>>
+] unit-test
+
+{ float-array{ 546.0 567.0 588.0 609.0 630.0 651.0 1428.0 1498.0 1568.0 1638.0
+    1708.0 1778.0 2310.0 2429.0 2548.0 2667.0 2786.0 2905.0 3192.0 3360.0
+    3528.0 3696.0 3864.0 4032.0 4074.0 4291.0 4508.0 4725.0 4942.0 5159.0
+    4956.0 5222.0 5488.0 5754.0 6020.0 6286.0 5838.0 6153.0 6468.0 6783.0
+    7098.0 7413.0 22008.0 22372.0 22736.0 23100.0 23464.0 23828.0 24948.0
+    25361.0 25774.0 26187.0 26600.0 27013.0 27888.0 28350.0 28812.0 29274.0
+    29736.0 30198.0 30828.0 31339.0 31850.0 32361.0 32872.0 33383.0 33768.0
+    34328.0 34888.0 35448.0 36008.0 36568.0 36708.0 37317.0 37926.0 38535.0
+    39144.0 39753.0 39648.0 40306.0 40964.0 41622.0 42280.0 42938.0 } } [
+    { 2 7 7 } naturals { 2 7 6 } naturals matmul vec>>
+] unit-test
+
 ! test transpose
 { float-array{ 0.0 2.0 1.0 3.0 } } [
     { 2 2 } naturals transpose vec>>
index e44ac4ebff4c451fac76257be3ff164e5e985e57..f97d0c4facf7874b27bc818b7b8a19be238fd07e 100644 (file)
@@ -1,12 +1,14 @@
 ! Copyright (C) 2019 HMC Clinic.
 ! See http://factorcode.org/license.txt for BSD license.
 
-USING: accessors alien.data arrays grouping kernel locals math
-math.functions math.ranges multi-methods sequences
-sequences.extras sequences.private specialized-arrays typed ;
+USING: accessors alien alien.c-types alien.data arrays byte-arrays combinators
+grouping kernel locals kernel.private math math.functions math.ranges math.vectors
+math.vectors.simd multi-methods parser prettyprint.custom sequences sequences.extras
+sequences.private specialized-arrays typed ;
 
 QUALIFIED-WITH: alien.c-types c
 SPECIALIZED-ARRAY: c:float
+SPECIALIZED-ARRAY: float-4
 IN: tensors
 
 ! Tensor class definition
@@ -17,6 +19,8 @@ TUPLE: tensor
 ! Errors
 ERROR: non-positive-shape-error shape ;
 ERROR: shape-mismatch-error shape1 shape2 ;
+ERROR: non-uniform-seq-error seq ;
+ERROR: dimension-mismatch-error tensor-dim index-dim ;
 
 <PRIVATE
 
@@ -28,6 +32,7 @@ ERROR: shape-mismatch-error shape1 shape2 ;
 : <tensor> ( shape seq -- tensor )
     tensor boa ;
 
+! Creates a freshly-allocated float-array with the desired c-type values
 : >float-array ( seq -- float-array )
     c:float >c-array ;
 
@@ -50,10 +55,14 @@ PRIVATE>
 : arange ( a b step -- tensor )
     <range> [ length >fixnum 1array ] keep >float-array <tensor> ;
 
-! Construct a tensors with vec { 0 1 2 ... } and reshape to the desired shape
+! Construct a tensor with vec { 0 1 2 ... } and reshape to the desired shape
 : naturals ( shape -- tensor )
     check-shape [ ] [ product [0,b) >float-array ] bi <tensor> ;
 
+! Construct a tensor without initializing its values
+: (tensor) ( shape -- tensor )
+    dup product (float-array) <tensor> ;
+
 <PRIVATE
 
 : check-reshape ( shape1 shape2 -- shape1 shape2 )
@@ -81,6 +90,117 @@ TYPED: tensor>array ( tensor: tensor -- seq: array )
     [ rest-slice reverse [ group ] each ] unless-empty ;
 
 <PRIVATE
+! recursively finds shape of nested array
+! assumes properly shaped array (all sub-arrays are same size)
+:: find-shape ( seq shape -- shape' )
+    seq empty? [ { 0 } ] [
+        ! add length of seq element to shape
+        shape seq length 1array append :> shape'
+        ! base case: check if the first element is a seq
+        seq first :> 1st
+        1st sequence?
+        ! is a sequence: recurse on 1st element
+        [ 1st shape' find-shape ]
+        ! not a sequence: return shape'
+        [ shape' ] if
+    ] if ;
+PRIVATE>
+
+! turns a nested array into a tensor
+:: >tensor ( seq -- tensor )
+    ! get the shape
+    seq { } find-shape :> shape
+    ! flatten the array
+    seq
+    shape length 1 - [
+        drop concat
+    ] each-integer :> flatseq
+    ! check that the size is good
+    shape product flatseq length =
+    [ seq non-uniform-seq-error ] unless
+    ! turn into a tensor
+    shape flatseq >float-array <tensor> ;
+
+SYNTAX: t{ \ } [ >tensor ] parse-literal ;
+
+! Pretty printing
+syntax:M: tensor pprint-delims drop \ t{ \ } ;
+syntax:M: tensor >pprint-sequence tensor>array ;
+syntax:M: tensor pprint* pprint-object ;
+
+
+<PRIVATE
+! turns a shape into a list of things by which to multiply 
+! indices to get a full index (e.g. { 2 3 4 } -> { 12 4 1 })
+: ind-mults ( shape -- seq )
+    <reversed> 1 swap [ swap [ * ] keep ] map nip reverse ;
+
+! turns a num/seq index & tensor into num index & tensor
+! also throws a dimension mismatch if seq & tens shape>> arent the same len
+: num-index ( n/seq tensor -- n tensor )
+    ! check form of index (num or seq)
+    swap dup array? not
+    [ ! if array, first check if it's a valid index
+        2dup [ shape>> length ] dip length 2dup = 
+        [ dimension-mismatch-error ] unless 2drop
+        ! turn into num
+        [ dup shape>> ind-mults ] dip [ * ] 2map-sum
+    ] unless swap ;
+
+PRIVATE>
+
+
+! Sequence protocol implementation
+syntax:M: tensor clone [ shape>> clone ] [ vec>> clone ] bi <tensor> ;
+
+syntax:M: tensor length vec>> length ;
+
+syntax:M: tensor nth num-index vec>> nth ;
+
+syntax:M: tensor nth-unsafe num-index vec>> nth-unsafe ;
+
+syntax:M: tensor set-nth num-index vec>> set-nth ;
+
+syntax:M: tensor set-nth-unsafe num-index vec>> set-nth-unsafe ;
+
+syntax:M: tensor new-sequence
+    ! Check if the old and new tensors are the same size
+    shape>> 2dup product =
+    ! If so preserve the shape, otherwise create a 1D tensor
+    [ nip (tensor) ] [ drop 1array (tensor) ] if ;
+
+syntax:M: tensor like
+    ! If the original sequence is already a tensor, we are done
+    over tensor?
+    [ drop ] [
+        over float-array? [
+            [ dup [ length 1array ] dip <tensor> ] dip
+        ] [
+            [ >tensor ] dip
+        ] if
+        2dup [ length ] bi@ = [ shape>> reshape ] [ drop ] if
+    ] if ;
+
+syntax:M: tensor clone-like
+    ! If the original sequence is already a tensor, we just need to clone it
+    over tensor?
+    [ drop clone ] [
+        [ >tensor ] dip
+        2dup [ length ] bi@ = [ shape>> reshape ] [ drop ] if
+    ] if ;
+
+INSTANCE: tensor sequence
+
+
+<PRIVATE
+
+:: make-subseq ( arr start len -- arr )
+    ! Find the index
+    c:float heap-size start *
+    ! Compute the starting pointer
+    arr underlying>> <displaced-alien>
+    ! Push length and type to create the new array
+    len c:float <c-direct-array> ; inline
 
 : check-bop-shape ( shape1 shape2 -- shape )
     2dup = [ shape-mismatch-error ] unless drop ;
@@ -90,42 +210,245 @@ TYPED:: t-bop ( tensor1: tensor tensor2: tensor quot: ( x y -- z ) -- tensor: te
     tensor1 shape>> tensor2 shape>> check-bop-shape
     tensor1 vec>> tensor2 vec>> quot 2map <tensor> ; inline
 
+! Create an array of 4-element SIMD arrays for processing floats
+: simd-for-bop ( array -- simd-array rest-slice/f )
+    dup length dup 4 mod [ drop f ] [ - cut-slice ] if-zero
+    [ float-4 cast-array ] dip ; inline
+
+! Create an array of 4-element SIMD arrays for processing floats
+! Tensor class definition
+TUPLE: simd-slice
+    { first-slice float-array }
+    { simd-slice float-4-array }
+    { end-slice float-array } ;
+
+:: (simd-slice) ( arr start len -- arr/f )
+    len [ float-array{ } ] [ drop arr start len make-subseq ] if-zero ; inline
+
+:: <simd-slice> ( arr start -- simd-slice )
+    ! Compute the beginning
+    arr 0 start (simd-slice)
+    ! Compute the SIMD part
+    arr length start - :> len
+    len 4 mod :> end
+    arr start len end - (simd-slice) float-4 cast-array
+    ! Compute the end
+    arr dup length end - end (simd-slice)
+    simd-slice boa ; inline
+
+! Apply the binary operators simd-quot and quot to quickly combine the tensors
+:: t-bop-simd ( tensor1 tensor2 simd-quot: ( x y -- z ) quot: ( x y -- z ) -- tensor )
+    tensor1 shape>> tensor2 shape>> check-bop-shape
+    tensor1 vec>> tensor2 vec>>
+    dup length (float-array) dup :> vec3
+    [ simd-for-bop ] tri@ :> ( simd1 rest1 simd2 rest2 simd3 rest3 )
+    simd1 simd2 simd-quot simd3 2map-into
+    rest1 rest2 quot rest3 2map-into
+    vec3 <tensor> ; inline
+
 ! Apply the operation to the tensor
 TYPED:: t-uop ( tensor: tensor quot: ( x -- y ) -- tensor: tensor )
     tensor vec>> quot map [ tensor shape>> ] dip <tensor> ; inline
 
+! Apply the binary operators simd-quot and quot to quickly combine a tensor and
+! a number
+:: t-uop-simd ( tensor n simd-quot: ( x y -- z ) quot: ( x y -- z ) -- tensor )
+    tensor dup [ shape>> ] [ vec>> ] bi*
+    dup length (float-array) dup :> vec2
+    [ simd-for-bop ] bi@ :> ( simd1 rest1 simd2 rest2 )
+    simd1 n n n n float-4-boa simd-quot curry simd2 map-into
+    rest1 n quot curry rest2 map-into
+    vec2 <tensor> ; inline
+
 PRIVATE>
 
 ! Add a tensor to either another tensor or a scalar
 multi-methods:GENERIC: t+ ( x y -- tensor )
-METHOD: t+ { tensor tensor } [ + ] t-bop ;
-METHOD: t+ { tensor number } >float [ + ] curry t-uop ;
-METHOD: t+ { number tensor } [ >float ] dip [ + ] with t-uop ;
+METHOD: t+ { tensor tensor } [ v+ ] [ + ] t-bop-simd ;
+METHOD: t+ { tensor number } >float [ v+ ] [ + ] t-uop-simd ;
+METHOD: t+ { number tensor } swap >float [ swap v+ ] [ swap + ] t-uop-simd ;
 
 ! Subtraction between two tensors or a tensor and a scalar
 multi-methods:GENERIC: t- ( x y -- tensor )
-METHOD: t- { tensor tensor } [ - ] t-bop ;
-METHOD: t- { tensor number } >float [ - ] curry t-uop ;
-METHOD: t- { number tensor } [ >float ] dip [ - ] with t-uop ;
+METHOD: t- { tensor tensor } [ v- ] [ - ] t-bop-simd ;
+METHOD: t- { tensor number } >float [ v- ] [ - ] t-uop-simd ;
+METHOD: t- { number tensor } swap >float [ swap v- ] [ swap - ] t-uop-simd ;
 
 ! Multiply a tensor with either another tensor or a scalar
 multi-methods:GENERIC: t* ( x y -- tensor )
-METHOD: t* { tensor tensor } [ * ] t-bop ;
-METHOD: t* { tensor number } >float [ * ] curry t-uop ;
-METHOD: t* { number tensor } [ >float ] dip [ * ] with t-uop ;
+METHOD: t* { tensor tensor } [ v* ] [ * ] t-bop-simd ;
+METHOD: t* { tensor number } >float [ v* ] [ * ] t-uop-simd ;
+METHOD: t* { number tensor } swap >float [ swap v* ] [ swap * ] t-uop-simd ;
 
 ! Divide two tensors or a tensor and a scalar
 multi-methods:GENERIC: t/ ( x y -- tensor )
-METHOD: t/ { tensor tensor } [ / ] t-bop ;
-METHOD: t/ { tensor number } >float [ / ] curry t-uop ;
-METHOD: t/ { number tensor } [ >float ] dip [ / ] with t-uop ;
+METHOD: t/ { tensor tensor } [ v/ ] [ / ] t-bop-simd ;
+METHOD: t/ { tensor number } >float [ v/ ] [ / ] t-uop-simd ;
+METHOD: t/ { number tensor } swap >float [ swap v/ ] [ swap / ] t-uop-simd ;
 
-! Divide two tensors or a tensor and a scalar
+! Mod two tensors or a tensor and a scalar
 multi-methods:GENERIC: t% ( x y -- tensor )
 METHOD: t% { tensor tensor } [ mod ] t-bop ;
 METHOD: t% { tensor number } >float [ mod ] curry t-uop ;
 METHOD: t% { number tensor } [ >float ] dip [ mod ] with t-uop ;
 
+! Sum together all elements in the tensor
+syntax:M: tensor sum vec>> 0 <simd-slice>
+    [ simd-slice>> 0 [ sum + ] reduce ]
+    [ end-slice>> sum ] bi + ;
+
+<PRIVATE
+
+! Also converts all elements of the sequence to tensors
+:: check-concat-shape ( seq -- seq )
+    ! Compute the bottom shape of the first element in the sequence
+    seq first { } >tensor dup :> empty-tensor
+    like shape>> dup :> first-shape rest :> rest-shape
+    seq [
+        ! Compute the bottom shape of this element
+        empty-tensor like dup shape>> rest
+        ! Compare; if they are different, throw an error
+        rest-shape = [ shape>> first-shape swap shape-mismatch-error ] unless
+    ] map ;
+
+! Also converts all elements of the sequence to tensors
+:: check-stack-shape ( seq -- seq )
+    ! Compute the bottom shape of the first element in the sequence
+    seq first { } >tensor dup :> empty-tensor
+    like shape>> :> first-shape
+    seq [
+        ! Compute the bottom shape of this element
+        empty-tensor like dup shape>>
+        ! Compare; if they are different, throw an error
+        first-shape = [ shape>> first-shape swap shape-mismatch-error ] unless
+    ] map ;
+
+! Also converts all elements of the sequence to tensors
+:: check-hstack-shape ( seq -- seq )
+    ! Compute the top shape of the first element in the sequence
+    seq first { } >tensor dup :> empty-tensor
+    like shape>> dup :> first-shape but-last :> but-last-shape
+    seq [
+        ! Compute the top shape of this element
+        empty-tensor like dup shape>> but-last
+        ! Compare; if they are different, throw an error
+        but-last-shape = [ shape>> first-shape swap shape-mismatch-error ] unless
+    ] map ;
+
+: final-hstack-shape ( seq -- shape )
+    ! Get the top part
+    dup first shape>> but-last swap
+    ! Compute the last part of the shape
+    [ shape>> last ] map sum 1array append ;
+
+! Returns an guide for hstacking where the index corresponds to the postion
+! in the last dimension of the resulting tensor, and the elements are
+! { which tensor, len of tensor, index }
+:: hstack-guide ( seq -- guide )
+    ! Compute the list of last shape parts
+    seq [ shape>> last ] map :> last-dims
+    ! Curr tensor and index in tensor
+    0 0
+    last-dims sum [0,b) [
+        drop :> old-t-ind :> last-dims-i
+        last-dims-i last-dims nth
+        old-t-ind -
+        ! If we need to move onto the next tensor
+        [ last-dims-i 1 + 0 ]
+        ! Otherwise, stay with the current tensor
+        [ drop last-dims-i old-t-ind ] if-zero
+        2dup [ dup last-dims nth ] dip 3array
+        [ 1 + ] dip
+    ] map nip nip ;
+
+! Given a sequence of tensors, stack them across the last dimension
+:: hstack-unsafe ( tseq -- tensor )
+    ! Create the final tensor
+    tseq final-hstack-shape (tensor)
+    ! Compute the guide information
+    tseq hstack-guide dup length :> repeat :> guide
+    dup vec>> [
+        :> i drop
+        ! First get the correct tensor
+        i repeat /mod guide nth
+        dup first tseq nth
+        ! Now find the correct value within that tensor
+        [ [ second ] [ third ] bi -rot * + ] dip nth
+    ] map-index! drop ;
+
+! Also converts all elements of the sequence to tensors
+:: check-vstack-shape ( seq -- seq )
+    ! Compute the shape of the first sequence
+    seq first { } >tensor dup :> empty-tensor
+    like shape>> dup :> first-shape
+    ! Compute the index of the dimension to be stacked across
+    length 2 - :> vdim
+    seq [
+        ! Convert this element to a tensor
+        empty-tensor like dup
+        ! Compare the shapes
+        shape>> first-shape [ = ] 2map
+        vdim swap remove-nth
+        ! If the shapes differ in anything except the second-to-last dimension
+        ! this sequence cannot be vstacked
+        t [ = ] reduce [ shape>> first-shape swap shape-mismatch-error ] unless
+    ] map ;
+
+! Compute the shape after the vstack has been completed
+:: final-vstack-shape ( seq -- shape )
+    ! Compute the new second-to-last dimension
+    seq first dims 2 - :> vdim
+    seq 0 [ shape>> vdim swap nth + ] reduce
+    ! Combine it to create the new shape
+    seq first shape>> clone :> new-shape
+    vdim new-shape set-nth
+    new-shape ;
+
+! Combine the second-to-last and last dimensions of each tensor for stacking
+:: reshape-for-vstack ( seq -- seq )
+    seq first dims 2 - :> vdim
+    seq [
+        dup shape>> vdim cut product 1array append >>shape
+    ] map! ;
+
+
+PRIVATE>
+
+! Concatenation operations
+! Concatenate across the last dimension
+: t-concat ( seq -- tensor )
+    check-concat-shape
+    ! Compute the final shape
+    [
+        ! Compute the first dimension
+        [ 0 [ shape>> first + ] reduce 1array ]
+        ! Compute the other dimensions
+        [ first shape>> rest ] bi  append
+    ]
+    ! Concatenate all of the float-arrays
+    [ [ vec>> ] map concat ] bi <tensor> ;
+
+: stack ( seq -- tensor )
+    check-stack-shape
+    ! Compute the new shape
+    [ [ length 1array ] [ first shape>> ] bi append ]
+    ! Concatenate all of the tensors
+    [ [ vec>> ] map concat ] bi <tensor> ;
+
+: hstack ( seq -- tensor )
+    ! Check shape and convert everything to tensors
+    check-hstack-shape hstack-unsafe ;
+
+: vstack ( seq -- tensor )
+    ! Check shape and convert everything to tensors
+    check-vstack-shape
+    ! Find the final shape
+    [ final-vstack-shape ]
+    ! Reshape each of the tensors and stack
+    [ reshape-for-vstack hstack-unsafe ] bi
+    ! Finally reshape and return
+    swap >>shape ;
+
 <PRIVATE
 
 ! Check that the tensor has an acceptable shape for matrix multiplication
@@ -147,25 +470,101 @@ METHOD: t% { number tensor } [ >float ] dip [ mod ] with t-uop ;
     ! Take a slice
     rot <slice> ;
 
+! much quicker transpose for 2d tensors
+TYPED:: 2d-transpose ( tensor: tensor -- tensor': tensor )
+    tensor shape>> :> old-shape
+    tensor vec>> :> vec
+    old-shape first2 :> ( s1 s2 )
+    ! loop through new tensor
+    old-shape reverse dup product <iota> [
+        ! find y*b val in original tensor
+        s1 /mod s2 *
+        ! find x val in original tensor
+        [ s2 /mod ] dip + nip
+        ! get that index in original tensor
+        vec nth-unsafe
+    ] float-array{ } map-as <tensor> ;
+
 ! Perform matrix multiplication muliplying an
 ! mxn matrix with a nxp matrix
-TYPED:: 2d-matmul ( vec1: float-array start1: fixnum
-                    vec2: float-array start2: fixnum
-                    res: float-array start3: fixnum
+TYPED:: 2d-matmul ( vec1: float-array vec2: float-array res: float-array
                     m: fixnum n: fixnum p: fixnum -- )
     ! For each element in the range, we want to compute the dot product of the
     ! corresponding row and column
+    ! Transpose vec2 so that we are doing row * row (as opposed to row * col)
+    { n p } vec2 <tensor> 2d-transpose vec>> :> vec2
+
     m [ :> i
+        i n * :> in
+        i p * :> ip
+        vec1 in n make-subseq
         p [ :> j
-            0.0 ! This is the sum
-            n [ :> k
-                ! Add to the sum
-                i n * k + start1 + vec1 nth-unsafe
-                k p * j + start2 + vec2 nth-unsafe
-                * +
-            ] each-integer
-            i p * j + start3 + res set-nth-unsafe
+            dup
+            vec2 j n * n make-subseq
+            0.0 [ * + ] 2reduce
+            ip j + res set-nth-unsafe
         ] each-integer
+        drop
+    ] each-integer ;
+
+! Perform matrix multiplication muliplying an
+! mxn matrix with a nxp matrix
+TYPED:: 2d-matmul-mixed ( vec1: float-array vec2: float-array res: float-array
+                    m: fixnum n: fixnum p: fixnum start: fixnum -- )
+    ! For each element in the range, we want to compute the dot product of the
+    ! corresponding row and column
+    ! Transpose vec2 so that we are doing row * row (as opposed to row * col)
+    { n p } vec2 <tensor> 2d-transpose vec>> :> vec2
+
+    ! Compute the location in the float-array each 2D matrix will start at
+    start m n * * :> start1
+    start n p * * :> start2
+
+    m [ :> i
+        i n * :> in
+        4 4 in start1 + 4 mod - swap mod :> in4m
+        i p * :> ip
+        vec1 in n make-subseq :> sub1
+        sub1 in4m <simd-slice> :> slice1
+        p [ :> j
+            j n * :> jn
+            4 4 jn 4 mod - swap mod :> jn4m
+            vec2 jn n make-subseq
+            in4m jn4m = [
+                jn4m <simd-slice> slice1 swap
+                2dup [ first-slice>> ] bi@ 0.0 [ * + ] 2reduce
+                [ 2dup [ simd-slice>> ] bi@ ] dip [ vdot + ] 2reduce
+                [ [ end-slice>> ] bi@ ] dip [ * + ] 2reduce
+            ] [
+                sub1 swap
+                0.0 [ * + ] 2reduce
+            ] if
+            ip j + res set-nth-unsafe
+        ] each-integer
+    ] each-integer ;
+
+! ! Perform matrix multiplication muliplying an
+! mxn matrix with a nxp matrix
+! Should only be called when n is a multiple of 4
+TYPED:: 2d-matmul-simd ( vec1: float-array vec2: float-array
+                             res: float-array
+                             m: fixnum n: fixnum p: fixnum -- )
+    ! For each element in the range, we want to compute the dot product of the
+    ! corresponding row and column
+    ! Transpose vec2 so that we are doing row * row (as opposed to row * col)
+    { n p } vec2 <tensor> 2d-transpose vec>> :> vec2
+
+    m [ :> i
+        i n * :> in
+        i p * :> ip
+        vec1 in n make-subseq float-4 cast-array
+        p [ :> j
+            dup
+            vec2 j n * n make-subseq float-4 cast-array
+            0.0 [ vdot + ] 2reduce
+            ip j + res set-nth-unsafe
+        ] each-integer
+        drop
     ] each-integer ;
 
 PRIVATE>
@@ -187,47 +586,43 @@ TYPED:: matmul ( tensor1: tensor tensor2: tensor -- tensor3: tensor )
     top-shape { m p } append
 
     ! Now create the new float array to store the underlying result
-    dup product c:float (c-array) :> vec3
+    dup product (float-array) :> vec3
 
     ! Now update the tensor3 to contain the multiplied matricies
     top-prod [
         :> i
-        ! Compute vec1 and start1
-        tensor1 vec>> m n * i *
+
+        ! Compute vec1 using direct C arrays
+        tensor1 vec>> m n * i * m n * make-subseq
+
         ! Compute vec2 and start2
-        tensor2 vec>> n p * i *
+        tensor2 vec>> n p * i * n p * make-subseq
+
         ! Compute the result
-        vec3 m p * i *
+        vec3 m p * i * m p * make-subseq
         ! Push m, n, and p and multiply the arrays
-        m n p 2d-matmul
+        m n p
+        { { [ n 4 mod 0 = ] [ 2d-matmul-simd ] }
+          { [ n 4 < ] [ 2d-matmul ] }
+          [ i 2d-matmul-mixed ]
+        } cond
+
     ] each-integer
     vec3 <tensor> ;
 
-
-<PRIVATE
-! helper for transpose: turns a shape into a list of things
-! by which to multiply indices to get a full index
-: ind-mults ( shape -- seq )
-    <reversed> 1 swap [ swap [ * ] keep ] map nip ;
-
-! helper for transpose: given shape, flat index, & mults for
-! the shape, gives nd index
-: transpose-index ( i shape -- seq )
-    <reversed> [ /mod ] map reverse nip ;
-PRIVATE>
-
 ! Transpose an n-dimensional tensor by flipping the axes
 TYPED:: transpose ( tensor: tensor -- tensor': tensor )
-    tensor shape>> :> old-shape
-    tensor vec>> :> vec
-    old-shape reverse :> new-shape
-    ! check that the size is fine
-    new-shape product vec length assert=
-    old-shape ind-mults reverse :> mults
-    ! loop through new tensor
-    new-shape dup product <iota> [
-        ! find index in original tensor
-        old-shape mults [ [ /mod ] dip * ] 2map-sum nip
-        ! get that index in original tensor
-        vec nth-unsafe
-    ] float-array{ } map-as <tensor> ;
+    tensor shape>> length 2 =
+    [ tensor 2d-transpose ]
+    [ tensor shape>> :> old-shape
+        tensor vec>> :> vec
+        old-shape reverse :> new-shape
+        old-shape ind-mults :> mults
+        ! loop through new tensor
+        new-shape dup product <iota> [
+            ! find index in original tensor
+            old-shape mults [ [ /mod ] dip * ] 2map-sum nip
+            ! get that index in original tensor
+            vec nth-unsafe
+        ] float-array{ } map-as <tensor>
+    ] if ;