+
+
+
+ ml5.js Time Series Hand Gesture Train and Save
+
+
+
+
+
+
+
+
diff --git a/examples/timeSeries-hand-gestures/sketch.js b/examples/timeSeries-hand-gestures/sketch.js
new file mode 100644
index 00000000..c95392ff
--- /dev/null
+++ b/examples/timeSeries-hand-gestures/sketch.js
@@ -0,0 +1,192 @@
+/*
+ * 👋 Hello! This is an ml5.js example made and shared with ❤️.
+ * Learn more about the ml5.js project: https://ml5js.org/
+ * ml5.js license and Code of Conduct: https://github.com/ml5js/ml5-next-gen/blob/main/LICENSE.md
+ *
+ * This example demonstrates training a Hand Gesture classifier through ml5.TimeSeries.
+ */
+
+let seqLength = 50;
+
+let handPose;
+let video;
+
+let hands = [];
+let sequence = [];
+
+let recordingFinished = false;
+let predictedWord = "";
+
+// UI variables
+let trainingWords = {};
+
+function preload() {
+ // Load the handPose model
+ handPose = ml5.handPose();
+
+ // setup the timeseries neural network
+ let options = {
+ outputs: ["label"],
+ task: "classification",
+ dataMode: "spatial",
+ debug: "true",
+ learningRate: 0.001,
+ };
+ model = ml5.timeSeries(options);
+}
+
+function setup() {
+ createCanvas(640, 480);
+
+ // setup video capture
+ video = createCapture(VIDEO);
+ video.size(640, 480);
+ video.hide();
+
+ // place UI elements
+ UI();
+
+ // use handpose model on video
+ handPose.detectStart(video, gotHands);
+}
+
+function draw() {
+ // draw video on frame
+ image(video, 0, 0, width, height);
+
+ drawPredictedWord();
+
+ // if hands are found then start recording
+ if (hands.length > 0 && recordingFinished == false) {
+ if (sequence.length <= seqLength) {
+ // get coordinates from hands (21 points)
+ handpoints = drawPoints();
+ sequence.push(handpoints);
+
+ // once sequence reaches the seqLength, add sequence as just one X value
+ } else if (sequence.length > 0) {
+ // get the training word from the input box
+ let train_word = nameField.value();
+
+ // if there is a word currently in the box then add data with that label
+ if (train_word.length > 0) {
+ // add data to the model
+ let target = { label: train_word };
+ model.addData(sequence, target);
+ trainingWordsUpdate();
+
+ // if there is no word in the box then classify instead
+ } else {
+ // classify the data
+ model.classify(sequence, gotResults);
+ }
+
+ // reset the sequence
+ sequence = [];
+ recordingFinished = true;
+ }
+
+ // can only record again when hand is out of frame
+ } else {
+ if (hands.length == 0) {
+ recordingFinished = false;
+ }
+ }
+}
+
+function drawPoints() {
+ let handpoints = [];
+ // iterate through both hands
+ for (let i = 0; i < hands.length; i++) {
+ let hand = hands[i];
+ for (let j = 0; j < hand.keypoints.length; j++) {
+ // access the keypoints in the hand
+ let keypoint = hand.keypoints[j];
+ handpoints.push(keypoint.x, keypoint.y);
+
+ fill(0, 255, 0);
+ noStroke();
+ circle(keypoint.x, keypoint.y, 5);
+ }
+ }
+ // assign to a different variable before clearing
+ let output = handpoints;
+ handpoints = [];
+
+ return output;
+}
+
+// Callback function for when handPose outputs data
+function gotHands(results) {
+ // save the output to the hands variable
+ hands = results;
+}
+
+function trainModelAndSave() {
+ model.normalizeData();
+ let options = {
+ epochs: 100,
+ };
+ model.train(options, whileTraining, finishedTraining);
+ nameField.value("");
+}
+
+function whileTraining(epoch) {
+ console.log(epoch);
+}
+
+function finishedTraining() {
+ console.log("finished training.");
+ model.save("model");
+}
+
+function gotResults(results) {
+ predictedWord = results[0].label;
+ console.log(predictedWord);
+ text(predictedWord, 200, 200);
+}
+
+function UI() {
+ nameField = createInput("");
+ nameField.attribute("placeholder", "Type the word to train");
+ nameField.position(110, 500);
+ nameField.size(250);
+
+ instructionP = createP(
+ 'I want to train:
1.) Type any word you want to pair with a gesture, e.g. "HELLO" 2.) Do the gesture associated to the word, make sure to do it until the points disappear. 3.) Move your hand out of the frame and repeat the gesture, do this multiple times 4.) Do the same for other words e.g. "BYE" 5.) Once all data is collected, press Train and Save
Tip: have at least 5 datasets for each word'
+ );
+ instructionP.style("width", "640px");
+ dataCountsP = createP("-> After the gesture a tally will appear here <-");
+
+ train_but = createButton("Train and Save");
+ train_but.mouseClicked(trainModelAndSave);
+ train_but.style("font-family", "Georgia");
+ train_but.style("font-size", "20px");
+ train_but.position(500, 490);
+}
+
+function drawPredictedWord() {
+ textSize(100);
+ fill(255);
+ text(predictedWord, 100, height / 2);
+}
+
+function trainingWordsUpdate() {
+ let tempWord = nameField.value();
+ console.log(Object.keys(trainingWords));
+ if (!(tempWord in trainingWords)) {
+ trainingWords[tempWord] = 1;
+ } else {
+ trainingWords[tempWord]++;
+ }
+
+ let counts = "";
+ let keys = Object.keys(trainingWords);
+ console.log("keys", keys);
+
+ for (let k of keys) {
+ counts += k + " : " + trainingWords[k] + " ";
+ }
+
+ dataCountsP.html(counts);
+}
diff --git a/examples/timeSeries-load-model-hand-gestures/index.html b/examples/timeSeries-load-model-hand-gestures/index.html
new file mode 100644
index 00000000..92363d69
--- /dev/null
+++ b/examples/timeSeries-load-model-hand-gestures/index.html
@@ -0,0 +1,45 @@
+
+
+
+
+
+
+
+ ml5.js Time Series Hand Gesture load model
+
+
+
+
+
+
+
+
+ This example loads a model that is trained with ASL hand gestures for
+ Hello and Goodbye.
+
+
+ Instructions:
+ 1.) Use one hand to do a gesture in front of the camera
+ 2.) Wait for the points to disappear or the prediction appears on
+ screen
+ 3.) To predict again, remove your hands in the frame and do the gesture
+ again
+
+
diff --git a/examples/timeSeries-load-model-hand-gestures/model/model.json b/examples/timeSeries-load-model-hand-gestures/model/model.json
new file mode 100644
index 00000000..ad7c44f6
--- /dev/null
+++ b/examples/timeSeries-load-model-hand-gestures/model/model.json
@@ -0,0 +1 @@
+{"modelTopology":{"class_name":"Sequential","config":{"name":"sequential_1","layers":[{"class_name":"Conv1D","config":{"filters":8,"kernel_initializer":{"class_name":"VarianceScaling","config":{"scale":1,"mode":"fan_avg","distribution":"normal","seed":null}},"kernel_regularizer":null,"kernel_constraint":null,"kernel_size":[3],"strides":[1],"padding":"valid","dilation_rate":[1],"activation":"relu","use_bias":true,"bias_initializer":{"class_name":"Zeros","config":{}},"bias_regularizer":null,"activity_regularizer":null,"bias_constraint":null,"name":"conv1d_Conv1D1","trainable":true,"batch_input_shape":[null,51,42],"dtype":"float32"}},{"class_name":"MaxPooling1D","config":{"pool_size":[2],"padding":"valid","strides":[2],"name":"max_pooling1d_MaxPooling1D1","trainable":true}},{"class_name":"Conv1D","config":{"filters":16,"kernel_initializer":{"class_name":"VarianceScaling","config":{"scale":1,"mode":"fan_avg","distribution":"normal","seed":null}},"kernel_regularizer":null,"kernel_constraint":null,"kernel_size":[3],"strides":[1],"padding":"valid","dilation_rate":[1],"activation":"relu","use_bias":true,"bias_initializer":{"class_name":"Zeros","config":{}},"bias_regularizer":null,"activity_regularizer":null,"bias_constraint":null,"name":"conv1d_Conv1D2","trainable":true,"batch_input_shape":[null,51,42],"dtype":"float32"}},{"class_name":"MaxPooling1D","config":{"pool_size":[2],"padding":"valid","strides":[2],"name":"max_pooling1d_MaxPooling1D2","trainable":true}},{"class_name":"Flatten","config":{"name":"flatten_Flatten1","trainable":true}},{"class_name":"Dense","config":{"units":16,"activation":"relu","use_bias":true,"kernel_initializer":{"class_name":"VarianceScaling","config":{"scale":1,"mode":"fan_avg","distribution":"normal","seed":null}},"bias_initializer":{"class_name":"Zeros","config":{}},"kernel_regularizer":null,"bias_regularizer":null,"activity_regularizer":null,"kernel_constraint":null,"bias_constraint":null,"name":"dense_Dense1","trainable":true}},{"class_name":"Dense","config":{"units":2,"activation":"softmax","use_bias":true,"kernel_initializer":{"class_name":"VarianceScaling","config":{"scale":1,"mode":"fan_avg","distribution":"normal","seed":null}},"bias_initializer":{"class_name":"Zeros","config":{}},"kernel_regularizer":null,"bias_regularizer":null,"activity_regularizer":null,"kernel_constraint":null,"bias_constraint":null,"name":"dense_Dense2","trainable":true}}]},"keras_version":"tfjs-layers 4.8.0","backend":"tensor_flow.js"},"weightsManifest":[{"paths":["./hello.weights.bin"],"weights":[{"name":"conv1d_Conv1D1/kernel","shape":[3,42,8],"dtype":"float32"},{"name":"conv1d_Conv1D1/bias","shape":[8],"dtype":"float32"},{"name":"conv1d_Conv1D2/kernel","shape":[3,8,16],"dtype":"float32"},{"name":"conv1d_Conv1D2/bias","shape":[16],"dtype":"float32"},{"name":"dense_Dense1/kernel","shape":[176,16],"dtype":"float32"},{"name":"dense_Dense1/bias","shape":[16],"dtype":"float32"},{"name":"dense_Dense2/kernel","shape":[16,2],"dtype":"float32"},{"name":"dense_Dense2/bias","shape":[2],"dtype":"float32"}]}]}
\ No newline at end of file
diff --git a/examples/timeSeries-load-model-hand-gestures/model/model.weights.bin b/examples/timeSeries-load-model-hand-gestures/model/model.weights.bin
new file mode 100644
index 00000000..e57f1816
Binary files /dev/null and b/examples/timeSeries-load-model-hand-gestures/model/model.weights.bin differ
diff --git a/examples/timeSeries-load-model-hand-gestures/model/model_meta.json b/examples/timeSeries-load-model-hand-gestures/model/model_meta.json
new file mode 100644
index 00000000..1c0165c7
--- /dev/null
+++ b/examples/timeSeries-load-model-hand-gestures/model/model_meta.json
@@ -0,0 +1 @@
+{"inputUnits":[42],"outputUnits":2,"inputs":{"label_0":{"dtype":"number","min":4.151249399907168,"max":586.4725394909854},"label_1":{"dtype":"number","min":186.47223882383636,"max":496.34918695509003},"label_2":{"dtype":"number","min":12.818880217505907,"max":564.7860747522525},"label_3":{"dtype":"number","min":160.9460986889124,"max":478.89482602620234},"label_4":{"dtype":"number","min":20.681431005110262,"max":557.1173870582799},"label_5":{"dtype":"number","min":135.1274696802808,"max":454.0862355189599},"label_6":{"dtype":"number","min":29.375938053231934,"max":562.4826339023859},"label_7":{"dtype":"number","min":113.22511415628927,"max":455.15365538508894},"label_8":{"dtype":"number","min":37.27265551578051,"max":573.3838980891996},"label_9":{"dtype":"number","min":98.00531862273047,"max":473.4382341601794},"label_10":{"dtype":"number","min":2.706973037101564,"max":599.2858408346702},"label_11":{"dtype":"number","min":117.7350326456234,"max":453.76022921684716},"label_12":{"dtype":"number","min":11.635752695869659,"max":612.8243751678727},"label_13":{"dtype":"number","min":91.05094143918305,"max":481.6467136241304},"label_14":{"dtype":"number","min":22.9353041163117,"max":621.0127886598051},"label_15":{"dtype":"number","min":61.619264849841635,"max":499.63536096409143},"label_16":{"dtype":"number","min":33.53953084457643,"max":626.4181148091915},"label_17":{"dtype":"number","min":28.455718477478662,"max":512.7953875856006},"label_18":{"dtype":"number","min":-2.8065139589559984,"max":617.7828981986556},"label_19":{"dtype":"number","min":117.6886729722432,"max":459.5357193516273},"label_20":{"dtype":"number","min":3.7782929928570064,"max":633.7038985044576},"label_21":{"dtype":"number","min":86.77279076496669,"max":486.0751342925063},"label_22":{"dtype":"number","min":16.177018651157255,"max":642.8366376068107},"label_23":{"dtype":"number","min":51.687144639081325,"max":502.64037741142846},"label_24":{"dtype":"number","min":28.1461509145229,"max":650.2419536370577},"label_25":{"dtype":"number","min":15.922382743702723,"max":516.9301399988833},"label_26":{"dtype":"number","min":-6.382516546058305,"max":630.7077663350849},"label_27":{"dtype":"number","min":120.16376158664924,"max":461.0881814514869},"label_28":{"dtype":"number","min":-1.4074379536407533,"max":647.5041251714117},"label_29":{"dtype":"number","min":90.58035685591811,"max":485.04491883378125},"label_30":{"dtype":"number","min":10.174906800459325,"max":658.4893875478738},"label_31":{"dtype":"number","min":71.76407331703523,"max":500.55112323964187},"label_32":{"dtype":"number","min":21.11718120932074,"max":668.566957655395},"label_33":{"dtype":"number","min":39.557348432978586,"max":514.4287318106208},"label_34":{"dtype":"number","min":-7.9534800405596595,"max":641.3232619371444},"label_35":{"dtype":"number","min":126.31599791044414,"max":465.6320514399833},"label_36":{"dtype":"number","min":-3.8369034650104927,"max":658.2044139172733},"label_37":{"dtype":"number","min":103.73604938021917,"max":481.03793223993495},"label_38":{"dtype":"number","min":3.7075645592075435,"max":668.8017566330357},"label_39":{"dtype":"number","min":88.76136006394765,"max":494.63688258092407},"label_40":{"dtype":"number","min":6.9609311353376135,"max":676.9525074586147},"label_41":{"dtype":"number","min":75.97401514052241,"max":506.7948506427954}},"outputs":{"label":{"dtype":"string","min":0,"max":1,"uniqueValues":["hello","bye"],"legend":{"hello":[1,0],"bye":[0,1]}}},"isNormalized":true,"seriesShape":[51,42]}
\ No newline at end of file
diff --git a/examples/timeSeries-load-model-hand-gestures/sketch.js b/examples/timeSeries-load-model-hand-gestures/sketch.js
new file mode 100644
index 00000000..e8e45085
--- /dev/null
+++ b/examples/timeSeries-load-model-hand-gestures/sketch.js
@@ -0,0 +1,132 @@
+/*
+ * 👋 Hello! This is an ml5.js example made and shared with ❤️.
+ * Learn more about the ml5.js project: https://ml5js.org/
+ * ml5.js license and Code of Conduct: https://github.com/ml5js/ml5-next-gen/blob/main/LICENSE.md
+ *
+ * This example demonstrates loading a Hand Gesture classifier through ml5.TimeSeries.
+ * This example is trained with the ASL gestures for Hello and Goodbye
+ *
+ * Reference to sign hello and goodbye in ASL:
+ * Hello: https://babysignlanguage.com/dictionary/hello/
+ * Goodbye: https://babysignlanguage.com/dictionary/goodbye/
+ */
+
+// change this to make the recording longer
+let seqLength = 50;
+
+let handPose;
+let video;
+let hands = [];
+let sequence = [];
+let recordingFinished = false;
+let predictedWord = "";
+
+function preload() {
+ // Load the handPose model
+ handPose = ml5.handPose();
+
+ // setup the timeseries neural network
+ let options = {
+ task: "classification",
+ dataMode: "spatial",
+ spatialData: "true",
+ };
+
+ model = ml5.timeSeries(options);
+}
+
+function setup() {
+ let canvas = createCanvas(640, 480);
+ canvas.parent("canvasDiv");
+
+ // create video capture
+ video = createCapture(VIDEO);
+ video.size(640, 480);
+ video.hide();
+
+ handPose.detectStart(video, gotHands);
+
+ // setup the model files to load
+ let modelDetails = {
+ model: "model/model.json",
+ metadata: "model/model_meta.json",
+ weights: "model/model.weights.bin",
+ };
+
+ // load the model and call modelLoaded once finished
+ model.load(modelDetails, modelLoaded);
+}
+// call back for load model
+function modelLoaded() {
+ console.log("model loaded!");
+}
+
+function draw() {
+ // draw video on the canvas
+ image(video, 0, 0, width, height);
+
+ // put the text on screen after a prediction
+ placePredictedText();
+
+ // if hands are found then start recording
+ if (hands.length > 0 && recordingFinished == false) {
+ if (sequence.length <= seqLength) {
+ // get coordinates from hands (21 points)
+ handpoints = drawPoints();
+ sequence.push(handpoints);
+
+ // once sequence reaches the seqLength, add sequence as just one X value
+ } else if (sequence.length > 0) {
+ // classify based on the collected data
+ model.classify(sequence, gotResults);
+
+ // reset the sequence
+ sequence = [];
+ recordingFinished = true;
+ }
+
+ // can only record again when hand is out of frame
+ } else {
+ if (hands.length == 0) {
+ recordingFinished = false;
+ }
+ }
+}
+
+// draw the points on the hands
+function drawPoints() {
+ let handpoints = [];
+ for (let i = 0; i < hands.length; i++) {
+ let hand = hands[i];
+ for (let j = 0; j < hand.keypoints.length; j++) {
+ let keypoint = hand.keypoints[j];
+ fill(0, 255, 0);
+ noStroke();
+ circle(keypoint.x, keypoint.y, 5);
+ handpoints.push(keypoint.x, keypoint.y);
+ }
+ }
+ let output = handpoints;
+ handpoints = [];
+ return output;
+}
+
+// Callback function for when handPose outputs data
+function gotHands(results) {
+ // save the output to the hands variable
+ hands = results;
+}
+
+// call back for accessing the results
+function gotResults(results) {
+ predictedWord = results[0].label;
+ console.log(predictedWord);
+ text(predictedWord, 100, 100);
+}
+
+// for drawing text on screen
+function placePredictedText() {
+ textSize(100);
+ fill(255);
+ text(predictedWord, 100, height / 2);
+}
diff --git a/examples/timeSeries-train-mouse-gesture RDP/index.html b/examples/timeSeries-train-mouse-gesture RDP/index.html
new file mode 100644
index 00000000..6407c4eb
--- /dev/null
+++ b/examples/timeSeries-train-mouse-gesture RDP/index.html
@@ -0,0 +1,37 @@
+
+
+
+
+
+
+
+ ml5.js Time Series Train Mouse Gesture classifier Example
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/examples/timeSeries-train-mouse-gesture RDP/sketch.js b/examples/timeSeries-train-mouse-gesture RDP/sketch.js
new file mode 100644
index 00000000..31b54d27
--- /dev/null
+++ b/examples/timeSeries-train-mouse-gesture RDP/sketch.js
@@ -0,0 +1,140 @@
+/*
+ * 👋 Hello! This is an ml5.js example made and shared with ❤️.
+ * Learn more about the ml5.js project: https://ml5js.org/
+ * ml5.js license and Code of Conduct: https://github.com/ml5js/ml5-next-gen/blob/main/LICENSE.md
+ *
+ * This example demonstrates How to train your own mouse gesture classifier through ml5.TimeSeries.
+ */
+
+let model;
+
+let currShape = "circle";
+let state = "collection";
+
+let datapoints;
+let sequence = [];
+let targetSequence = 30;
+let recCircle, recSquare, trainBut;
+
+function preload() {
+ let options = {
+ inputs: ["x", "y"],
+ outputs: ["label"],
+ task: "classification",
+ dataMode: "spatial",
+ debug: "true",
+ learningRate: 0.005,
+ };
+
+ model = ml5.timeSeries(options);
+}
+
+function setup() {
+ // p5 js elements
+ let canvas = createCanvas(600, 400);
+ canvas.parent("canvasDiv");
+ background(220);
+ UI();
+}
+
+function draw() {
+ // record data when the mouse is pressed inside the canvas
+ if (mouseIsPressed && mouseY < height && mouseX < width) {
+ // draw lines through coordinates
+ line(pmouseX, pmouseY, mouseX, mouseY);
+ let inputs = { x: mouseX, y: mouseY };
+ sequence.push(inputs);
+ }
+}
+
+// code to signify drawing can be done again
+function mouseReleased() {
+ if (mouseY < height && mouseX < width) {
+ // if state is collection, add whole sequence as X, and shape as Y
+ if (state == "collection") {
+ let target = { label: currShape };
+ let paddedCoordinates = model.padCoordinates(sequence, targetSequence);
+ model.addData(paddedCoordinates, target);
+ clearScreen();
+ } else if (state == "prediction") {
+ let paddedCoordinates = model.padCoordinates(sequence, targetSequence);
+ model.classify(paddedCoordinates, gotResults);
+ clearScreen();
+ }
+ }
+ // reset the sequence
+ sequence = [];
+}
+
+// cleanup screen and removed drawn elements, add helpful text
+function clearScreen() {
+ background(220);
+ textSize(20);
+ fill(0);
+ text(state + " : " + currShape, 50, 50);
+}
+
+function trainModel() {
+ // normalize Data first before Training
+ model.normalizeData();
+
+ // set the number of epochs for training
+ let options = {
+ epochs: 40,
+ };
+ model.train(options, whileTraining, finishedTraining);
+
+ background(220);
+ state = "training";
+ text("Training...", 50, 50);
+
+ recCircle.attribute("disabled", true);
+ recSquare.attribute("disabled", true);
+ trainBut.attribute("disabled", true);
+}
+
+function whileTraining(epoch, loss) {
+ console.log(epoch);
+}
+
+function finishedTraining() {
+ background(220);
+ text("Training Finished, Draw again to predict", 50, 50);
+ state = "prediction";
+}
+
+function gotResults(results) {
+ // console.log("results", results);
+ let label = results[0].label;
+
+ currShape = label;
+}
+
+////////////// UI Elements ////////////
+function UI() {
+ textSize(20);
+
+ recCircle = select("#recCircle");
+ recSquare = select("#recSquare");
+ trainBut = select("#trainBut");
+
+ recCircle.mouseClicked(recordCircle);
+ recSquare.mouseClicked(recordSquare);
+ trainBut.mouseClicked(trainModel);
+
+ function recordCircle() {
+ state = "collection";
+ currShape = "circle";
+
+ background(220);
+ text(state + " : " + currShape, 50, 50);
+ }
+
+ function recordSquare() {
+ state = "collection";
+ currShape = "square";
+
+ background(220);
+ text(state + " : " + currShape, 50, 50);
+ }
+}
diff --git a/examples/timeSeries-train-mouse-gesture/index.html b/examples/timeSeries-train-mouse-gesture/index.html
new file mode 100644
index 00000000..acdfde38
--- /dev/null
+++ b/examples/timeSeries-train-mouse-gesture/index.html
@@ -0,0 +1,43 @@
+
+
+
+
+
+
+
+ ml5.js Time Series Train Mouse Gesture classifier Example
+
+
+
+
+
+
+
+
+
+
+
+
+ Instructions:
+ 1.) Press the "Record Circle" or "Record Square" and start drawing until
+ the ink runs out
+ 2.) Draw multiple times for each shape 2.) Press "Train" and wait for
+ training to finish
+ 3.) Draw again to predict drawn shape
+ Tip: Collect at least 5 drawings for each:
+
+
+
+
+
diff --git a/examples/timeSeries-train-mouse-gesture/sketch.js b/examples/timeSeries-train-mouse-gesture/sketch.js
new file mode 100644
index 00000000..c0139c65
--- /dev/null
+++ b/examples/timeSeries-train-mouse-gesture/sketch.js
@@ -0,0 +1,204 @@
+/*
+ * 👋 Hello! This is an ml5.js example made and shared with ❤️.
+ * Learn more about the ml5.js project: https://ml5js.org/
+ * ml5.js license and Code of Conduct: https://github.com/ml5js/ml5-next-gen/blob/main/LICENSE.md
+ *
+ * This example demonstrates How to train your own mouse gesture classifier through ml5.TimeSeries.
+ */
+
+let model;
+let counts = {
+ circleDataCount: 0,
+ squareDataCount: 0,
+};
+let currShape = "circle";
+let state = "collection";
+
+let pressedOnce = true;
+let frameCount = 0;
+let datapoints;
+let sequence = [];
+let recCircle, recSquare, trainBut;
+
+// Training Data lenghts
+let ink_multiplier = 3;
+let num_seq = 20;
+
+function preload() {
+ let options = {
+ inputs: ["x", "y"],
+ outputs: ["label"],
+ task: "classification",
+ spatialData: "true",
+ debug: "true",
+ learningRate: 0.005,
+ };
+
+ model = ml5.timeSeries(options);
+}
+
+function setup() {
+ // p5 js elements
+ let canvas = createCanvas(600, 400);
+ canvas.parent("canvasDiv");
+ background(220);
+ UI();
+
+ // set framerate to constant rate for constant data collection
+ frameRate(60);
+}
+
+function draw() {
+ // record data when the mouse is pressed inside the canvas
+ if (mouseIsPressed && pressedOnce && mouseY < 400 && mouseX < 600) {
+ // draw lines through coordinates
+ line(pmouseX, pmouseY, mouseX, mouseY);
+
+ frameCount++;
+
+ let inputs = { x: mouseX, y: mouseY };
+
+ sequence.push(inputs);
+
+ if (sequence.length == num_seq * ink_multiplier) {
+ pressedOnce = false;
+ frameCount = 0;
+
+ // if state is collection, add whole sequence as X, and shape as Y
+ if (state == "collection") {
+ let target = { label: currShape };
+ model.addData(sequence, target);
+
+ // add to the count for each
+ counts[currShape + "DataCount"] += 1;
+ console.log(counts);
+ updateDataCountUI();
+
+ // reset the screen
+ background(220);
+ textSize(20);
+ fill(0);
+ text("Recording: " + currShape, 50, 50);
+ // if prediction, classify using the whole sequence
+ } else if (state == "prediction") {
+ model.classify(sequence, gotResults);
+
+ background(220);
+ }
+
+ // reset the sequence
+ sequence = [];
+ }
+ }
+ inkBar();
+}
+
+function trainModel() {
+ // normalize Data first before Training
+ model.normalizeData();
+
+ // set the number of epochs for training
+ let options = {
+ epochs: 40,
+ };
+ model.train(options, whileTraining, finishedTraining);
+
+ background(220);
+ state = "training";
+ text("Training...", 50, 50);
+ recCircle.style("background-color", "");
+ recSquare.style("background-color", "");
+ trainBut.style("background-color", "#f0f0f0");
+}
+
+function whileTraining(epoch, loss) {
+ console.log(epoch);
+}
+
+function finishedTraining() {
+ background(220);
+ text("Training Finished, Draw again to predict", 50, 50);
+ state = "prediction";
+}
+
+function gotResults(results) {
+ let label = results[0].label;
+
+ fill(0);
+ text("Prediction: " + label, 50, 50);
+}
+
+// code to signify drawing can be done again
+function mouseReleased() {
+ pressedOnce = true;
+}
+
+////////////// UI Elements ////////////
+
+// code to visualize how much ink left
+function inkBar() {
+ datapoints = map(frameCount, 0, ink_multiplier * num_seq, 0, num_seq);
+
+ bar_height = 250;
+ height_miltiplier = bar_height / num_seq;
+ push();
+ fill(0);
+ textSize(15);
+ text("Ink:", 550, 90);
+ rect(550, 100, 25, num_seq * height_miltiplier);
+ fill(255);
+ rect(550, 100, 25, datapoints * height_miltiplier);
+ pop();
+}
+
+// code for UI elements such as buttons
+function UI() {
+ textSize(20);
+
+ recCircle = select("#recCircle");
+ recSquare = select("#recSquare");
+ trainBut = select("#trainBut");
+
+ recCircle.mouseClicked(recordCircle);
+ recCircle.style("background-color", "#f0f0f0");
+ recSquare.mouseClicked(recordSquare);
+ trainBut.mouseClicked(trainModel);
+
+ function recordCircle() {
+ state = "collection";
+ currShape = "circle";
+
+ background(220);
+ text("Recording: circle", 50, 50);
+ recCircle.style("background-color", "#f0f0f0");
+ recSquare.style("background-color", "");
+ trainBut.style("background-color", "");
+ }
+
+ function recordSquare() {
+ state = "collection";
+ currShape = "square";
+
+ background(220);
+ text("Recording: square", 50, 50);
+ recCircle.style("background-color", "");
+ recSquare.style("background-color", "#f0f0f0");
+ trainBut.style("background-color", "");
+ }
+ dataCountsP = createP(
+ "circle data: " +
+ counts.circleDataCount +
+ " square data: " +
+ counts.squareDataCount
+ );
+}
+
+// Update the HTML UI with the current data counts
+function updateDataCountUI() {
+ dataCountsP.html(
+ "circle data: " +
+ counts.circleDataCount +
+ " square data: " +
+ counts.squareDataCount
+ );
+}
diff --git a/examples/timeSeries-weather-prediction/index.html b/examples/timeSeries-weather-prediction/index.html
new file mode 100644
index 00000000..72a3ffdd
--- /dev/null
+++ b/examples/timeSeries-weather-prediction/index.html
@@ -0,0 +1,34 @@
+
+
+
+
+
+
+
+ ml5.js Time Series Weather Prediction Example
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/examples/timeSeries-weather-prediction/sketch.js b/examples/timeSeries-weather-prediction/sketch.js
new file mode 100644
index 00000000..a21e8754
--- /dev/null
+++ b/examples/timeSeries-weather-prediction/sketch.js
@@ -0,0 +1,183 @@
+/*
+ * 👋 Hello! This is an ml5.js example made and shared with ❤️.
+ * Learn more about the ml5.js project: https://ml5js.org/
+ * ml5.js license and Code of Conduct: https://github.com/ml5js/ml5-next-gen/blob/main/LICENSE.md
+ *
+ * This example demonstrates Training a Stock Price Predictor through ml5.TimeSeries.
+ */
+
+let model;
+let data;
+let data_index;
+
+let seq = [];
+let targetLength = 5;
+
+// load JSON data with same formatting from the internet, this means
+// loadData() cannot yet be used as it is formatted differently
+function preload() {
+ json_data = loadJSON("weather_data.json");
+
+ // set the options to initialize timeSeries Neural Network
+ let options = {
+ task: "regression",
+ dataMode: "linear",
+ debug: "true",
+ learningRate: 0.01,
+ output: ["label"],
+ };
+ model = ml5.timeSeries(options);
+}
+
+function setup() {
+ data = json_data.data;
+ createCanvas(640, 400);
+ background(220);
+
+ // iterate through data using simple sliding window algorithm
+ data_index = targetLength - 1;
+ while (data_index < data.length - 1) {
+ // get the values [targetLength] steps before current index, collect and add
+ for (let x = targetLength - 1; x >= 0; x--) {
+ let curr = data[data_index - x];
+ // choose from the raw data what you want to to feed to the model
+ let inputs = {
+ temperature: curr.temperature,
+ humidity: curr.humidity,
+ windSpeed: curr.wind_speed,
+ pressure: curr.pressure,
+ precipitation: curr.precipitation,
+ };
+
+ // once collected all data into an array to make it into a sequence
+ // the format of the sequence is like this [{},{},...,{}]
+ // this is the X value
+ seq.push(inputs);
+ }
+
+ // the Y value to train is the value that comes after the sequence
+ let target = data[data_index + 1];
+
+ // select the outputs you want to get, multiple outputs are possible, we want to predict all values
+ let output = {
+ temperature: target.temperature,
+ humidity: target.humidity,
+ windSpeed: target.wind_speed,
+ pressure: target.pressure,
+ precipitation: target.precipitation,
+ };
+
+ // feed data into the model
+ model.addData(seq, output);
+
+ // reset the sequence so new values can be added
+ seq = [];
+
+ // iterate through the whole dataset moving the sliding window in each iteration
+ data_index++;
+ }
+ // normalize the data after adding everything
+ model.normalizeData();
+
+ // put a button to train and predict
+ trainAndPredictButtons();
+}
+
+// train data
+function trainData() {
+ model.normalizeData();
+ let options = {
+ epochs: 100,
+ };
+ model.train(options, finishedTraining);
+}
+
+function finishedTraining() {
+ console.log("Training Done!");
+}
+
+// predict data
+function predictData() {
+ // set the seq to empty
+ seq = [];
+
+ // choose the most recent sequences
+ let latest = data.slice(-targetLength);
+ for (let x = 0; x < targetLength; x++) {
+ let curr = latest[x];
+ // select the same properties for inputs
+ let inputs = {
+ temperature: curr.temperature,
+ humidity: curr.humidity,
+ windSpeed: curr.wind_speed,
+ pressure: curr.pressure,
+ precipitation: curr.precipitation,
+ };
+ // add them to one array to make them a sequence
+ seq.push(inputs);
+ }
+
+ // use the sequence to predict
+ model.predict(seq, gotResults);
+}
+
+// put the new data in the dataset so this will be considered for any new predictions
+function gotResults(results) {
+ console.log(results);
+ addNewData(results); //optional but will be helpful in using new prediction as part of dataset
+}
+
+// code for adding new data to the dataset to be used for future prediction
+function addNewData(results) {
+ (new_values = {
+ date: " for the next hour",
+ temperature: parseFloat(results[0].value.toFixed(2)), // get string convert to float and round to 2 decimal points
+ humidity: parseFloat(results[1].value.toFixed(2)),
+ wind_speed: parseFloat(results[2].value.toFixed(2)),
+ pressure: parseFloat(results[3].value.toFixed(2)),
+ precipitation: parseFloat(results[4].value.toFixed(2)),
+ }),
+ data.push(new_values);
+}
+
+function draw() {
+ background(220);
+ textAlign(CENTER, CENTER);
+ textSize(16);
+
+ // Draw the table headers
+ let headers = [
+ "Date",
+ "Temperature",
+ "Humidity",
+ "Wind Speed",
+ "Pressure",
+ "Precipitation",
+ ];
+ let xOffset = 70;
+ let yOffset = 100;
+ for (let i = 0; i < headers.length; i++) {
+ text(headers[i], xOffset + i * 100, yOffset);
+ }
+
+ // Display the last 5 entries from the dataset
+ let latest = data.slice(-targetLength);
+ for (let i = 0; i < latest.length; i++) {
+ let entry = latest[i];
+ text(entry.date.slice(5), xOffset, yOffset + (i + 1) * 30);
+ text(entry.temperature, xOffset + 100, yOffset + (i + 1) * 30);
+ text(entry.humidity, xOffset + 200, yOffset + (i + 1) * 30);
+ text(entry.wind_speed, xOffset + 300, yOffset + (i + 1) * 30);
+ text(entry.pressure, xOffset + 400, yOffset + (i + 1) * 30);
+ text(entry.precipitation, xOffset + 500, yOffset + (i + 1) * 30);
+ }
+}
+
+// get buttons and assign functions (UI)
+function trainAndPredictButtons() {
+ train_but = select("#train_but");
+ train_but.mouseClicked(trainData);
+
+ pred_but = select("#pred_but");
+ pred_but.mouseClicked(predictData);
+}
diff --git a/examples/timeSeries-weather-prediction/weather_data.json b/examples/timeSeries-weather-prediction/weather_data.json
new file mode 100644
index 00000000..a45429a3
--- /dev/null
+++ b/examples/timeSeries-weather-prediction/weather_data.json
@@ -0,0 +1,196 @@
+{
+ "data": [
+ {
+ "date": "2024-08-01T00:00:00Z",
+ "temperature": 28.0,
+ "humidity": 50,
+ "wind_speed": 3.0,
+ "pressure": 1015,
+ "precipitation": 0.0
+ },
+ {
+ "date": "2024-08-01T01:00:00Z",
+ "temperature": 27.5,
+ "humidity": 52,
+ "wind_speed": 4.0,
+ "pressure": 1014,
+ "precipitation": 0.0
+ },
+ {
+ "date": "2024-08-01T02:00:00Z",
+ "temperature": 27.0,
+ "humidity": 55,
+ "wind_speed": 5.0,
+ "pressure": 1013,
+ "precipitation": 0.0
+ },
+ {
+ "date": "2024-08-01T03:00:00Z",
+ "temperature": 26.5,
+ "humidity": 60,
+ "wind_speed": 6.0,
+ "pressure": 1012,
+ "precipitation": 2.0
+ },
+ {
+ "date": "2024-08-01T04:00:00Z",
+ "temperature": 26.0,
+ "humidity": 65,
+ "wind_speed": 8.0,
+ "pressure": 1010,
+ "precipitation": 5.0
+ },
+ {
+ "date": "2024-08-01T05:00:00Z",
+ "temperature": 25.5,
+ "humidity": 70,
+ "wind_speed": 10.0,
+ "pressure": 1008,
+ "precipitation": 10.0
+ },
+ {
+ "date": "2024-08-01T06:00:00Z",
+ "temperature": 25.0,
+ "humidity": 75,
+ "wind_speed": 12.0,
+ "pressure": 1006,
+ "precipitation": 15.0
+ },
+ {
+ "date": "2024-08-01T07:00:00Z",
+ "temperature": 24.5,
+ "humidity": 80,
+ "wind_speed": 14.0,
+ "pressure": 1004,
+ "precipitation": 20.0
+ },
+ {
+ "date": "2024-08-01T08:00:00Z",
+ "temperature": 24.0,
+ "humidity": 85,
+ "wind_speed": 15.0,
+ "pressure": 1002,
+ "precipitation": 25.0
+ },
+ {
+ "date": "2024-08-01T09:00:00Z",
+ "temperature": 23.5,
+ "humidity": 90,
+ "wind_speed": 17.0,
+ "pressure": 1000,
+ "precipitation": 30.0
+ },
+ {
+ "date": "2024-08-01T10:00:00Z",
+ "temperature": 23.0,
+ "humidity": 95,
+ "wind_speed": 20.0,
+ "pressure": 998,
+ "precipitation": 35.0
+ },
+ {
+ "date": "2024-08-01T11:00:00Z",
+ "temperature": 24.0,
+ "humidity": 85,
+ "wind_speed": 10.0,
+ "pressure": 1005,
+ "precipitation": 10.0
+ },
+ {
+ "date": "2024-08-01T12:00:00Z",
+ "temperature": 25.0,
+ "humidity": 75,
+ "wind_speed": 7.0,
+ "pressure": 1010,
+ "precipitation": 5.0
+ },
+ {
+ "date": "2024-08-01T13:00:00Z",
+ "temperature": 26.0,
+ "humidity": 65,
+ "wind_speed": 5.0,
+ "pressure": 1013,
+ "precipitation": 0.0
+ },
+ {
+ "date": "2024-08-01T14:00:00Z",
+ "temperature": 27.0,
+ "humidity": 60,
+ "wind_speed": 4.0,
+ "pressure": 1015,
+ "precipitation": 0.0
+ },
+ {
+ "date": "2024-08-01T15:00:00Z",
+ "temperature": 28.0,
+ "humidity": 50,
+ "wind_speed": 3.0,
+ "pressure": 1018,
+ "precipitation": 0.0
+ },
+ {
+ "date": "2024-08-01T16:00:00Z",
+ "temperature": 27.0,
+ "humidity": 55,
+ "wind_speed": 4.0,
+ "pressure": 1015,
+ "precipitation": 0.0
+ },
+ {
+ "date": "2024-08-01T17:00:00Z",
+ "temperature": 26.0,
+ "humidity": 60,
+ "wind_speed": 5.0,
+ "pressure": 1012,
+ "precipitation": 1.0
+ },
+ {
+ "date": "2024-08-01T18:00:00Z",
+ "temperature": 25.0,
+ "humidity": 70,
+ "wind_speed": 7.0,
+ "pressure": 1009,
+ "precipitation": 5.0
+ },
+ {
+ "date": "2024-08-01T19:00:00Z",
+ "temperature": 24.0,
+ "humidity": 80,
+ "wind_speed": 10.0,
+ "pressure": 1005,
+ "precipitation": 10.0
+ },
+ {
+ "date": "2024-08-01T20:00:00Z",
+ "temperature": 23.0,
+ "humidity": 90,
+ "wind_speed": 12.0,
+ "pressure": 1002,
+ "precipitation": 15.0
+ },
+ {
+ "date": "2024-08-01T21:00:00Z",
+ "temperature": 22.0,
+ "humidity": 95,
+ "wind_speed": 15.0,
+ "pressure": 999,
+ "precipitation": 20.0
+ },
+ {
+ "date": "2024-08-01T22:00:00Z",
+ "temperature": 21.0,
+ "humidity": 98,
+ "wind_speed": 18.0,
+ "pressure": 995,
+ "precipitation": 25.0
+ },
+ {
+ "date": "2024-08-01T23:00:00Z",
+ "temperature": 20.0,
+ "humidity": 100,
+ "wind_speed": 20.0,
+ "pressure": 992,
+ "precipitation": 30.0
+ }
+ ]
+}
diff --git a/package.json b/package.json
index 63b88db7..cd90354e 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "ml5",
- "version": "1.0.1",
+ "version": "1.0.2",
"description": "A friendly machine learning library for the web.",
"main": "dist/ml5.min.js",
"scripts": {
@@ -11,7 +11,8 @@
"postinstall": "patch-package",
"test": "jest --config tests/jest.config.js",
"upload-examples": "node scripts/uploadExamples.js",
- "update-p5-version": "node scripts/updateP5Version.js"
+ "update-p5-version": "node scripts/updateP5Version.js",
+ "update-readme": "node scripts/updateReadme.js"
},
"files": [
"dist"
@@ -71,5 +72,11 @@
"@babel/preset-env"
]
},
- "prettier": {}
+ "prettier": {},
+ "packageManager": "yarn@4.3.1",
+ "engines": {
+ "node": "^20.15.1",
+ "yarn": "^4.3.1",
+ "npm": "please-use-yarn"
+ }
}
diff --git a/src/TimeSeries/index.js b/src/TimeSeries/index.js
new file mode 100644
index 00000000..62af28fe
--- /dev/null
+++ b/src/TimeSeries/index.js
@@ -0,0 +1,733 @@
+import * as tf from "@tensorflow/tfjs";
+import callCallback from "../utils/callcallback";
+import handleArguments from "../utils/handleArguments";
+import NeuralNetwork from "./timeSeries";
+import NeuralNetworkData from "./timeSeriesData";
+import nnUtils from "../NeuralNetwork/NeuralNetworkUtils";
+import NeuralNetworkVis from "../NeuralNetwork/NeuralNetworkVis";
+
+import setBackend from "../utils/setBackend";
+
+import tsUtils from "./timeSeriesUtils";
+
+const DEFAULTS = {
+ inputs: [],
+ outputs: [],
+ dataUrl: null,
+ modelUrl: null,
+ layers: [],
+ task: null,
+ dataMode: "linear",
+ debug: false,
+ learningRate: 0.2,
+ hiddenUnits: 16,
+};
+
+class timeSeries {
+ constructor(options, callback) {
+ this.options =
+ {
+ ...DEFAULTS,
+ ...options,
+ } || DEFAULTS;
+
+ this.neuralNetwork = new NeuralNetwork();
+ this.neuralNetworkData = new NeuralNetworkData();
+ this.neuralNetworkVis = new NeuralNetworkVis();
+
+ this.data = {
+ training: [],
+ };
+
+ this.init = this.init.bind(this);
+
+ this.ready = callCallback(this.init(), callback);
+ }
+
+ async init() {
+ // workaround for Error
+ setBackend("webgl");
+
+ await tf.ready();
+ if (this.options.dataUrl) {
+ await this.loadDataFromUrl(this.options.dataUrl);
+ } else if (this.options.modelUrl) {
+ await this.load(this.options.modelUrl);
+ }
+ return this;
+ }
+ /**
+ * ////////////////////////////////////////////////////////////
+ * Add and Format Data
+ * ////////////////////////////////////////////////////////////
+ */
+
+ /* adding data: can only accept the following formats:
+ - for xInputs:
+ 1. Sequence of objects (array of objects)
+ [{x: , y: },{x: , y: },{x: , y: },{x: , y: }]
+ 2. Sequence of arrays (array of array, order matters)
+ [[],[],[],[]]
+ 3. Sequence of values (inputlabels should be provided by user)
+ [[,,,,,]] e.g. shape = {inputLabels: ['x','y']} will become [{x: , y: },{x: , y: },{x: , y: },{x: , y: }]
+
+ - for yInputs:
+ 1. similar to neural network, so use same logic
+
+ - at the end of the adding data, the data is formatted to a sequence of objects similar to 1 of xinputs
+
+ - changed data Modality into spatialData so its a boolean, true if coordinate data and false if normal lstm
+ */
+
+ addData(xInputs, yInputs, options = null) {
+ // 1. verify format between the three possible types of xinputs
+ const xs = tsUtils.verifyAndFormatInputs(xInputs, options, this.options);
+
+ // 2. format the yInput - same logic as NN class
+ const ys = tsUtils.verifyAndFormatOutputs(yInputs, options, this.options);
+
+ // 3. add data to raw
+ this.neuralNetworkData.addData(xs, ys);
+ }
+
+ /**
+ * ////////////////////////////////////////////////////////////
+ * Train Data
+ * ////////////////////////////////////////////////////////////
+ */
+
+ async train(optionsOrCallback, optionsOrWhileTraining, callback) {
+ let options = {};
+ let whileTrainingCb = null;
+ let finishedTrainingCb;
+
+ if (typeof optionsOrCallback === "object") {
+ options = optionsOrCallback;
+ if (typeof optionsOrWhileTraining === "function") {
+ whileTrainingCb = null;
+ finishedTrainingCb = callback || optionsOrWhileTraining;
+ } else {
+ finishedTrainingCb = optionsOrWhileTraining;
+ }
+ } else if (typeof optionsOrCallback === "function") {
+ whileTrainingCb = optionsOrCallback;
+ finishedTrainingCb = optionsOrWhileTraining;
+ } else {
+ finishedTrainingCb = optionsOrCallback;
+ }
+
+ return callCallback(
+ this.trainInternal(options, whileTrainingCb),
+ finishedTrainingCb
+ );
+ }
+
+ async trainInternal(_options, whileTrainingCb) {
+ const options = {
+ epochs: 10,
+ batchSize: 32,
+ validationSplit: 0.1,
+ whileTraining: null,
+ ..._options,
+ };
+
+ // if debug mode is true, then use tf vis
+ if (this.options.debug === true || this.options.debug === "true") {
+ options.whileTraining = [
+ this.neuralNetworkVis.trainingVis(),
+ {
+ onEpochEnd: whileTrainingCb,
+ },
+ ];
+ } else {
+ // if not use the default training
+ // options.whileTraining = whileTrainingCb === null ? [{
+ // onEpochEnd: (epoch, loss) => {
+ // console.log(epoch, loss.loss)
+ // }
+ // }] :
+ // [{
+ // onEpochEnd: whileTrainingCb
+ // }];
+ options.whileTraining = [
+ {
+ onEpochEnd: whileTrainingCb,
+ },
+ ];
+ }
+
+ // if metadata needs to be generated about the data
+ if (!this.neuralNetworkData.isMetadataReady) {
+ // if the inputs are defined as an array of [img_width, img_height, channels]
+ this.createMetaData();
+ }
+
+ // if the data still need to be summarized, onehotencoded, etc
+ if (!this.neuralNetworkData.isWarmedUp) {
+ this.prepareForTraining();
+ }
+
+ // if inputs and outputs are not specified
+ // in the options, then create the tensors
+ // from the this.neuralNetworkData.data.raws
+ if (!options.inputs && !options.outputs) {
+ const { inputs, outputs } = this.convertTrainingDataToTensors();
+ options.inputs = inputs;
+ options.outputs = outputs;
+ }
+
+ // check to see if layers are passed into the constructor
+ // then use those to create your architecture
+ if (!this.neuralNetwork.isLayered) {
+ // TODO: don't update this.options.layers - Linda
+ this.options.layers = this.createNetworkLayers(this.options.layers);
+ }
+
+ // if the model does not have any layers defined yet
+ // then use the default structure
+ if (!this.neuralNetwork.isLayered) {
+ // TODO: don't update this.options.layers - Linda
+ this.options.layers = this.addDefaultLayers();
+ }
+
+ if (!this.neuralNetwork.isCompiled) {
+ // compile the model with defaults
+ this.compile();
+ }
+
+ // train once the model is compiled
+ await this.neuralNetwork.train(options);
+ }
+
+ createMetaData() {
+ // this method does not get shape for images but instead for timesteps
+ const { inputs } = this.options;
+
+ let inputShape;
+ if (typeof inputs === "number") {
+ inputShape = inputs;
+ } else if (Array.isArray(inputs) && inputs.length > 0) {
+ inputShape = inputs.length; //will be fed into the tensors later
+ }
+
+ this.neuralNetworkData.createMetadata(inputShape);
+ }
+
+ prepareForTraining() {
+ // this.data.training = this.neuralNetworkData.applyOneHotEncodingsToDataRaw();
+ this.neuralNetworkData.isWarmedUp = true;
+ }
+
+ convertTrainingDataToTensors() {
+ return this.neuralNetworkData.convertRawToTensors(this.data.training);
+ }
+
+ createNetworkLayers(layerJsonArray) {
+ const layers = [...layerJsonArray];
+
+ const { inputUnits, outputUnits } = this.neuralNetworkData.meta;
+ const layersLength = layers.length;
+
+ if (!(layers.length >= 2)) {
+ return false;
+ }
+
+ // set the inputShape
+ layers[0].inputShape = layers[0].inputShape
+ ? layers[0].inputShape
+ : inputUnits;
+ // set the output units
+ const lastIndex = layersLength - 1;
+ const lastLayer = layers[lastIndex];
+ lastLayer.units = lastLayer.units ? lastLayer.units : outputUnits;
+
+ layers.forEach((layer) => {
+ this.addLayer(tf.layers[layer.type](layer));
+ });
+
+ return layers;
+ }
+
+ addDefaultLayers() {
+ let layers;
+ const task = this.options.task;
+ const dataMode = this.options.dataMode;
+ let taskConditions = `${task}_${dataMode}`;
+ switch (taskConditions.toLowerCase()) {
+ // if the task is classification and spatial modality
+ case "classification_spatial":
+ layers = [
+ {
+ type: "conv1d",
+ filters: 8,
+ kernelSize: 3,
+ activation: "relu",
+ inputShape: this.neuralNetworkData.meta.seriesShape,
+ },
+ {
+ type: "maxPooling1d",
+ poolSize: 2,
+ },
+ {
+ type: "conv1d",
+ filters: 16,
+ kernelSize: 3,
+ activation: "relu",
+ inputShape: this.neuralNetworkData.meta.seriesShape,
+ },
+ {
+ type: "maxPooling1d",
+ poolSize: 2,
+ },
+ {
+ type: "flatten",
+ },
+ {
+ type: "dense",
+ units: this.options.hiddenUnits,
+ activation: "relu",
+ },
+ {
+ type: "dense",
+ activation: "softmax",
+ },
+ ];
+
+ return this.createNetworkLayers(layers);
+ // if the task is classification and sequential modality
+ case "classification_linear":
+ layers = [
+ {
+ type: "lstm",
+ units: 16,
+ activation: "relu",
+ inputShape: this.neuralNetworkData.meta.seriesShape,
+ returnSequences: true,
+ },
+ {
+ type: "lstm",
+ units: 8,
+ activation: "relu",
+ returnSequences: false,
+ },
+ {
+ type: "dense",
+ units: this.options.hiddenUnits,
+ activation: "relu",
+ },
+ {
+ type: "dense",
+ activation: "softmax",
+ },
+ ];
+
+ return this.createNetworkLayers(layers);
+
+ // if the task is regression
+ case "regression_spatial":
+ layers = [
+ {
+ type: "conv1d",
+ filters: 8,
+ kernelSize: 3,
+ activation: "relu",
+ inputShape: this.neuralNetworkData.meta.seriesShape,
+ },
+ {
+ type: "maxPooling1d",
+ poolSize: 2,
+ },
+ {
+ type: "conv1d",
+ filters: 16,
+ kernelSize: 3,
+ activation: "relu",
+ inputShape: this.neuralNetworkData.meta.seriesShape,
+ },
+ {
+ type: "maxPooling1d",
+ poolSize: 2,
+ },
+ {
+ type: "flatten",
+ },
+ {
+ type: "dense",
+ units: this.options.hiddenUnits,
+ activation: "relu",
+ },
+ {
+ type: "dense",
+ activation: "sigmoid",
+ },
+ ];
+
+ return this.createNetworkLayers(layers);
+
+ case "regression_linear":
+ layers = [
+ {
+ type: "lstm",
+ units: 16,
+ activation: "relu",
+ inputShape: this.neuralNetworkData.meta.seriesShape,
+ returnSequences: true,
+ },
+ {
+ type: "lstm",
+ units: 8,
+ activation: "relu",
+ },
+ {
+ type: "dense",
+ units: this.options.hiddenUnits,
+ activation: "relu",
+ },
+ {
+ type: "dense",
+ activation: "sigmoid",
+ },
+ ];
+
+ return this.createNetworkLayers(layers);
+
+ default:
+ console.log("no inputUnits or outputUnits defined");
+ layers = [
+ {
+ type: "lstm",
+ units: 16,
+ activation: "relu",
+ inputShape: this.neuralNetworkData.meta.seriesShape,
+ },
+ {
+ type: "lstm",
+ units: 8,
+ activation: "relu",
+ },
+ {
+ type: "dense",
+ units: this.options.hiddenUnits,
+ activation: "relu",
+ },
+ {
+ type: "dense",
+ activation: "sigmoid",
+ },
+ ];
+ return this.createNetworkLayers(layers);
+ }
+ }
+
+ addLayer(layer) {
+ this.neuralNetwork.addLayer(layer);
+ }
+
+ compile() {
+ const LEARNING_RATE = this.options.learningRate;
+
+ let options = {};
+
+ if (
+ this.options.task === "classification" ||
+ this.options.task === "imageClassification"
+ ) {
+ options = {
+ loss: "categoricalCrossentropy",
+ optimizer: tf.train.adam,
+ metrics: ["accuracy"],
+ };
+ } else if (this.options.task === "regression") {
+ options = {
+ loss: "meanSquaredError",
+ optimizer: tf.train.adam,
+ metrics: ["accuracy"],
+ };
+ }
+
+ options.optimizer = options.optimizer
+ ? this.neuralNetwork.setOptimizerFunction(
+ LEARNING_RATE,
+ options.optimizer
+ )
+ : this.neuralNetwork.setOptimizerFunction(LEARNING_RATE, tf.train.sgd);
+
+ this.neuralNetwork.compile(options);
+
+ // if debug mode is true, then show the model summary
+ if (this.options.debug) {
+ this.neuralNetworkVis.modelSummary(
+ {
+ name: "Model Summary",
+ },
+ this.neuralNetwork.model
+ );
+ }
+ }
+
+ async normalizeData() {
+ if (!this.neuralNetworkData.data.raw.length > 0) {
+ throw new Error(
+ "Empty Data Error: You Cannot Normalize/Train without adding any data! Please add data first"
+ );
+ }
+ if (!this.neuralNetworkData.isMetadataReady) {
+ this.createMetaData();
+ }
+
+ if (!this.neuralNetworkData.isWarmedUp) {
+ this.prepareForTraining();
+ }
+
+ const trainingData = this.neuralNetworkData.normalizeDataRaw();
+
+ // set this equal to the training data
+ this.data.training = trainingData;
+
+ // set isNormalized to true
+ this.neuralNetworkData.meta.isNormalized = true;
+ }
+
+ // ////////
+
+ classify(_input, _cb) {
+ return callCallback(this.classifyInternal(_input), _cb);
+ }
+
+ async classifyInternal(_input) {
+ const { meta } = this.neuralNetworkData;
+ const headers = Object.keys(meta.inputs);
+
+ let inputData;
+
+ inputData = this.formatInputsForPredictionAll(_input);
+
+ const unformattedResults = await this.neuralNetwork.classify(inputData);
+ inputData.dispose();
+
+ if (meta !== null) {
+ const label = Object.keys(meta.outputs)[0];
+ const vals = Object.entries(meta.outputs[label].legend);
+
+ const formattedResults = unformattedResults.map((unformattedResult) => {
+ return vals
+ .map((item, idx) => {
+ return {
+ [item[0]]: unformattedResult[idx],
+ label: item[0],
+ confidence: unformattedResult[idx],
+ };
+ })
+ .sort((a, b) => b.confidence - a.confidence);
+ });
+
+ // return single array if the length is less than 2,
+ // otherwise return array of arrays
+ if (formattedResults.length < 2) {
+ return formattedResults[0];
+ }
+ return formattedResults;
+ }
+
+ return unformattedResults;
+ }
+
+ formatInputsForPredictionAll(_input) {
+ const { meta } = this.neuralNetworkData;
+ const inputHeaders = Object.keys(meta.inputs);
+
+ const formatted_inputs = tsUtils.verifyAndFormatInputs(
+ _input,
+ null,
+ this.options
+ );
+ const normalized_inputs = this.neuralNetworkData.normalizePredictData(
+ formatted_inputs,
+ meta.inputs
+ );
+ const output = tf.tensor(normalized_inputs);
+
+ return output;
+ }
+
+ predict(_input, _cb) {
+ return callCallback(this.predictInternal(_input), _cb);
+ }
+
+ async predictInternal(_input) {
+ const { meta } = this.neuralNetworkData;
+
+ const inputData = this.formatInputsForPredictionAll(_input);
+
+ const unformattedResults = await this.neuralNetwork.predict(inputData);
+ inputData.dispose();
+
+ if (meta !== null) {
+ const labels = Object.keys(meta.outputs);
+
+ const formattedResults = unformattedResults.map((unformattedResult) => {
+ return labels.map((item, idx) => {
+ // check to see if the data were normalized
+ // if not, then send back the values, otherwise
+ // unnormalize then return
+ let val;
+ let unNormalized;
+ if (meta.isNormalized) {
+ const { min, max } = meta.outputs[item];
+ val = nnUtils.unnormalizeValue(unformattedResult[idx], min, max);
+ unNormalized = unformattedResult[idx];
+ } else {
+ val = unformattedResult[idx];
+ }
+
+ const d = {
+ [labels[idx]]: val,
+ label: item,
+ value: val,
+ };
+
+ // if unNormalized is not undefined, then
+ // add that to the output
+ if (unNormalized) {
+ d.unNormalizedValue = unNormalized;
+ }
+
+ return d;
+ });
+ });
+
+ // return single array if the length is less than 2,
+ // otherwise return array of arrays
+ if (formattedResults.length < 2) {
+ return formattedResults[0];
+ }
+ return formattedResults;
+ }
+
+ // if no meta exists, then return unformatted results;
+ return unformattedResults;
+ }
+
+ /**
+ * ////////////////////////////////////////////////////////////
+ * Save / Load Data
+ * ////////////////////////////////////////////////////////////
+ */
+
+ saveData(name, callback) {
+ const args = handleArguments(name, callback);
+ return callCallback(
+ this.neuralNetworkData.saveData(args.name),
+ args.callback
+ );
+ }
+
+ async loadData(filesOrPath, callback) {
+ return callCallback(this.neuralNetworkData.loadData(filesOrPath), callback);
+ }
+
+ async loadDataFromUrl(dataUrl, inputs, outputs) {
+ let json;
+ let dataFromUrl;
+ try {
+ if (dataUrl.endsWith(".csv")) {
+ dataFromUrl = await this.neuralNetworkData.loadCSV(
+ dataUrl,
+ inputs,
+ outputs
+ );
+ } else if (dataUrl.endsWith(".json")) {
+ dataFromUrl = await this.neuralNetworkData.loadJSON(
+ dataUrl,
+ inputs,
+ outputs
+ );
+ } else if (dataUrl.includes("blob")) {
+ dataFromUrl = await this.loadBlob(dataUrl, inputs, outputs);
+ } else {
+ throw new Error("Not a valid data format. Must be csv or json");
+ }
+ } catch (error) {
+ console.error(error);
+ throw new Error(error);
+ }
+
+ dataFromUrl.map((item) => {
+ this.addData(item.xs, item.ys);
+ });
+
+ this.createMetaData();
+
+ this.prepareForTraining();
+ }
+
+ /**
+ * ////////////////////////////////////////////////////////////
+ * Save / Load Model
+ * ////////////////////////////////////////////////////////////
+ */
+
+ async save(name, callback) {
+ const args = handleArguments(name, callback);
+ const modelName = args.string || "model";
+ // save the model
+ return callCallback(
+ Promise.all([
+ this.neuralNetwork.save(modelName),
+ this.neuralNetworkData.saveMeta(modelName),
+ ]),
+ args.callback
+ );
+ }
+
+ /**
+ * @public - also called internally by init() when there is a modelUrl in the options
+ * load a model and metadata
+ * @param {string | FileList | Object} filesOrPath - The URL of the file to load,
+ * or a FileList object (.files) from an HTML element .
+ * @param {ML5Callback} [callback] Optional - A function to call when the loading is complete.
+ * @return {Promise}
+ */
+ async load(filesOrPath, callback) {
+ return callCallback(
+ Promise.all([
+ this.neuralNetwork.load(filesOrPath),
+ this.neuralNetworkData.loadMeta(filesOrPath),
+ ]),
+ callback
+ );
+ }
+
+ /**
+ * dispose and release memory for a model
+ */
+ dispose() {
+ this.neuralNetwork.dispose();
+ }
+
+ padCoordinates(coordinates, targetPointCount) {
+ const maxEpsilon = int(coordinates.length / 2);
+ return tsUtils.padCoordinates(coordinates, targetPointCount, maxEpsilon);
+ }
+}
+
+const TimeSeries = (inputsOrOptions, outputsOrCallback, callback) => {
+ let options;
+ let cb;
+
+ if (inputsOrOptions instanceof Object) {
+ options = inputsOrOptions;
+ cb = outputsOrCallback;
+ } else {
+ options = {
+ inputs: inputsOrOptions,
+ outputs: outputsOrCallback,
+ };
+ cb = callback;
+ }
+
+ const instance = new timeSeries(options, cb);
+ return instance;
+};
+
+export default TimeSeries;
diff --git a/src/TimeSeries/timeSeries.js b/src/TimeSeries/timeSeries.js
new file mode 100644
index 00000000..fc560ecf
--- /dev/null
+++ b/src/TimeSeries/timeSeries.js
@@ -0,0 +1,251 @@
+import * as tf from "@tensorflow/tfjs";
+import { saveBlob } from "../utils/io";
+import { randomGaussian } from "../utils/random";
+
+/*
+
+Things changed from neural network class:
+
+1. No neuro evolution
+
+*/
+
+class NeuralNetwork {
+ constructor() {
+ // flags
+ this.isTrained = false;
+ this.isCompiled = false;
+ this.isLayered = false;
+ /**
+ * @type {tf.Sequential | null} - the TensorFlow model
+ */
+ this.model = null;
+
+ // methods
+ this.init = this.init.bind(this);
+ this.createModel = this.createModel.bind(this);
+ this.addLayer = this.addLayer.bind(this);
+ this.compile = this.compile.bind(this);
+ this.setOptimizerFunction = this.setOptimizerFunction.bind(this);
+ this.train = this.train.bind(this);
+ this.predict = this.predict.bind(this);
+ this.classify = this.classify.bind(this);
+ this.save = this.save.bind(this);
+ this.load = this.load.bind(this);
+
+ // initialize
+ this.init();
+ }
+
+ /**
+ * initialize with create model
+ */
+ init() {
+ this.createModel();
+ }
+
+ /**
+ * creates a sequential model
+ * uses switch/case for potential future where different formats are supported
+ * @param {*} _type
+ */
+ createModel(_type = "sequential") {
+ switch (_type.toLowerCase()) {
+ case "sequential":
+ this.model = tf.sequential();
+ return this.model;
+ default:
+ this.model = tf.sequential();
+ return this.model;
+ }
+ }
+
+ /**
+ * add layer to the model
+ * if the model has 2 or more layers switch the isLayered flag
+ * @param {tf.Layer} layer
+ * @void
+ */
+ addLayer(layer) {
+ this.model.add(layer);
+
+ // check if it has at least an input and output layer
+ if (this.model.layers.length >= 2) {
+ this.isLayered = true;
+ }
+ }
+
+ /**
+ * Compile the model
+ * if the model is compiled, set the isCompiled flag to true
+ * @param {*} _modelOptions
+ */
+ compile(_modelOptions) {
+ this.model.compile(_modelOptions);
+ this.isCompiled = true;
+ }
+
+ /**
+ * Set the optimizer function given the learning rate
+ * as a parameter
+ * @param {*} learningRate
+ * @param {*} optimizer
+ */
+ setOptimizerFunction(learningRate, optimizer) {
+ return optimizer.call(this, learningRate);
+ }
+
+ /**
+ * Train the model
+ * @param {Object} _options
+ */
+ async train(_options) {
+ const TRAINING_OPTIONS = _options;
+
+ const xs = TRAINING_OPTIONS.inputs;
+ const ys = TRAINING_OPTIONS.outputs;
+ console.log("train", xs, ys);
+ const { batchSize, epochs, shuffle, validationSplit, whileTraining } =
+ TRAINING_OPTIONS;
+
+ await this.model.fit(xs, ys, {
+ batchSize,
+ epochs,
+ shuffle,
+ validationSplit,
+ callbacks: whileTraining,
+ });
+
+ xs.dispose();
+ ys.dispose();
+
+ this.isTrained = true;
+ }
+
+ /**
+ * returns the prediction as an array synchronously
+ * @param {*} _inputs
+ */
+ predictSync(_inputs) {
+ const output = tf.tidy(() => {
+ return this.model.predict(_inputs);
+ });
+ const result = output.arraySync();
+
+ output.dispose();
+ _inputs.dispose();
+
+ return result;
+ }
+
+ /**
+ * returns the prediction as an array
+ * @param {*} _inputs
+ */
+ async predict(_inputs) {
+ const output = tf.tidy(() => {
+ return this.model.predict(_inputs);
+ });
+ const result = await output.array();
+
+ output.dispose();
+ _inputs.dispose();
+
+ return result;
+ }
+
+ /**
+ * classify is the same as .predict()
+ * @param {*} _inputs
+ */
+ async classify(_inputs) {
+ return this.predict(_inputs);
+ }
+
+ /**
+ * classify is the same as .predict()
+ * @param {*} _inputs
+ */
+ classifySync(_inputs) {
+ return this.predictSync(_inputs);
+ }
+
+ // predictMultiple
+ // classifyMultiple
+ // are the same as .predict()
+
+ /**
+ * save the model.json and the weights.bin files
+ * @param {string} modelName
+ * @return {Promise}
+ */
+ async save(modelName = "model") {
+ await this.model.save(
+ tf.io.withSaveHandler(async (data) => {
+ this.weightsManifest = {
+ modelTopology: data.modelTopology,
+ weightsManifest: [
+ {
+ paths: [`./${modelName}.weights.bin`],
+ weights: data.weightSpecs,
+ },
+ ],
+ };
+ console.log("data.weightData", data.weightData);
+ await saveBlob(
+ data.weightData,
+ `${modelName}.weights.bin`,
+ "application/octet-stream"
+ );
+ console.log("this.weightsManifest", this.weightsManifest);
+ await saveBlob(
+ JSON.stringify(this.weightsManifest),
+ `${modelName}.json`,
+ "text/plain"
+ );
+ })
+ );
+ }
+
+ /**
+ * loads the model and weights
+ * @param {string | FileList | Object} filesOrPath
+ */
+ async load(filesOrPath) {
+ if (filesOrPath instanceof FileList) {
+ const files = Array.from(filesOrPath);
+ // find the correct files
+ const model = files.find(
+ (file) => file.name.includes(".json") && !file.name.includes("_meta")
+ );
+ const weights = files.find((file) => file.name.includes(".bin"));
+ // load the model
+ this.model = await tf.loadLayersModel(
+ tf.io.browserFiles([model, weights])
+ );
+ } else if (filesOrPath instanceof Object) {
+ this.model = await tf.loadLayersModel(
+ tf.io.http(filesOrPath.model, {
+ // Override the weights path from the JSON weightsManifest
+ weightUrlConverter: (weightFileName) => {
+ return filesOrPath.weights || weightFileName;
+ },
+ })
+ );
+ } else {
+ this.model = await tf.loadLayersModel(filesOrPath);
+ }
+
+ this.isCompiled = true;
+ this.isLayered = true;
+ this.isTrained = true;
+ }
+
+ /**
+ * dispose and release the memory for the model
+ */
+ dispose() {
+ this.model.dispose();
+ }
+}
+export default NeuralNetwork;
diff --git a/src/TimeSeries/timeSeriesData.js b/src/TimeSeries/timeSeriesData.js
new file mode 100644
index 00000000..ab5f10a8
--- /dev/null
+++ b/src/TimeSeries/timeSeriesData.js
@@ -0,0 +1,943 @@
+import * as tf from "@tensorflow/tfjs";
+import axios from "axios";
+import { saveBlob } from "../utils/io";
+import modelLoader from "../utils/modelLoader";
+import nnUtils from "../NeuralNetwork/NeuralNetworkUtils";
+
+import tsUtils from "./timeSeriesUtils";
+
+class NeuralNetworkData {
+ constructor() {
+ this.meta = {
+ inputUnits: null, // Number
+ outputUnits: null, // Number
+ // objects describing input/output data by property name
+ inputs: {}, // { name1: {dtype}, name2: {dtype} }
+ outputs: {}, // { name1: {dtype} }
+ isNormalized: false, // Boolean - keep this in meta for model saving/loading
+ };
+
+ this.isMetadataReady = false;
+ this.isWarmedUp = false;
+
+ this.data = {
+ raw: [], // array of {xs:[{},{}], ys:{}}
+ };
+ }
+
+ /**
+ * ////////////////////////////////////////////////////////
+ * Add Data
+ * ////////////////////////////////////////////////////////
+ */
+
+ /**
+ * Add Data
+ * @param {object} xInputObj, {key: value}, key must be the name of the property value must be a String, Number, or Array
+ * @param {*} yInputObj, {key: value}, key must be the name of the property value must be a String, Number, or Array
+ * @void - updates this.data
+ */
+ addData(xInputObj, yInputObj) {
+ this.data.raw.push({
+ xs: xInputObj,
+ ys: yInputObj,
+ });
+ }
+
+ /**
+ * ////////////////////////////////////////////////////////
+ * Summarize Data
+ * ////////////////////////////////////////////////////////
+ */
+
+ /**
+ * create the metadata from the data
+ * this covers:
+ * 1. getting the datatype from the data
+ * 2. getting the min and max from the data
+ * 3. getting the oneHot encoded values
+ * 4. getting the inputShape and outputUnits from the data
+ * @param {Array} [inputShape]
+ * @void
+ */
+ createMetadata(inputShape = null) {
+ // get the data type for each property
+ this.getDTypesFromSeriesData();
+ // get the stats - min, max
+ this.getDataStats();
+ // onehot encode
+ this.getDataOneHot();
+ // calculate the input units from the data
+ this.getDataUnits(inputShape);
+ // get the shape of batch
+
+ this.isMetadataReady = true;
+ }
+
+ /**
+ * getDTypesFromData
+ * gets the data types of the data we're using
+ * important for handling oneHot
+ * @private
+ * @void - updates this.meta
+ */
+ getDTypesFromSeriesData() {
+ const meta = {
+ ...this.meta,
+ inputs: {},
+ outputs: {},
+ };
+
+ const sample = this.data.raw[0];
+
+ //consistent dTypes have already been checked at add data
+ const xs = Object.keys(sample.xs[0]); //since time series data is in form of array
+ const ys = Object.keys(sample.ys);
+ xs.forEach((prop) => {
+ meta.inputs[prop] = {
+ dtype: nnUtils.getDataType(sample.xs[0][prop]),
+ };
+ });
+
+ ys.forEach((prop) => {
+ meta.outputs[prop] = {
+ dtype: nnUtils.getDataType(sample.ys[prop]),
+ };
+ });
+
+ this.meta = meta;
+ }
+
+ /**
+ * get stats about the data
+ * @private
+ * @void
+ */
+ getDataStats() {
+ this.meta.inputs = this.getInputMetaStats(this.meta.inputs, "xs");
+ this.meta.outputs = this.getInputMetaStats(this.meta.outputs, "ys");
+ }
+
+ /**
+ * get back the min and max of each label
+ * @private
+ * @param {Object} inputOrOutputMeta
+ * @param {"xs" | "ys"} xsOrYs
+ * @return {Object}
+ */
+ getInputMetaStats(inputOrOutputMeta, xsOrYs) {
+ const inputMeta = Object.assign({}, inputOrOutputMeta);
+
+ Object.keys(inputMeta).forEach((k) => {
+ if (inputMeta[k].dtype === "string") {
+ inputMeta[k].min = 0;
+ inputMeta[k].max = 1;
+ } else if (inputMeta[k].dtype === "number") {
+ let dataAsArray;
+ if (xsOrYs === "ys") {
+ dataAsArray = this.data.raw.map((item) => item[xsOrYs][k]);
+ } else if (xsOrYs === "xs") {
+ dataAsArray = this.data.raw.flatMap((item) =>
+ item[xsOrYs].map((obj) => obj[k])
+ );
+ }
+ inputMeta[k].min = nnUtils.getMin(dataAsArray);
+ inputMeta[k].max = nnUtils.getMax(dataAsArray);
+ } else if (inputMeta[k].dtype === "array") {
+ const dataAsArray = this.data.raw.map((item) => item[xsOrYs][k]).flat();
+ inputMeta[k].min = nnUtils.getMin(dataAsArray);
+ inputMeta[k].max = nnUtils.getMax(dataAsArray);
+ }
+ });
+
+ return inputMeta;
+ }
+
+ /**
+ * getDataOneHot
+ * creates onehot encodings for the input and outputs
+ * and adds them to the meta info
+ * @private
+ * @void
+ */
+ getDataOneHot() {
+ this.meta.inputs = this.getInputMetaOneHot(this.meta.inputs, "xs");
+ this.meta.outputs = this.getInputMetaOneHot(this.meta.outputs, "ys");
+ }
+
+ /**
+ * getOneHotMeta
+ * @param {Object} _inputsMeta
+ * @param {"xs" | "ys"} xsOrYs
+ * @return {Object}
+ */
+ getInputMetaOneHot(_inputsMeta, xsOrYs) {
+ const inputsMeta = Object.assign({}, _inputsMeta);
+
+ Object.entries(inputsMeta).forEach((arr) => {
+ // the key
+ const key = arr[0];
+ // the value
+ const { dtype } = arr[1];
+
+ if (dtype === "string") {
+ const uniqueVals = [
+ ...new Set(this.data.raw.map((obj) => obj[xsOrYs][key])),
+ ];
+ const oneHotMeta = this.createOneHotEncodings(uniqueVals);
+ inputsMeta[key] = {
+ ...inputsMeta[key],
+ ...oneHotMeta,
+ };
+ }
+ });
+ return inputsMeta;
+ }
+
+ /**
+ * get the data units, inputshape and output units
+ * @private
+ * @param {Array} arrayShape
+ * @void
+ */
+ getDataUnits(arrayShape = null) {
+ // if the data has a shape pass it in
+ if (arrayShape) {
+ this.meta.inputUnits = arrayShape;
+ } else {
+ this.meta.inputUnits = [this.getInputMetaUnits(this.meta.inputs)].flat();
+ }
+
+ this.meta.outputUnits = this.getInputMetaUnits(this.meta.outputs);
+ }
+
+ /**
+ * @private
+ * @param {Object} inputsMeta
+ * @return {number | Array}
+ */
+ // eslint-disable-next-line class-methods-use-this
+ getInputMetaUnits(inputsMeta) {
+ let units = 0;
+
+ Object.entries(inputsMeta).forEach((arr) => {
+ const { dtype } = arr[1];
+ if (dtype === "number") {
+ units += 1;
+ } else if (dtype === "string") {
+ const { uniqueValues } = arr[1];
+
+ const uniqueCount = uniqueValues.length;
+ units += uniqueCount;
+ } else if (dtype === "array") {
+ // TODO: User must input the shape of the
+ // image size correctly.
+ units = [];
+ }
+ });
+
+ return units;
+ }
+
+ /**
+ * Returns a legend mapping the
+ * data values to oneHot encoded values
+ * @private
+ * @param {Array} _uniqueValuesArray
+ * @return {Object}
+ */
+ // eslint-disable-next-line class-methods-use-this, no-unused-vars
+ createOneHotEncodings(_uniqueValuesArray) {
+ return tf.tidy(() => {
+ const output = {
+ uniqueValues: _uniqueValuesArray,
+ legend: {},
+ };
+
+ const uniqueVals = _uniqueValuesArray; // [...new Set(this.data.raw.map(obj => obj.xs[prop]))]
+ // get back values from 0 to the length of the uniqueVals array
+ const onehotValues = uniqueVals.map((item, idx) => idx);
+ // oneHot encode the values in the 1d tensor
+ const oneHotEncodedValues = tf.oneHot(
+ tf.tensor1d(onehotValues, "int32"),
+ uniqueVals.length
+ );
+ // convert them from tensors back out to an array
+ const oneHotEncodedValuesArray = oneHotEncodedValues.arraySync();
+
+ // populate the legend with the key/values
+ uniqueVals.forEach((uVal, uIdx) => {
+ output.legend[uVal] = oneHotEncodedValuesArray[uIdx];
+ });
+
+ return output;
+ });
+ }
+
+ /**
+ * ////////////////////////////////////////////////////////
+ * Tensor handling
+ * ////////////////////////////////////////////////////////
+ */
+
+ /**
+ * convertRawToTensors
+ * converts array of {xs, ys} to tensors
+ * @param {*} dataRaw
+ *
+ * @return {{ inputs: tf.Tensor, outputs: tf.Tensor }}
+ */
+ // eslint-disable-next-line class-methods-use-this, no-unused-vars
+ convertRawToTensors(dataRaw) {
+ const meta = Object.assign({}, this.meta);
+ const dataLength = dataRaw.length;
+
+ return tf.tidy(() => {
+ const inputArr = [];
+ const outputArr = [];
+
+ dataRaw.forEach((row) => {
+ // get xs
+ // const xs = Object.keys(meta.inputs)
+ // .map((k) => {
+ // return row.xs[k];
+ // })
+ // .flat();
+
+ // inputArr.push(xs);
+
+ const xs = row.xs;
+ inputArr.push(xs);
+
+ // get ys
+ const ys = Object.keys(meta.outputs)
+ .map((k) => {
+ return row.ys[k];
+ })
+ .flat();
+
+ outputArr.push(ys);
+ });
+
+ // const inputs = tf.tensor(inputArr.flat(), [
+ // dataLength,
+ // ...meta.inputUnits,
+ // ]);
+ const inputs = tf.tensor(inputArr);
+
+ const outputs = tf.tensor(outputArr.flat(), [
+ dataLength,
+ meta.outputUnits,
+ ]);
+
+ return {
+ inputs,
+ outputs,
+ };
+ });
+ }
+
+ /**
+ * ////////////////////////////////////////////////////////
+ * data normalization / unnormalization
+ * ////////////////////////////////////////////////////////
+ */
+
+ /**
+ * normalize the dataRaw input
+ * @return {Array