pdfcoffee
}Chapter 13const container = {name: 'Accuracy', tab: 'Evaluation'};tfvis.show.perClassAccuracy(container, classAccuracy, classNames);labels.dispose();async function showConfusion(model, data) {const [preds, labels] = doPrediction(model, data);const confusionMatrix = await tfvis.metrics.confusionMatrix(labels, preds);const container = {name: 'Confusion Matrix', tab: 'Evaluation'};tfvis.render.confusionMatrix(container, {values: confusionMatrix}, classNames);labels.dispose();}Finally, the run() function will call all these functions in sequence to build an endto-endML pipeline:import {MnistData} from './data.js';async function run() {const data = new MnistData();await data.load();await showExamples(data);const model = getModel();tfvis.show.modelSummary({name: 'Model Architecture'}, model);await train(model, data);await showAccuracy(model, data);await showConfusion(model, data);}document.addEventListener('DOMContentLoaded', run);Refreshing the browser location http://localhost:8000/index.html will invokethe run() method above. The table below shows the model architecture, and theplots below that show the progress of the training.[ 483 ]
TensorFlow for Mobile and IoT and TensorFlow.jsOn the left are the loss and accuracy values on the validation dataset observedat the end of each batch, and on the right are the same loss and accuracy valuesobserved on the training dataset (blue) and validation dataset (red) at the end ofeach epoch:In addition, the following figure shows the accuracies across different classes forpredictions from our trained model on the test dataset, as well as the confusionmatrix of predicted versus actual classes for test dataset samples:[ 484 ]
- Page 468 and 469: Chapter 11Image source: https://arx
- Page 470 and 471: Chapter 11A neural network is used
- Page 472: Chapter 1111. Details regarding ins
- Page 475 and 476: TensorFlow and Cloud• Scalability
- Page 477 and 478: TensorFlow and Cloud• Azure DevOp
- Page 479 and 480: TensorFlow and Cloud• Lambda: The
- Page 481 and 482: TensorFlow and Cloud• Deep Learni
- Page 483 and 484: TensorFlow and CloudEC2 on AmazonTo
- Page 485 and 486: TensorFlow and CloudCompute Instanc
- Page 487 and 488: TensorFlow and CloudYou just share
- Page 489 and 490: TensorFlow and CloudIn case you req
- Page 491 and 492: TensorFlow and CloudIt starts with
- Page 493 and 494: TensorFlow and CloudTFX librariesTF
- Page 495 and 496: TensorFlow and CloudReferences1. To
- Page 497 and 498: TensorFlow for Mobile and IoT and T
- Page 499 and 500: TensorFlow for Mobile and IoT and T
- Page 501 and 502: TensorFlow for Mobile and IoT and T
- Page 503 and 504: TensorFlow for Mobile and IoT and T
- Page 505 and 506: TensorFlow for Mobile and IoT and T
- Page 507 and 508: TensorFlow for Mobile and IoT and T
- Page 509 and 510: TensorFlow for Mobile and IoT and T
- Page 511 and 512: TensorFlow for Mobile and IoT and T
- Page 513 and 514: TensorFlow for Mobile and IoT and T
- Page 515 and 516: TensorFlow for Mobile and IoT and T
- Page 517: TensorFlow for Mobile and IoT and T
- Page 521 and 522: TensorFlow for Mobile and IoT and T
- Page 523 and 524: TensorFlow for Mobile and IoT and T
- Page 525 and 526: TensorFlow for Mobile and IoT and T
- Page 527 and 528: An introduction to AutoMLThat is pr
- Page 529 and 530: An introduction to AutoMLFeature co
- Page 531 and 532: An introduction to AutoMLThis Effic
- Page 533 and 534: An introduction to AutoMLGoogle Clo
- Page 535 and 536: An introduction to AutoMLThen, we c
- Page 537 and 538: An introduction to AutoMLOnce the d
- Page 539 and 540: An introduction to AutoMLIf your mo
- Page 541 and 542: An introduction to AutoMLClicking o
- Page 543 and 544: An introduction to AutoMLFigure 16:
- Page 545 and 546: An introduction to AutoMLYou can al
- Page 547 and 548: An introduction to AutoMLPut simply
- Page 549 and 550: An introduction to AutoMLLet's star
- Page 551 and 552: An introduction to AutoMLThe token
- Page 553 and 554: An introduction to AutoMLThis will
- Page 555 and 556: An introduction to AutoMLFigure 37:
- Page 557 and 558: An introduction to AutoMLAt the end
- Page 559 and 560: An introduction to AutoMLUsing Clou
- Page 561 and 562: An introduction to AutoMLOnce the d
- Page 563 and 564: An introduction to AutoMLAt the end
- Page 565 and 566: An introduction to AutoMLAs the nex
- Page 567 and 568: An introduction to AutoMLOnce the m
}
Chapter 13
const container = {name: 'Accuracy', tab: 'Evaluation'};
tfvis.show.perClassAccuracy(container, classAccuracy, classNames);
labels.dispose();
async function showConfusion(model, data) {
const [preds, labels] = doPrediction(model, data);
const confusionMatrix = await tfvis.metrics.confusionMatrix(
labels, preds);
const container = {name: 'Confusion Matrix', tab: 'Evaluation'};
tfvis.render.confusionMatrix(
container, {values: confusionMatrix}, classNames);
labels.dispose();
}
Finally, the run() function will call all these functions in sequence to build an endto-end
ML pipeline:
import {MnistData} from './data.js';
async function run() {
const data = new MnistData();
await data.load();
await showExamples(data);
const model = getModel();
tfvis.show.modelSummary({name: 'Model Architecture'}, model);
await train(model, data);
await showAccuracy(model, data);
await showConfusion(model, data);
}
document.addEventListener('DOMContentLoaded', run);
Refreshing the browser location http://localhost:8000/index.html will invoke
the run() method above. The table below shows the model architecture, and the
plots below that show the progress of the training.
[ 483 ]