|
61 | 61 | }, |
62 | 62 | "outputs": [], |
63 | 63 | "source": [ |
64 | | - "# Import Tensorflow 2.0\n", |
| 64 | + "#Import Comet\n", |
65 | 65 | "%pip install comet_ml\n", |
66 | 66 | "import comet_ml\n", |
67 | 67 | "comet_ml.init(project_name=\"6.s191lab2.1.1\")\n", |
68 | 68 | "comet_model_1 = comet_ml.Experiment()\n", |
69 | 69 | "\n", |
| 70 | + "# Import Tensorflow 2.0\n", |
70 | 71 | "%tensorflow_version 2.x\n", |
71 | 72 | "import tensorflow as tf \n", |
72 | 73 | "\n", |
|
276 | 277 | "BATCH_SIZE = 64\n", |
277 | 278 | "EPOCHS = 5\n", |
278 | 279 | "\n", |
279 | | - "model.fit(train_images, train_labels, batch_size=BATCH_SIZE, epochs=EPOCHS)" |
| 280 | + "model.fit(train_images, train_labels, batch_size=BATCH_SIZE, epochs=EPOCHS)\n", |
| 281 | + "comet_model_1.end()" |
280 | 282 | ] |
281 | 283 | }, |
282 | 284 | { |
|
419 | 421 | }, |
420 | 422 | "outputs": [], |
421 | 423 | "source": [ |
| 424 | + "comet_ml.init(project_name=\"6.s191lab2.1.2\")\n", |
| 425 | + "comet_model_2 = comet_ml.Experiment()\n", |
| 426 | + "\n", |
422 | 427 | "'''TODO: Define the compile operation with your optimizer and learning rate of choice'''\n", |
423 | 428 | "cnn_model.compile(optimizer='''TODO''', loss='''TODO''', metrics=['accuracy']) # TODO" |
424 | 429 | ] |
|
441 | 446 | "outputs": [], |
442 | 447 | "source": [ |
443 | 448 | "'''TODO: Use model.fit to train the CNN model, with the same batch_size and number of epochs previously used.'''\n", |
444 | | - "cnn_model.fit('''TODO''')" |
| 449 | + "cnn_model.fit('''TODO''')\n", |
| 450 | + "comet_model_2.end() " |
445 | 451 | ] |
446 | 452 | }, |
447 | 453 | { |
|
652 | 658 | "plotter = mdl.util.PeriodicPlotter(sec=2, xlabel='Iterations', ylabel='Loss', scale='semilogy')\n", |
653 | 659 | "optimizer = tf.keras.optimizers.SGD(learning_rate=1e-2) # define our optimizer\n", |
654 | 660 | "\n", |
| 661 | + "comet_ml.init(project_name=\"6.s191lab2.1.3\")\n", |
| 662 | + "comet_model_3 = comet_ml.Experiment()\n", |
| 663 | + "\n", |
655 | 664 | "if hasattr(tqdm, '_instances'): tqdm._instances.clear() # clear if it exists\n", |
656 | 665 | "\n", |
657 | 666 | "for idx in tqdm(range(0, train_images.shape[0], batch_size)):\n", |
|
666 | 675 | "\n", |
667 | 676 | " #'''TODO: compute the categorical cross entropy loss\n", |
668 | 677 | " loss_value = tf.keras.backend.sparse_categorical_crossentropy('''TODO''', '''TODO''') # TODO\n", |
| 678 | + " comet_model_3.log_metric(\"loss\", loss_value.numpy().mean(), step=idx)\n", |
669 | 679 | "\n", |
670 | 680 | " loss_history.append(loss_value.numpy().mean()) # append the loss to the loss_history record\n", |
671 | 681 | " plotter.plot(loss_history.get())\n", |
|
674 | 684 | " '''TODO: Use the tape to compute the gradient against all parameters in the CNN model.\n", |
675 | 685 | " Use cnn_model.trainable_variables to access these parameters.''' \n", |
676 | 686 | " grads = # TODO\n", |
677 | | - " optimizer.apply_gradients(zip(grads, cnn_model.trainable_variables))\n" |
| 687 | + " optimizer.apply_gradients(zip(grads, cnn_model.trainable_variables))\n", |
| 688 | + "\n", |
| 689 | + "comet_model_3.end()\n" |
678 | 690 | ] |
679 | 691 | }, |
680 | 692 | { |
|
0 commit comments