{ "cells": [ { "cell_type": "code", "execution_count": 15, "id": "ab7d7dbd-f2e0-4384-8c55-d4aeee74dd7c", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "The tensorboard extension is already loaded. To reload it, use:\n", " %reload_ext tensorboard\n", "WARNING:tensorflow:5 out of the last 5 calls to .predict_function at 0x7fe3e8248550> triggered tf.function retracing. Tracing is expensive and the excessive number of tracings could be due to (1) creating @tf.function repeatedly in a loop, (2) passing tensors with different shapes, (3) passing Python objects instead of tensors. For (1), please define your @tf.function outside of the loop. For (2), @tf.function has reduce_retracing=True option that can avoid unnecessary retracing. For (3), please refer to https://www.tensorflow.org/guide/function#controlling_retracing and https://www.tensorflow.org/api_docs/python/tf/function for more details.\n", "1/1 [==============================] - 0s 33ms/step\n", "[[0.03452728]\n", " [0.9867295 ]\n", " [0.9883936 ]\n", " [0.01205833]]\n" ] }, { "data": { "text/plain": [ "Reusing TensorBoard on port 6008 (pid 911540), started 0:22:54 ago. (Use '!kill 911540' to kill it.)" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "text/html": [ "\n", " \n", " \n", " " ], "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "%load_ext tensorboard\n", "\n", "import tensorflow as tf\n", "import numpy as np \n", "import datetime, os\n", "\n", "tf.config.experimental.set_visible_devices([], \"GPU\") \n", "\n", "X = np.array([[0,0],[0,1],[1,0],[1,1]])\n", "y = np.array([[0],[1],[1],[0]])\n", " \n", "model = tf.keras.models.Sequential([\n", " tf.keras.layers.Dense(8, activation='relu', name='layers_dense_1'),\n", " tf.keras.layers.Dense(8, activation='relu', name='layers_dense_2'),\n", " tf.keras.layers.Dense(1, activation='sigmoid', name='layers_dense_3')\n", "])\n", " \n", "loss_fn = tf.keras.losses.binary_crossentropy\n", "\n", "simple = True\n", "\n", "if simple == True:\n", " model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])\n", " \n", " logdir = os.path.join(\"logs\", datetime.datetime.now().strftime(\"%Y%m%d-%H%M%S\"))\n", " tensorboard_callback = tf.keras.callbacks.TensorBoard(logdir, histogram_freq=1)\n", " model.fit(X, y, batch_size=4, epochs=1000, verbose=0, callbacks = [tensorboard_callback])\n", "else:\n", " for i in range(100):\n", " with tf.GradientTape() as tape:\n", " # Forward pass.\n", " predictions = model(X)\n", " # Compute the loss value for this batch.\n", " loss_value = loss_fn(y, predictions)\n", "\n", " # Get gradients of loss wrt the weights.\n", " gradients = tape.gradient(loss_value, model.trainable_weights)\n", " # Update the weights of the model.\n", " optimizer.apply_gradients(zip(gradients, model.trainable_weights))\n", "\n", "print(model.predict(X))\n", "\n", "%tensorboard --logdir logs" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.8.16" } }, "nbformat": 4, "nbformat_minor": 5 }