{ "cells": [ { "cell_type": "code", "execution_count": 1, "id": "00cd1404-2739-46f8-9751-1fb5828cc878", "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "2022-12-04 13:57:49.124769: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libcudart.so.11.0'; dlerror: libcudart.so.11.0: cannot open shared object file: No such file or directory\n", "2022-12-04 13:57:49.124815: I tensorflow/stream_executor/cuda/cudart_stub.cc:29] Ignore above cudart dlerror if you do not have a GPU set up on your machine.\n", "2022-12-04 13:57:49.168190: E tensorflow/stream_executor/cuda/cuda_blas.cc:2981] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\n", "2022-12-04 13:57:50.158649: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libnvinfer.so.7'; dlerror: libnvinfer.so.7: cannot open shared object file: No such file or directory\n", "2022-12-04 13:57:50.158771: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libnvinfer_plugin.so.7'; dlerror: libnvinfer_plugin.so.7: cannot open shared object file: No such file or directory\n", "2022-12-04 13:57:50.158788: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Cannot dlopen some TensorRT libraries. If you would like to use Nvidia GPU with TensorRT, please make sure the missing libraries mentioned above are installed properly.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "(60000, 784)\n", "(10000, 784)\n", "60000 train pictures, each has 784 values stored in a simple array\n" ] } ], "source": [ "# Goal: Test the Autoencoder on the MNIST Dataset, investigate influence of different ativation functions on predictions, use combinations\n", "# relu-relu, relu-sigmoid, sigmoid-relu and sigmoid-sigmoid\n", "\n", "# Import libraries\n", "import numpy as np\n", "import matplotlib.pyplot as plt\n", "import keras \n", "from keras.datasets import mnist\n", "from keras import layers\n", "\n", "# Load the data\n", "(X_train, Y_train), (X_test, Y_test) = mnist.load_data()\n", "\n", "# Normalise data\n", "X_train = X_train.astype('float32') / 255.\n", "X_test = X_test.astype('float32') / 255.\n", "\n", "X_train = X_train.reshape((len(X_train), np.prod(X_train.shape[1:])))\n", "X_test = X_test.reshape((len(X_test), np.prod(X_test.shape[1:])))\n", "\n", "print(X_train.shape)\n", "print(X_test.shape)\n", "print(\"60000 train pictures, each has 784 values stored in a simple array\")" ] }, { "cell_type": "code", "execution_count": 2, "id": "d0dfe895-ec94-4b8f-8a0c-720cb5f3bbd1", "metadata": {}, "outputs": [ { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAB6oAAAChCAYAAAB+ijHCAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/NK7nSAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAouklEQVR4nO3de9yVc774/3VXNCR0cmhTUZtBkoTGMcZj0ImIGjGzHSKEQTlUU6Yc9jAYYyjZjg0mIhOZNJFDTjNFjAqj2UkKnVVqOtz374/fdz/2vntf3Ferta57Vc/nf9frcV3XemeurnX4tGaVVVRUVOQAAAAAAAAAICM1qnsAAAAAAAAAALYuFqoBAAAAAAAAyJSFagAAAAAAAAAyZaEaAAAAAAAAgExZqAYAAAAAAAAgUxaqAQAAAAAAAMiUhWoAAAAAAAAAMmWhGgAAAAAAAIBM1cr3wPLy8ty8efNydevWzZWVlRVyJrYAFRUVueXLl+caN26cq1Gj+P8ewvXI98nyenQtUhXXI6XE9Uip8NqRUuLeSClxPVIqPFdTStwbKSWuR0qJ65FSsTHXYt4L1fPmzcvtueee+R7OVuLzzz/P7bHHHkV/HNcjaWRxPboWScv1SClxPVIqvHaklLg3Ukpcj5QKz9WUEvdGSonrkVLieqRUpLkW8/4nFXXr1s33ULYiWV0nrkfSyOI6cS2SluuRUuJ6pFR47UgpcW+klLgeKRWeqykl7o2UEtcjpcT1SKlIc53kvVDtq/ykkdV14nokjSyuE9ciabkeKSWuR0qF146UEvdGSonrkVLhuZpS4t5IKXE9Ukpcj5SKNNdJ8X9UBgAAAAAAAAD+DwvVAAAAAAAAAGTKQjUAAAAAAAAAmbJQDQAAAAAAAECmLFQDAAAAAAAAkCkL1QAAAAAAAABkykI1AAAAAAAAAJmyUA0AAAAAAABApixUAwAAAAAAAJApC9UAAAAAAAAAZMpCNQAAAAAAAACZqlXdAwD/q2/fvqFtt912obVq1Sq0bt26VXn+YcOGhfbWW2+FNnLkyCrPBQAAAAAAsLmqXbt2aG+88UZoBx98cGjPPfdcpe1TTz21YHNtTXyjGgAAAAAAAIBMWagGAAAAAAAAIFMWqgEAAAAAAADIlIVqAAAAAAAAADJVq7oHgK3VqFGjQuvWrVve5ysvL69yn4suuii0E044IbRXX301tDlz5uQ3GKS0zz77hPbRRx+FdsUVV4R29913F2UmNi916tQJ7bbbbqu0nXQfnDp1amhnnHFGaJ999tkmTAcAAFA96tWrF1qTJk3yPl/Se6Mrr7wytA8//LDS9ieffBL2ef/99/OeA9hyHHXUUaG99dZboe27776Vtjt16hT26dixY2jjxo1LNcebb74Z2uTJk1MdS+mrXbt2aHfeeWdorVu3Dq2ioiK0pM8U2Xi+UQ0AAAAAAABApixUAwAAAAAAAJApC9UAAAAAAAAAZMpvVEMGCv171Em/2/viiy9W2t57773DPp07dw6tefPmofXs2TO0W265ZWNGhI128MEHh5b02+tz587NYhw2Q7vvvntovXr1qrSddE0dcsghoSX9xtE999yzCdOxpWjTpk1ozzzzTGjNmjXLYJqq/eQnPwlt5syZoX3++edZjMMWIOn15NixY0Pr06dPaMOHDw9t/fr1hRmMzOyyyy6hPfnkk6El/b7fiBEjQps9e3ZB5iqGnXbaKbRjjjkmtPHjx1faXrt2bdFmAvi/kn6HtUuXLqG1b98+tBYtWuT9uEm/Nd20adPQkn4LdEM1a9bMew6g9O24446hPfbYY6Edf/zxoa1atSq0bbfdttL2DjvskGqOo48+OtV+SY/57bffhnbxxReHNnr06FSPQfW5/PLLQ7vwwgtDe/nll0MbNGhQaG+//XZhBtvK+UY1AAAAAAAAAJmyUA0AAAAAAABApixUAwAAAAAAAJApC9UAAAAAAAAAZKpWdQ8AW5q2bduG1rVr11THTp8+PbQuXbqEtnDhwtBWrFhRaXvbbbcN+7z99tuhHXTQQaE1aNDge+eEYmjdunVoK1euDG3MmDEZTEOpa9SoUWiPPPJINUzC1ubEE08MrXbt2tUwSTqdO3cO7bzzzgutR48eWYzDZibpNeG9996b6tjf//73oT344IOhrVq1auMHIzP16tULLek9y0477RTaV199Fdrs2bMLMlcxJP0Zpk6dGlrSa5BDDjmk0vann35auMEoiB133DG0W265JbSWLVtW2j7hhBPCPmvXri3cYPD/NG/ePLRLL7200navXr3CPtttt11oZWVlhRvsO+yzzz5Ffwxgy/DrX/86tI4dO6Y6NukeN3PmzErbCxYsCPt88803qc6fdL9Mmi1pjgceeCC0Tz75JLQPPvgg1SxkY7fddku138SJE0NLWluhMHyjGgAAAAAAAIBMWagGAAAAAAAAIFMWqgEAAAAAAADIlIVqAAAAAAAAADJVq7oH2FjdunULrVevXqHNmzcvtNWrV4f22GOPhfbll1+G9umnn6Ydka3c7rvvHlpZWVlo06dPD+3EE08Mbf78+XnNcfXVV4e2//77pzp23LhxeT0mpNWyZcvQ+vTpE9rIkSOzGIcSd/nll4d26qmnhnbYYYcV7DGPOeaY0GrUiP++7/333w/ttddeK9gcVK9ateJL5Q4dOlTDJPmbOnVqaFdddVVoderUCW3lypVFmYnNR9K9cI899kh17BNPPBFa0vsxSkfDhg1DGzVqVGj169cP7d577w3tsssuK8xgGRk4cGBoe+21V2gXXXRRaD4vKC09e/YM7aabbgptzz33rPJcO+64Y2iLFi3KbzD4HknPr1dccUU1TBJ99NFHoSV9psWWq0WLFpW2k14zdO3aNbT27duHVl5eHtrw4cNDe+ONN0LzfFv6DjjggNCS1nOSzJ07N7Sf/exnoW14HSxdujTss2LFilSPmfQ5z6BBg0JLep2Y9Bph8ODBoV1wwQWhLVmyJNV8FF7dunVDW7t2bWgTJ07MYhz+H9+oBgAAAAAAACBTFqoBAAAAAAAAyJSFagAAAAAAAAAyZaEaAAAAAAAAgEzVqu4BNtatt94aWrNmzfI+30UXXRTa8uXLQ5s+fXrej1Fsc+fODS3pv9OUKVOyGGer99xzz4XWokWL0JKus8WLFxdsjh49eoS2zTbbFOz8sCl++MMfhlanTp3QRo0alcU4lLg777wztPLy8qI+5mmnnZaqffbZZ6F17949tKlTpxZmMDJ13HHHhfajH/0otKTXXaWiXr16oe2///6hbb/99qGtXLmyKDNRmmrXrh3agAED8j7fyJEjQ6uoqMj7fBRfmzZtQmvfvn2qY4cMGVLgaYrrgAMOCO3qq68ObcyYMaF5fVpa9thjj9B++9vfhtagQYPQ0tyT7r777tD69OkTWiHfy7N5aNiwYWhXXHFFaG+88UZo48ePD+1f//pXaMuWLau0nfTaLOl99IQJE0L78MMPQ3vnnXdCe++990JbtWpVaF4nbhlatmwZWtI9bsP3wknX/6Y4/PDDQ1u3bl1oH3/8cWiTJ08OLenv4po1a/Kcjo1Rt27d0NI+B//6178O7ZVXXinIXN8l6bOlG264IbRtt902tL59+4bWtWvX0B588MHQxo0bl3JCNkXjxo1DO//880N78803Q3v33XeLMhPJfKMaAAAAAAAAgExZqAYAAAAAAAAgUxaqAQAAAAAAAMiUhWoAAAAAAAAAMlWrugfYWL169QqtVatWoc2cOTO0/fbbL7Q2bdqE1r59+9DatWsX2ueff15pe8899wz7pLVu3brQFixYENruu++e6nxz5swJbcqUKRs/GAXx2WefFf0x+vXrV2l7n332SXXcO++8k6pBIV1zzTWhJf09cd/a+rzwwguh1ahR3H9Xt2jRotBWrFgRWtOmTUPba6+9QvvrX/8aWs2aNfOcjqy0bNkytCeeeCK0WbNmhXbzzTcXZaZCOOWUU6p7BDYTBx54YGiHHHJIqmOT3sv8+c9/3uSZKJ5ddtkltNNPPz3Vseeff35oSe9dS8UBBxwQ2sSJE1MdO2bMmNCWL1++yTNROH379g2tfv36BTt/9+7dQzvppJNCu+mmm0K7++67Q1uzZk1hBiNTderUCW3ChAmhHXTQQaF17do11WO8/fbboW34meXs2bPDPk2aNAlt7ty5oZWXl6eagy1D0mfll156aWhJ97gdd9yxyvN/8cUXob3++uuh/fd//3doSZ8HTZ06NbTDDjsstKT7e4cOHUJ7//33Qxs+fHhoFF7t2rVT7ffII4+Eds899xR6nILp379/aEl/f5I+IzrttNNCGzduXGEG43sNHDiwukfYZEnrkmnXIZPuhZ988skmz1QMvlENAAAAAAAAQKYsVAMAAAAAAACQKQvVAAAAAAAAAGTKQjUAAAAAAAAAmapV3QNsrJdeeilVSzJ+/PhU+9WrVy+01q1bhzZ16tRK24ceemiq8ydZvXp1aEk/bD5z5szQ6tevH9qsWbPynoXS16lTp9CGDBlSaXvbbbcN+3z99dehXX/99aF9++23mzAdVNasWbPQ2rZtG1rSPW/lypXFGIkSceyxx4a27777hlZeXp6qpTF8+PDQJkyYENqyZctCO/7440MbMGBAqse9+OKLQxs2bFiqY8nGwIEDQ6tTp05oJ510UmgrVqwoykwbK+k1YdLfs3z//rBlO/300/M+Nuk+Smm7/fbbQzv77LND2/A9by6Xyz311FNFmalYjj766NB23XXX0B5++OHQ/vCHPxRjJPLUtGnT0M4999xUx37wwQehffXVV6GdcMIJVZ5rp512Cq1v376hPfbYY6F9+eWXVZ6f6pX0Wcrjjz8e2kEHHRTazTffHNrEiRPznmX27NlV7jNnzpy8z8+W4b777guta9euoTVs2DDV+ZI+Z//73/9eabt///5hn6TPtpMcccQRoSW9X37wwQdDS/p8Pulefs8994T29NNPV9pesGDB941JnoYOHZpqv3feeafIkxTfiy++GFrv3r1Da9euXRbjkKBjx46p9nvggQeKPEmU9Jlg0rxJa5Xbbbddqsf45ptvQrvzzjtDS/v3tph8oxoAAAAAAACATFmoBgAAAAAAACBTFqoBAAAAAAAAyJSFagAAAAAAAAAyVau6ByhFS5YsCW3SpElVHvfSSy8VdI7TTz89tKQfT//73/8e2qhRowo6C6Wlbdu2oW277bZVHpd0Xbz66qsFmQm+y7HHHptqvwULFhR5EqpTs2bNQvvjH/8YWsOGDfN+jM8++yy0p59+utL2r371q7DPt99+m/f5L7zwwtAaNWoU2q233hraD37wg9B+//vfh7Z27dpU85Fet27dQuvQoUNon376aWhTpkwpykyFMGDAgNDKy8tDe+WVV0JbunRpESZic3LMMcek2m/NmjWhJV17lLaKiorQku4X8+bNCy3pGqgO2223XWj9+/cP7ZJLLgkt6c9/3nnnFWYwiqZ169ah1a1bN7TXX389tKT3JEmvxX76059W2k66ppo3bx7abrvtFtqf/vSn0E4++eTQFi9eHBrZ2GGHHUK7/vrrQ+vUqVNoCxcuDO03v/lNaGnfa8CGku5R11xzTWgXXHBBaGVlZaElfeYybNiw0G677bbQVq5c+Z1zbqwGDRqEVrNmzdBuuOGG0MaPHx9a06ZNCzIXG2/vvfcOrXHjxqEtW7YstKT1jM3Nyy+/HFrv3r2rYRJyuVxu++23D61Wrbj8+cUXX4T28MMP5/24Gz5GmzZtwj5jxowJLem1Y40a8XvFSffuiRMnhpb0uE2aNAkt6XPMRx99tNJ20uefxeYb1QAAAAAAAABkykI1AAAAAAAAAJmyUA0AAAAAAABApixUAwAAAAAAAJCp+GviVItddtkltHvvvTe0pB9UHzJkSGiLFy8uzGBUu2effTa0n/zkJ1Ue9+ijj4Y2cODAQowEG+XAAw9Mtd+tt95a5EmoTrVqxZccDRs2zPt8r776amg9evQIbeHChXk/xoY+++yz0G655ZbQ7rjjjtC233770JKu+bFjx4Y2a9astCOS0hlnnBFa0v9GSa/FSkWzZs1C69mzZ2jr168P7cYbbwxt7dq1BZmLzccRRxzxvdvfZeXKlaFNmzatECNRgjp27BjahAkTQlu6dGlow4YNK9gcxx57bGjt27cPrV27dqnON3r06E0diWpQu3bt0CoqKkK78847U51v9erVoT300EOVtpNeM+y9996pzv/tt9+GtmbNmlTHko1TTz01tOuuuy60OXPmhHb00UeHtmzZsoLMBblc8vNcv379QisrKwvtiy++CO30008P7a9//Wt+wyWoWbNmaHvuuWdoSZ9XvvDCC6HVq1cv1eMm/flHjhwZWtJrFTbN2WefHVrSc+TTTz8d2ptvvlmUmdh6XXDBBaHtuuuuoY0YMSLvx2jcuHFoF154YaXttOsv8+bNCy3p3pX0udTcuXNTPUbSZ4wdOnQIbffdd6+0nfT5Z7H5RjUAAAAAAAAAmbJQDQAAAAAAAECmLFQDAAAAAAAAkCkL1QAAAAAAAABkqlZ1D8D/79JLLw2tUaNGoS1ZsiS0jz/+uCgzkb0Nf7g+l8vljjjiiNBq164d2sKFCytt33jjjWGfFStWbMJ0kE67du0qbZ977rlhn/feey+0v/zlL0Wbic3blClTQjvvvPNC2/A+mIWxY8eG1rNnz9AOPfTQLMYhwU477RTahvep7zJs2LBCj1MwF154YWgNGzYMbebMmaFNmjSpKDOxecn3vlTKfy9I76677grtuOOOC61x48ahHXPMMaGVlZWF1qVLlzyni5LOX1FRkerYf/7zn6H1799/k2ciez/96U9T7dexY8fQnn322bwes23btnkdl8vlcm+//XZo3pOXlqTPW5IkvX+dO3duoceBSmrWrBna+vXrUx27bt260A4//PDQunXrFtoPf/jDKs+/atWq0Pbbb79ULel9+6677lrlY36Xr776KrSkz0TXrl2b92OQrEePHqEtW7YstKTXnVBoBx98cKr9/vGPf+T9GAMHDgztoosuqrSd9B7l5ZdfDu3KK68Mbfr06XnPlmRT/qxZ841qAAAAAAAAADJloRoAAAAAAACATFmoBgAAAAAAACBTfqO6Ghx55JGhXXfddamOPfXUU0P78MMPN3UkSsTTTz8dWoMGDVId+4c//KHS9qxZswoyE2ysE044odJ2/fr1wz7jx48PbfXq1UWbidJUo0a6fy+X9FtapSLpNzOT/lxp/6w33HBDaOecc85Gz8X/ql27dmj/9m//FtoTTzyRxTgF07x581T7eZ3Id0nzu6tLly4NzW9UbxmmTp0aWqtWrUJr3bp1aCeddFJo/fr1C23BggWhPfLIIyknrGzkyJGhvf/++6mOffPNN0PzXmnzlPRcnfRb6IceemhoSb+5euCBB4bWtWvXStv16tUL+yTdG5P269WrV2hJ1/KMGTNCIxtJv8+bJOm+N3jw4ND+9Kc/hTZt2rSNngtyueTfNJ00aVJoG34Gk8vlck2aNAntd7/7XWhJv6WaZMPfxk76/ey00v4edXl5eWhjxowJ7fLLLw9t/vz5Gz8YBfHRRx+FNnny5GqYhK1N48aNC3q+ffbZJ7Tu3btXedz9998f2hVXXBHamjVr8htsE7377rupWtZ8oxoAAAAAAACATFmoBgAAAAAAACBTFqoBAAAAAAAAyJSFagAAAAAAAAAyVau6B9gadejQIbRtttkmtJdeeim0t956qygzkb0uXbqE1qZNm1THvvLKK6ENHjx4U0eCgjjooIMqbVdUVIR9Ro8endU4lIjevXuHVl5eXg2TFFbnzp1DO/jgg0NL+rMmtRtuuKEgc/G/li9fHtq0adNCa9WqVWj169cPbfHixQWZa2PssssuoXXr1i3VsZMnTy70OGyGjjrqqNDOOuusKo9btmxZaHPnzi3ITJSeJUuWhDZp0qRU7dprry3KTP9j7733Dq2srCy0pPt73759izES1WDixImhJd2nDjzwwNBmzJgRWtL7lDSPeemll4b2/PPPh/bv//7voV1++eWhJb1OJhuNGjUKLek1eu3atUMbNGhQaAMHDgxt+PDhob399tuhNWnSJLRPP/200vb06dPDPkkOOOCA0JI+T/ScXtpWrVoVWteuXUPbeeedQ7vuuutCO/LII0NbtGhRaHPmzAltw78DG37uk8vlcocddlhom2LEiBGh9e/fP7SlS5cW9HFJVqdOndCS1jOgutStWze0pPcLaV122WWhJd1vH3/88UrbF198cd6PWWhJ/03Wrl0b2po1a7IY53v5RjUAAAAAAAAAmbJQDQAAAAAAAECmLFQDAAAAAAAAkCkL1QAAAAAAAABkqlZ1D7Cl22677UI76aSTQkv6wfLBgweHlvRj55S+Bg0ahNa/f//Qttlmm1TnmzZtWmgrVqzY6LlgU+22226hHX300ZW2P/7447DPmDFjijYTpalz587VPcJGadSoUWj7779/aEn38rQWLFgQmuf5wlu1alVos2bNCu30008Pbdy4caHdcccdhRksl8u1bNkytL333ju0Zs2ahVZRUZHqMcrLyzd6LrY8Sa9Fa9So+t8s/+UvfynGOLDRBg0aFFrSffDaa68NLen5ls3T4sWLQzvzzDNDGz16dGg77bRTqse4++67K20nXVOrV68O7ZlnngntuuuuC+3EE08MrXnz5qElvVah8H7zm9+EdtVVV+V9vqTn1ksuuSRVK7ake+Err7wSWo8ePTKYhkJaunRpaEn3n0J69NFHQzvssMNSHbt8+fLQkv7ePfzww6GtX78+1WNQeEnPt0nPXwsXLsxinJLQpUuXVPutW7euyJOQyyW/N0j7uUmS3XffPdX5kvarDo0bNw7t/PPPDy3pNWsp8I1qAAAAAAAAADJloRoAAAAAAACATFmoBgAAAAAAACBTFqoBAAAAAAAAyFSt6h5gS9evX7/QDj744NDGjx8f2ptvvlmUmcje1VdfHdqhhx6a6thnn302tMGDB2/qSFAQ//Ef/xHaLrvsUmn7z3/+c0bTQOEMGDAgtEsvvTTv882ePTu0n//856HNmTMn78cgvaTn0bKystA6duwY2hNPPFGwORYuXBhaRUVFaA0bNsz7MR5++OG8j2XL0a1btyr3Wbp0aWj33XdfEaaB73fGGWeE9rOf/Sy05cuXh7Zo0aKizETpmjhxYmhJ97yzzjortKT73qBBgyptr169OtUcQ4cODW2//fYLrUuXLlU+Zi6X/DqRwrvuuutCGzVqVGiPP/54aLVqxY9U99xzz9Bq1CiN7wg1atQotKS/KwMHDgztxhtvLMpMbD6uueaaSts9evTI+1y9e/cOrZDvsaAYDjnkkNA6deqU6tj+/fsXehwycNFFF4V25JFHVtmuv/76sM+IESNCK/T7lmeeeSa0b7/9NrTbb7+9oI9bKKXxagkAAAAAAACArYaFagAAAAAAAAAyZaEaAAAAAAAAgExZqAYAAAAAAAAgU7Wqe4AtSceOHUP75S9/Gdo333wT2pAhQ4oyE6XhqquuyvvYPn36hLZixYpNGQcKpmnTplXus2TJkgwmgfy98MILoe27774FfYwZM2aENnny5II+Bul99NFHoZ155pmhtW7dOrQWLVoUbI7Ro0en2u+RRx4JrWfPnqmOXbVq1UbNxOZvjz32CO2ss86q8ri5c+eGNmXKlILMBBvj5JNPTrXf888/H9q7775b6HHYDE2cODFVK6Sk59tRo0aF1qVLl9COO+640OrXrx/a4sWL85yO77J+/frQkp779tlnn1Tn+/GPfxzaNttsE9oNN9wQ2qGHHprqMQqprKwstEMOOSTzOSgtF1xwQWgDBw6stF2rVrolhenTp4f2zDPP5DcYZCTpPpj02f7OO+8c2htvvBHaiy++WJC5+F+NGzcObffddy/oYyxatCi0Nm3ahDZ27NhK20OHDg37nHTSSaF16tQptOXLl6fab8N7ci6Xyx188MGh3XjjjaG9/fbboZUC36gGAAAAAAAAIFMWqgEAAAAAAADIlIVqAAAAAAAAADJloRoAAAAAAACATNWq7gE2Zw0aNKi0/bvf/S7sU7NmzdBeeOGF0Er1R8ypfvXr1w9t7dq1BTv/smXLUp1/m222CW2nnXZK9Rg777xzaFdddVWqYze0fv360K699trQvv3227zOz8bp1KlTlfs899xzGUxCqSsrKwutRo10/17u5JNPTrXfiBEjQmvcuHGVxyXNUV5enuox0+rcuXNBz0c2pk2blqoV2z//+c+8j23ZsmVoH3744aaMQ4k74ogjQktzv3322WeLMA1svKTn/ZUrV4Z2++23ZzEO5O3JJ58MrUuXLqF17949tD59+oQ2ZMiQwgxG0bz00kup9mvdunVohx56aGjr1q2rtP3QQw+Ffe6///7QfvGLX4R21llnpZqNrcthhx0WWtLz6w477FDluVasWBFa7969Q/vXv/6VcjpKyezZs0Nbvnx59oMUWNLaTd++fUNLeq7+4osvUh274b2cTTdv3rzQ/vGPf4TWtGnT0I4//vjQ7rvvvtCS1hbmz58f2obP30mflc+cOTO0pPWSpPvv+eefn2q2G2+8MbShQ4eGVqp8oxoAAAAAAACATFmoBgAAAAAAACBTFqoBAAAAAAAAyJSFagAAAAAAAAAyVau6B9hc1KxZM7Tx48dX2t5rr73CPrNmzQrtl7/8ZeEGY4v3wQcfFPX8Tz31VGjz588Pbddddw2te/fuRZlpY3355Zeh3XTTTdUwyZbtqKOOCm233XarhknYHA0bNiy0W2+9NdWxzz//fGjl5eWpjk27X6GOy+VyueHDh+d9LCQpKytL1ZJ8+OGHhR6HEtegQYNU+y1cuLDS9l133VWMcaBKvXv3rrSd9L7j66+/Du3dd98t2kxQCEmvJ5Ne/55yyimhDR48OLQ//vGPoX3yySd5Tkd1mjBhQmhJn2HUqlX5Y9tevXqFfVq0aBFa+/bt855t7ty5eR/L5qdz586h1a1bt8rjVq5cGVqXLl1Ce+ONN/IbjJIzadKk0L744ovQdtxxx9AaNmwY2obvRQqtVatWoV1yySWhtWnTJrS2bdumeoyzzz47tHfeeSfVsRTe+eefH9q4ceNC69ChQ2gvvvhiaHfccUdoSWsmGzr88MNDu/7661Ptl/Q5z8cffxzagAEDQhszZkyVs5Uy36gGAAAAAAAAIFMWqgEAAAAAAADIlIVqAAAAAAAAADJloRoAAAAAAACATNWq7gE2F82bNw/tkEMOqfK4q666KrRZs2YVZCY2Hy+88EJop5xySjVMEp1xxhkFPd+6detCKy8vT3Xs2LFjK21PmTIl1XGvv/56qv3YNF27dg2tZs2aob333nuVtl977bWizcTm45lnngmtX79+oTVq1CiLcaq0YMGC0GbOnBnahRdeGNr8+fOLMhNbr4qKilQNcrlc7sQTT0y135w5cyptL1u2rBjjQJV69+5daTvp/jZu3LhU56pbt25o9erVC23D6x+yMm3atNAGDRoU2m233RbazTffHNo555wT2qpVq/Ibjswkva948sknQzvzzDOrPNdxxx2X6jHXr18fWtK99brrrkt1PjY/Sc+R11xzTV7neuyxx0J75ZVX8joXW5b99tsvtPHjx4dW7M9N2rVrF1qDBg1SHbtw4cLQNvzMOpfL5f72t79t/GAUzdy5c0M76aSTQps0aVJoP/rRj0J76qmnUj1uWVlZpe1N+azmoYceCu3aa68NbdGiRXk/RqnyjWoAAAAAAAAAMmWhGgAAAAAAAIBMWagGAAAAAAAAIFMWqgEAAAAAAADIVK3qHqAUNW3aNLQJEyZUeVy/fv1Ce/755wsyE5u30047LbRrrrkmtG222SbvxzjggAMqbXfv3j3vcz344IOhzZ49O9WxTz/9dGgfffRR3rNQPbbffvvQOnTokOrY0aNHV9pev359QWZi8/bZZ5+F1qNHj9BOPfXU0K644opijPS9brrpptDuueeezOeAXC6X+8EPfpBqv1WrVhV5EkpN0mvH5s2bpzp29erVlbbXrl1bkJmgGJJeT/bs2TO0K6+8MrTp06eH9vOf/7wwg0EBPProo6FddNFFoSV9rjBkyJDQPvjgg8IMRtEkvWb7xS9+EdoOO+xQabtt27Zhn1122SW0pM9vRo4cGdoNN9zw3UOyWdvw2snlcrkZM2aElvZzyA3vK0nXK1ufAQMGhDZw4MDQ2rRpk8U4VSovLw9t8eLFod1xxx2h/ed//mdRZqK45s+fH1q7du1CS1pHadGiRWi9evUK7b/+678qbVdUVKSa7YEHHghta15D8Y1qAAAAAAAAADJloRoAAAAAAACATFmoBgAAAAAAACBTFqoBAAAAAAAAyFSt6h6gFF144YWhNWnSpMrjXn311dDS/ng6W59bb721qOc/66yzinp+tmxr164NbcmSJaGNHTs2tLvuuqsoM7Hlee2111K1CRMmhJb0XN25c+fQNrxGR4wYEfYpKysLbcaMGaFBdTn33HNDW7p0aWhDhw7NYBpKSXl5eWhTpkwJrWXLlqF9+umnRZkJiuGCCy4I7fzzzw/tgQceCM29kVK3YMGC0E444YTQZs+eHdq1114bWs+ePQsyF9n66quvQtvw/c0555wT9mnXrl1ov/rVr0L7+uuvN2E6NjfHH398aHvssUdoaT+3vvLKKyttr169Or/B2KKMGTMmtHfeeSe08ePHh5b0/qSQ7r///tDee++90IYPH17UOSg9SZ+l3HfffamO7devX4Gn4X/4RjUAAAAAAAAAmbJQDQAAAAAAAECmLFQDAAAAAAAAkKmt/jeqjzrqqNAuu+yyapgEoHQk/Ub1EUccUQ2TQPLvGSU12FL97W9/C+2OO+4IbdKkSVmMQwlZv359aAMGDAgt6fcHp06dWpSZYGP16dOn0vaQIUPCPq+99lpow4YNC23JkiWhrVmzZhOmg+oxZ86c0CZOnBhaly5dQtt///0rbc+YMaNwg1GtRo4cmarB0KFDQ0v7e9S33XZbaN5nkNa8efNCa9WqVTVMAmxOfKMaAAAAAAAAgExZqAYAAAAAAAAgUxaqAQAAAAAAAMiUhWoAAAAAAAAAMlWrugeobkcffXRoO+ywQ6pjZ82aVWl7xYoVBZkJAAD+R+fOnat7BDYj8+bNC+28886rhkkgncmTJ1faPv7446tpEiht3bp1C+39998PrUWLFpW2Z8yYUbSZgNJUv3790MrKykL7+uuvQ/vtb39bjJEA4Dv5RjUAAAAAAAAAmbJQDQAAAAAAAECmLFQDAAAAAAAAkCkL1QAAAAAAAABkqlZ1D7C5eP/990P78Y9/XGl78eLFWY0DAAAAwFbim2++CW2vvfaqhkmAUnfHHXekakOHDg1t/vz5RZkJAL6Lb1QDAAAAAAAAkCkL1QAAAAAAAABkykI1AAAAAAAAAJmyUA0AAAAAAABApmpV9wDV7ZZbbknVAAAAAACglN15552pGgCUAt+oBgAAAAAAACBTFqoBAAAAAAAAyJSFagAAAAAAAAAylfdCdUVFRSHnYAuV1XXieiSNLK4T1yJpuR4pJa5HSoXXjpQS90ZKieuRUuG5mlLi3kgpcT1SSlyPlIo010neC9XLly/P91C2IlldJ65H0sjiOnEtkpbrkVLieqRUeO1IKXFvpJS4HikVnqspJe6NlBLXI6XE9UipSHOdlFXk+c8eysvLc/PmzcvVrVs3V1ZWls8p2IJVVFTkli9fnmvcuHGuRo3i/z/Mux75Pllej65FquJ6pJS4HikVXjtSStwbKSWuR0qF52pKiXsjpcT1SClxPVIqNuZazHuhGgAAAAAAAADyUfx/AgkAAAAAAAAA/4eFagAAAAAAAAAyZaEaAAAAAAAAgExZqAYAAAAAAAAgUxaqAQAAAAAAAMiUhWoAAAAAAAAAMmWhGgAAAAAAAIBMWagGAAAAAAAAIFMWqgEAAAAAAADIlIVqAAAAAAAAADJloRoAAAAAAACATFmoBgAAAAAAACBT/x8+E4s1YPbI7gAAAABJRU5ErkJggg==\n", "text/plain": [ "
" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "# Plot input data\n", "number_digits = 12 # Plot first 12 digits\n", "plt.figure(figsize=(25, 4))\n", "for i in range(number_digits):\n", " # Display original\n", " ax = plt.subplot(2, number_digits, i + 1)\n", " plt.imshow(X_test[i].reshape(28, 28)) # Reshape necessary because raw data is an array of size 784\n", " plt.gray()\n", " ax.get_xaxis().set_visible(False)\n", " ax.get_yaxis().set_visible(False)" ] }, { "cell_type": "code", "execution_count": 3, "id": "64358920-17b5-4dd2-b99b-06a71f1ceff3", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Model: \"model\"\n", "_________________________________________________________________\n", " Layer (type) Output Shape Param # \n", "=================================================================\n", " input_1 (InputLayer) [(None, 784)] 0 \n", " \n", " dense (Dense) (None, 784) 615440 \n", " \n", " dense_2 (Dense) (None, 784) 615440 \n", " \n", "=================================================================\n", "Total params: 1,230,880\n", "Trainable params: 1,230,880\n", "Non-trainable params: 0\n", "_________________________________________________________________\n", "Model: \"model_1\"\n", "_________________________________________________________________\n", " Layer (type) Output Shape Param # \n", "=================================================================\n", " input_1 (InputLayer) [(None, 784)] 0 \n", " \n", " dense (Dense) (None, 784) 615440 \n", " \n", " dense_3 (Dense) (None, 784) 615440 \n", " \n", "=================================================================\n", "Total params: 1,230,880\n", "Trainable params: 1,230,880\n", "Non-trainable params: 0\n", "_________________________________________________________________\n", "Model: \"model_2\"\n", "_________________________________________________________________\n", " Layer (type) Output Shape Param # \n", "=================================================================\n", " input_1 (InputLayer) [(None, 784)] 0 \n", " \n", " dense_1 (Dense) (None, 784) 615440 \n", " \n", " dense_4 (Dense) (None, 784) 615440 \n", " \n", "=================================================================\n", "Total params: 1,230,880\n", "Trainable params: 1,230,880\n", "Non-trainable params: 0\n", "_________________________________________________________________\n", "Model: \"model_3\"\n", "_________________________________________________________________\n", " Layer (type) Output Shape Param # \n", "=================================================================\n", " input_1 (InputLayer) [(None, 784)] 0 \n", " \n", " dense_1 (Dense) (None, 784) 615440 \n", " \n", " dense_5 (Dense) (None, 784) 615440 \n", " \n", "=================================================================\n", "Total params: 1,230,880\n", "Trainable params: 1,230,880\n", "Non-trainable params: 0\n", "_________________________________________________________________\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "2022-12-04 13:57:53.418991: W tensorflow/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libcuda.so.1'; dlerror: libcuda.so.1: cannot open shared object file: No such file or directory\n", "2022-12-04 13:57:53.419033: W tensorflow/stream_executor/cuda/cuda_driver.cc:263] failed call to cuInit: UNKNOWN ERROR (303)\n", "2022-12-04 13:57:53.419069: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:156] kernel driver does not appear to be running on this host (26920c06eb75): /proc/driver/nvidia/version does not exist\n" ] } ], "source": [ "# Prepare decoder and encoder, use 'relu', 'sigmoid' or a mixture of them as activation functions.\n", "\n", "digits_size = X_train.shape[1] #Number of values for a digit\n", "input_encoder = keras.Input(shape = (digits_size)) # Prepare Digit for input to NN\n", "\n", "encoder_relu = layers.Dense(digits_size, activation = 'relu')(input_encoder) # Set up encoder NN, input = digits\n", "encoder_sigmoid = layers.Dense(digits_size, activation = 'sigmoid')(input_encoder)\n", "\n", "decoder_relu_relu = layers.Dense(digits_size, activation = 'relu')(encoder_relu)\n", "decoder_relu_sigmoid = layers.Dense(digits_size, activation = 'sigmoid')(encoder_relu)\n", "decoder_sigmoid_relu = layers.Dense(digits_size, activation = 'relu')(encoder_sigmoid)\n", "decoder_sigmoid_sigmoid = layers.Dense(digits_size, activation = 'sigmoid')(encoder_sigmoid)\n", "\n", "#Perform reconstructions\n", "autoencoder_relu_relu = keras.Model(input_encoder, decoder_relu_relu)\n", "autoencoder_relu_sigmoid = keras.Model(input_encoder, decoder_relu_sigmoid)\n", "autoencoder_sigmoid_relu = keras.Model(input_encoder, decoder_sigmoid_relu)\n", "autoencoder_sigmoid_sigmoid = keras.Model(input_encoder, decoder_sigmoid_sigmoid)\n", "\n", "#Print information about autoencoders\n", "autoencoder_relu_relu.summary()\n", "autoencoder_relu_sigmoid.summary()\n", "autoencoder_sigmoid_relu.summary()\n", "autoencoder_sigmoid_sigmoid.summary()\n" ] }, { "cell_type": "code", "execution_count": null, "id": "f30c5487-3c4b-47c8-a3a6-272f3a692e8b", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1/10\n", "938/938 [==============================] - 12s 12ms/step - loss: 0.6315 - val_loss: 1.1206\n", "Epoch 2/10\n", "938/938 [==============================] - 11s 12ms/step - loss: 1.0697 - val_loss: 1.2907\n", "Epoch 3/10\n", "938/938 [==============================] - 13s 14ms/step - loss: 1.3335 - val_loss: 1.3233\n", "Epoch 4/10\n", "938/938 [==============================] - 13s 14ms/step - loss: 1.4539 - val_loss: 1.5141\n", "Epoch 5/10\n", "938/938 [==============================] - 13s 14ms/step - loss: 1.5719 - val_loss: 1.6318\n", "Epoch 6/10\n", "938/938 [==============================] - 14s 15ms/step - loss: 1.5981 - val_loss: 1.6568\n", "Epoch 7/10\n", "938/938 [==============================] - 15s 16ms/step - loss: 1.6723 - val_loss: 1.7463\n", "Epoch 8/10\n", "938/938 [==============================] - 13s 14ms/step - loss: 1.7097 - val_loss: 1.7265\n", "Epoch 9/10\n", "938/938 [==============================] - 14s 14ms/step - loss: 1.7227 - val_loss: 1.7702\n", "Epoch 10/10\n", "938/938 [==============================] - 18s 20ms/step - loss: 1.7541 - val_loss: 1.7797\n", "Epoch 1/10\n", "938/938 [==============================] - 15s 15ms/step - loss: 0.0959 - val_loss: 0.0704\n", "Epoch 2/10\n", "938/938 [==============================] - 14s 15ms/step - loss: 0.0686 - val_loss: 0.0669\n", "Epoch 3/10\n", "938/938 [==============================] - 15s 16ms/step - loss: 0.0663 - val_loss: 0.0655\n", "Epoch 4/10\n", "938/938 [==============================] - 14s 15ms/step - loss: 0.0653 - val_loss: 0.0648\n", "Epoch 5/10\n", "938/938 [==============================] - 14s 14ms/step - loss: 0.0648 - val_loss: 0.0645\n", "Epoch 6/10\n", "938/938 [==============================] - 15s 16ms/step - loss: 0.0644 - val_loss: 0.0641\n", "Epoch 7/10\n", "938/938 [==============================] - 13s 14ms/step - loss: 0.0641 - val_loss: 0.0639\n", "Epoch 8/10\n", "938/938 [==============================] - 13s 14ms/step - loss: 0.0639 - val_loss: 0.0637\n", "Epoch 9/10\n", "938/938 [==============================] - 13s 14ms/step - loss: 0.0637 - val_loss: 0.0636\n", "Epoch 10/10\n", "938/938 [==============================] - 13s 14ms/step - loss: 0.0636 - val_loss: 0.0634\n", "Epoch 1/10\n", "938/938 [==============================] - 14s 15ms/step - loss: 1.3946 - val_loss: 1.6848\n", "Epoch 2/10\n", "938/938 [==============================] - 15s 15ms/step - loss: 1.9113 - val_loss: 2.0953\n", "Epoch 3/10\n", "938/938 [==============================] - 14s 15ms/step - loss: 2.1973 - val_loss: 2.3097\n", "Epoch 4/10\n", "938/938 [==============================] - 14s 15ms/step - loss: 2.3203 - val_loss: 2.3437\n", "Epoch 5/10\n", "938/938 [==============================] - 14s 15ms/step - loss: 2.3379 - val_loss: 2.3411\n", "Epoch 6/10\n", "938/938 [==============================] - 14s 15ms/step - loss: 2.3402 - val_loss: 2.3462\n", "Epoch 7/10\n", "938/938 [==============================] - 14s 15ms/step - loss: 2.3468 - val_loss: 2.3640\n", "Epoch 8/10\n", "938/938 [==============================] - 14s 15ms/step - loss: 2.3600 - val_loss: 2.3628\n", "Epoch 9/10\n", "938/938 [==============================] - 14s 15ms/step - loss: 2.3474 - val_loss: 2.3403\n", "Epoch 10/10\n", "938/938 [==============================] - 14s 15ms/step - loss: 2.3355 - val_loss: 2.3433\n", "Epoch 1/10\n", "938/938 [==============================] - 12s 12ms/step - loss: 0.1132 - val_loss: 0.0785\n", "Epoch 2/10\n", "938/938 [==============================] - 12s 13ms/step - loss: 0.0742 - val_loss: 0.0704\n", "Epoch 3/10\n", "938/938 [==============================] - 12s 13ms/step - loss: 0.0693 - val_loss: 0.0678\n", "Epoch 4/10\n", "938/938 [==============================] - 11s 12ms/step - loss: 0.0674 - val_loss: 0.0665\n", "Epoch 5/10\n", "938/938 [==============================] - 11s 12ms/step - loss: 0.0663 - val_loss: 0.0658\n", "Epoch 6/10\n", "938/938 [==============================] - 13s 14ms/step - loss: 0.0656 - val_loss: 0.0653\n", "Epoch 7/10\n", "938/938 [==============================] - 14s 14ms/step - loss: 0.0652 - val_loss: 0.0649\n", "Epoch 8/10\n", "938/938 [==============================] - 14s 15ms/step - loss: 0.0648 - val_loss: 0.0646\n", "Epoch 9/10\n", "130/938 [===>..........................] - ETA: 12s - loss: 0.0646" ] } ], "source": [ "# Compile autoencoders\n", "autoencoder_relu_relu.compile(optimizer='adam', loss='binary_crossentropy')\n", "autoencoder_relu_sigmoid.compile(optimizer='adam', loss='binary_crossentropy')\n", "autoencoder_sigmoid_relu.compile(optimizer='adam', loss='binary_crossentropy')\n", "autoencoder_sigmoid_sigmoid.compile(optimizer='adam', loss='binary_crossentropy')\n", "\n", "# Train autoencoders\n", "autoencoder_relu_relu.fit(X_train, X_train, epochs = 10, batch_size = 64, shuffle=True, validation_data = (X_test, X_test))\n", "autoencoder_relu_sigmoid.fit(X_train, X_train, epochs = 10, batch_size = 64, shuffle=True, validation_data = (X_test, X_test))\n", "autoencoder_sigmoid_relu.fit(X_train, X_train, epochs = 10, batch_size = 64, shuffle=True, validation_data = (X_test, X_test))\n", "autoencoder_sigmoid_sigmoid.fit(X_train, X_train, epochs = 10, batch_size = 64, shuffle=True, validation_data = (X_test, X_test))" ] }, { "cell_type": "code", "execution_count": null, "id": "7ed5b847-34ce-442e-b150-815e456dd00d", "metadata": {}, "outputs": [], "source": [ "# Set up validation\n", "output_layer_relu_relu = autoencoder_relu_relu.layers[-1]\n", "output_layer_relu_sigmoid = autoencoder_relu_sigmoid.layers[-1]\n", "output_layer_sigmoid_relu = autoencoder_sigmoid_relu.layers[-1]\n", "output_layer_sigmoid_sigmoid = autoencoder_sigmoid_sigmoid.layers[-1]\n", "\n", "encoder_validate_relu = keras.Model(input_encoder, encoder_relu)\n", "encoder_validate_sigmoid = keras.Model(input_encoder, encoder_sigmoid)\n", "\n", "encoded_input = keras.Input(shape = (digits_size))\n", "\n", "decoder_validate_relu_relu = keras.Model(encoded_input, output_layer_relu_relu(encoded_input))\n", "decoder_validate_relu_sigmoid = keras.Model(encoded_input, output_layer_relu_sigmoid(encoded_input))\n", "decoder_validate_sigmoid_relu = keras.Model(encoded_input, output_layer_sigmoid_relu(encoded_input))\n", "decoder_validate_sigmoid_sigmoid = keras.Model(encoded_input, output_layer_sigmoid_sigmoid(encoded_input))\n", "\n", "# Perform predictions\n", "encoded_imgs_relu = encoder_validate_relu.predict(X_test)\n", "encoded_imgs_sigmoid = encoder_validate_sigmoid.predict(X_test)\n", "\n", "decoded_imgs_relu_relu = decoder_validate_relu_relu.predict(encoded_imgs_relu)\n", "decoded_imgs_relu_sigmoid = decoder_validate_relu_sigmoid.predict(encoded_imgs_relu)\n", "decoded_imgs_sigmoid_relu = decoder_validate_sigmoid_relu.predict(encoded_imgs_sigmoid)\n", "decoded_imgs_sigmoid_sigmoid = decoder_validate_sigmoid_sigmoid.predict(encoded_imgs_sigmoid)" ] }, { "cell_type": "code", "execution_count": null, "id": "5562bc93-7e4b-4364-884f-b6e1c0f873a8", "metadata": {}, "outputs": [], "source": [ "# Plot relu relu results\n", "plt.figure(figsize=(25, 4))\n", "for i in range(number_digits):\n", " # Plot original\n", " ax = plt.subplot(2, number_digits, i + 1)\n", " plt.imshow(X_test[i].reshape(28, 28)) # Reshape necessary because raw data is an array of size 784\n", " plt.gray()\n", " ax.get_xaxis().set_visible(False)\n", " ax.get_yaxis().set_visible(False)\n", " \n", " # Plot reconstruction\n", " ax = plt.subplot(2, number_digits, i + 1 + number_digits)\n", " plt.imshow(decoded_imgs_relu_relu[i].reshape(28, 28))\n", " plt.gray()\n", " ax.get_xaxis().set_visible(False)\n", " ax.get_yaxis().set_visible(False)\n", "plt.show()" ] }, { "cell_type": "code", "execution_count": null, "id": "9d2ee3f5-da66-40e3-9cb9-b8a1f7905a3f", "metadata": {}, "outputs": [], "source": [ "# Plot relu sigmoid results\n", "plt.figure(figsize=(25, 4))\n", "for i in range(number_digits):\n", " # Plot original\n", " ax = plt.subplot(2, number_digits, i + 1)\n", " plt.imshow(X_test[i].reshape(28, 28)) # Reshape necessary because raw data is an array of size 784\n", " plt.gray()\n", " ax.get_xaxis().set_visible(False)\n", " ax.get_yaxis().set_visible(False)\n", " \n", " # Plot reconstruction\n", " ax = plt.subplot(2, number_digits, i + 1 + number_digits)\n", " plt.imshow(decoded_imgs_relu_sigmoid[i].reshape(28, 28))\n", " plt.gray()\n", " ax.get_xaxis().set_visible(False)\n", " ax.get_yaxis().set_visible(False)\n", "plt.show()" ] }, { "cell_type": "code", "execution_count": null, "id": "e79482b5-3695-4980-9f62-32b048e63848", "metadata": {}, "outputs": [], "source": [ "# Plot sigmoid relu results\n", "plt.figure(figsize=(25, 4))\n", "for i in range(number_digits):\n", " # Plot original\n", " ax = plt.subplot(2, number_digits, i + 1)\n", " plt.imshow(X_test[i].reshape(28, 28)) # Reshape necessary because raw data is an array of size 784\n", " plt.gray()\n", " ax.get_xaxis().set_visible(False)\n", " ax.get_yaxis().set_visible(False)\n", " \n", " # Plot reconstruction\n", " ax = plt.subplot(2, number_digits, i + 1 + number_digits)\n", " plt.imshow(decoded_imgs_sigmoid_relu[i].reshape(28, 28))\n", " plt.gray()\n", " ax.get_xaxis().set_visible(False)\n", " ax.get_yaxis().set_visible(False)\n", "plt.show()" ] }, { "cell_type": "code", "execution_count": null, "id": "d0256b9d-d840-4d1b-94b8-8a29419c04a6", "metadata": {}, "outputs": [], "source": [ "# Plot sigmoid sigmoid results\n", "plt.figure(figsize=(25, 4))\n", "for i in range(number_digits):\n", " # Plot original\n", " ax = plt.subplot(2, number_digits, i + 1)\n", " plt.imshow(X_test[i].reshape(28, 28)) # Reshape necessary because raw data is an array of size 784\n", " plt.gray()\n", " ax.get_xaxis().set_visible(False)\n", " ax.get_yaxis().set_visible(False)\n", " \n", " # Plot reconstruction\n", " ax = plt.subplot(2, number_digits, i + 1 + number_digits)\n", " plt.imshow(decoded_imgs_sigmoid_sigmoid[i].reshape(28, 28))\n", " plt.gray()\n", " ax.get_xaxis().set_visible(False)\n", " ax.get_yaxis().set_visible(False)\n", "plt.show()" ] }, { "cell_type": "code", "execution_count": null, "id": "821d10ed-5d5e-4d3b-aa6c-8cf6be2eb5d7", "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.10.6" } }, "nbformat": 4, "nbformat_minor": 5 }