From 0c33e7fe472f00d0093d3bf42a30961ef11cd811 Mon Sep 17 00:00:00 2001 From: paumann Date: Sun, 18 Jun 2023 17:13:15 +0200 Subject: [PATCH] Final submission --- Aufgabe 4/aufgabe04.ipynb | 1292 +++++++------------------------------ 1 file changed, 222 insertions(+), 1070 deletions(-) diff --git a/Aufgabe 4/aufgabe04.ipynb b/Aufgabe 4/aufgabe04.ipynb index b5f962c..a97f4d1 100644 --- a/Aufgabe 4/aufgabe04.ipynb +++ b/Aufgabe 4/aufgabe04.ipynb @@ -1,20 +1,30 @@ { "cells": [ + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Aufgabenblatt 3" + ] + }, { "cell_type": "code", - "execution_count": 107, + "execution_count": 20, "metadata": {}, "outputs": [], "source": [ "import numpy as np\n", "from tqdm import tqdm\n", + "from random import sample\n", + "from typing import Literal\n", "\n", "rng = np.random.default_rng()" ] }, { "cell_type": "code", - "execution_count": 108, + "execution_count": 21, "metadata": {}, "outputs": [], "source": [ @@ -44,7 +54,7 @@ }, { "cell_type": "code", - "execution_count": 136, + "execution_count": 22, "metadata": {}, "outputs": [], "source": [ @@ -53,7 +63,7 @@ "\n", "\n", "class NeuralNet:\n", - " def __init__(self, inputs: int = 2, hidden_layers: list[tuple[int, Callable]] = None, w: list = []):\n", + " def __init__(self, inputs: int = 2, hidden_layers: list[tuple[int, Callable]] = None, weights: list = None):\n", " \"\"\"\n", " Initializes the neural network.\n", " Hidden layers can be specified with the 'hidden_layers' parameter,\n", @@ -65,18 +75,25 @@ " self.input_shape = (-1, inputs)\n", " self.layers = [] if hidden_layers is None else hidden_layers\n", " self.layers.append((1, sigmoid)) # Add output layer\n", - " self.weights = w # TODO <-- das klappt hier nicht, wenn man da [] übergibt geht wieder. Ich wollte aber weights setzen können um die fitness function zu machen\n", "\n", - " # Construct weights for hidden layer\n", - " self.activation_functions = []\n", - " for index, (num_neurons, activation_function) in enumerate(self.layers):\n", - " self.activation_functions.append(activation_function)\n", + " self.activation_functions = [activation_function for _, activation_function in self.layers]\n", "\n", - " num_layer_inputs = inputs if index == 0 else self.layers[index - 1][0]\n", - " #self.weights.append(rng.uniform(low=-1.0, high=1.0, size=(num_layer_inputs, num_neurons)))\n", - " if not self.weights: # only initialize random weights if weights is empty\n", + " # Randomly create weights if not supplied\n", + " if weights is None:\n", + " self.weights = []\n", + " for index, (num_neurons, _) in enumerate(self.layers):\n", + " num_layer_inputs = inputs if index == 0 else self.layers[index - 1][0]\n", " self.weights.append(rng.uniform(low=-1.0, high=1.0, size=(num_layer_inputs, num_neurons)))\n", "\n", + " # Check if weight shapes are correct\n", + " else:\n", + " assert len(weights) == len(self.layers)\n", + " for index, (weight, layer) in enumerate(zip(weights, self.layers)):\n", + " num_neurons, _ = layer\n", + " num_layer_inputs = inputs if index == 0 else self.layers[index - 1][0]\n", + " assert weight.shape == (num_layer_inputs, num_neurons)\n", + " self.weights = weights\n", + "\n", " def forward_pass(self, x: np.array) -> tuple:\n", " \"\"\"\n", " Do a forward pass through the neural net.\n", @@ -99,7 +116,7 @@ " Executes a forward pass,\n", " and returns the classification result ŷ.\n", " \"\"\"\n", - " \n", + "\n", " _, F = self.forward_pass(x)\n", " ŷ = F[-1].reshape(x.shape[0])\n", " return ŷ\n", @@ -129,56 +146,36 @@ " Δweights = [np.dot(F[i].T, error) / batch_size for i, error in enumerate(layer_errors)]\n", " return Δweights\n", "\n", - " \n", - "\n", " # Aufgabe 5\n", - " def train(self, data: list, batch_size: int = 10, epochs: int = 50, learning_rate: float = 0.5):\n", + " def train(self, data: list, batch_size: int = 10, epochs: int = 50, learning_rate: float = 0.5, verbose: bool = True):\n", " \"\"\"\n", " Train the neural network with the given input data.\n", " \"\"\"\n", "\n", - " #Old\n", - " \n", " x, y = data\n", - " for epoch in tqdm(range(epochs)):\n", + " for epoch in tqdm(range(epochs), disable=not verbose):\n", " for i in range(0, len(data), batch_size):\n", " x_batch = x[i : i + batch_size]\n", " y_batch = y[i : i + batch_size]\n", " Δweights = self.backward_pass(x_batch, y_batch)\n", "\n", - " self.weights = [w - learning_rate * Δw for w, Δw in zip(self.weights, Δweights, strict=True)]\n", - " \n", - "\n", - " #Evolutionary algorithm\n", - " f = fitness(np.array([[0.179, -0.186, -0.008, -0.048], [0.044, -0.028, -0.063, -0.131], [0.01, -0.035, -0.004, 0.088]]), np.array([[0.088], [0.171], [0.005], [-0.04]]))\n", - " print(f)\n", - " \n", - "def fitness(weights):\n", - " test_data = samples(100)\n", - "\n", - " # Updating weights after every backward pass\n", - " nn = NeuralNet(hidden_layers=[(8, relu)], w=weights)\n", - " \n", - " correct = sum(np.around(nn.classify(test_data[0])) == test_data[1])\n", - " return correct\n", - " " + " self.weights = [w - learning_rate * Δw for w, Δw in zip(self.weights, Δweights, strict=True)]" ] }, { "cell_type": "code", - "execution_count": 137, + "execution_count": 23, "metadata": {}, "outputs": [], "source": [ "# Unit test to make sure neural net behaves as expected\n", "# Uses example values from https://towardsdatascience.com/how-does-back-propagation-work-in-neural-networks-with-worked-example-bc59dfb97f48.\n", "\n", - "nn = NeuralNet(inputs=3, hidden_layers=[(4, sigmoid)])\n", - "\n", - "nn.weights = [\n", + "nn = NeuralNet(inputs=3, hidden_layers=[(4, sigmoid)], weights=[\n", " np.array([[0.179, -0.186, -0.008, -0.048], [0.044, -0.028, -0.063, -0.131], [0.01, -0.035, -0.004, 0.088]]),\n", " np.array([[0.088], [0.171], [0.005], [-0.04]]),\n", - "]\n", + "])\n", + "\n", "\n", "# Make sure forward pass is correct\n", "Z, F = nn.forward_pass(np.array([[7, 8, 10]]))\n", @@ -199,29 +196,28 @@ }, { "cell_type": "code", - "execution_count": 138, + "execution_count": 24, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Correct classifications before training: 0.509\n" + "Correct classifications before training: 0.479\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "100%|██████████| 80/80 [00:00<00:00, 9763.28it/s]" + "100%|██████████| 80/80 [00:00<00:00, 3999.19it/s]" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "56\n", - "Correct classifications after training: 0.733\n" + "Correct classifications after training: 1.0\n" ] }, { @@ -253,17 +249,19 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 25, "metadata": {}, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAbAAAAGiCAYAAACGUJO6AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAApW0lEQVR4nO3dfXBU5fn/8U8SyQYqSVAkPDSaqvVZQUHSaB1rJzUz+sXym3Gk4gBlfKgtdZRMK0SE+NAaapUyVZQRpTpTLaij1hEGq6mMVekwBTJjK+ooItSvCfClm2DQBJLz+8O6Gtlcd/acnOze5P2a2T849173ufdk2Ws3ua6984IgCAQAgGfys70AAADCIIEBALxEAgMAeIkEBgDwEgkMAOAlEhgAwEskMACAl0hgAAAvkcAAAF4igQEAvJRxAnv11Vc1ZcoUjR07Vnl5eXruueecMevXr9c555yjRCKhE088UY8++miIpQIA8KWME1h7e7vGjx+vZcuW9en+H3zwgS699FJddNFFampq0k033aRrrrlGL774YsaLBQDgC3lRvsw3Ly9Pzz77rKZOndrrfebNm6c1a9bon//8Z+rYj370IyWTSa1bty7sqQEAg9wRcZ9gw4YNqq6u7nGspqZGN910U68xHR0d6ujoSP27u7tbe/fu1dFHH628vLy4lgoAiEEQBNq3b5/Gjh2r/Pz+K72IPYE1NzerrKysx7GysjK1tbXp008/1dChQw+JaWho0O233x730gAAA2jnzp365je/2W/zxZ7Awqirq1NtbW3q362trTr22GO184dS8ZA0AcWOCUuMsRERYsOOSdJwY8z1eKzYI0PGSVKedTFcD+hoY8x6QN9wzGud13qwrrmHRZg3EfKcklQUcsw6p2vc9d88bKxr3oIIsWHPG2VNLmFfLqO8zMb1Ej2wL/1tbW0qLy/X8OGuF6HMxP4oRo8erZaWlh7HWlpaVFxcnPbTlyQlEgklEof+pyoe0ksCK3Qswvr/ab1mSFL6JX7Oeg10vY5Z467XT+s5EHZMksxfz7o+9od9sUr3A/0q64cb1w/e9aSwxq0nRZTzutYUJYFZc0dJFtmIjTKvCwksqv7+E1DsfWBVVVVqbGzsceyll15SVVVV3KcGABzGMk5gn3zyiZqamtTU1CTp8zL5pqYm7dixQ9Lnv/6bOXNm6v7XX3+9tm3bpptvvllvv/22HnjgAT355JOaO3du/zwCAMCglHEC+8c//qGzzz5bZ599tiSptrZWZ599thYtWiRJ+vjjj1PJTJK+9a1vac2aNXrppZc0fvx43XvvvXr44YdVU1PTTw8BADAYReoDGyhtbW0qKSlR6+W9/A0s6Zig1Bhz1SZYsdbYURHmdf2tyoq1Hk+U4hDn38+sB1wackyyH5Ar1hq3/tAYpTjEFWv9vcmKjfJ3OVes9ffLsH8fixqbjb+BxfW3qmz9DSx3/n6Weg1vbVVxseuFqO/4LkQAgJdIYAAAL5HAAABeIoEBALxEAgMAeIkEBgDwEgkMAOClnPwy33633RiL0nNltT4lI8wbV2+aNSbZ18IZuzfcmNk/5jpxqyM2GXJea0yy+7XaHbFWD9lnxpirlyuuHrIuY8z1xbgHjbHD7fsZLdZ1iDJvnKw1uR5Pf8W48QkMAOAlEhgAwEskMACAl0hgAAAvkcAAAF4igQEAvJSL9ZuZi7Lb/f86YpMhx0od81rjrspyq3o8GfKcrvF9jtiwJfhWib1rPM+1LcNIYyzsRZTCb9PiirVK8K3yeyk7JfiuebNRgu9aUzZK8KO8zB5uJfj9i09gAAAvkcAAAF4igQEAvEQCAwB4iQQGAPASCQwA4CUSGADAS7nYRJBbrD4xq4Wp1DFvNrZiOdoxr3Ve65yu8SjzWv1lw9scscb4EVYPmdU/Jtk9ZK69cD4xxqx+rFLHvNnoIYtzi5ewPWRR+qbi6iGLc4sXS5TtbPob26kAAJBCAgMAeIkEBgDwEgkMAOAlEhgAwEskMACAlyijj2K3MZZ0xFrjpY5Ya9wqz7eqvyW7Atw6p+u8VmzSMa8Va41J4UvwXeX5Zgl+qR1rluhbPwCr/F7KTgm+q+w8G1u8dDjmTRhjcZXgx1lG70sJPmX0AACkkMAAAF4igQEAvEQCAwB4iQQGAPASCQwA4CXK6GMS7LHH86yS9qRj8tKQsVaclJ1vsrfGJLv03zqna26rxN6KkyKW4CdDntj1DfnWD886p+u8Ucrzo5TRZ+Mb8uMqwXe9zGajBD/KvC7proXr2obDJzAAgJdIYAAAL5HAAABeIoEBALxEAgMAeIkEBgDwEgkMAOAl+sBcCuKZtu2AcUpHD9k3rN6ovcaYteWJZLf1xLUVS5Q1WWOSvWZrvfsc82ajh8zsH3Od2BqTwl+oKFu8WGOuuaPMG6U3Lew2Llb/mJSdHrKB3uKF7VQAAEghgQEAvEQCAwB4iQQGAPASCQwA4CUSGADAS4OjjN5K0zGVyUeRdI0bJfjDjTL6YtfEpcaYVU3tirW2YklGmNcak+wSfSs2SstA0hFrndcsz3ds02KW4Dv6MsytWkqNsbh6KyS7HL7dGIuztD/sNi6u8vxslOAP9BYvVvtCeHwCAwB4iQQGAPASCQwA4CUSGADASyQwAICXSGAAAC+RwAAAXjo8+sBysJcrz/XWoDv83MmQY6WOc0bqIbNagqzYUse81rhrK5aw57XGXOd1xWZji5fh++3Y4h29jxVaPWRW/5iUnR4yVy9XNnrIomzTElcPmWve/t7ixVpneHwCAwB4iQQGAPASCQwA4KVQCWzZsmWqqKhQUVGRKisrtXHjRvP+S5cu1cknn6yhQ4eqvLxcc+fO1WefxfPdWACAwSHjBLZ69WrV1taqvr5emzdv1vjx41VTU6Ndu3alvf8TTzyh+fPnq76+Xlu3btUjjzyi1atX65Zbbom8eADA4JVxAluyZImuvfZazZ49W6eddpqWL1+uYcOGaeXKlWnv/8Ybb+j888/X9OnTVVFRoYsvvlhXXnml81MbAACWjMroOzs7tWnTJtXV1aWO5efnq7q6Whs2bEgbc9555+mPf/yjNm7cqMmTJ2vbtm1au3atZsyY0et5Ojo61NHxZdllW5tjK4lBxuoasKqtI1ViO0rwS8OW4Luqqa1Ya0wKXyofV3m+FL46PM6dS6wnhlWCb5XfS1kqwXft+2OVu1vnlMKX4EfZpiWuLV5cZe1WCb4rbaQrwXe1KISTUQLbs2ePurq6VFZW1uN4WVmZ3n777bQx06dP1549e/Td735XQRDo4MGDuv76681fITY0NOj222/PZGkAgEEm9irE9evX66677tIDDzygzZs365lnntGaNWt055139hpTV1en1tbW1G3nzp1xLxMA4JmMPoGNHDlSBQUFamlp6XG8paVFo0ePThuzcOFCzZgxQ9dcc40k6cwzz1R7e7uuu+46LViwQPn5h+bQRCKhRMK1EykAYDDL6BNYYWGhJk6cqMbGxtSx7u5uNTY2qqqqKm3M/v37D0lSBQWf/xUnCIJM1wsAgKQQ34VYW1urWbNmadKkSZo8ebKWLl2q9vZ2zZ49W5I0c+ZMjRs3Tg0NDZKkKVOmaMmSJTr77LNVWVmp9957TwsXLtSUKVNSiQwAgExlnMCmTZum3bt3a9GiRWpubtaECRO0bt26VGHHjh07enziuvXWW5WXl6dbb71VH330kY455hhNmTJFv/71r/vvUQAABp28wIPf47W1tamkpEStl0vFQ9Lcwapuleyy5yhl3Fb5sRUnqc0oS3eVu4ctlXfN22mMWSX2kl2AbMVacZJUbP2S27WosN8ab425xl1V3EfHNK8Va4255i4OGSfZPx9rXkkqHGYMlhpjUcrz4yrBd5XCfyNCbNgS/Di/If/Q+oW2tnaVlPyPWltbVVzs+uH3Hd+FCADwEgkMAOAlEhgAwEskMACAl0hgAAAvkcAAAF4igQEAvJRxIzP6Ry5+B4m1R3aU3jSrHci5xYvRL1fq6OEbboznRdl+xBp3bcUSdicQ65yS3V+WdMRac1tjUeZ19pBZ27gYY4VJx8SlxliUHrIo26lY87qaXK0eMut/tKuXq797yIyfWQR8AgMAeIkEBgDwEgkMAOAlEhgAwEskMACAl0hgAAAvUUbvMohSvFXa7yqCbTHGomzxYpXgu3bCsSq1rRJ7V3l+XtIYtMak8KXlVpm867zWOV3jYbekkeJbk/WkKHE8U80S/D12rFlmXxpyTIp2ocJu8WKV30v9X4L/qSMmnEH08gwAOJyQwAAAXiKBAQC8RAIDAHiJBAYA8BIJDADgJRIYAMBLh0cfmGtvEmucFN4vOo2xuPrAXLFtxlixMebsL4urh6zUGIvS9GbNK9m9XskI81rj1ryuWGssyhNquPUsljT8f3sfG2r1kEXZpiXKDz6uLV5cPWTpzmv1lYXHyzcAwEskMACAl0hgAAAvkcAAAF4igQEAvEQCAwB46fAoo89FrtL+7vChvukyxnY7Yq3NMeIqwbfiXLFxleCb5fdStEpsa25rXmtMirYVi7XmZIR5rXHXD96MNUrwrfJ7KUsl+Fb5vRStBD/deIcjJhw+gQEAvEQCAwB4iQQGAPASCQwA4CUSGADASyQwAICXKKP3UNh3Hbn4bsUqsZfsMvthjtiwJfiuamqrODmuEnyr/F6Sio3xgv+zY0OXyltl8lJ85e7WeeNqGZDCP2lc88ZWgm+d2FWeH6UEP915DzhiwsnF1zQAAJxIYAAAL5HAAABeIoEBALxEAgMAeIkEBgDwEgkMAOAl+sCiyMH0H9eScnGLl72OcasPzOohs+Jcsa4tXsK2EiUd85Za81o7Y0gqNcYLrBNbY1J8W7FY53XNa7UwHe2IDXteK84V69zixeoh29X72NCka2JjLEwP2UFHTDg5+BIMAIAbCQwA4CUSGADASyQwAICXSGAAAC+RwAAAXqKMPkt45xCdayuWpDHWboy5yugTxtg3HLHWeaNs8RJbrFFib5XfS44S/FI7NnTJumunD6tU3rUVizV3acgxKTtbvFhbuEhSqVWCb23hIqUvs+92xITD6ygAwEskMACAl0hgAAAvkcAAAF4igQEAvEQCAwB4iQQGAPASfWCufUJycR+RHBT2MsV5ea0+sc+MsSjbqbhirT4wq4fMipOy00OWdMxr9YnF1kNmjUl2r5erh8ya2+ovS0aY19WbZs1tzWuNSY4nhaOnK902Lo6fd1h8AgMAeIkEBgDwEgkMAOClUAls2bJlqqioUFFRkSorK7Vx40bz/slkUnPmzNGYMWOUSCR00kknae3ataEWDACAFKKIY/Xq1aqtrdXy5ctVWVmppUuXqqamRu+8845GjRp1yP07Ozv1gx/8QKNGjdLTTz+tcePG6cMPP1RpaWl/rB8AMEhlnMCWLFmia6+9VrNnz5YkLV++XGvWrNHKlSs1f/78Q+6/cuVK7d27V2+88YaGDBkiSaqoqIi2agDAoJdRAuvs7NSmTZtUV1eXOpafn6/q6mpt2LAhbczzzz+vqqoqzZkzR3/+8591zDHHaPr06Zo3b54KCtIXUXd0dKijoyP177a2Nnthrl+EUgqfVXFd/ijzhi2xd40XRYi1SvBdZfRWCb7jf4+KjTFzRw7HvFYldtIRa5XZW1u8FO51TWyMRdmKJRnynK7xoyLEWiX4yQjzun7w6WJzoYx+z5496urqUllZWY/jZWVlam5uThuzbds2Pf300+rq6tLatWu1cOFC3XvvvfrVr37V63kaGhpUUlKSupWXl2eyTADAIBB7FWJ3d7dGjRqlhx56SBMnTtS0adO0YMECLV++vNeYuro6tba2pm47d+6Me5kAAM9k9CvEkSNHqqCgQC0tLT2Ot7S0aPTo0WljxowZoyFDhvT4deGpp56q5uZmdXZ2qrCw8JCYRCKhRMLa9xYAMNhl9AmssLBQEydOVGNjY+pYd3e3GhsbVVVVlTbm/PPP13vvvafu7i+/fuTdd9/VmDFj0iYvAAD6IuNfIdbW1mrFihV67LHHtHXrVv30pz9Ve3t7qipx5syZPYo8fvrTn2rv3r268cYb9e6772rNmjW66667NGfOnP57FACAQSfjMvpp06Zp9+7dWrRokZqbmzVhwgStW7cuVdixY8cO5ed/mRfLy8v14osvau7cuTrrrLM0btw43XjjjZo3b17/PQoAwKCTFwRBkO1FuLS1tamkpEStl0vFQ9LcodMxQZRyUmvcGrNqiCV92tH7mCM09Gld81pl3K5Yq8w7Snm4Fesqd7fmtmJd8x4wxlyl/VaZvTVmfQN+1FirBN+KtcrvXbFhKrH7Ehtl3kLXhbKCrRJ8K06yS+Vdsda4NRalZcAVm+aJ0dYulfw/qbW1VcXFrmdO3/FdiAAAL5HAAABeIoEBALxEAgMAeIkEBgDwEgkMAOAlEhgAwEsZNzIDhztXL5fVBxbXViyuea3YKL12VmuUFRc11uo7jGuLl+GORZUa44VJI9DqQ5WysxWLa15rza4+sHRzf+qICYlPYAAAL5HAAABeIoEBALxEAgMAeIkEBgDwEgkMAOAlyuijcNVbZ+G0YccOR2GvhVUm79LlGLfmjlKeH3abFkkydvYJvU2OFF8JftgS+6ixSWPMKrG3xqQYS/CtsVLHvNa4VZ4vpV+z9SSLgE9gAAAvkcAAAF4igQEAvEQCAwB4iQQGAPASCQwA4CUSGADAS/SBuQy25qle8E4nflavl6sPLBs9ZK7Wnig9ZNZ5rR4x19YxUfrArHFr3qRjXqtPzLXFy9D/syYOOSbZvV5JR2y6uaM0Vxp4XQIAeIkEBgDwEgkMAOAlEhgAwEskMACAl0hgAAAvDY4yeitNZymFU53fN769w3L9XMNWE7virPEhjlirzD7KNi1RSvATxlhcW7xYpfCSXUYfdsx1XldsqXEhh7f0PjY06ZjYGi91xJakOebaZygk314fAACQRAIDAHiKBAYA8BIJDADgJRIYAMBLJDAAgJcOjzJ6atIxyHWGHJOkQmMsG99yL9ml/98wxlxrilKCb8VGKYW3SvuLHbGtxli6avbUvI4+hpIoJfilaY51O2JC4hMYAMBLJDAAgJdIYAAAL5HAAABeIoEBALxEAgMAeIkEBgDw0uHRB5aLIvSm+djWFnbNPj5W38S1FYvVP+aaN0oPWdjtX6LMK4XvIbPiJLuvzdWb1maMRelNs+YN00P2qeN8YfEJDADgJRIYAMBLJDAAgJdIYAAAL5HAAABeIoEBALxEGT1S4no3w7uk3NZljFml8K7y/LhK8K15XWuySuXjKu13ldHHVYIfpYw+SmwyzbFPHDFh8doCAPASCQwA4CUSGADASyQwAICXSGAAAC+RwAAAXiKBAQC8RB9YFFnaC4QtSDBQrB4xa0zKTg9ZlHmztcWL1Qc2LKZY1zYtUfrAitMcc/WzhcUnMACAl0hgAAAvkcAAAF4KlcCWLVumiooKFRUVqbKyUhs3buxT3KpVq5SXl6epU6eGOS0AACkZJ7DVq1ertrZW9fX12rx5s8aPH6+amhrt2rXLjNu+fbt+8Ytf6IILLgi9WAAAvpBxAluyZImuvfZazZ49W6eddpqWL1+uYcOGaeXKlb3GdHV16aqrrtLtt9+u448/3nmOjo4OtbW19bgBAPBVGZXRd3Z2atOmTaqrq0sdy8/PV3V1tTZs2NBr3B133KFRo0bp6quv1t/+9jfneRoaGnT77bdnsrT4ePZXQqvE3rOH4uTj47F+Pq4S8DjOKbnL4XvT7Ri3Hk9cJfhWmbxkl+C7Yq3zRimjD1ueL4Uvo7fK5CV7m5YwJfifOmLCyug1YM+ePerq6lJZWVmP42VlZWpubk4b89prr+mRRx7RihUr+nyeuro6tba2pm47d+7MZJkAgEEg1kbmffv2acaMGVqxYoVGjhzZ57hEIqFEIhHjygAAvssogY0cOVIFBQVqaWnpcbylpUWjR48+5P7vv/++tm/frilTpqSOdXd//ouHI444Qu+8845OOOGEMOsGAAxyGf0KsbCwUBMnTlRjY2PqWHd3txobG1VVVXXI/U855RS9+eabampqSt0uu+wyXXTRRWpqalJ5eXn0RwAAGJQy/hVibW2tZs2apUmTJmny5MlaunSp2tvbNXv2bEnSzJkzNW7cODU0NKioqEhnnHFGj/jS0lJJOuQ4AACZyDiBTZs2Tbt379aiRYvU3NysCRMmaN26danCjh07dig/38f6MACAT/KCIAiyvQiXtrY2lZSUqPVyqdhV65pOMuSYa7w1/LxdRi2qI9QsgbXGXN10YeeV7NJa65uoXSW5VmyUEmMr1jVv2HLqKLGuEvtOY8xV7m7FWud1zRvlm+yt0n9rzPXyYI272g2sWKsUPsqaopTghx2T7BJ8V2y6EvzPJC2U1NraquLidN9XHw4flQAAXiKBAQC8RAIDAHiJBAYA8BIJDADgJRIYAMBLJDAAgJdi/TJfHD5cPTJAfwq7FUuUbVpc/VrW3FHmtcZd/X9ht3Fx9XJF2R4mXf9lhyMmLD6BAQC8RAIDAHiJBAYA8BIJDADgJRIYAMBLJDAAgJcoo49LhLcGvKvoH1xHf4UtlXeVnVsl63GV4EeZN8rjibLtT5QS/HTj1pY9UfB/HADgJRIYAMBLJDAAgJdIYAAAL5HAAABeIoEBALxEAgMAeIk+MOS0bGzj4jqnqzcnrCjvJq01d+fgvFG4+qrCxrp+7mF7vaL0crn6pwpDnte1xUt/95Dl4v8ZAACyhgQGAPASCQwA4CUSGADASyQwAICXSGAAAC9RRh9FltJ/XKeNUrLu2zsh67HGVfIbhev6RiktH0ys0n9XW0DYEnxXyXqU0n5rzVYJvlV+L0UrwU9XZn/QEROWb687AABIIoEBADxFAgMAeIkEBgDwEgkMAOAlEhgAwEuU0bvqVLPxdegxydZDsc7LOyikYz1n4moZcM1rjVvl7K55s1GC72oZiFKCn27NlNEDAPAVJDAAgJdIYAAAL5HAAABeIoEBALxEAgMAeIkEBgDw0uDoA8vBNB3XknLwoQL9Khs9YpK7d6o3rjXF1UMW5Tr1dw9ZXD8XXu8AAF4igQEAvEQCAwB4iQQGAPASCQwA4CUSGADAS4OjjB4AYhS2xN4lrhL8KGX0YWIpowcA4CtIYAAAL5HAAABeIoEBALxEAgMAeIkEBgDwEgkMAOAl+sDiYjVSeCgX3+nk4iV2renAgKwCucTVA2WNu55PVh+Y9X/W1bfW3z1kcfXJ5eLrEgAATiQwAICXSGAAAC+FSmDLli1TRUWFioqKVFlZqY0bN/Z63xUrVuiCCy7QiBEjNGLECFVXV5v3BwCgLzJOYKtXr1Ztba3q6+u1efNmjR8/XjU1Ndq1a1fa+69fv15XXnmlXnnlFW3YsEHl5eW6+OKL9dFHH0VePABg8MoLgiDIJKCyslLnnnuu7r//fklSd3e3ysvLdcMNN2j+/PnO+K6uLo0YMUL333+/Zs6cmfY+HR0d6ujoSP27ra1N5eXlar1cKh6SyWq/mMAYa3XEJkOO7bOnDYzzWtO6pg475hpvjxC7P8K8Vuxnjlhr7g5jzDWvNe6qMrRiOyPMa427Yq3zWtVjrnmjVLLF9S3rYed1xVqifBN7lEpb69OJa94osb1VIW6X1NraquLiYscMfZfRJ7DOzk5t2rRJ1dXVX06Qn6/q6mpt2LChT3Ps379fBw4c0FFHHdXrfRoaGlRSUpK6lZeXZ7LMgVNg3GKaNhdLxwGE12XcOh03K7bbuB2I8dbbWuOQUQLbs2ePurq6VFZW1uN4WVmZmpub+zTHvHnzNHbs2B5J8Ovq6urU2tqauu3cuTOTZQIABoEBbWRevHixVq1apfXr16uoqKjX+yUSCSUSiQFcGQDANxklsJEjR6qgoEAtLS09jre0tGj06NFm7D333KPFixfr5Zdf1llnnZX5SgEA+IqMfoVYWFioiRMnqrGxMXWsu7tbjY2Nqqqq6jXu7rvv1p133ql169Zp0qRJ4VcLAMB/ZfwrxNraWs2aNUuTJk3S5MmTtXTpUrW3t2v27NmSpJkzZ2rcuHFqaGiQJP3mN7/RokWL9MQTT6iioiL1t7IjjzxSRx55ZD8+FADAYJJxAps2bZp2796tRYsWqbm5WRMmTNC6detShR07duxQfv6XH+wefPBBdXZ26vLLL+8xT319vW677bZoqwcADFoZ94FlQ1tbm0pKSrLTB2Y1OCVDjsnuA3P1a1lT0wfWt7npA+vbeekD61usJcq30cclTC9XX6X7u1S3pN3Kch8YAAC5ggQGAPASCQwA4CUSGADASyQwAICXSGAAAC+RwAAAXhrQL/PNGqupwcMUHteWKmzVAvglbG+ai9WbFuZ1Iq51evjyDQAACQwA4CkSGADASyQwAICXSGAAAC+RwAAAXhocZfQ5KM966xCh5tQqcY2zTD6uuaO8w8rFNQG5IK6ydil9CX5ce3bxfxEA4CUSGADASyQwAICXSGAAAC+RwAAAXiKBAQC8RAIDAHiJPjDEjndJuc3ql4uzXwi5y9pOJQz6wAAA+AoSGADASyQwAICXSGAAAC+RwAAAXiKBAQC8RBl9XOLcu+Qwkq3LlK13bofTO0bXY+nvUmzkhjA/V8roAQD4ChIYAMBLJDAAgJdIYAAAL5HAAABeIoEBALxEGX0UMaV/KvCzi3d1gB/4vwoA8BIJDADgJRIYAMBLJDAAgJdIYAAAL5HAAABeIoEBALxEH5gLKV4SvWkAcg8vzwAAL5HAAABeIoEBALxEAgMAeIkEBgDwEgkMAOClwVFGb9WAe1gf7uGSAaDf8QkMAOAlEhgAwEskMACAl0hgAAAvkcAAAF4igQEAvEQCAwB4aXD0geUiq5mre8BW0YNv72YOt3446/EcGLBVAP7w7TULAABJJDAAgKdIYAAAL4VKYMuWLVNFRYWKiopUWVmpjRs3mvd/6qmndMopp6ioqEhnnnmm1q5dG2qxAAB8IeMEtnr1atXW1qq+vl6bN2/W+PHjVVNTo127dqW9/xtvvKErr7xSV199tbZs2aKpU6dq6tSp+uc//xl58QCAwSsvCIIgk4DKykqde+65uv/++yVJ3d3dKi8v1w033KD58+cfcv9p06apvb1dL7zwQurYd77zHU2YMEHLly9Pe46Ojg51dHSk/t3a2qpjjz1WO38oFQ/JZLX/9Zkx1uqItcatsTbHvPt6H2o/aId+YoxZSzJOKUnaH/KcrrmteT91zNtujHVGiO0wxqyniyvWGpPsNVuVhq55rVhXBaNV9GrFdjnmtWJdhbbWuHVe15rCzuuKjRLnOm+UucOKsqZ0An3+/yqZTKqkpKQfJ85AR0dHUFBQEDz77LM9js+cOTO47LLL0saUl5cHv/vd73ocW7RoUXDWWWf1ep76+vrgv4+ZGzdu3LgdJrf3338/k5TjlFEf2J49e9TV1aWysrIex8vKyvT222+njWlubk57/+bm5l7PU1dXp9ra2tS/k8mkjjvuOO3YsaN/s/dhpq2tTeXl5dq5c6eKi4uzvZycxXVy4xr1Ddepb774LdpRRx3Vr/PmZCNzIpFQIpE45HhJSQlPkj4oLi7mOvUB18mNa9Q3XKe+yc/v38L3jGYbOXKkCgoK1NLS0uN4S0uLRo8enTZm9OjRGd0fAIC+yCiBFRYWauLEiWpsbEwd6+7uVmNjo6qqqtLGVFVV9bi/JL300ku93h8AgL7I+FeItbW1mjVrliZNmqTJkydr6dKlam9v1+zZsyVJM2fO1Lhx49TQ0CBJuvHGG3XhhRfq3nvv1aWXXqpVq1bpH//4hx566KE+nzORSKi+vj7trxXxJa5T33Cd3LhGfcN16pu4rlPGZfSSdP/99+u3v/2tmpubNWHCBP3+979XZWWlJOl73/ueKioq9Oijj6bu/9RTT+nWW2/V9u3b9e1vf1t33323Lrnkkn57EACAwSdUAgMAINv4LkQAgJdIYAAAL5HAAABeIoEBALyUMwmMLVr6JpPrtGLFCl1wwQUaMWKERowYoerqaud1PRxk+lz6wqpVq5SXl6epU6fGu8Ackel1SiaTmjNnjsaMGaNEIqGTTjppUPy/y/Q6LV26VCeffLKGDh2q8vJyzZ07V5995vqKaL+9+uqrmjJlisaOHau8vDw999xzzpj169frnHPOUSKR0Iknntijcr3P+vWbFUNatWpVUFhYGKxcuTL417/+FVx77bVBaWlp0NLSkvb+r7/+elBQUBDcfffdwVtvvRXceuutwZAhQ4I333xzgFc+sDK9TtOnTw+WLVsWbNmyJdi6dWvw4x//OCgpKQn+/e9/D/DKB06m1+gLH3zwQTBu3LjgggsuCH74wx8OzGKzKNPr1NHREUyaNCm45JJLgtdeey344IMPgvXr1wdNTU0DvPKBlel1evzxx4NEIhE8/vjjwQcffBC8+OKLwZgxY4K5c+cO8MoH1tq1a4MFCxYEzzzzTCDpkC98/7pt27YFw4YNC2pra4O33noruO+++4KCgoJg3bp1GZ03JxLY5MmTgzlz5qT+3dXVFYwdOzZoaGhIe/8rrrgiuPTSS3scq6ysDH7yk5/Eus5sy/Q6fd3BgweD4cOHB4899lhcS8y6MNfo4MGDwXnnnRc8/PDDwaxZswZFAsv0Oj344IPB8ccfH3R2dg7UEnNCptdpzpw5wfe///0ex2pra4Pzzz8/1nXmkr4ksJtvvjk4/fTTexybNm1aUFNTk9G5sv4rxM7OTm3atEnV1dWpY/n5+aqurtaGDRvSxmzYsKHH/SWppqam1/sfDsJcp6/bv3+/Dhw40O/fCJ0rwl6jO+64Q6NGjdLVV189EMvMujDX6fnnn1dVVZXmzJmjsrIynXHGGbrrrrvU1dXfO0fljjDX6bzzztOmTZtSv2bctm2b1q5dyxc3fE1/vYZn/dvoB2qLFt+FuU5fN2/ePI0dO/aQJ87hIsw1eu211/TII4+oqalpAFaYG8Jcp23btumvf/2rrrrqKq1du1bvvfeefvazn+nAgQOqr68fiGUPuDDXafr06dqzZ4+++93vKggCHTx4UNdff71uueWWgViyN3p7DW9ra9Onn36qoUOH9mmerH8Cw8BYvHixVq1apWeffVZFRUXZXk5O2Ldvn2bMmKEVK1Zo5MiR2V5OTuvu7taoUaP00EMPaeLEiZo2bZoWLFjQ667qg9X69et111136YEHHtDmzZv1zDPPaM2aNbrzzjuzvbTDUtY/gbFFS9+EuU5fuOeee7R48WK9/PLLOuuss+JcZlZleo3ef/99bd++XVOmTEkd6+7+fJP2I444Qu+8845OOOGEeBedBWGeS2PGjNGQIUNUUFCQOnbqqaequblZnZ2dKiwsjHXN2RDmOi1cuFAzZszQNddcI0k688wz1d7eruuuu04LFizo9/2wfNXba3hxcXGfP31JOfAJjC1a+ibMdZKku+++W3feeafWrVunSZMmDcRSsybTa3TKKafozTffVFNTU+p22WWX6aKLLlJTU5PKy8sHcvkDJsxz6fzzz9d7772XSvCS9O6772rMmDGHZfKSwl2n/fv3H5Kkvkj6AV87m9Jvr+GZ1ZfEY9WqVUEikQgeffTR4K233gquu+66oLS0NGhubg6CIAhmzJgRzJ8/P3X/119/PTjiiCOCe+65J9i6dWtQX18/aMroM7lOixcvDgoLC4Onn346+Pjjj1O3ffv2ZeshxC7Ta/R1g6UKMdPrtGPHjmD48OHBz3/+8+Cdd94JXnjhhWDUqFHBr371q2w9hAGR6XWqr68Phg8fHvzpT38Ktm3bFvzlL38JTjjhhOCKK67I1kMYEPv27Qu2bNkSbNmyJZAULFmyJNiyZUvw4YcfBkEQBPPnzw9mzJiRuv8XZfS//OUvg61btwbLli3zt4w+CILgvvvuC4499tigsLAwmDx5cvD3v/89NXbhhRcGs2bN6nH/J598MjjppJOCwsLC4PTTTw/WrFkzwCvOjkyu03HHHRdIOuRWX18/8AsfQJk+l75qsCSwIMj8Or3xxhtBZWVlkEgkguOPPz749a9/HRw8eHCAVz3wMrlOBw4cCG677bbghBNOCIqKioLy8vLgZz/7WfCf//xn4Bc+gF555ZW0rzVfXJtZs2YFF1544SExEyZMCAoLC4Pjjz8++MMf/pDxedlOBQDgpaz/DQwAgDBIYAAAL5HAAABeIoEBALxEAgMAeIkEBgDwEgkMAOAlEhgAwEskMACAl0hgAAAvkcAAAF76/yY+ArPpaL8/AAAAAElFTkSuQmCC", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQYAAAD8CAYAAACVSwr3AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAAAYbElEQVR4nO2df4xdV3HHPxObDV6zzkPYqNQ/SBCmxUARdElokUpaQuukIhYFgY0iSgkYUkxRoaipoIEa/gEKVSNcwFALQhWSgKpihMHlR6JIFIMNgYCNgoxJGycUAw4mJS2O6fSP99K8nXO85/j67rvv2d+PFGnnvjn3zm5W43O/OzPH3B0hhBjmnK4DEEKMH0oMQogEJQYhRIISgxAiQYlBCJGgxCCESCgmBjPbYWZHzOzbJ/nczOxaMztoZreb2dPbD1MIMUpqdgwfBtbP8/mlwNrBf5uB951+WEKILikmBne/FTg6j8sG4Drvswfomdlj2gpQCDF6Frdwj5XAXUP24cG1H0RHM9tMf1fB0kX85q8/ooWnizoe/8SuIziLmA52d9XFX/va13/s7itOdV0biaEad98ObAeY7Znve/Yon36W88nruo7gLGI22Cdauu+p38dsyb83eVIbf5W4G1g9ZK8aXBNCTChtJIadwEsHf514JnDM3ZPXCCHE5FB8lTCzjwEXA8vN7DDwFuBhAO7+fmAXcBlwELgf+JOqJy8CZobsezI+M5lrohkbnjHX/uTebuI4K7imYI8/xcTg7psKnzvwmtYiEkJ0jiofhRAJSgxCiAQlBiFEwkjrGOYQxcdexif+0fNXFiyasw+JkS2ys/D51sy1GkGyrfqHU0c7BiFEghKDECJBiUEIkdCtxtAbsu/P+CwL9qFgP67NgM5youYA0h0WlKg7jFcRlHYMQogEJQYhRIISgxAioTuN4RzKdQw/L9jfzKx56mnEJOaiWocMpZqFplw2wmeV0Y5BCJGgxCCESFBiEEIkKDEIIRK6Ex8XUy5wihOcot0j5d+C/dunFJWYj7NSjOxOAITLgz26WLRjEEIkKDEIIRKUGIQQCeNT4JSbCN0LdtQhanSJz2d8Lpk3MlGLGq9GzOg0B+0YhBAJSgxCiAQlBiFEghKDECJhfKZEn5fxid2UUVisWZMRKD1oNhY1HdGciS+CGlUR0Z4W7pH7xW0nfu0YhBAJSgxCiAQlBiFEwvhoDLlipfuC3Qt2zZpM4ZQFHeLn4bVsqTSH9hhrzaHLBqmFop1fXu0YhBAJSgxCiAQlBiFEwvg0UeX0gl6wo0+TxqvMtaXBPpJ59Xy0dId2OCsbr9qoWRgt2jEIIRKqEoOZrTezO8zsoJldnfl8jZndbGa3mdntZpYbki+EmBCKicHMFgHbgEuBdcAmM1sX3N4M3OTuTwM2Av/QdqBCiNFRs2O4EDjo7ofc/ThwA7Ah+DgPnU19HnBPeyEKIUZNjfi4ErhryD4MXBR83gr8q5m9FljKSWYkmdlmYDPAmscMPB8kJyTGhqhe4fPctdx947WwZumxdMmdQZA8X2Jke4ysCOpMLGhaGNoSHzcBH3b3VfQP4fuomSX3dvft7j7r7rMrHtnSk4UQrVOTGO4GVg/ZqwbXhrkSuAnA3b8MPBxY3kaAQojRU5MY9gJrzewCM5uiLy7GPdl/AM8BMLMn0k8MP2ozUCHE6ChqDO5+wsy2ALvptz7tcPf9ZrYV2OfuO4E3AB80sz+nL0S+zN193hvHJqr/yfiUhq4sI6VmknS8b9QYMmt+/sBce39IjU+S5tAerRVBdaUpTF5BU6Sq8tHddwG7wrVrhr4+ADyr3dCEEF2hykchRIISgxAiodsmqukhezrjE+sN4rt/kwGykGoTFbrEzNH5XfZmXmefId2hPYq1DqpRaBPtGIQQCUoMQogEJQYhRIISgxAiodsp0cMi4C8yPvFaVPxyTVRRbOxlfAoFTjnBcknwmQmx5UK5JehhF0uMbI9EjPzrbuIAzoSCpoh2DEKIBCUGIUSCEoMQIqE7jcEMph72kD19PPWJRU+FAStA3ZToeJ8GukSNxhD7wj4bNIf10hwa86lYz2RvS3ye513qDpONdgxCiAQlBiFEghKDECKhO42BRcx5mZ85krrEl/RoV5wy1ei0qgpdYir4zGTWlG77z5m+nz+S7pAl0RRq1gTdQZpDPdoxCCESlBiEEAlKDEKIBCUGIUTC+IiPi/8rdYnjmmuExVhp1GTKU82aEEuvQnyM2mmuKOr6ILK9RGJka7QjRp55DVM5tGMQQiQoMQghEpQYhBAJHWsMjxiye6nLdKwiCp83Ob0K4L5gx0fXrAmxWEYwmAmySc0A6/gt7Qiaw8vPAs2hSTFTo+eo8eqkaMcghEhQYhBCJCgxCCESOtYYhgsGMnUMi8Oklqg5NDm9CsqaQoM6htxzlsUyjP+da+fm35Zu+97M+/eWCdcdRqUp1JDWOjy3o0i6RTsGIUSCEoMQIkGJQQiRoMQghEjoUHw8h2KBUxQkY1NVrsCpZspTFBLjo2tOuGrQeDVz7NQfUxpiBfC3Qbz7iwkXI8eJt9jn5th/c5aIkVU7BjNbb2Z3mNlBM7v6JD4vMrMDZrbfzK5vN0whxCgp7hjMbBGwDXgucBjYa2Y73f3AkM9a4K+AZ7n7vWb26IUKWAix8NTsGC4EDrr7IXc/DtwAbAg+rwS2ufu9AO6emewqhJgUajSGlcBdQ/Zh4KLg8wQAM/sS/cqlt7r7Z+ONzGwzsBlgzZpp5moMmQKn+PJvUXP4WbqkyTCXuGYZKaXKowrBIEokMw+c+mNykknUHd6eKRh685joDuNUzJRjX+HzqDnAmak7tPVXicXAWuBiYBPwQTPrRSd33+7us+4+u2LFw1t6tBCibWoSw93A6iF71eDaMIeBne7+gLt/H/gu/UQhhJhAahLDXmCtmV1gZlPARiBuCP+F/m4BM1tO/9XiUHthCiFGSTExuPsJYAuwG/gOcJO77zezrWb24JvrbuAnZnYAuBl4o7v/ZKGCFkIsLObunTx4dnaF79v3/KEr/5nxirnlcMEG7g0tjLnb/ijYPwx2bs09p2jn7hN8/vtouiSGEkPNhRZvE9cA/DTYbx+RGDnuYmOkJD7WME5ipNnnvubus6e6TiXRQogEJQYhRIISgxAioeMmquFahkdkfGLpTi/YudOrwht3rlgpjk5qUKxU1XhVGAu9JDdZOsTWZLJ0brBV9PnL8O7/jjEpgDoTOBMar7RjEEIkKDEIIRKUGIQQCWM0qKVm6kppwgowFXSH6eOpT3wJjy/uOb0gPqqmWau0JvOcXkFjyP2USqdq567F/q3XZeoN/r6B7jBJdQtt1CzUMImNV9oxCCESlBiEEAlKDEKIBCUGIURCh+KjAecO2TUFTtGumPo0k5kyV7ptbkxSSWwMj61akxEsF8UpT/GYu8xjSpOlc+uiT25szquCkPiBIEZOktA4box7EZR2DEKIBCUGIUSCEoMQIqFDjWERc3WFX2Z8SmJAr7xmca7RqvDiXjNZOp48VdN4VbMmxNKr0BhKZWCQ1nTFNbGvDFLd4eVBU3g+k8WoCpqaMG5FUNoxCCESlBiEEAlKDEKIhI7rGIbfYnN/fY+1DTV1DHFNL3WZjsUChcdA+fSqmtqHaN+XWRNisXhiduZbbjLMpeYU7VLj1Y2ZNS/OXBPN6LLWQTsGIUSCEoMQIkGJQQiRoMQghEgYoynRuTaeWHYTfWoarzJq3eIwOnr6Z8HO3DYqek0mSzcocIr2sozIeX84fCtXrFTSQXNr4k8yzsKayqy5LtgvzfiMinEuaGrCKMVI7RiEEAlKDEKIBCUGIUTCmGsM8S23VPCUu9bL+ATdYWnQGGpuW6MxxEfXVCI10CXOO1YOpVTDVfMt1zReRR3iQ8F+RWaNaMZCNl5pxyCESFBiEEIkKDEIIRLGqIkqpzFETeFEsGvejCsGxlpBc4DyMJeamoR48nZN41WFLrEk9oTFbifKEknNAJgaXSL+X4w6xPsya67KXBPNyOkOTdCOQQiRUJUYzGy9md1hZgfN7Op5/F5gZm5ms+2FKIQYNcXEYGaLgG3ApcA6YJOZrcv4zQCvA77SdpBCiNFSs2O4EDjo7ofc/ThwA7Ah4/c24B3kXz2FEBNEjfi4ErhryD4MXDTsYGZPB1a7+6fN7I0nu5GZbQY2A6xZsyI8Pic+RrGxSRNVzidKYkF8XJIRLGdCp1LNaOao6DWZLN1AsOwdLYdSo502ER+j7llqxAK4Nth/lvEpcaY1THXNaYuPZnYO8B7gDSVfd9/u7rPuPrtiRfyNF0KMCzWJ4W5g9ZC9anDtQWaAJwO3mNmdwDOBnRIghZhcahLDXmCtmV1gZlPARuD/jx5x92Puvtzdz3f384E9wOXurt2dEBNKUWNw9xNmtgXYTf/4qB3uvt/MtgL73L3hmcexiSrqCblrUWOomSxdo0P0gp07vSq8uJeKl3I+NU1UMZS4pkKXmMr4zARZpUYiKfnk5tmUTtHO1F4l196d8Sm+p4pWqap8dPddwK5w7ZqT+F58+mEJIbpElY9CiAQlBiFEghKDECKh4+7K0y1wqpksXSM+Rlmtly6ZCoLkdCjVqZksXfGYYiVSzZqMktj7xfwuOR003ramwGlpwadGfMz5vDPYv5fxEe2hHYMQIkGJQQiRoMQghEjoWGM4d8jOFTjF8GqaqJpMeWow9WnmyKk/pmZsUqnRKrcmCgQZwWBRnPIU7Fz4JY2hZupT1Byaagzx2u5g/0FmjWiOdgxCiAQlBiFEghKDECJhjOoYzs34RL2gZEOzYS41p1cFn8VxsnTuGOpg10xHKRUY5EKrabSKw1zun/fj7KObNFGVzivPrXlYxif+dvwy2J/KrHle5pqoQzsGIUSCEoMQIkGJQQiRoMQghEgYI/ExF0qpwKnmkLSaKU81BU5xTW+uGSuGcrdtMlm6pigqTo+6L+PTm2taeHYv8y2XdNAmpWO5KdFNCpyiPZVZ88lg5848EHm0YxBCJCgxCCESlBiEEAkdagzx8blQYllLaXBLrU+p7KamKCq8lFtmTPT0z4IdPq+ZjlIzUaXmhKvCVOtcfdaycPhW/KlVHIqVrKnRJXL/x6KmELWKXFFU9PlEsF+YWSP6aMcghEhQYhBCJCgxCCESJryOocnpVVA+EbvmTbgX7NzpVUFjqJmmWtIY4mNzPjkdIl6r0CXOO3bqj2lyQnap8QpSvaDmhKtY2xAkE27MrHlx5trZiHYMQogEJQYhRIISgxAiQYlBCJEwYeJjWwVOUWxsMlk6io29dInFKU9BjGwyNqmmiaqm8qhCSVwSJ0sHha9GOy1Nms5dq2mNq5kMFQXJOPUpJ1heH+yXZHzOBrRjEEIkKDEIIRKUGIQQCROmMUQ7vjXmfJroEDVNVNEnV5YTNIYlwZ6JJTeZx9RoDDVNVKVp0xW6RO9oeUlpmEtTjSHqATUDYOL/1fjbkmu8ij7/FOwrMmvORKp2DGa23szuMLODZnZ15vPXm9kBM7vdzL5gZo9tP1QhxKgoJgYzWwRsAy4F1gGbzGxdcLsNmHX336Df3frOtgMVQoyOmh3DhcBBdz/k7seBGwjj89z9Znd/cGe5B1jVbphCiFFSozGsBO4asg8DF83jfyXwmdwHZrYZ2AywZs2vsjBNVG00WtUMkK1pvOoFO9Y1hJd2KA5Uyb7Y1zRaNRkAE65NhTXnZWSVkkRSc3pVk2EuNQNkY1NVbk3UGKK9I7Pm5Zlrk06rf5UwsyuAWeBduc/dfbu7z7r77IoVj2rz0UKIFqnZMdwNrB6yVw2uzcHMLgHeBDzb3XMSvRBiQqjZMewF1prZBWY2BWwEdg47mNnTgA8Al7v7kfbDFEKMkmJicPcTwBZgN/Ad4CZ3329mW83s8oHbu+i/dH/czL5hZjtPcjshxARQVeDk7ruAXeHaNUNfX7IwoZTCyxUvRbmoSYFTzWTpGvGxoApOZaY+zYRSnZrTq2qKoKK4WCp4yt0nPGdZ5oWxpHHmQqv5FpcGu43Tq2oKnOKaRZk1Hwr2KzI+o+KLLd1HJdFCiAQlBiFEghKDECJhjE6iauvz0ulVuXU1TVSlYS5N2oMyGsN0+KNOk9OrmjRa1egSvbnmosyaeOh3E42hxqdGFSppDDldIra1lYa95K59INivyqxpg7b0hBzaMQghEpQYhBAJSgxCiIQxGtSSo8nn8VrUHCDVC0o2lHWIGo2hottpcWy0Cm/cTQbI5q7V1DHENRWNV1FjqJE/mjRaNRkGG++R+80oDXPJjNZJrkX7fZk1V2WujRPaMQghEpQYhBAJSgxCiAQlBiFEQscFTiXaEB8XajJUaWp0zicWNOXW9OaaUc1ronFCWQWsOeHqvmD30iUWnj0TvuUaXbRJqVhuSnQUHxdq6lP81zUKlrmiqPcGe0vGJ7KQBU0R7RiEEAlKDEKIBCUGIUTCmBc4ReK7f1ONodRolVtTatvJjblsUhQVXsotvOhPhxOzoVmjVc2U6CaNVyHcWJ+VO3yrRu6I1+JPu63J0lGriHpBbrhLqcApN9wl+kTNAfqHuHSFdgxCiAQlBiFEghKDECJBiUEIkTDmBU6RmnAXqsCpjWPtaqY+9YIduy0z4mPNbUviY3xszqeFjsyZY+mSUmhQ/hZrxMcoWOaKokoCZSx4glRILB1zV+vzrWA/JeOzUGjHIIRIUGIQQiQoMQghEiaswClSMwG6RmMo6Qc1Pm1Nlo6NVr25pmUmS0fdocmUp9yaXsGnwYlXS3KTpcOLfJMesBqNIeoFOY2hNBkqN8GpyelVNZOh4n2/GeynZta0hXYMQogEJQYhRIISgxAiYczrGEpaQJO6hty1aOf+qtzGcJeaJqpesONbb0ZjWBKu5TqVSsNcasY3l07Mhka6xMzR+R+bW9bGOWA5Jak0ACbXRFVqtKqpY6g5eTvy9cy1pxfW1KIdgxAiQYlBCJGgxCCESFBiEEIkjHmBUxOxMbJQU6KbHGvXxmTpXmZNbLQ6mrpE9S5OgK4RH+OjWxIsp2KjVWYYVqnRquZYuxrBstRElZv6VJok3bSJKidIDpMrisoJkk2o2jGY2Xozu8PMDprZ1ZnPzzWzGweff8XMzm8pPiFEBxQTg5ktArYBl9IfQ7fJzOI4uiuBe9398cDfAe9oO1AhxOio2TFcCBx090Pufhy4AdgQfDYAHxl8/QngOWZm7YUphBglNS/tK4G7huzDwEUn83H3E2Z2DHgU8ONhJzPbDGwemL8wm/l2k6A7Yjnh+xljJilWmKx4JylWgF9rsmik4qO7bwe2A5jZPnefHeXzT4dJineSYoXJineSYoV+vE3W1bxK3A2sHrJXDa5lfcxsMf0TCX7SJCAhRPfUJIa9wFozu8DMpoCNwM7gsxP448HXLwS+6O7eXphCiFFSfJUYaAZbgN30Z07scPf9ZrYV2OfuO4F/BD5qZgeBo/STR4ntpxF3F0xSvJMUK0xWvJMUKzSM1/QPuxAiopJoIUSCEoMQImHBE8MklVNXxPp6MztgZreb2RfM7LFdxDkUz7zxDvm9wMzczDr7M1tNrGb2osHPd7+ZXT/qGEMspd+FNWZ2s5ndNvh9uKyLOAex7DCzI2aWrQuyPtcOvpfbzaw8z8XdF+w/+mLl94DH0e81+SawLvj8KfD+wdcbgRsXMqbTjPV3genB11d1FWttvAO/GeBWYA8wO66xAmuB24BHDuxHj/PPlr6od9Xg63XAnR3G+zv0hzd9+ySfXwZ8hn7n4jOBr5TuudA7hkkqpy7G6u43u/uDjX576Nd0dEXNzxbgbfR7V3LNhKOiJtZXAtvc/V4Adz8y4hiHqYnXeahH9TzgnhHGNzcQ91vp/zXwZGwArvM+e4CemT1mvnsudGLIlVOvPJmPu58AHiynHjU1sQ5zJf0s3BXFeAdbxtXu/ulRBpah5mf7BOAJZvYlM9tjZutHFl1KTbxvBa4ws8PALuC1owmtEaf6uz3uw2DHEzO7ApgFnt11LCfDzM4B3gO8rONQallM/3XiYvo7sVvN7Cnu/tMug5qHTcCH3f3dZvZb9Ot4nuzuuTEJE8dC7xgmqZy6JlbM7BLgTcDl7p4ZKTIySvHOAE8GbjGzO+m/W+7sSICs+dkeBna6+wPu/n3gu/QTRRfUxHslcBOAu3+Z/oyX5SOJ7tSp+t2ewwKLIouBQ8AFPCTiPCn4vIa54uNNHQk4NbE+jb4otbaLGE813uB/C92JjzU/2/XARwZfL6e/9X3UGMf7GeBlg6+fSF9jsA5/H87n5OLjHzJXfPxq8X4jCPgy+tn/e8CbBte20v8XF/qZ9uPAQeCrwOM6/OGWYv088EPgG4P/dnYVa028wbezxFD5szX6rz4HgG8BG8f5Z0v/LxFfGiSNbwC/32GsHwN+QH+q3GH6u5lXA68e+tluG3wv36r5PVBJtBAiQZWPQogEJQYhRIISgxAiQYlBCJGgxCCESFBiEEIkKDEIIRL+D65j+pH0pb95AAAAAElFTkSuQmCC", "text/plain": [ - "
" + "
" ] }, - "metadata": {}, + "metadata": { + "needs_background": "light" + }, "output_type": "display_data" } ], @@ -290,1050 +288,204 @@ "visualize(nn)\n" ] }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Aufgabenblatt 4" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Probleme beim Crossover\n", + "\n", + "Das _Austauschen einzelner Gewichte_ macht hier bei neuronalen Netzwerken wenig Sinn, da die Gewichte nur in der Gesamtheit Sinn ergeben. Ersetze ich ein einzelnen Gewicht in einem Elternteil, wird dies höchstwahrscheinlich die Fitness nicht verbessern, da dieses Gewicht nur im Zusammenspiel der Gewichte des Elternteil von dem es kommt gut funktioniert." + ] + }, { "cell_type": "code", - "execution_count": null, + "execution_count": 26, + "metadata": {}, + "outputs": [], + "source": [ + "def mutate(nn: NeuralNet) -> None:\n", + " nn.weights = [w + rng.normal(scale=0.1, size=w.shape) for w in nn.weights]\n", + "\n", + "def recombine(mother: NeuralNet, father: NeuralNet) -> NeuralNet:\n", + " weights = [(wm + wf) / 2.0 for wm, wf in zip(mother.weights, father.weights)]\n", + " return NeuralNet(hidden_layers=[(8, relu)], weights=weights)" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "metadata": {}, + "outputs": [], + "source": [ + "def fitness(nn: NeuralNet) -> int:\n", + " test_data = samples(100)\n", + "\n", + " correct = sum(np.around(nn.classify(test_data[0])) == test_data[1])\n", + " return correct\n", + "\n", + "\n", + "def evolution(\n", + " generations: int = 50,\n", + " population_size: int = 20,\n", + " selection_size: int = 5,\n", + " selection_type: Literal[\"elitist\"] | Literal[\"proportional\"] = \"elitist\",\n", + "):\n", + " assert selection_type in (\"elitist\", \"proportional\")\n", + " population = [NeuralNet(hidden_layers=[(8, relu)]) for _ in range(population_size)]\n", + "\n", + " for _ in range(generations):\n", + " # Select individuals with highest fitness for reproduction\n", + " population = sorted(population, key=lambda nn: fitness(nn), reverse=True)\n", + "\n", + " if selection_type == \"elitist\":\n", + " selection = population[:selection_size]\n", + " elif selection_type == \"proportional\":\n", + " population_fitness = [fitness(nn) for nn in population]\n", + " selection = np.random.choice(\n", + " population,\n", + " selection_size,\n", + " p=[f / sum(population_fitness) for f in population_fitness],\n", + " ).tolist()\n", + "\n", + " # Reproduce\n", + " offsprings = []\n", + " for _ in range(population_size - selection_size):\n", + " mother, father = sample(selection, 2)\n", + "\n", + " offspring = recombine(mother, father)\n", + " mutate(offspring)\n", + "\n", + " offsprings.append(offspring)\n", + "\n", + " # Create new population\n", + " population = selection + offsprings\n", + "\n", + " # Return best individual of final population\n", + " return max(population, key=lambda nn: fitness(nn))" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "100%|██████████| 20/20 [00:51<00:00, 2.58s/it]\n" + ] + } + ], + "source": [ + "test_runs = 10\n", + "\n", + "tests = {}\n", + "\n", + "with tqdm(total=2 * test_runs) as progressbar:\n", + " for test in range(test_runs):\n", + " test_data = samples(500)\n", + "\n", + " for selection_type in (\"elitist\", \"proportional\"):\n", + " nn = evolution(generations=100, selection_type=selection_type)\n", + " correct = sum(np.around(nn.classify(test_data[0])) == test_data[1])\n", + "\n", + " test_results = tests.setdefault(selection_type, [])\n", + " test_results.append(correct / test_data[0].shape[0])\n", + "\n", + " progressbar.update(1)" + ] + }, + { + "cell_type": "code", + "execution_count": 29, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Epoch 1/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2701 - binary_accuracy: 0.5000\n", - "Epoch 2/500\n", - "4/4 [==============================] - 0s 970us/step - loss: 0.2566 - binary_accuracy: 0.5000\n", - "Epoch 3/500\n", - "4/4 [==============================] - 0s 911us/step - loss: 0.2585 - binary_accuracy: 0.5000\n", - "Epoch 4/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2541 - binary_accuracy: 0.5000\n", - "Epoch 5/500\n", - "4/4 [==============================] - 0s 908us/step - loss: 0.2521 - binary_accuracy: 0.5000\n", - "Epoch 6/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2521 - binary_accuracy: 0.2500\n", - "Epoch 7/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2515 - binary_accuracy: 0.5000\n", - "Epoch 8/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2545 - binary_accuracy: 0.5000\n", - "Epoch 9/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2520 - binary_accuracy: 0.5000\n", - "Epoch 10/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2514 - binary_accuracy: 0.5000\n", - "Epoch 11/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2511 - binary_accuracy: 0.5000\n", - "Epoch 12/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2512 - binary_accuracy: 0.5000\n", - "Epoch 13/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2508 - binary_accuracy: 0.5000\n", - "Epoch 14/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2528 - binary_accuracy: 0.5000\n", - "Epoch 15/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2516 - binary_accuracy: 0.5000\n", - "Epoch 16/500\n", - "4/4 [==============================] - 0s 977us/step - loss: 0.2510 - binary_accuracy: 0.2500\n", - "Epoch 17/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2506 - binary_accuracy: 0.5000\n", - "Epoch 18/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2528 - binary_accuracy: 0.5000\n", - "Epoch 19/500\n", - "4/4 [==============================] - 0s 989us/step - loss: 0.2556 - binary_accuracy: 0.2500\n", - "Epoch 20/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2501 - binary_accuracy: 0.5000\n", - "Epoch 21/500\n", - "4/4 [==============================] - 0s 2ms/step - loss: 0.2498 - binary_accuracy: 0.5000\n", - "Epoch 22/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2502 - binary_accuracy: 0.2500\n", - "Epoch 23/500\n", - "4/4 [==============================] - 0s 2ms/step - loss: 0.2502 - binary_accuracy: 0.5000\n", - "Epoch 24/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2528 - binary_accuracy: 0.5000\n", - "Epoch 25/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2498 - binary_accuracy: 0.5000\n", - "Epoch 26/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2500 - binary_accuracy: 0.5000\n", - "Epoch 27/500\n", - "4/4 [==============================] - 0s 2ms/step - loss: 0.2493 - binary_accuracy: 0.7500\n", - "Epoch 28/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2536 - binary_accuracy: 0.5000\n", - "Epoch 29/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2524 - binary_accuracy: 0.5000\n", - "Epoch 30/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2496 - binary_accuracy: 0.5000\n", - "Epoch 31/500\n", - "4/4 [==============================] - 0s 959us/step - loss: 0.2489 - binary_accuracy: 0.5000\n", - "Epoch 32/500\n", - "4/4 [==============================] - 0s 861us/step - loss: 0.2486 - binary_accuracy: 0.5000\n", - "Epoch 33/500\n", - "4/4 [==============================] - 0s 791us/step - loss: 0.2486 - binary_accuracy: 0.5000\n", - "Epoch 34/500\n", - "4/4 [==============================] - 0s 877us/step - loss: 0.2510 - binary_accuracy: 0.7500\n", - "Epoch 35/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2480 - binary_accuracy: 0.7500\n", - "Epoch 36/500\n", - "4/4 [==============================] - 0s 857us/step - loss: 0.2495 - binary_accuracy: 0.2500\n", - "Epoch 37/500\n", - "4/4 [==============================] - 0s 731us/step - loss: 0.2479 - binary_accuracy: 0.5000\n", - "Epoch 38/500\n", - "4/4 [==============================] - 0s 926us/step - loss: 0.2463 - binary_accuracy: 0.7500\n", - "Epoch 39/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2470 - binary_accuracy: 0.7500\n", - "Epoch 40/500\n", - "4/4 [==============================] - 0s 726us/step - loss: 0.2464 - binary_accuracy: 0.7500\n", - "Epoch 41/500\n", - "4/4 [==============================] - 0s 880us/step - loss: 0.2464 - binary_accuracy: 0.7500\n", - "Epoch 42/500\n", - "4/4 [==============================] - 0s 718us/step - loss: 0.2454 - binary_accuracy: 0.5000\n", - "Epoch 43/500\n", - "4/4 [==============================] - 0s 911us/step - loss: 0.2449 - binary_accuracy: 0.5000\n", - "Epoch 44/500\n", - "4/4 [==============================] - 0s 865us/step - loss: 0.2452 - binary_accuracy: 0.5000\n", - "Epoch 45/500\n", - "4/4 [==============================] - 0s 895us/step - loss: 0.2468 - binary_accuracy: 0.5000\n", - "Epoch 46/500\n", - "4/4 [==============================] - 0s 909us/step - loss: 0.2448 - binary_accuracy: 0.5000\n", - "Epoch 47/500\n", - "4/4 [==============================] - 0s 755us/step - loss: 0.2437 - binary_accuracy: 0.7500\n", - "Epoch 48/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2432 - binary_accuracy: 0.7500\n", - "Epoch 49/500\n", - "4/4 [==============================] - 0s 832us/step - loss: 0.2455 - binary_accuracy: 0.7500\n", - "Epoch 50/500\n", - "4/4 [==============================] - 0s 930us/step - loss: 0.2439 - binary_accuracy: 0.7500\n", - "Epoch 51/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2469 - binary_accuracy: 0.7500\n", - "Epoch 52/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2446 - binary_accuracy: 0.7500\n", - "Epoch 53/500\n", - "4/4 [==============================] - 0s 768us/step - loss: 0.2420 - binary_accuracy: 0.7500\n", - "Epoch 54/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2400 - binary_accuracy: 0.7500\n", - "Epoch 55/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2429 - binary_accuracy: 0.7500\n", - "Epoch 56/500\n", - "4/4 [==============================] - 0s 913us/step - loss: 0.2391 - binary_accuracy: 0.7500\n", - "Epoch 57/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2374 - binary_accuracy: 0.7500\n", - "Epoch 58/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2375 - binary_accuracy: 0.7500\n", - "Epoch 59/500\n", - "4/4 [==============================] - 0s 878us/step - loss: 0.2367 - binary_accuracy: 0.7500\n", - "Epoch 60/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2363 - binary_accuracy: 0.7500\n", - "Epoch 61/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2375 - binary_accuracy: 0.7500\n", - "Epoch 62/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2356 - binary_accuracy: 0.7500\n", - "Epoch 63/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2343 - binary_accuracy: 0.7500\n", - "Epoch 64/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2326 - binary_accuracy: 0.7500\n", - "Epoch 65/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2310 - binary_accuracy: 0.7500\n", - "Epoch 66/500\n", - "4/4 [==============================] - 0s 878us/step - loss: 0.2293 - binary_accuracy: 0.7500\n", - "Epoch 67/500\n", - "4/4 [==============================] - 0s 884us/step - loss: 0.2326 - binary_accuracy: 0.7500\n", - "Epoch 68/500\n", - "4/4 [==============================] - 0s 943us/step - loss: 0.2287 - binary_accuracy: 0.7500\n", - "Epoch 69/500\n", - "4/4 [==============================] - 0s 927us/step - loss: 0.2273 - binary_accuracy: 0.7500\n", - "Epoch 70/500\n", - "4/4 [==============================] - 0s 695us/step - loss: 0.2261 - binary_accuracy: 0.7500\n", - "Epoch 71/500\n", - "4/4 [==============================] - 0s 790us/step - loss: 0.2252 - binary_accuracy: 0.7500\n", - "Epoch 72/500\n", - "4/4 [==============================] - 0s 829us/step - loss: 0.2234 - binary_accuracy: 0.7500\n", - "Epoch 73/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2232 - binary_accuracy: 0.7500\n", - "Epoch 74/500\n", - "4/4 [==============================] - 0s 762us/step - loss: 0.2239 - binary_accuracy: 0.7500\n", - "Epoch 75/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2250 - binary_accuracy: 0.7500\n", - "Epoch 76/500\n", - "4/4 [==============================] - 0s 787us/step - loss: 0.2213 - binary_accuracy: 0.7500\n", - "Epoch 77/500\n", - "4/4 [==============================] - 0s 869us/step - loss: 0.2185 - binary_accuracy: 0.7500\n", - "Epoch 78/500\n", - "4/4 [==============================] - 0s 713us/step - loss: 0.2167 - binary_accuracy: 0.7500\n", - "Epoch 79/500\n", - "4/4 [==============================] - 0s 810us/step - loss: 0.2157 - binary_accuracy: 0.7500\n", - "Epoch 80/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2149 - binary_accuracy: 0.7500\n", - "Epoch 81/500\n", - "4/4 [==============================] - 0s 942us/step - loss: 0.2160 - binary_accuracy: 0.7500\n", - "Epoch 82/500\n", - "4/4 [==============================] - 0s 972us/step - loss: 0.2119 - binary_accuracy: 0.7500\n", - "Epoch 83/500\n", - "4/4 [==============================] - 0s 950us/step - loss: 0.2106 - binary_accuracy: 0.7500\n", - "Epoch 84/500\n", - "4/4 [==============================] - 0s 978us/step - loss: 0.2107 - binary_accuracy: 0.7500\n", - "Epoch 85/500\n", - "4/4 [==============================] - 0s 967us/step - loss: 0.2107 - binary_accuracy: 0.7500\n", - "Epoch 86/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.2068 - binary_accuracy: 0.7500\n", - "Epoch 87/500\n", - "4/4 [==============================] - 0s 839us/step - loss: 0.2069 - binary_accuracy: 0.7500\n", - "Epoch 88/500\n", - "4/4 [==============================] - 0s 787us/step - loss: 0.2071 - binary_accuracy: 0.7500\n", - "Epoch 89/500\n", - "4/4 [==============================] - 0s 812us/step - loss: 0.2053 - binary_accuracy: 0.7500\n", - "Epoch 90/500\n", - "4/4 [==============================] - 0s 762us/step - loss: 0.2027 - binary_accuracy: 0.7500\n", - "Epoch 91/500\n", - "4/4 [==============================] - 0s 976us/step - loss: 0.2015 - binary_accuracy: 0.7500\n", - "Epoch 92/500\n", - "4/4 [==============================] - 0s 923us/step - loss: 0.2024 - binary_accuracy: 0.7500\n", - "Epoch 93/500\n", - "4/4 [==============================] - 0s 852us/step - loss: 0.2041 - binary_accuracy: 0.7500\n", - "Epoch 94/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.1996 - binary_accuracy: 0.7500\n", - "Epoch 95/500\n", - "4/4 [==============================] - 0s 962us/step - loss: 0.1977 - binary_accuracy: 0.7500\n", - "Epoch 96/500\n", - "4/4 [==============================] - 0s 876us/step - loss: 0.1964 - binary_accuracy: 0.7500\n", - "Epoch 97/500\n", - "4/4 [==============================] - 0s 897us/step - loss: 0.1972 - binary_accuracy: 0.7500\n", - "Epoch 98/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.1951 - binary_accuracy: 0.7500\n", - "Epoch 99/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.1961 - binary_accuracy: 0.7500\n", - "Epoch 100/500\n", - "4/4 [==============================] - 0s 815us/step - loss: 0.1927 - binary_accuracy: 0.7500\n", - "Epoch 101/500\n", - "4/4 [==============================] - 0s 817us/step - loss: 0.1922 - binary_accuracy: 0.7500\n", - "Epoch 102/500\n", - "4/4 [==============================] - 0s 879us/step - loss: 0.1912 - binary_accuracy: 0.7500\n", - "Epoch 103/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.1922 - binary_accuracy: 0.7500\n", - "Epoch 104/500\n", - "4/4 [==============================] - 0s 902us/step - loss: 0.1918 - binary_accuracy: 0.7500\n", - "Epoch 105/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.1895 - binary_accuracy: 0.7500\n", - "Epoch 106/500\n", - "4/4 [==============================] - 0s 753us/step - loss: 0.1867 - binary_accuracy: 0.7500\n", - "Epoch 107/500\n", - "4/4 [==============================] - 0s 910us/step - loss: 0.1875 - binary_accuracy: 0.7500\n", - "Epoch 108/500\n", - "4/4 [==============================] - 0s 886us/step - loss: 0.1850 - binary_accuracy: 0.7500\n", - "Epoch 109/500\n", - "4/4 [==============================] - 0s 883us/step - loss: 0.1852 - binary_accuracy: 0.7500\n", - "Epoch 110/500\n", - "4/4 [==============================] - 0s 938us/step - loss: 0.1866 - binary_accuracy: 0.7500\n", - "Epoch 111/500\n", - "4/4 [==============================] - 0s 752us/step - loss: 0.1833 - binary_accuracy: 0.7500\n", - "Epoch 112/500\n", - "4/4 [==============================] - 0s 857us/step - loss: 0.1848 - binary_accuracy: 0.7500\n", - "Epoch 113/500\n", - "4/4 [==============================] - 0s 968us/step - loss: 0.1824 - binary_accuracy: 0.7500\n", - "Epoch 114/500\n", - "4/4 [==============================] - 0s 890us/step - loss: 0.1814 - binary_accuracy: 0.7500\n", - "Epoch 115/500\n", - "4/4 [==============================] - 0s 764us/step - loss: 0.1811 - binary_accuracy: 0.7500\n", - "Epoch 116/500\n", - "4/4 [==============================] - 0s 964us/step - loss: 0.1817 - binary_accuracy: 0.7500\n", - "Epoch 117/500\n", - "4/4 [==============================] - 0s 762us/step - loss: 0.1796 - binary_accuracy: 0.7500\n", - "Epoch 118/500\n", - "4/4 [==============================] - 0s 815us/step - loss: 0.1827 - binary_accuracy: 0.7500\n", - "Epoch 119/500\n", - "4/4 [==============================] - 0s 779us/step - loss: 0.1792 - binary_accuracy: 0.7500\n", - "Epoch 120/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.1761 - binary_accuracy: 0.7500\n", - "Epoch 121/500\n", - "4/4 [==============================] - 0s 797us/step - loss: 0.1784 - binary_accuracy: 0.7500\n", - "Epoch 122/500\n", - "4/4 [==============================] - 0s 916us/step - loss: 0.1758 - binary_accuracy: 0.7500\n", - "Epoch 123/500\n", - "4/4 [==============================] - 0s 877us/step - loss: 0.1747 - binary_accuracy: 0.7500\n", - "Epoch 124/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.1731 - binary_accuracy: 0.7500\n", - "Epoch 125/500\n", - "4/4 [==============================] - 0s 915us/step - loss: 0.1729 - binary_accuracy: 0.7500\n", - "Epoch 126/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.1721 - binary_accuracy: 0.7500\n", - "Epoch 127/500\n", - "4/4 [==============================] - 0s 778us/step - loss: 0.1723 - binary_accuracy: 0.7500\n", - "Epoch 128/500\n", - "4/4 [==============================] - 0s 885us/step - loss: 0.1709 - binary_accuracy: 0.7500\n", - "Epoch 129/500\n", - "4/4 [==============================] - 0s 800us/step - loss: 0.1724 - binary_accuracy: 0.7500\n", - "Epoch 130/500\n", - "4/4 [==============================] - 0s 650us/step - loss: 0.1725 - binary_accuracy: 0.7500\n", - "Epoch 131/500\n", - "4/4 [==============================] - 0s 995us/step - loss: 0.1705 - binary_accuracy: 0.7500\n", - "Epoch 132/500\n", - "4/4 [==============================] - 0s 795us/step - loss: 0.1682 - binary_accuracy: 0.7500\n", - "Epoch 133/500\n", - "4/4 [==============================] - 0s 934us/step - loss: 0.1670 - binary_accuracy: 0.7500\n", - "Epoch 134/500\n", - "4/4 [==============================] - 0s 861us/step - loss: 0.1662 - binary_accuracy: 0.7500\n", - "Epoch 135/500\n", - "4/4 [==============================] - 0s 909us/step - loss: 0.1656 - binary_accuracy: 0.7500\n", - "Epoch 136/500\n", - "4/4 [==============================] - 0s 829us/step - loss: 0.1654 - binary_accuracy: 0.7500\n", - "Epoch 137/500\n", - "4/4 [==============================] - 0s 796us/step - loss: 0.1642 - binary_accuracy: 0.7500\n", - "Epoch 138/500\n", - "4/4 [==============================] - 0s 895us/step - loss: 0.1638 - binary_accuracy: 0.7500\n", - "Epoch 139/500\n", - "4/4 [==============================] - 0s 935us/step - loss: 0.1660 - binary_accuracy: 0.7500\n", - "Epoch 140/500\n", - "4/4 [==============================] - 0s 870us/step - loss: 0.1643 - binary_accuracy: 0.7500\n", - "Epoch 141/500\n", - "4/4 [==============================] - 0s 799us/step - loss: 0.1623 - binary_accuracy: 0.7500\n", - "Epoch 142/500\n", - "4/4 [==============================] - 0s 924us/step - loss: 0.1619 - binary_accuracy: 0.7500\n", - "Epoch 143/500\n", - "4/4 [==============================] - 0s 827us/step - loss: 0.1670 - binary_accuracy: 0.7500\n", - "Epoch 144/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.1590 - binary_accuracy: 0.7500\n", - "Epoch 145/500\n", - "4/4 [==============================] - 0s 703us/step - loss: 0.1595 - binary_accuracy: 0.7500\n", - "Epoch 146/500\n", - "4/4 [==============================] - 0s 996us/step - loss: 0.1580 - binary_accuracy: 0.7500\n", - "Epoch 147/500\n", - "4/4 [==============================] - 0s 889us/step - loss: 0.1570 - binary_accuracy: 0.7500\n", - "Epoch 148/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.1598 - binary_accuracy: 0.7500\n", - "Epoch 149/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.1550 - binary_accuracy: 0.7500\n", - "Epoch 150/500\n", - "4/4 [==============================] - 0s 813us/step - loss: 0.1542 - binary_accuracy: 0.7500\n", - "Epoch 151/500\n", - "4/4 [==============================] - 0s 894us/step - loss: 0.1537 - binary_accuracy: 0.7500\n", - "Epoch 152/500\n", - "4/4 [==============================] - 0s 970us/step - loss: 0.1523 - binary_accuracy: 0.7500\n", - "Epoch 153/500\n", - "4/4 [==============================] - 0s 739us/step - loss: 0.1512 - binary_accuracy: 0.7500\n", - "Epoch 154/500\n", - "4/4 [==============================] - 0s 3ms/step - loss: 0.1502 - binary_accuracy: 0.7500\n", - "Epoch 155/500\n", - "4/4 [==============================] - 0s 2ms/step - loss: 0.1505 - binary_accuracy: 0.7500\n", - "Epoch 156/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.1520 - binary_accuracy: 0.7500\n", - "Epoch 157/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.1468 - binary_accuracy: 0.7500\n", - "Epoch 158/500\n", - "4/4 [==============================] - 0s 910us/step - loss: 0.1466 - binary_accuracy: 0.7500\n", - "Epoch 159/500\n", - "4/4 [==============================] - 0s 919us/step - loss: 0.1466 - binary_accuracy: 0.7500\n", - "Epoch 160/500\n", - "4/4 [==============================] - 0s 855us/step - loss: 0.1460 - binary_accuracy: 0.7500\n", - "Epoch 161/500\n", - "4/4 [==============================] - 0s 852us/step - loss: 0.1443 - binary_accuracy: 0.7500\n", - "Epoch 162/500\n", - "4/4 [==============================] - 0s 877us/step - loss: 0.1416 - binary_accuracy: 0.7500\n", - "Epoch 163/500\n", - "4/4 [==============================] - 0s 838us/step - loss: 0.1401 - binary_accuracy: 0.7500\n", - "Epoch 164/500\n", - "4/4 [==============================] - 0s 670us/step - loss: 0.1421 - binary_accuracy: 0.7500\n", - "Epoch 165/500\n", - "4/4 [==============================] - 0s 832us/step - loss: 0.1385 - binary_accuracy: 1.0000\n", - "Epoch 166/500\n", - "4/4 [==============================] - 0s 780us/step - loss: 0.1369 - binary_accuracy: 1.0000\n", - "Epoch 167/500\n", - "4/4 [==============================] - 0s 731us/step - loss: 0.1355 - binary_accuracy: 1.0000\n", - "Epoch 168/500\n", - "4/4 [==============================] - 0s 982us/step - loss: 0.1354 - binary_accuracy: 1.0000\n", - "Epoch 169/500\n", - "4/4 [==============================] - 0s 775us/step - loss: 0.1350 - binary_accuracy: 0.7500\n", - "Epoch 170/500\n", - "4/4 [==============================] - 0s 714us/step - loss: 0.1322 - binary_accuracy: 0.7500\n", - "Epoch 171/500\n", - "4/4 [==============================] - 0s 854us/step - loss: 0.1341 - binary_accuracy: 0.7500\n", - "Epoch 172/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.1303 - binary_accuracy: 0.7500\n", - "Epoch 173/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.1316 - binary_accuracy: 0.7500\n", - "Epoch 174/500\n", - "4/4 [==============================] - 0s 791us/step - loss: 0.1309 - binary_accuracy: 1.0000\n", - "Epoch 175/500\n", - "4/4 [==============================] - 0s 991us/step - loss: 0.1263 - binary_accuracy: 1.0000\n", - "Epoch 176/500\n", - "4/4 [==============================] - 0s 816us/step - loss: 0.1250 - binary_accuracy: 1.0000\n", - "Epoch 177/500\n", - "4/4 [==============================] - 0s 829us/step - loss: 0.1234 - binary_accuracy: 1.0000\n", - "Epoch 178/500\n", - "4/4 [==============================] - 0s 845us/step - loss: 0.1210 - binary_accuracy: 1.0000\n", - "Epoch 179/500\n", - "4/4 [==============================] - 0s 918us/step - loss: 0.1217 - binary_accuracy: 1.0000\n", - "Epoch 180/500\n", - "4/4 [==============================] - 0s 847us/step - loss: 0.1188 - binary_accuracy: 1.0000\n", - "Epoch 181/500\n", - "4/4 [==============================] - 0s 870us/step - loss: 0.1174 - binary_accuracy: 1.0000\n", - "Epoch 182/500\n", - "4/4 [==============================] - 0s 782us/step - loss: 0.1160 - binary_accuracy: 1.0000\n", - "Epoch 183/500\n", - "4/4 [==============================] - 0s 911us/step - loss: 0.1147 - binary_accuracy: 1.0000\n", - "Epoch 184/500\n", - "4/4 [==============================] - 0s 961us/step - loss: 0.1130 - binary_accuracy: 1.0000\n", - "Epoch 185/500\n", - "4/4 [==============================] - 0s 762us/step - loss: 0.1154 - binary_accuracy: 1.0000\n", - "Epoch 186/500\n", - "4/4 [==============================] - 0s 718us/step - loss: 0.1123 - binary_accuracy: 1.0000\n", - "Epoch 187/500\n", - "4/4 [==============================] - 0s 989us/step - loss: 0.1091 - binary_accuracy: 1.0000\n", - "Epoch 188/500\n", - "4/4 [==============================] - 0s 906us/step - loss: 0.1083 - binary_accuracy: 1.0000\n", - "Epoch 189/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.1062 - binary_accuracy: 1.0000\n", - "Epoch 190/500\n", - "4/4 [==============================] - 0s 876us/step - loss: 0.1049 - binary_accuracy: 1.0000\n", - "Epoch 191/500\n", - "4/4 [==============================] - 0s 932us/step - loss: 0.1034 - binary_accuracy: 1.0000\n", - "Epoch 192/500\n", - "4/4 [==============================] - 0s 790us/step - loss: 0.1012 - binary_accuracy: 1.0000\n", - "Epoch 193/500\n", - "4/4 [==============================] - 0s 680us/step - loss: 0.1007 - binary_accuracy: 1.0000\n", - "Epoch 194/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.1017 - binary_accuracy: 1.0000\n", - "Epoch 195/500\n", - "4/4 [==============================] - 0s 711us/step - loss: 0.0999 - binary_accuracy: 1.0000\n", - "Epoch 196/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0961 - binary_accuracy: 1.0000\n", - "Epoch 197/500\n", - "4/4 [==============================] - 0s 897us/step - loss: 0.0946 - binary_accuracy: 1.0000\n", - "Epoch 198/500\n", - "4/4 [==============================] - 0s 2ms/step - loss: 0.0930 - binary_accuracy: 1.0000\n", - "Epoch 199/500\n", - "4/4 [==============================] - 0s 860us/step - loss: 0.0934 - binary_accuracy: 1.0000\n", - "Epoch 200/500\n", - "4/4 [==============================] - 0s 943us/step - loss: 0.0914 - binary_accuracy: 1.0000\n", - "Epoch 201/500\n", - "4/4 [==============================] - 0s 867us/step - loss: 0.0895 - binary_accuracy: 1.0000\n", - "Epoch 202/500\n", - "4/4 [==============================] - 0s 823us/step - loss: 0.0890 - binary_accuracy: 1.0000\n", - "Epoch 203/500\n", - "4/4 [==============================] - 0s 955us/step - loss: 0.0868 - binary_accuracy: 1.0000\n", - "Epoch 204/500\n", - "4/4 [==============================] - 0s 933us/step - loss: 0.0852 - binary_accuracy: 1.0000\n", - "Epoch 205/500\n", - "4/4 [==============================] - 0s 921us/step - loss: 0.0849 - binary_accuracy: 1.0000\n", - "Epoch 206/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0827 - binary_accuracy: 1.0000\n", - "Epoch 207/500\n", - "4/4 [==============================] - 0s 877us/step - loss: 0.0828 - binary_accuracy: 1.0000\n", - "Epoch 208/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0793 - binary_accuracy: 1.0000\n", - "Epoch 209/500\n", - "4/4 [==============================] - 0s 971us/step - loss: 0.0803 - binary_accuracy: 1.0000\n", - "Epoch 210/500\n", - "4/4 [==============================] - 0s 952us/step - loss: 0.0782 - binary_accuracy: 1.0000\n", - "Epoch 211/500\n", - "4/4 [==============================] - 0s 886us/step - loss: 0.0775 - binary_accuracy: 1.0000\n", - "Epoch 212/500\n", - "4/4 [==============================] - 0s 812us/step - loss: 0.0747 - binary_accuracy: 1.0000\n", - "Epoch 213/500\n", - "4/4 [==============================] - 0s 841us/step - loss: 0.0744 - binary_accuracy: 1.0000\n", - "Epoch 214/500\n", - "4/4 [==============================] - 0s 756us/step - loss: 0.0725 - binary_accuracy: 1.0000\n", - "Epoch 215/500\n", - "4/4 [==============================] - 0s 921us/step - loss: 0.0721 - binary_accuracy: 1.0000\n", - "Epoch 216/500\n", - "4/4 [==============================] - 0s 701us/step - loss: 0.0699 - binary_accuracy: 1.0000\n", - "Epoch 217/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0693 - binary_accuracy: 1.0000\n", - "Epoch 218/500\n", - "4/4 [==============================] - 0s 714us/step - loss: 0.0690 - binary_accuracy: 1.0000\n", - "Epoch 219/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0674 - binary_accuracy: 1.0000\n", - "Epoch 220/500\n", - "4/4 [==============================] - 0s 710us/step - loss: 0.0655 - binary_accuracy: 1.0000\n", - "Epoch 221/500\n", - "4/4 [==============================] - 0s 814us/step - loss: 0.0665 - binary_accuracy: 1.0000\n", - "Epoch 222/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0640 - binary_accuracy: 1.0000\n", - "Epoch 223/500\n", - "4/4 [==============================] - 0s 910us/step - loss: 0.0624 - binary_accuracy: 1.0000\n", - "Epoch 224/500\n", - "4/4 [==============================] - 0s 959us/step - loss: 0.0631 - binary_accuracy: 1.0000\n", - "Epoch 225/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0607 - binary_accuracy: 1.0000\n", - "Epoch 226/500\n", - "4/4 [==============================] - 0s 725us/step - loss: 0.0602 - binary_accuracy: 1.0000\n", - "Epoch 227/500\n", - "4/4 [==============================] - 0s 998us/step - loss: 0.0585 - binary_accuracy: 1.0000\n", - "Epoch 228/500\n", - "4/4 [==============================] - 0s 838us/step - loss: 0.0576 - binary_accuracy: 1.0000\n", - "Epoch 229/500\n", - "4/4 [==============================] - 0s 877us/step - loss: 0.0569 - binary_accuracy: 1.0000\n", - "Epoch 230/500\n", - "4/4 [==============================] - 0s 856us/step - loss: 0.0556 - binary_accuracy: 1.0000\n", - "Epoch 231/500\n", - "4/4 [==============================] - 0s 981us/step - loss: 0.0549 - binary_accuracy: 1.0000\n", - "Epoch 232/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0541 - binary_accuracy: 1.0000\n", - "Epoch 233/500\n", - "4/4 [==============================] - 0s 826us/step - loss: 0.0536 - binary_accuracy: 1.0000\n", - "Epoch 234/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0526 - binary_accuracy: 1.0000\n", - "Epoch 235/500\n", - "4/4 [==============================] - 0s 799us/step - loss: 0.0522 - binary_accuracy: 1.0000\n", - "Epoch 236/500\n", - "4/4 [==============================] - 0s 963us/step - loss: 0.0510 - binary_accuracy: 1.0000\n", - "Epoch 237/500\n", - "4/4 [==============================] - 0s 832us/step - loss: 0.0502 - binary_accuracy: 1.0000\n", - "Epoch 238/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0497 - binary_accuracy: 1.0000\n", - "Epoch 239/500\n", - "4/4 [==============================] - 0s 775us/step - loss: 0.0486 - binary_accuracy: 1.0000\n", - "Epoch 240/500\n", - "4/4 [==============================] - 0s 781us/step - loss: 0.0476 - binary_accuracy: 1.0000\n", - "Epoch 241/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0474 - binary_accuracy: 1.0000\n", - "Epoch 242/500\n", - "4/4 [==============================] - 0s 745us/step - loss: 0.0474 - binary_accuracy: 1.0000\n", - "Epoch 243/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0463 - binary_accuracy: 1.0000\n", - "Epoch 244/500\n", - "4/4 [==============================] - 0s 849us/step - loss: 0.0458 - binary_accuracy: 1.0000\n", - "Epoch 245/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0442 - binary_accuracy: 1.0000\n", - "Epoch 246/500\n", - "4/4 [==============================] - 0s 973us/step - loss: 0.0435 - binary_accuracy: 1.0000\n", - "Epoch 247/500\n", - "4/4 [==============================] - 0s 883us/step - loss: 0.0433 - binary_accuracy: 1.0000\n", - "Epoch 248/500\n", - "4/4 [==============================] - 0s 909us/step - loss: 0.0428 - binary_accuracy: 1.0000\n", - "Epoch 249/500\n", - "4/4 [==============================] - 0s 989us/step - loss: 0.0420 - binary_accuracy: 1.0000\n", - "Epoch 250/500\n", - "4/4 [==============================] - 0s 926us/step - loss: 0.0415 - binary_accuracy: 1.0000\n", - "Epoch 251/500\n", - "4/4 [==============================] - 0s 826us/step - loss: 0.0405 - binary_accuracy: 1.0000\n", - "Epoch 252/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0400 - binary_accuracy: 1.0000\n", - "Epoch 253/500\n", - "4/4 [==============================] - 0s 912us/step - loss: 0.0395 - binary_accuracy: 1.0000\n", - "Epoch 254/500\n", - "4/4 [==============================] - 0s 903us/step - loss: 0.0390 - binary_accuracy: 1.0000\n", - "Epoch 255/500\n", - "4/4 [==============================] - 0s 721us/step - loss: 0.0384 - binary_accuracy: 1.0000\n", - "Epoch 256/500\n", - "4/4 [==============================] - 0s 809us/step - loss: 0.0381 - binary_accuracy: 1.0000\n", - "Epoch 257/500\n", - "4/4 [==============================] - 0s 867us/step - loss: 0.0374 - binary_accuracy: 1.0000\n", - "Epoch 258/500\n", - "4/4 [==============================] - 0s 884us/step - loss: 0.0373 - binary_accuracy: 1.0000\n", - "Epoch 259/500\n", - "4/4 [==============================] - 0s 2ms/step - loss: 0.0368 - binary_accuracy: 1.0000\n", - "Epoch 260/500\n", - "4/4 [==============================] - 0s 2ms/step - loss: 0.0359 - binary_accuracy: 1.0000\n", - "Epoch 261/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0353 - binary_accuracy: 1.0000\n", - "Epoch 262/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0349 - binary_accuracy: 1.0000\n", - "Epoch 263/500\n", - "4/4 [==============================] - 0s 859us/step - loss: 0.0343 - binary_accuracy: 1.0000\n", - "Epoch 264/500\n", - "4/4 [==============================] - 0s 951us/step - loss: 0.0341 - binary_accuracy: 1.0000\n", - "Epoch 265/500\n", - "4/4 [==============================] - 0s 836us/step - loss: 0.0339 - binary_accuracy: 1.0000\n", - "Epoch 266/500\n", - "4/4 [==============================] - 0s 922us/step - loss: 0.0335 - binary_accuracy: 1.0000\n", - "Epoch 267/500\n", - "4/4 [==============================] - 0s 813us/step - loss: 0.0326 - binary_accuracy: 1.0000\n", - "Epoch 268/500\n", - "4/4 [==============================] - 0s 880us/step - loss: 0.0322 - binary_accuracy: 1.0000\n", - "Epoch 269/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0322 - binary_accuracy: 1.0000\n", - "Epoch 270/500\n", - "4/4 [==============================] - 0s 961us/step - loss: 0.0314 - binary_accuracy: 1.0000\n", - "Epoch 271/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0310 - binary_accuracy: 1.0000\n", - "Epoch 272/500\n", - "4/4 [==============================] - 0s 845us/step - loss: 0.0307 - binary_accuracy: 1.0000\n", - "Epoch 273/500\n", - "4/4 [==============================] - 0s 915us/step - loss: 0.0302 - binary_accuracy: 1.0000\n", - "Epoch 274/500\n", - "4/4 [==============================] - 0s 706us/step - loss: 0.0299 - binary_accuracy: 1.0000\n", - "Epoch 275/500\n", - "4/4 [==============================] - 0s 820us/step - loss: 0.0298 - binary_accuracy: 1.0000\n", - "Epoch 276/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0291 - binary_accuracy: 1.0000\n", - "Epoch 277/500\n", - "4/4 [==============================] - 0s 841us/step - loss: 0.0290 - binary_accuracy: 1.0000\n", - "Epoch 278/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0285 - binary_accuracy: 1.0000\n", - "Epoch 279/500\n", - "4/4 [==============================] - 0s 947us/step - loss: 0.0280 - binary_accuracy: 1.0000\n", - "Epoch 280/500\n", - "4/4 [==============================] - 0s 878us/step - loss: 0.0277 - binary_accuracy: 1.0000\n", - "Epoch 281/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0273 - binary_accuracy: 1.0000\n", - "Epoch 282/500\n", - "4/4 [==============================] - 0s 899us/step - loss: 0.0271 - binary_accuracy: 1.0000\n", - "Epoch 283/500\n", - "4/4 [==============================] - 0s 974us/step - loss: 0.0269 - binary_accuracy: 1.0000\n", - "Epoch 284/500\n", - "4/4 [==============================] - 0s 835us/step - loss: 0.0264 - binary_accuracy: 1.0000\n", - "Epoch 285/500\n", - "4/4 [==============================] - 0s 806us/step - loss: 0.0265 - binary_accuracy: 1.0000\n", - "Epoch 286/500\n", - "4/4 [==============================] - 0s 814us/step - loss: 0.0260 - binary_accuracy: 1.0000\n", - "Epoch 287/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0256 - binary_accuracy: 1.0000\n", - "Epoch 288/500\n", - "4/4 [==============================] - 0s 859us/step - loss: 0.0253 - binary_accuracy: 1.0000\n", - "Epoch 289/500\n", - "4/4 [==============================] - 0s 979us/step - loss: 0.0252 - binary_accuracy: 1.0000\n", - "Epoch 290/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0248 - binary_accuracy: 1.0000\n", - "Epoch 291/500\n", - "4/4 [==============================] - 0s 914us/step - loss: 0.0245 - binary_accuracy: 1.0000\n", - "Epoch 292/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0241 - binary_accuracy: 1.0000\n", - "Epoch 293/500\n", - "4/4 [==============================] - 0s 841us/step - loss: 0.0239 - binary_accuracy: 1.0000\n", - "Epoch 294/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0237 - binary_accuracy: 1.0000\n", - "Epoch 295/500\n", - "4/4 [==============================] - 0s 861us/step - loss: 0.0235 - binary_accuracy: 1.0000\n", - "Epoch 296/500\n", - "4/4 [==============================] - 0s 815us/step - loss: 0.0231 - binary_accuracy: 1.0000\n", - "Epoch 297/500\n", - "4/4 [==============================] - 0s 753us/step - loss: 0.0230 - binary_accuracy: 1.0000\n", - "Epoch 298/500\n", - "4/4 [==============================] - 0s 843us/step - loss: 0.0225 - binary_accuracy: 1.0000\n", - "Epoch 299/500\n", - "4/4 [==============================] - 0s 979us/step - loss: 0.0224 - binary_accuracy: 1.0000\n", - "Epoch 300/500\n", - "4/4 [==============================] - 0s 636us/step - loss: 0.0221 - binary_accuracy: 1.0000\n", - "Epoch 301/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0219 - binary_accuracy: 1.0000\n", - "Epoch 302/500\n", - "4/4 [==============================] - 0s 748us/step - loss: 0.0216 - binary_accuracy: 1.0000\n", - "Epoch 303/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0214 - binary_accuracy: 1.0000\n", - "Epoch 304/500\n", - "4/4 [==============================] - 0s 818us/step - loss: 0.0212 - binary_accuracy: 1.0000\n", - "Epoch 305/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0210 - binary_accuracy: 1.0000\n", - "Epoch 306/500\n", - "4/4 [==============================] - 0s 874us/step - loss: 0.0207 - binary_accuracy: 1.0000\n", - "Epoch 307/500\n", - "4/4 [==============================] - 0s 913us/step - loss: 0.0206 - binary_accuracy: 1.0000\n", - "Epoch 308/500\n", - "4/4 [==============================] - 0s 906us/step - loss: 0.0203 - binary_accuracy: 1.0000\n", - "Epoch 309/500\n", - "4/4 [==============================] - 0s 904us/step - loss: 0.0201 - binary_accuracy: 1.0000\n", - "Epoch 310/500\n", - "4/4 [==============================] - 0s 831us/step - loss: 0.0199 - binary_accuracy: 1.0000\n", - "Epoch 311/500\n", - "4/4 [==============================] - 0s 704us/step - loss: 0.0197 - binary_accuracy: 1.0000\n", - "Epoch 312/500\n", - "4/4 [==============================] - 0s 928us/step - loss: 0.0195 - binary_accuracy: 1.0000\n", - "Epoch 313/500\n", - "4/4 [==============================] - 0s 790us/step - loss: 0.0193 - binary_accuracy: 1.0000\n", - "Epoch 314/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0191 - binary_accuracy: 1.0000\n", - "Epoch 315/500\n", - "4/4 [==============================] - 0s 896us/step - loss: 0.0190 - binary_accuracy: 1.0000\n", - "Epoch 316/500\n", - "4/4 [==============================] - 0s 794us/step - loss: 0.0188 - binary_accuracy: 1.0000\n", - "Epoch 317/500\n", - "4/4 [==============================] - 0s 811us/step - loss: 0.0186 - binary_accuracy: 1.0000\n", - "Epoch 318/500\n", - "4/4 [==============================] - 0s 792us/step - loss: 0.0185 - binary_accuracy: 1.0000\n", - "Epoch 319/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0183 - binary_accuracy: 1.0000\n", - "Epoch 320/500\n", - "4/4 [==============================] - 0s 929us/step - loss: 0.0180 - binary_accuracy: 1.0000\n", - "Epoch 321/500\n", - "4/4 [==============================] - 0s 864us/step - loss: 0.0179 - binary_accuracy: 1.0000\n", - "Epoch 322/500\n", - "4/4 [==============================] - 0s 950us/step - loss: 0.0177 - binary_accuracy: 1.0000\n", - "Epoch 323/500\n", - "4/4 [==============================] - 0s 768us/step - loss: 0.0175 - binary_accuracy: 1.0000\n", - "Epoch 324/500\n", - "4/4 [==============================] - 0s 787us/step - loss: 0.0174 - binary_accuracy: 1.0000\n", - "Epoch 325/500\n", - "4/4 [==============================] - 0s 919us/step - loss: 0.0172 - binary_accuracy: 1.0000\n", - "Epoch 326/500\n", - "4/4 [==============================] - 0s 858us/step - loss: 0.0170 - binary_accuracy: 1.0000\n", - "Epoch 327/500\n", - "4/4 [==============================] - 0s 850us/step - loss: 0.0170 - binary_accuracy: 1.0000\n", - "Epoch 328/500\n", - "4/4 [==============================] - 0s 634us/step - loss: 0.0168 - binary_accuracy: 1.0000\n", - "Epoch 329/500\n", - "4/4 [==============================] - 0s 983us/step - loss: 0.0166 - binary_accuracy: 1.0000\n", - "Epoch 330/500\n", - "4/4 [==============================] - 0s 856us/step - loss: 0.0164 - binary_accuracy: 1.0000\n", - "Epoch 331/500\n", - "4/4 [==============================] - 0s 796us/step - loss: 0.0163 - binary_accuracy: 1.0000\n", - "Epoch 332/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0162 - binary_accuracy: 1.0000\n", - "Epoch 333/500\n", - "4/4 [==============================] - 0s 860us/step - loss: 0.0160 - binary_accuracy: 1.0000\n", - "Epoch 334/500\n", - "4/4 [==============================] - 0s 973us/step - loss: 0.0158 - binary_accuracy: 1.0000\n", - "Epoch 335/500\n", - "4/4 [==============================] - 0s 897us/step - loss: 0.0157 - binary_accuracy: 1.0000\n", - "Epoch 336/500\n", - "4/4 [==============================] - 0s 2ms/step - loss: 0.0156 - binary_accuracy: 1.0000\n", - "Epoch 337/500\n", - "4/4 [==============================] - 0s 2ms/step - loss: 0.0154 - binary_accuracy: 1.0000\n", - "Epoch 338/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0153 - binary_accuracy: 1.0000\n", - "Epoch 339/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0152 - binary_accuracy: 1.0000\n", - "Epoch 340/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0151 - binary_accuracy: 1.0000\n", - "Epoch 341/500\n", - "4/4 [==============================] - 0s 897us/step - loss: 0.0149 - binary_accuracy: 1.0000\n", - "Epoch 342/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0148 - binary_accuracy: 1.0000\n", - "Epoch 343/500\n", - "4/4 [==============================] - 0s 872us/step - loss: 0.0147 - binary_accuracy: 1.0000\n", - "Epoch 344/500\n", - "4/4 [==============================] - 0s 828us/step - loss: 0.0145 - binary_accuracy: 1.0000\n", - "Epoch 345/500\n", - "4/4 [==============================] - 0s 898us/step - loss: 0.0144 - binary_accuracy: 1.0000\n", - "Epoch 346/500\n", - "4/4 [==============================] - 0s 757us/step - loss: 0.0143 - binary_accuracy: 1.0000\n", - "Epoch 347/500\n", - "4/4 [==============================] - 0s 910us/step - loss: 0.0142 - binary_accuracy: 1.0000\n", - "Epoch 348/500\n", - "4/4 [==============================] - 0s 854us/step - loss: 0.0140 - binary_accuracy: 1.0000\n", - "Epoch 349/500\n", - "4/4 [==============================] - 0s 949us/step - loss: 0.0140 - binary_accuracy: 1.0000\n", - "Epoch 350/500\n", - "4/4 [==============================] - 0s 783us/step - loss: 0.0138 - binary_accuracy: 1.0000\n", - "Epoch 351/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0138 - binary_accuracy: 1.0000\n", - "Epoch 352/500\n", - "4/4 [==============================] - 0s 883us/step - loss: 0.0137 - binary_accuracy: 1.0000\n", - "Epoch 353/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0134 - binary_accuracy: 1.0000\n", - "Epoch 354/500\n", - "4/4 [==============================] - 0s 925us/step - loss: 0.0134 - binary_accuracy: 1.0000\n", - "Epoch 355/500\n", - "4/4 [==============================] - 0s 960us/step - loss: 0.0133 - binary_accuracy: 1.0000\n", - "Epoch 356/500\n", - "4/4 [==============================] - 0s 879us/step - loss: 0.0132 - binary_accuracy: 1.0000\n", - "Epoch 357/500\n", - "4/4 [==============================] - 0s 925us/step - loss: 0.0130 - binary_accuracy: 1.0000\n", - "Epoch 358/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0130 - binary_accuracy: 1.0000\n", - "Epoch 359/500\n", - "4/4 [==============================] - 0s 779us/step - loss: 0.0128 - binary_accuracy: 1.0000\n", - "Epoch 360/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0128 - binary_accuracy: 1.0000\n", - "Epoch 361/500\n", - "4/4 [==============================] - 0s 815us/step - loss: 0.0126 - binary_accuracy: 1.0000\n", - "Epoch 362/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0125 - binary_accuracy: 1.0000\n", - "Epoch 363/500\n", - "4/4 [==============================] - 0s 947us/step - loss: 0.0124 - binary_accuracy: 1.0000\n", - "Epoch 364/500\n", - "4/4 [==============================] - 0s 792us/step - loss: 0.0124 - binary_accuracy: 1.0000\n", - "Epoch 365/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0122 - binary_accuracy: 1.0000\n", - "Epoch 366/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0122 - binary_accuracy: 1.0000\n", - "Epoch 367/500\n", - "4/4 [==============================] - 0s 970us/step - loss: 0.0120 - binary_accuracy: 1.0000\n", - "Epoch 368/500\n", - "4/4 [==============================] - 0s 981us/step - loss: 0.0119 - binary_accuracy: 1.0000\n", - "Epoch 369/500\n", - "4/4 [==============================] - 0s 965us/step - loss: 0.0119 - binary_accuracy: 1.0000\n", - "Epoch 370/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0118 - binary_accuracy: 1.0000\n", - "Epoch 371/500\n", - "4/4 [==============================] - 0s 853us/step - loss: 0.0117 - binary_accuracy: 1.0000\n", - "Epoch 372/500\n", - "4/4 [==============================] - 0s 886us/step - loss: 0.0116 - binary_accuracy: 1.0000\n", - "Epoch 373/500\n", - "4/4 [==============================] - 0s 908us/step - loss: 0.0115 - binary_accuracy: 1.0000\n", - "Epoch 374/500\n", - "4/4 [==============================] - 0s 868us/step - loss: 0.0114 - binary_accuracy: 1.0000\n", - "Epoch 375/500\n", - "4/4 [==============================] - 0s 899us/step - loss: 0.0113 - binary_accuracy: 1.0000\n", - "Epoch 376/500\n", - "4/4 [==============================] - 0s 874us/step - loss: 0.0112 - binary_accuracy: 1.0000\n", - "Epoch 377/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0112 - binary_accuracy: 1.0000\n", - "Epoch 378/500\n", - "4/4 [==============================] - 0s 911us/step - loss: 0.0111 - binary_accuracy: 1.0000\n", - "Epoch 379/500\n", - "4/4 [==============================] - 0s 844us/step - loss: 0.0110 - binary_accuracy: 1.0000\n", - "Epoch 380/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0109 - binary_accuracy: 1.0000\n", - "Epoch 381/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0108 - binary_accuracy: 1.0000\n", - "Epoch 382/500\n", - "4/4 [==============================] - 0s 935us/step - loss: 0.0107 - binary_accuracy: 1.0000\n", - "Epoch 383/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0107 - binary_accuracy: 1.0000\n", - "Epoch 384/500\n", - "4/4 [==============================] - 0s 760us/step - loss: 0.0106 - binary_accuracy: 1.0000\n", - "Epoch 385/500\n", - "4/4 [==============================] - 0s 961us/step - loss: 0.0105 - binary_accuracy: 1.0000\n", - "Epoch 386/500\n", - "4/4 [==============================] - 0s 865us/step - loss: 0.0104 - binary_accuracy: 1.0000\n", - "Epoch 387/500\n", - "4/4 [==============================] - 0s 723us/step - loss: 0.0104 - binary_accuracy: 1.0000\n", - "Epoch 388/500\n", - "4/4 [==============================] - 0s 986us/step - loss: 0.0103 - binary_accuracy: 1.0000\n", - "Epoch 389/500\n", - "4/4 [==============================] - 0s 751us/step - loss: 0.0102 - binary_accuracy: 1.0000\n", - "Epoch 390/500\n", - "4/4 [==============================] - 0s 960us/step - loss: 0.0101 - binary_accuracy: 1.0000\n", - "Epoch 391/500\n", - "4/4 [==============================] - 0s 880us/step - loss: 0.0101 - binary_accuracy: 1.0000\n", - "Epoch 392/500\n", - "4/4 [==============================] - 0s 673us/step - loss: 0.0100 - binary_accuracy: 1.0000\n", - "Epoch 393/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0099 - binary_accuracy: 1.0000\n", - "Epoch 394/500\n", - "4/4 [==============================] - 0s 912us/step - loss: 0.0099 - binary_accuracy: 1.0000\n", - "Epoch 395/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0098 - binary_accuracy: 1.0000\n", - "Epoch 396/500\n", - "4/4 [==============================] - 0s 742us/step - loss: 0.0097 - binary_accuracy: 1.0000\n", - "Epoch 397/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0097 - binary_accuracy: 1.0000\n", - "Epoch 398/500\n", - "4/4 [==============================] - 0s 842us/step - loss: 0.0096 - binary_accuracy: 1.0000\n", - "Epoch 399/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0095 - binary_accuracy: 1.0000\n", - "Epoch 400/500\n", - "4/4 [==============================] - 0s 816us/step - loss: 0.0095 - binary_accuracy: 1.0000\n", - "Epoch 401/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0094 - binary_accuracy: 1.0000\n", - "Epoch 402/500\n", - "4/4 [==============================] - 0s 775us/step - loss: 0.0093 - binary_accuracy: 1.0000\n", - "Epoch 403/500\n", - "4/4 [==============================] - 0s 958us/step - loss: 0.0093 - binary_accuracy: 1.0000\n", - "Epoch 404/500\n", - "4/4 [==============================] - 0s 851us/step - loss: 0.0092 - binary_accuracy: 1.0000\n", - "Epoch 405/500\n", - "4/4 [==============================] - 0s 844us/step - loss: 0.0091 - binary_accuracy: 1.0000\n", - "Epoch 406/500\n", - "4/4 [==============================] - 0s 987us/step - loss: 0.0091 - binary_accuracy: 1.0000\n", - "Epoch 407/500\n", - "4/4 [==============================] - 0s 786us/step - loss: 0.0090 - binary_accuracy: 1.0000\n", - "Epoch 408/500\n", - "4/4 [==============================] - 0s 936us/step - loss: 0.0090 - binary_accuracy: 1.0000\n", - "Epoch 409/500\n", - "4/4 [==============================] - 0s 971us/step - loss: 0.0089 - binary_accuracy: 1.0000\n", - "Epoch 410/500\n", - "4/4 [==============================] - 0s 901us/step - loss: 0.0088 - binary_accuracy: 1.0000\n", - "Epoch 411/500\n", - "4/4 [==============================] - 0s 908us/step - loss: 0.0088 - binary_accuracy: 1.0000\n", - "Epoch 412/500\n", - "4/4 [==============================] - 0s 946us/step - loss: 0.0087 - binary_accuracy: 1.0000\n", - "Epoch 413/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0087 - binary_accuracy: 1.0000\n", - "Epoch 414/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0086 - binary_accuracy: 1.0000\n", - "Epoch 415/500\n", - "4/4 [==============================] - 0s 922us/step - loss: 0.0085 - binary_accuracy: 1.0000\n", - "Epoch 416/500\n", - "4/4 [==============================] - 0s 719us/step - loss: 0.0085 - binary_accuracy: 1.0000\n", - "Epoch 417/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0084 - binary_accuracy: 1.0000\n", - "Epoch 418/500\n", - "4/4 [==============================] - 0s 756us/step - loss: 0.0084 - binary_accuracy: 1.0000\n", - "Epoch 419/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0083 - binary_accuracy: 1.0000\n", - "Epoch 420/500\n", - "4/4 [==============================] - 0s 847us/step - loss: 0.0083 - binary_accuracy: 1.0000\n", - "Epoch 421/500\n", - "4/4 [==============================] - 0s 2ms/step - loss: 0.0082 - binary_accuracy: 1.0000\n", - "Epoch 422/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0082 - binary_accuracy: 1.0000\n", - "Epoch 423/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0081 - binary_accuracy: 1.0000\n", - "Epoch 424/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0081 - binary_accuracy: 1.0000\n", - "Epoch 425/500\n", - "4/4 [==============================] - 0s 956us/step - loss: 0.0080 - binary_accuracy: 1.0000\n", - "Epoch 426/500\n", - "4/4 [==============================] - 0s 865us/step - loss: 0.0080 - binary_accuracy: 1.0000\n", - "Epoch 427/500\n", - "4/4 [==============================] - 0s 798us/step - loss: 0.0079 - binary_accuracy: 1.0000\n", - "Epoch 428/500\n", - "4/4 [==============================] - 0s 853us/step - loss: 0.0079 - binary_accuracy: 1.0000\n", - "Epoch 429/500\n", - "4/4 [==============================] - 0s 701us/step - loss: 0.0078 - binary_accuracy: 1.0000\n", - "Epoch 430/500\n", - "4/4 [==============================] - 0s 780us/step - loss: 0.0078 - binary_accuracy: 1.0000\n", - "Epoch 431/500\n", - "4/4 [==============================] - 0s 717us/step - loss: 0.0077 - binary_accuracy: 1.0000\n", - "Epoch 432/500\n", - "4/4 [==============================] - 0s 2ms/step - loss: 0.0077 - binary_accuracy: 1.0000\n", - "Epoch 433/500\n", - "4/4 [==============================] - 0s 3ms/step - loss: 0.0076 - binary_accuracy: 1.0000\n", - "Epoch 434/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0076 - binary_accuracy: 1.0000\n", - "Epoch 435/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0075 - binary_accuracy: 1.0000\n", - "Epoch 436/500\n", - "4/4 [==============================] - 0s 721us/step - loss: 0.0075 - binary_accuracy: 1.0000\n", - "Epoch 437/500\n", - "4/4 [==============================] - 0s 952us/step - loss: 0.0074 - binary_accuracy: 1.0000\n", - "Epoch 438/500\n", - "4/4 [==============================] - 0s 758us/step - loss: 0.0074 - binary_accuracy: 1.0000\n", - "Epoch 439/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0074 - binary_accuracy: 1.0000\n", - "Epoch 440/500\n", - "4/4 [==============================] - 0s 733us/step - loss: 0.0073 - binary_accuracy: 1.0000\n", - "Epoch 441/500\n", - "4/4 [==============================] - 0s 973us/step - loss: 0.0073 - binary_accuracy: 1.0000\n", - "Epoch 442/500\n", - "4/4 [==============================] - 0s 946us/step - loss: 0.0072 - binary_accuracy: 1.0000\n", - "Epoch 443/500\n", - "4/4 [==============================] - 0s 825us/step - loss: 0.0072 - binary_accuracy: 1.0000\n", - "Epoch 444/500\n", - "4/4 [==============================] - 0s 980us/step - loss: 0.0071 - binary_accuracy: 1.0000\n", - "Epoch 445/500\n", - "4/4 [==============================] - 0s 954us/step - loss: 0.0071 - binary_accuracy: 1.0000\n", - "Epoch 446/500\n", - "4/4 [==============================] - 0s 859us/step - loss: 0.0070 - binary_accuracy: 1.0000\n", - "Epoch 447/500\n", - "4/4 [==============================] - 0s 851us/step - loss: 0.0070 - binary_accuracy: 1.0000\n", - "Epoch 448/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0070 - binary_accuracy: 1.0000\n", - "Epoch 449/500\n", - "4/4 [==============================] - 0s 660us/step - loss: 0.0069 - binary_accuracy: 1.0000\n", - "Epoch 450/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0069 - binary_accuracy: 1.0000\n", - "Epoch 451/500\n", - "4/4 [==============================] - 0s 939us/step - loss: 0.0068 - binary_accuracy: 1.0000\n", - "Epoch 452/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0068 - binary_accuracy: 1.0000\n", - "Epoch 453/500\n", - "4/4 [==============================] - 0s 884us/step - loss: 0.0068 - binary_accuracy: 1.0000\n", - "Epoch 454/500\n", - "4/4 [==============================] - 0s 757us/step - loss: 0.0067 - binary_accuracy: 1.0000\n", - "Epoch 455/500\n", - "4/4 [==============================] - 0s 884us/step - loss: 0.0067 - binary_accuracy: 1.0000\n", - "Epoch 456/500\n", - "4/4 [==============================] - 0s 763us/step - loss: 0.0067 - binary_accuracy: 1.0000\n", - "Epoch 457/500\n", - "4/4 [==============================] - 0s 739us/step - loss: 0.0066 - binary_accuracy: 1.0000\n", - "Epoch 458/500\n", - "4/4 [==============================] - 0s 935us/step - loss: 0.0066 - binary_accuracy: 1.0000\n", - "Epoch 459/500\n", - "4/4 [==============================] - 0s 987us/step - loss: 0.0065 - binary_accuracy: 1.0000\n", - "Epoch 460/500\n", - "4/4 [==============================] - 0s 871us/step - loss: 0.0065 - binary_accuracy: 1.0000\n", - "Epoch 461/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0065 - binary_accuracy: 1.0000\n", - "Epoch 462/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0064 - binary_accuracy: 1.0000\n", - "Epoch 463/500\n", - "4/4 [==============================] - 0s 780us/step - loss: 0.0064 - binary_accuracy: 1.0000\n", - "Epoch 464/500\n", - "4/4 [==============================] - 0s 830us/step - loss: 0.0064 - binary_accuracy: 1.0000\n", - "Epoch 465/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0063 - binary_accuracy: 1.0000\n", - "Epoch 466/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0063 - binary_accuracy: 1.0000\n", - "Epoch 467/500\n", - "4/4 [==============================] - 0s 781us/step - loss: 0.0063 - binary_accuracy: 1.0000\n", - "Epoch 468/500\n", - "4/4 [==============================] - 0s 2ms/step - loss: 0.0062 - binary_accuracy: 1.0000\n", - "Epoch 469/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0062 - binary_accuracy: 1.0000\n", - "Epoch 470/500\n", - "4/4 [==============================] - 0s 887us/step - loss: 0.0061 - binary_accuracy: 1.0000\n", - "Epoch 471/500\n", - "4/4 [==============================] - 0s 897us/step - loss: 0.0061 - binary_accuracy: 1.0000\n", - "Epoch 472/500\n", - "4/4 [==============================] - 0s 763us/step - loss: 0.0061 - binary_accuracy: 1.0000\n", - "Epoch 473/500\n", - "4/4 [==============================] - 0s 832us/step - loss: 0.0060 - binary_accuracy: 1.0000\n", - "Epoch 474/500\n", - "4/4 [==============================] - 0s 912us/step - loss: 0.0060 - binary_accuracy: 1.0000\n", - "Epoch 475/500\n", - "4/4 [==============================] - 0s 776us/step - loss: 0.0060 - binary_accuracy: 1.0000\n", - "Epoch 476/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0059 - binary_accuracy: 1.0000\n", - "Epoch 477/500\n", - "4/4 [==============================] - 0s 811us/step - loss: 0.0059 - binary_accuracy: 1.0000\n", - "Epoch 478/500\n", - "4/4 [==============================] - 0s 787us/step - loss: 0.0059 - binary_accuracy: 1.0000\n", - "Epoch 479/500\n", - "4/4 [==============================] - 0s 933us/step - loss: 0.0058 - binary_accuracy: 1.0000\n", - "Epoch 480/500\n", - "4/4 [==============================] - 0s 895us/step - loss: 0.0058 - binary_accuracy: 1.0000\n", - "Epoch 481/500\n", - "4/4 [==============================] - 0s 884us/step - loss: 0.0058 - binary_accuracy: 1.0000\n", - "Epoch 482/500\n", - "4/4 [==============================] - 0s 802us/step - loss: 0.0058 - binary_accuracy: 1.0000\n", - "Epoch 483/500\n", - "4/4 [==============================] - 0s 772us/step - loss: 0.0057 - binary_accuracy: 1.0000\n", - "Epoch 484/500\n", - "4/4 [==============================] - 0s 814us/step - loss: 0.0057 - binary_accuracy: 1.0000\n", - "Epoch 485/500\n", - "4/4 [==============================] - 0s 785us/step - loss: 0.0057 - binary_accuracy: 1.0000\n", - "Epoch 486/500\n", - "4/4 [==============================] - 0s 815us/step - loss: 0.0056 - binary_accuracy: 1.0000\n", - "Epoch 487/500\n", - "4/4 [==============================] - 0s 2ms/step - loss: 0.0056 - binary_accuracy: 1.0000\n", - "Epoch 488/500\n", - "4/4 [==============================] - 0s 987us/step - loss: 0.0056 - binary_accuracy: 1.0000\n", - "Epoch 489/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0055 - binary_accuracy: 1.0000\n", - "Epoch 490/500\n", - "4/4 [==============================] - 0s 902us/step - loss: 0.0055 - binary_accuracy: 1.0000\n", - "Epoch 491/500\n", - "4/4 [==============================] - 0s 851us/step - loss: 0.0055 - binary_accuracy: 1.0000\n", - "Epoch 492/500\n", - "4/4 [==============================] - 0s 808us/step - loss: 0.0055 - binary_accuracy: 1.0000\n", - "Epoch 493/500\n", - "4/4 [==============================] - 0s 789us/step - loss: 0.0054 - binary_accuracy: 1.0000\n", - "Epoch 494/500\n", - "4/4 [==============================] - 0s 834us/step - loss: 0.0054 - binary_accuracy: 1.0000\n", - "Epoch 495/500\n", - "4/4 [==============================] - 0s 844us/step - loss: 0.0054 - binary_accuracy: 1.0000\n", - "Epoch 496/500\n", - "4/4 [==============================] - 0s 746us/step - loss: 0.0053 - binary_accuracy: 1.0000\n", - "Epoch 497/500\n", - "4/4 [==============================] - 0s 982us/step - loss: 0.0053 - binary_accuracy: 1.0000\n", - "Epoch 498/500\n", - "4/4 [==============================] - 0s 791us/step - loss: 0.0053 - binary_accuracy: 1.0000\n", - "Epoch 499/500\n", - "4/4 [==============================] - 0s 778us/step - loss: 0.0053 - binary_accuracy: 1.0000\n", - "Epoch 500/500\n", - "4/4 [==============================] - 0s 1ms/step - loss: 0.0052 - binary_accuracy: 1.0000\n" + "selection_type='elitist' : 0.8282\n", + "selection_type='proportional' : 0.6898000000000001\n" ] - }, + } + ], + "source": [ + "for selection_type, results in tests.items():\n", + " print(f\"{selection_type=} : {sum(results) / test_runs}\")" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Beobachtungen und finale Lösung\n", + "\n", + "In diesem Fall scheint die _Elitist selection_ (bei uns meist 80-90% korrekt gelöst über mehrere Durchläufe vs 70%) besser zu funktionieren als die _Fitness proportional selection_ . Im ersten Fall kann man sagen, dass hier das XOR Problem fast immer gelöst werden konnte, im zweiten Fall leider weniger.\n", + "Mit zusätzlichem Bias würde das ganze vielleicht konsistenter gelöst werden." + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": {}, + "outputs": [ { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 94, - "metadata": {}, - "output_type": "execute_result" + "name": "stdout", + "output_type": "stream", + "text": [ + "TensorFlow: 3.5656441999599338, Backpropagation: 0.09983029961585999, Evolutionary: 4.410684499889612\n" + ] } ], "source": [ "# Aufgabe 3 von Aufgabenblatt 4\n", "import tensorflow as tf\n", "from tensorflow import keras\n", + "import timeit\n", "\n", - "#input\n", - "x = np.array([[0,0], [0,1], [1,0], [1,1]], dtype=np.float32)\n", - "#output\n", - "y = np.array([[0],[1],[1],[0]], dtype=np.float32)\n", + "training_data = samples(100)\n", + "test_data = samples(500)\n", "\n", + "# Tensorflow\n", "model = tf.keras.models.Sequential()\n", "model.add(tf.keras.Input(shape=(2,)))\n", - "model.add(tf.keras.layers.Dense(2, activation=tf.keras.activations.sigmoid, kernel_initializer=tf.initializers.Constant(0.5)))\n", + "model.add(tf.keras.layers.Dense(2, activation=tf.keras.activations.relu, kernel_initializer=tf.initializers.Constant(0.5)))\n", "model.add(tf.keras.layers.Dense(1, activation=tf.keras.activations.sigmoid))\n", + "model.compile(optimizer=tf.keras.optimizers.legacy.Adam(learning_rate=0.05), loss=tf.keras.losses.BinaryCrossentropy(), metrics=['binary_accuracy'])\n", + "tf_time = timeit.timeit(lambda: model.fit(training_data[0], training_data[1], batch_size=20, epochs=80, verbose=False), number=5)\n", "\n", - "model.compile(optimizer=tf.keras.optimizers.legacy.Adam(learning_rate=0.05), loss=tf.keras.losses.MeanSquaredError(), metrics=['binary_accuracy'])\n", - "#model.summary()\n", + "# Backpropagation\n", + "nn = NeuralNet(hidden_layers=[(8, relu)])\n", + "bp_time = timeit.timeit(lambda: nn.train(training_data, batch_size=20, epochs=80, verbose=False), number=5)\n", "\n", - "model.fit(x, y, batch_size=1, epochs=500)\n", + "# Evolutionary\n", + "ev_time = timeit.timeit(lambda: evolution(selection_type=\"elitist\"), number=5)\n", "\n", - "# Klappt 20/20 Mal (nur 20 mal getestet)\n", - "\n" + "print(f\"TensorFlow: {tf_time}, Backpropagation: {bp_time}, Evolutionary: {ev_time}\")\n" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Geschwindigkeiten\n", + "\n", + "Nach unserem Test läuft Backpropagation am schnellsten ab, dann TensorFlow, und am langsamsten der evolutionäre Algorithmus.\n", + "Um die Geschwindigkeiten zu verbessern, könnte man die Anzahl an Epochen / Generationen minimieren, sodass das neuronale Netz\n", + "gerade so konvergiert. Die hier sichtbaren groben Geschwindigkeitsunterschiede sind in unserem Fall aber kaum durch leichte\n", + "Veränderungen auszugleichen." ] } ], @@ -1353,7 +505,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.9" + "version": "3.10.1" }, "orig_nbformat": 4, "vscode": {