From 4094cea359da2bc1a5fffe97d9b1390f82e682d2 Mon Sep 17 00:00:00 2001 From: paumann Date: Thu, 1 Jun 2023 15:43:19 +0200 Subject: [PATCH] Initial commit for a3 --- Aufgabe 3/aufgabe03.ipynb | 128 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 128 insertions(+) create mode 100644 Aufgabe 3/aufgabe03.ipynb diff --git a/Aufgabe 3/aufgabe03.ipynb b/Aufgabe 3/aufgabe03.ipynb new file mode 100644 index 0000000..6411552 --- /dev/null +++ b/Aufgabe 3/aufgabe03.ipynb @@ -0,0 +1,128 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "\n", + "rng = np.random.default_rng()" + ] + }, + { + "cell_type": "code", + "execution_count": 46, + "metadata": {}, + "outputs": [], + "source": [ + "# Aufgabe 1\n", + "\n", + "relu = np.vectorize(lambda x : max(0, x))\n", + "\n", + "sigmoid = np.vectorize(lambda x : 1 / (1 + np.exp(-x)))\n", + "\n", + "inputs = [rng.integers(0, 1, size=(2,1), endpoint=True) for _ in range(100)]\n", + "outputs = [a[0] ^ b[0] for a, b in inputs]\n", + "data = list(zip(inputs, outputs))\n", + "\n", + "# Aufgabe 3\n", + "\n", + "# Binary Cross Entropy Loss\n", + "bcel = np.vectorize(lambda y, ŷ : -(y * np.log(ŷ) + (1 - y) * np.log(1 - ŷ)))\n", + "\n", + "# Derivations\n", + "bcel_derivation = np.vectorize(lambda y, ŷ: (1 / (1 - ŷ)) if y == 0 else -(1 / ŷ))\n", + "sigmoid_derivation = np.vectorize(lambda x: (1 / (1 + np.exp(-x))) * (1 - (1 / (1 + np.exp(-x)))))\n", + "relu_derivation = np.vectorize(lambda x: 0 if x < 0 else 1)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 51, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0.7722866010132297\n", + "0.7722866010132297\n", + "0.9354266393448504\n", + "0.7722866010132297\n", + "0.7722866010132297\n", + "0.7722866010132297\n", + "0.8102957251424703\n", + "0.7722866010132297\n", + "0.5\n", + "0.7722866010132297\n" + ] + } + ], + "source": [ + "# Aufgabe 2\n", + "\n", + "\n", + "class NeuralNet:\n", + " def __init__(self, inputs: int = 2, hidden_layers: int = 1, hidden_layer_neurons: int = 4):\n", + " self.input_shape = (inputs, 1)\n", + "\n", + " # Construct weights for hidden layer\n", + " self.weights = []\n", + " for i in range(hidden_layers):\n", + " num_inputs = inputs if i == 0 else hidden_layer_neurons # First hidden layer only needs 2x4 weight matrix\n", + " self.weights.append(rng.uniform(low=-1.0, high=1.0, size=(hidden_layer_neurons, num_inputs)))\n", + "\n", + " # Construct weights for output layer\n", + " self.outweights = rng.uniform(low=-1.0, high=1.0, size=(1, hidden_layer_neurons))\n", + "\n", + " def forward_pass(self, x) -> float:\n", + " x = np.array(x)\n", + " if x.shape != self.input_shape:\n", + " raise ValueError(f\"Input must be of shape {self.input_shape}.\")\n", + "\n", + " # Hidden layers\n", + " for layer in self.weights:\n", + " x = relu(np.matmul(layer, x))\n", + "\n", + " # Output layer\n", + " return sigmoid(np.matmul(self.outweights, x))[0][0]\n", + " \n", + " # Aufgabe 4\n", + " def backward_pass(self, learning_rate: float):\n", + " ...\n", + "\n", + "\n", + "nn = NeuralNet(hidden_layers=1)\n", + "\n", + "for input_values, expected in data[:10]:\n", + " actual = nn.forward_pass(input_values)\n", + "\n", + " print(actual)\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.1" + }, + "orig_nbformat": 4 + }, + "nbformat": 4, + "nbformat_minor": 2 +}