diff --git a/Neural graph module/ngm.ipynb b/Neural graph module/ngm.ipynb
index e5d9403df82c271b501aa62a75189c618cccc3ec..448cf36375aeab9161dd776cc696f2c298e3753a 100644
--- a/Neural graph module/ngm.ipynb	
+++ b/Neural graph module/ngm.ipynb	
@@ -31,15 +31,7 @@
       "cell_type": "code",
       "execution_count": null,
       "metadata": {},
-      "outputs": [
-        {
-          "name": "stdout",
-          "output_type": "stream",
-          "text": [
-            "cuda\n"
-          ]
-        }
-      ],
+      "outputs": [],
       "source": [
         "# Use GPU if available\n",
         "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n",
@@ -49,7 +41,7 @@
     },
     {
       "cell_type": "code",
-      "execution_count": 20,
+      "execution_count": null,
       "metadata": {},
       "outputs": [],
       "source": [
@@ -215,19 +207,83 @@
     },
     {
       "cell_type": "code",
-      "execution_count": null,
+      "execution_count": 15,
       "metadata": {},
-      "outputs": [],
+      "outputs": [
+        {
+          "name": "stdout",
+          "output_type": "stream",
+          "text": [
+            "Beginning making batch\n"
+          ]
+        },
+        {
+          "name": "stderr",
+          "output_type": "stream",
+          "text": [
+            "100%|██████████| 408/408 [00:00<00:00, 616.32it/s]\n"
+          ]
+        },
+        {
+          "name": "stdout",
+          "output_type": "stream",
+          "text": [
+            "Finished with batches\n",
+            "Beginning making batch\n"
+          ]
+        },
+        {
+          "name": "stderr",
+          "output_type": "stream",
+          "text": [
+            "100%|██████████| 408/408 [00:00<00:00, 620.42it/s]"
+          ]
+        },
+        {
+          "name": "stdout",
+          "output_type": "stream",
+          "text": [
+            "Finished with batches\n"
+          ]
+        },
+        {
+          "name": "stderr",
+          "output_type": "stream",
+          "text": [
+            "\n"
+          ]
+        },
+        {
+          "ename": "ValueError",
+          "evalue": "Sum of input lengths does not equal the length of the input dataset!",
+          "output_type": "error",
+          "traceback": [
+            "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
+            "\u001b[1;31mValueError\u001b[0m                                Traceback (most recent call last)",
+            "\u001b[1;32mc:\\Users\\Albin\\Documents\\TDDE19\\codebase\\Neural graph module\\ngm.ipynb Cell 7\u001b[0m in \u001b[0;36m<cell line: 14>\u001b[1;34m()\u001b[0m\n\u001b[0;32m      <a href='vscode-notebook-cell:/c%3A/Users/Albin/Documents/TDDE19/codebase/Neural%20graph%20module/ngm.ipynb#X11sZmlsZQ%3D%3D?line=8'>9</a>\u001b[0m inputs, attention_mask, correct_rels \u001b[39m=\u001b[39m make_batch()\n\u001b[0;32m     <a href='vscode-notebook-cell:/c%3A/Users/Albin/Documents/TDDE19/codebase/Neural%20graph%20module/ngm.ipynb#X11sZmlsZQ%3D%3D?line=12'>13</a>\u001b[0m dataset \u001b[39m=\u001b[39m MyDataset(\u001b[39m*\u001b[39mmake_batch(), relations\u001b[39m=\u001b[39mrelations)\n\u001b[1;32m---> <a href='vscode-notebook-cell:/c%3A/Users/Albin/Documents/TDDE19/codebase/Neural%20graph%20module/ngm.ipynb#X11sZmlsZQ%3D%3D?line=13'>14</a>\u001b[0m split_data \u001b[39m=\u001b[39m random_split(dataset, [\u001b[39m0.8\u001b[39;49m, \u001b[39m0.2\u001b[39;49m], generator\u001b[39m=\u001b[39;49mtorch\u001b[39m.\u001b[39;49mGenerator()\u001b[39m.\u001b[39;49mmanual_seed(\u001b[39m42\u001b[39;49m))\n\u001b[0;32m     <a href='vscode-notebook-cell:/c%3A/Users/Albin/Documents/TDDE19/codebase/Neural%20graph%20module/ngm.ipynb#X11sZmlsZQ%3D%3D?line=15'>16</a>\u001b[0m train_dataloader \u001b[39m=\u001b[39m DataLoader(train_set, batch_size\u001b[39m=\u001b[39m\u001b[39m1\u001b[39m, shuffle\u001b[39m=\u001b[39m\u001b[39mTrue\u001b[39;00m)\n\u001b[0;32m     <a href='vscode-notebook-cell:/c%3A/Users/Albin/Documents/TDDE19/codebase/Neural%20graph%20module/ngm.ipynb#X11sZmlsZQ%3D%3D?line=17'>18</a>\u001b[0m \u001b[39m#show first entry\u001b[39;00m\n",
+            "File \u001b[1;32mb:\\Programs\\Miniconda\\envs\\tdde19\\lib\\site-packages\\torch\\utils\\data\\dataset.py:311\u001b[0m, in \u001b[0;36mrandom_split\u001b[1;34m(dataset, lengths, generator)\u001b[0m\n\u001b[0;32m    309\u001b[0m \u001b[39m# Cannot verify that dataset is Sized\u001b[39;00m\n\u001b[0;32m    310\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39msum\u001b[39m(lengths) \u001b[39m!=\u001b[39m \u001b[39mlen\u001b[39m(dataset):    \u001b[39m# type: ignore[arg-type]\u001b[39;00m\n\u001b[1;32m--> 311\u001b[0m     \u001b[39mraise\u001b[39;00m \u001b[39mValueError\u001b[39;00m(\u001b[39m\"\u001b[39m\u001b[39mSum of input lengths does not equal the length of the input dataset!\u001b[39m\u001b[39m\"\u001b[39m)\n\u001b[0;32m    313\u001b[0m indices \u001b[39m=\u001b[39m randperm(\u001b[39msum\u001b[39m(lengths), generator\u001b[39m=\u001b[39mgenerator)\u001b[39m.\u001b[39mtolist()\n\u001b[0;32m    314\u001b[0m \u001b[39mreturn\u001b[39;00m [Subset(dataset, indices[offset \u001b[39m-\u001b[39m length : offset]) \u001b[39mfor\u001b[39;00m offset, length \u001b[39min\u001b[39;00m \u001b[39mzip\u001b[39m(_accumulate(lengths), lengths)]\n",
+            "\u001b[1;31mValueError\u001b[0m: Sum of input lengths does not equal the length of the input dataset!"
+          ]
+        }
+      ],
       "source": [
-        "#Prepare data\n",
+        "from torch.utils.data import random_split\n",
         "\n",
+        "#Prepare data\n",
         "def open_json(file):\n",
         "    with open(file, \"r\") as f:\n",
         "        return json.load(f)\n",
         "\n",
+        "relations = open_json(\"../data/relations-query-qald-9-linked.json\")\n",
+        "inputs, attention_mask, correct_rels = make_batch()\n",
+        "\n",
+        "# relations = open_json(\"../data/relations-lcquad-without-http-train-linked.json\")\n",
+        "# train_set = MyDataset(*make_batch(), relations=relations)\n",
+        "\n",
+        "\n",
+        "dataset = MyDataset(*make_batch(), relations=relations)\n",
+        "split_data = random_split(dataset, [0.8, 0.2], generator=torch.Generator().manual_seed(42))\n",
         "\n",
-        "relations = open_json(\"../data/relations-lcquad-without-http-train-linked.json\")\n",
-        "train_set = MyDataset(*make_batch(), relations=relations)\n",
         "train_dataloader = DataLoader(train_set, batch_size=1, shuffle=True)\n",
         "\n",
         "#show first entry\n",
@@ -258,8 +314,10 @@
         "epoch = 500\n",
         "batch_size = 64\n",
         "train_dataloader = DataLoader(train_set, batch_size=batch_size, shuffle=True)\n",
+        "valid_dataloader = DataLoader(valid_set, batch_size=batch_size, shuffle=True)\n",
         "for e in range(epoch):\n",
-        "    epoch_loss = 0\n",
+        "    train_loss_epoch = 0\n",
+        "    valid_loss_epoch = 0\n",
         "    for i_batch, sample_batched in enumerate(train_dataloader):\n",
         "        optimizer.zero_grad()\n",
         "        train = sample_batched[0]\n",
@@ -273,9 +331,20 @@
         "        # backward and optimize\n",
         "        loss.backward()\n",
         "        optimizer.step()\n",
-        "        epoch_loss = epoch_loss + loss.item()\n",
+        "        train_loss_epoch = train_loss_epoch + loss.item()\n",
+        "\n",
+        "    for i_batch, sample_batched in enumerate(valid_dataloader):\n",
+        "        valid = sample_batched[0]\n",
+        "        valid_mask = sample_batched[1]\n",
+        "        label_index = sample_batched[2].to(device)\n",
+        "        \n",
+        "        # Forward pass\n",
+        "        output = model(train, train_mask)\n",
+        "        loss = criterion(output, label_index)\n",
+        "\n",
+        "        valid_loss_epoch = valid_loss_epoch + loss.item()\n",
         "\n",
-        "    print(e+1, epoch_loss / len(sample_batched))"
+        "    print(e+1, \"Train\", valid_loss_epoch/len(sample_batched), \", Valid \", valid_loss_epoch/len(sample_batched))"
       ]
     },
     {
@@ -321,7 +390,7 @@
   ],
   "metadata": {
     "kernelspec": {
-      "display_name": "Python 3.9.11 64-bit",
+      "display_name": "Python 3.10.4 ('tdde19')",
       "language": "python",
       "name": "python3"
     },
@@ -335,12 +404,12 @@
       "name": "python",
       "nbconvert_exporter": "python",
       "pygments_lexer": "ipython3",
-      "version": "3.9.11"
+      "version": "3.10.4"
     },
     "orig_nbformat": 4,
     "vscode": {
       "interpreter": {
-        "hash": "64e7cd3b4b88defe39dd61a4584920400d6beb2615ab2244e340c2e20eecdfe9"
+        "hash": "8e4aa0e1a1e15de86146661edda0b2884b54582522f7ff2b916774ba6b8accb1"
       }
     }
   },