Initial commit
This commit is contained in:
commit
1995df58ce
21 changed files with 6708 additions and 0 deletions
1000
Bel_NN_C.ipynb
Normal file
1000
Bel_NN_C.ipynb
Normal file
File diff suppressed because it is too large
Load diff
992
Cro_NN_C.ipynb
Normal file
992
Cro_NN_C.ipynb
Normal file
|
|
@ -0,0 +1,992 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"id": "25c0d153-288c-4ee8-a968-915f853b8157",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import numpy as np\n",
|
||||
"import pandas as pd \n",
|
||||
"from matplotlib import pyplot as plt \n",
|
||||
"\n",
|
||||
"data = pd.read_csv('cro_data_test.csv')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"id": "962cacc2-c818-4c5b-bdab-2ee46c6de511",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"data = np.array(data)\n",
|
||||
"\n",
|
||||
"m,n = data.shape\n",
|
||||
"data_train = data[1000:m].T\n",
|
||||
"\n",
|
||||
"Y_train = data_train[0].astype(int)\n",
|
||||
"\n",
|
||||
"X_train = data_train[1:n]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"id": "e863fe3b-3ee6-42f3-b716-4fcda6a850af",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def init_params():\n",
|
||||
" W1 = np.random.rand(10,1024) - 0.5\n",
|
||||
" b1 = np.random.rand(10,1) - 0.5\n",
|
||||
" W2 = np.random.rand(5,10) - 0.5\n",
|
||||
" b2 = np.random.rand(5,1) - 0.5\n",
|
||||
" return W1, b1 , W2, b2"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"id": "64dd0fba-a49e-4f13-b534-e074350b5f42",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def ReLU(Z):\n",
|
||||
" return np.maximum(Z,0)\n",
|
||||
"def softmax(Z):\n",
|
||||
" A = np.exp(Z) / sum(np.exp(Z))\n",
|
||||
" return A\n",
|
||||
"def forward_prop(W1, b1, W2, b2, X):\n",
|
||||
" Z1 = W1.dot(X) + b1\n",
|
||||
" A1 = ReLU(Z1)\n",
|
||||
" Z2 = W2.dot(A1) + b2\n",
|
||||
" A2 = softmax(Z2)\n",
|
||||
" return Z1, A1, Z2, A2\n",
|
||||
"def ReLU_deriv(Z):\n",
|
||||
" return Z > 0\n",
|
||||
"def one_hot(Y):\n",
|
||||
" one_hot_Y = np.zeros((Y.size, Y.max() + 1))\n",
|
||||
" one_hot_Y[np.arange(Y.size), Y] = 1\n",
|
||||
" one_hot_Y = one_hot_Y.T\n",
|
||||
" return one_hot_Y\n",
|
||||
"def backward_prop(Z1, A1, Z2, A2, W1, W2, X, Y):\n",
|
||||
" one_hot_Y = one_hot(Y)\n",
|
||||
" dZ2 = A2 - one_hot_Y\n",
|
||||
" dW2 = 1 / m * dZ2.dot(A1.T)\n",
|
||||
" db2 = 1 / m * np.sum(dZ2)\n",
|
||||
" dZ1 = W2.T.dot(dZ2) * ReLU_deriv(Z1)\n",
|
||||
" dW1 = 1 / m * dZ1.dot(X.T)\n",
|
||||
" db1 = 1 / m * np.sum(dZ1)\n",
|
||||
" return dW1, db1, dW2, db2\n",
|
||||
"def update_params(W1, b1, W2, b2, dW1, db1, dW2, db2, alpha):\n",
|
||||
" W1 = W1 - alpha * dW1\n",
|
||||
" b1 = b1 - alpha * db1 \n",
|
||||
" W2 = W2 - alpha * dW2 \n",
|
||||
" b2 = b2 - alpha * db2 \n",
|
||||
" return W1, b1, W2, b2\n",
|
||||
"def get_predictions(A2):\n",
|
||||
" return np.argmax(A2, 0)\n",
|
||||
"def get_accuracy(predictions, Y):\n",
|
||||
" #print(predictions, Y)\n",
|
||||
" return np.sum(predictions == Y) / Y.size"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"id": "e7ef6234-254e-47f6-ac29-ddd92d363e9e",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"acc_store = [] "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"id": "d24bdd4d-1d57-40b1-a95b-3cc33e02312d",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def gradient_descent(X, Y, alpha, iterations):\n",
|
||||
" W1, b1, W2, b2 = init_params()\n",
|
||||
" for i in range(iterations):\n",
|
||||
" Z1, A1, Z2, A2 = forward_prop(W1, b1, W2, b2, X)\n",
|
||||
" dW1, db1, dW2, db2 = backward_prop(Z1, A1, Z2, A2, W1, W2, X, Y)\n",
|
||||
" W1, b1, W2, b2 = update_params(W1, b1, W2, b2, dW1, db1, dW2, db2, alpha)\n",
|
||||
" if i % 10 == 0:\n",
|
||||
" print(\"Iteration: \", i)\n",
|
||||
" predictions = get_predictions(A2)\n",
|
||||
" pred = get_accuracy(predictions, Y)\n",
|
||||
" print(pred)\n",
|
||||
" acc_store.append(pred)\n",
|
||||
" return W1, b1, W2, b2"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 7,
|
||||
"id": "d266b8d3-8f15-4d89-a896-a728215b048d",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Iteration: 0\n",
|
||||
"0.2016308376575241\n",
|
||||
"Iteration: 10\n",
|
||||
"0.3700889547813195\n",
|
||||
"Iteration: 20\n",
|
||||
"0.45978502594514453\n",
|
||||
"Iteration: 30\n",
|
||||
"0.519644180874722\n",
|
||||
"Iteration: 40\n",
|
||||
"0.4792438843587843\n",
|
||||
"Iteration: 50\n",
|
||||
"0.49833209785025945\n",
|
||||
"Iteration: 60\n",
|
||||
"0.544477390659748\n",
|
||||
"Iteration: 70\n",
|
||||
"0.5804299481097109\n",
|
||||
"Iteration: 80\n",
|
||||
"0.6250926612305412\n",
|
||||
"Iteration: 90\n",
|
||||
"0.6653076352853966\n",
|
||||
"Iteration: 100\n",
|
||||
"0.6955151964418087\n",
|
||||
"Iteration: 110\n",
|
||||
"0.7168272794662713\n",
|
||||
"Iteration: 120\n",
|
||||
"0.7294292068198666\n",
|
||||
"Iteration: 130\n",
|
||||
"0.7366567828020756\n",
|
||||
"Iteration: 140\n",
|
||||
"0.7462935507783544\n",
|
||||
"Iteration: 150\n",
|
||||
"0.7544477390659748\n",
|
||||
"Iteration: 160\n",
|
||||
"0.758524833209785\n",
|
||||
"Iteration: 170\n",
|
||||
"0.7626019273535952\n",
|
||||
"Iteration: 180\n",
|
||||
"0.7677909562638991\n",
|
||||
"Iteration: 190\n",
|
||||
"0.7729799851742031\n",
|
||||
"Iteration: 200\n",
|
||||
"0.7755744996293551\n",
|
||||
"Iteration: 210\n",
|
||||
"0.7792809488510007\n",
|
||||
"Iteration: 220\n",
|
||||
"0.7824314306893996\n",
|
||||
"Iteration: 230\n",
|
||||
"0.7857672349888807\n",
|
||||
"Iteration: 240\n",
|
||||
"0.7889177168272795\n",
|
||||
"Iteration: 250\n",
|
||||
"0.7909562638991846\n",
|
||||
"Iteration: 260\n",
|
||||
"0.7935507783543365\n",
|
||||
"Iteration: 270\n",
|
||||
"0.7968865826538176\n",
|
||||
"Iteration: 280\n",
|
||||
"0.7996664195700519\n",
|
||||
"Iteration: 290\n",
|
||||
"0.8011489992587102\n",
|
||||
"Iteration: 300\n",
|
||||
"0.8026315789473685\n",
|
||||
"Iteration: 310\n",
|
||||
"0.8052260934025204\n",
|
||||
"Iteration: 320\n",
|
||||
"0.8081912527798369\n",
|
||||
"Iteration: 330\n",
|
||||
"0.8096738324684952\n",
|
||||
"Iteration: 340\n",
|
||||
"0.8118977020014826\n",
|
||||
"Iteration: 350\n",
|
||||
"0.8139362490733877\n",
|
||||
"Iteration: 360\n",
|
||||
"0.8148628613787992\n",
|
||||
"Iteration: 370\n",
|
||||
"0.8169014084507042\n",
|
||||
"Iteration: 380\n",
|
||||
"0.8181986656782803\n",
|
||||
"Iteration: 390\n",
|
||||
"0.8191252779836916\n",
|
||||
"Iteration: 400\n",
|
||||
"0.8204225352112676\n",
|
||||
"Iteration: 410\n",
|
||||
"0.8215344699777613\n",
|
||||
"Iteration: 420\n",
|
||||
"0.8224610822831727\n",
|
||||
"Iteration: 430\n",
|
||||
"0.825240919199407\n",
|
||||
"Iteration: 440\n",
|
||||
"0.8259822090437361\n",
|
||||
"Iteration: 450\n",
|
||||
"0.8280207561156412\n",
|
||||
"Iteration: 460\n",
|
||||
"0.8293180133432172\n",
|
||||
"Iteration: 470\n",
|
||||
"0.8306152705707932\n",
|
||||
"Iteration: 480\n",
|
||||
"0.8317272053372868\n",
|
||||
"Iteration: 490\n",
|
||||
"0.8313565604151223\n",
|
||||
"Iteration: 500\n",
|
||||
"0.8317272053372868\n",
|
||||
"Iteration: 510\n",
|
||||
"0.8320978502594515\n",
|
||||
"Iteration: 520\n",
|
||||
"0.8335804299481097\n",
|
||||
"Iteration: 530\n",
|
||||
"0.8348776871756857\n",
|
||||
"Iteration: 540\n",
|
||||
"0.8356189770200149\n",
|
||||
"Iteration: 550\n",
|
||||
"0.836360266864344\n",
|
||||
"Iteration: 560\n",
|
||||
"0.8367309117865085\n",
|
||||
"Iteration: 570\n",
|
||||
"0.8372868791697554\n",
|
||||
"Iteration: 580\n",
|
||||
"0.8395107487027428\n",
|
||||
"Iteration: 590\n",
|
||||
"0.8400667160859896\n",
|
||||
"Iteration: 600\n",
|
||||
"0.8404373610081541\n",
|
||||
"Iteration: 610\n",
|
||||
"0.8393254262416605\n",
|
||||
"Iteration: 620\n",
|
||||
"0.8400667160859896\n",
|
||||
"Iteration: 630\n",
|
||||
"0.8408080059303188\n",
|
||||
"Iteration: 640\n",
|
||||
"0.8408080059303188\n",
|
||||
"Iteration: 650\n",
|
||||
"0.8409933283914011\n",
|
||||
"Iteration: 660\n",
|
||||
"0.8421052631578947\n",
|
||||
"Iteration: 670\n",
|
||||
"0.8432171979243884\n",
|
||||
"Iteration: 680\n",
|
||||
"0.8432171979243884\n",
|
||||
"Iteration: 690\n",
|
||||
"0.843587842846553\n",
|
||||
"Iteration: 700\n",
|
||||
"0.8441438102297999\n",
|
||||
"Iteration: 710\n",
|
||||
"0.8445144551519644\n",
|
||||
"Iteration: 720\n",
|
||||
"0.8445144551519644\n",
|
||||
"Iteration: 730\n",
|
||||
"0.8445144551519644\n",
|
||||
"Iteration: 740\n",
|
||||
"0.844885100074129\n",
|
||||
"Iteration: 750\n",
|
||||
"0.844885100074129\n",
|
||||
"Iteration: 760\n",
|
||||
"0.8452557449962935\n",
|
||||
"Iteration: 770\n",
|
||||
"0.8459970348406227\n",
|
||||
"Iteration: 780\n",
|
||||
"0.8465530022238695\n",
|
||||
"Iteration: 790\n",
|
||||
"0.8467383246849518\n",
|
||||
"Iteration: 800\n",
|
||||
"0.8471089696071163\n",
|
||||
"Iteration: 810\n",
|
||||
"0.8476649369903633\n",
|
||||
"Iteration: 820\n",
|
||||
"0.8491475166790216\n",
|
||||
"Iteration: 830\n",
|
||||
"0.8502594514455152\n",
|
||||
"Iteration: 840\n",
|
||||
"0.8502594514455152\n",
|
||||
"Iteration: 850\n",
|
||||
"0.8511860637509266\n",
|
||||
"Iteration: 860\n",
|
||||
"0.8517420311341735\n",
|
||||
"Iteration: 870\n",
|
||||
"0.8517420311341735\n",
|
||||
"Iteration: 880\n",
|
||||
"0.8519273535952557\n",
|
||||
"Iteration: 890\n",
|
||||
"0.8519273535952557\n",
|
||||
"Iteration: 900\n",
|
||||
"0.8532246108228317\n",
|
||||
"Iteration: 910\n",
|
||||
"0.8539659006671608\n",
|
||||
"Iteration: 920\n",
|
||||
"0.85470719051149\n",
|
||||
"Iteration: 930\n",
|
||||
"0.85470719051149\n",
|
||||
"Iteration: 940\n",
|
||||
"0.8548925129725723\n",
|
||||
"Iteration: 950\n",
|
||||
"0.8556338028169014\n",
|
||||
"Iteration: 960\n",
|
||||
"0.8563750926612306\n",
|
||||
"Iteration: 970\n",
|
||||
"0.8565604151223128\n",
|
||||
"Iteration: 980\n",
|
||||
"0.8571163825055597\n",
|
||||
"Iteration: 990\n",
|
||||
"0.8567457375833951\n",
|
||||
"Iteration: 1000\n",
|
||||
"0.8578576723498889\n",
|
||||
"Iteration: 1010\n",
|
||||
"0.8580429948109711\n",
|
||||
"Iteration: 1020\n",
|
||||
"0.8582283172720534\n",
|
||||
"Iteration: 1030\n",
|
||||
"0.8585989621942179\n",
|
||||
"Iteration: 1040\n",
|
||||
"0.8587842846553002\n",
|
||||
"Iteration: 1050\n",
|
||||
"0.8591549295774648\n",
|
||||
"Iteration: 1060\n",
|
||||
"0.8595255744996294\n",
|
||||
"Iteration: 1070\n",
|
||||
"0.8595255744996294\n",
|
||||
"Iteration: 1080\n",
|
||||
"0.8597108969607117\n",
|
||||
"Iteration: 1090\n",
|
||||
"0.860637509266123\n",
|
||||
"Iteration: 1100\n",
|
||||
"0.8626760563380281\n",
|
||||
"Iteration: 1110\n",
|
||||
"0.8628613787991104\n",
|
||||
"Iteration: 1120\n",
|
||||
"0.8630467012601928\n",
|
||||
"Iteration: 1130\n",
|
||||
"0.863232023721275\n",
|
||||
"Iteration: 1140\n",
|
||||
"0.8630467012601928\n",
|
||||
"Iteration: 1150\n",
|
||||
"0.863232023721275\n",
|
||||
"Iteration: 1160\n",
|
||||
"0.8641586360266864\n",
|
||||
"Iteration: 1170\n",
|
||||
"0.8648999258710156\n",
|
||||
"Iteration: 1180\n",
|
||||
"0.8647146034099333\n",
|
||||
"Iteration: 1190\n",
|
||||
"0.8654558932542624\n",
|
||||
"Iteration: 1200\n",
|
||||
"0.8650852483320979\n",
|
||||
"Iteration: 1210\n",
|
||||
"0.8652705707931801\n",
|
||||
"Iteration: 1220\n",
|
||||
"0.8661971830985915\n",
|
||||
"Iteration: 1230\n",
|
||||
"0.8667531504818384\n",
|
||||
"Iteration: 1240\n",
|
||||
"0.8669384729429207\n",
|
||||
"Iteration: 1250\n",
|
||||
"0.8665678280207562\n",
|
||||
"Iteration: 1260\n",
|
||||
"0.8665678280207562\n",
|
||||
"Iteration: 1270\n",
|
||||
"0.8663825055596739\n",
|
||||
"Iteration: 1280\n",
|
||||
"0.865826538176427\n",
|
||||
"Iteration: 1290\n",
|
||||
"0.8665678280207562\n",
|
||||
"Iteration: 1300\n",
|
||||
"0.8682357301704967\n",
|
||||
"Iteration: 1310\n",
|
||||
"0.8700889547813195\n",
|
||||
"Iteration: 1320\n",
|
||||
"0.8721275018532246\n",
|
||||
"Iteration: 1330\n",
|
||||
"0.8723128243143069\n",
|
||||
"Iteration: 1340\n",
|
||||
"0.8723128243143069\n",
|
||||
"Iteration: 1350\n",
|
||||
"0.8702742772424018\n",
|
||||
"Iteration: 1360\n",
|
||||
"0.8699036323202373\n",
|
||||
"Iteration: 1370\n",
|
||||
"0.8680504077094143\n",
|
||||
"Iteration: 1380\n",
|
||||
"0.8680504077094143\n",
|
||||
"Iteration: 1390\n",
|
||||
"0.8691623424759081\n",
|
||||
"Iteration: 1400\n",
|
||||
"0.8713862120088954\n",
|
||||
"Iteration: 1410\n",
|
||||
"0.873054114158636\n",
|
||||
"Iteration: 1420\n",
|
||||
"0.874351371386212\n",
|
||||
"Iteration: 1430\n",
|
||||
"0.8758339510748703\n",
|
||||
"Iteration: 1440\n",
|
||||
"0.8763899184581171\n",
|
||||
"Iteration: 1450\n",
|
||||
"0.8763899184581171\n",
|
||||
"Iteration: 1460\n",
|
||||
"0.8762045959970348\n",
|
||||
"Iteration: 1470\n",
|
||||
"0.8745366938472943\n",
|
||||
"Iteration: 1480\n",
|
||||
"0.8724981467753892\n",
|
||||
"Iteration: 1490\n",
|
||||
"0.8702742772424018\n",
|
||||
"Iteration: 1500\n",
|
||||
"0.8710155670867309\n",
|
||||
"Iteration: 1510\n",
|
||||
"0.873054114158636\n",
|
||||
"Iteration: 1520\n",
|
||||
"0.8736100815418829\n",
|
||||
"Iteration: 1530\n",
|
||||
"0.8739807264640475\n",
|
||||
"Iteration: 1540\n",
|
||||
"0.8747220163083765\n",
|
||||
"Iteration: 1550\n",
|
||||
"0.8750926612305412\n",
|
||||
"Iteration: 1560\n",
|
||||
"0.8752779836916235\n",
|
||||
"Iteration: 1570\n",
|
||||
"0.8752779836916235\n",
|
||||
"Iteration: 1580\n",
|
||||
"0.8750926612305412\n",
|
||||
"Iteration: 1590\n",
|
||||
"0.8750926612305412\n",
|
||||
"Iteration: 1600\n",
|
||||
"0.8760192735359525\n",
|
||||
"Iteration: 1610\n",
|
||||
"0.876945885841364\n",
|
||||
"Iteration: 1620\n",
|
||||
"0.8775018532246108\n",
|
||||
"Iteration: 1630\n",
|
||||
"0.8778724981467754\n",
|
||||
"Iteration: 1640\n",
|
||||
"0.8784284655300222\n",
|
||||
"Iteration: 1650\n",
|
||||
"0.8782431430689399\n",
|
||||
"Iteration: 1660\n",
|
||||
"0.876945885841364\n",
|
||||
"Iteration: 1670\n",
|
||||
"0.8765752409191994\n",
|
||||
"Iteration: 1680\n",
|
||||
"0.8773165307635286\n",
|
||||
"Iteration: 1690\n",
|
||||
"0.8778724981467754\n",
|
||||
"Iteration: 1700\n",
|
||||
"0.8793550778354337\n",
|
||||
"Iteration: 1710\n",
|
||||
"0.8797257227575982\n",
|
||||
"Iteration: 1720\n",
|
||||
"0.8808376575240919\n",
|
||||
"Iteration: 1730\n",
|
||||
"0.8810229799851742\n",
|
||||
"Iteration: 1740\n",
|
||||
"0.8812083024462565\n",
|
||||
"Iteration: 1750\n",
|
||||
"0.8810229799851742\n",
|
||||
"Iteration: 1760\n",
|
||||
"0.8821349147516679\n",
|
||||
"Iteration: 1770\n",
|
||||
"0.8825055596738325\n",
|
||||
"Iteration: 1780\n",
|
||||
"0.8826908821349148\n",
|
||||
"Iteration: 1790\n",
|
||||
"0.882876204595997\n",
|
||||
"Iteration: 1800\n",
|
||||
"0.8830615270570793\n",
|
||||
"Iteration: 1810\n",
|
||||
"0.8832468495181616\n",
|
||||
"Iteration: 1820\n",
|
||||
"0.8834321719792438\n",
|
||||
"Iteration: 1830\n",
|
||||
"0.8825055596738325\n",
|
||||
"Iteration: 1840\n",
|
||||
"0.8821349147516679\n",
|
||||
"Iteration: 1850\n",
|
||||
"0.8817642698295033\n",
|
||||
"Iteration: 1860\n",
|
||||
"0.8826908821349148\n",
|
||||
"Iteration: 1870\n",
|
||||
"0.8843587842846553\n",
|
||||
"Iteration: 1880\n",
|
||||
"0.8851000741289844\n",
|
||||
"Iteration: 1890\n",
|
||||
"0.8852853965900667\n",
|
||||
"Iteration: 1900\n",
|
||||
"0.8856560415122313\n",
|
||||
"Iteration: 1910\n",
|
||||
"0.8856560415122313\n",
|
||||
"Iteration: 1920\n",
|
||||
"0.8858413639733136\n",
|
||||
"Iteration: 1930\n",
|
||||
"0.8865826538176427\n",
|
||||
"Iteration: 1940\n",
|
||||
"0.8871386212008896\n",
|
||||
"Iteration: 1950\n",
|
||||
"0.8869532987398072\n",
|
||||
"Iteration: 1960\n",
|
||||
"0.8873239436619719\n",
|
||||
"Iteration: 1970\n",
|
||||
"0.888065233506301\n",
|
||||
"Iteration: 1980\n",
|
||||
"0.8882505559673832\n",
|
||||
"Iteration: 1990\n",
|
||||
"0.8871386212008896\n",
|
||||
"Iteration: 2000\n",
|
||||
"0.885470719051149\n",
|
||||
"Iteration: 2010\n",
|
||||
"0.8865826538176427\n",
|
||||
"Iteration: 2020\n",
|
||||
"0.8878799110452187\n",
|
||||
"Iteration: 2030\n",
|
||||
"0.8888065233506302\n",
|
||||
"Iteration: 2040\n",
|
||||
"0.8888065233506302\n",
|
||||
"Iteration: 2050\n",
|
||||
"0.889362490733877\n",
|
||||
"Iteration: 2060\n",
|
||||
"0.8891771682727947\n",
|
||||
"Iteration: 2070\n",
|
||||
"0.888065233506301\n",
|
||||
"Iteration: 2080\n",
|
||||
"0.8886212008895478\n",
|
||||
"Iteration: 2090\n",
|
||||
"0.8891771682727947\n",
|
||||
"Iteration: 2100\n",
|
||||
"0.8904744255003706\n",
|
||||
"Iteration: 2110\n",
|
||||
"0.8910303928836175\n",
|
||||
"Iteration: 2120\n",
|
||||
"0.8908450704225352\n",
|
||||
"Iteration: 2130\n",
|
||||
"0.8910303928836175\n",
|
||||
"Iteration: 2140\n",
|
||||
"0.8914010378057821\n",
|
||||
"Iteration: 2150\n",
|
||||
"0.8917716827279466\n",
|
||||
"Iteration: 2160\n",
|
||||
"0.8915863602668643\n",
|
||||
"Iteration: 2170\n",
|
||||
"0.890659747961453\n",
|
||||
"Iteration: 2180\n",
|
||||
"0.8908450704225352\n",
|
||||
"Iteration: 2190\n",
|
||||
"0.8914010378057821\n",
|
||||
"Iteration: 2200\n",
|
||||
"0.8919570051890289\n",
|
||||
"Iteration: 2210\n",
|
||||
"0.8921423276501111\n",
|
||||
"Iteration: 2220\n",
|
||||
"0.8926982950333581\n",
|
||||
"Iteration: 2230\n",
|
||||
"0.8932542624166049\n",
|
||||
"Iteration: 2240\n",
|
||||
"0.8932542624166049\n",
|
||||
"Iteration: 2250\n",
|
||||
"0.8938102297998517\n",
|
||||
"Iteration: 2260\n",
|
||||
"0.8939955522609341\n",
|
||||
"Iteration: 2270\n",
|
||||
"0.8938102297998517\n",
|
||||
"Iteration: 2280\n",
|
||||
"0.8926982950333581\n",
|
||||
"Iteration: 2290\n",
|
||||
"0.8928836174944403\n",
|
||||
"Iteration: 2300\n",
|
||||
"0.8934395848776872\n",
|
||||
"Iteration: 2310\n",
|
||||
"0.8943661971830986\n",
|
||||
"Iteration: 2320\n",
|
||||
"0.8956634544106745\n",
|
||||
"Iteration: 2330\n",
|
||||
"0.89529280948851\n",
|
||||
"Iteration: 2340\n",
|
||||
"0.8954781319495922\n",
|
||||
"Iteration: 2350\n",
|
||||
"0.8954781319495922\n",
|
||||
"Iteration: 2360\n",
|
||||
"0.8960340993328392\n",
|
||||
"Iteration: 2370\n",
|
||||
"0.8964047442550037\n",
|
||||
"Iteration: 2380\n",
|
||||
"0.8964047442550037\n",
|
||||
"Iteration: 2390\n",
|
||||
"0.8964047442550037\n",
|
||||
"Iteration: 2400\n",
|
||||
"0.8964047442550037\n",
|
||||
"Iteration: 2410\n",
|
||||
"0.896590066716086\n",
|
||||
"Iteration: 2420\n",
|
||||
"0.896590066716086\n",
|
||||
"Iteration: 2430\n",
|
||||
"0.8969607116382505\n",
|
||||
"Iteration: 2440\n",
|
||||
"0.8969607116382505\n",
|
||||
"Iteration: 2450\n",
|
||||
"0.8971460340993328\n",
|
||||
"Iteration: 2460\n",
|
||||
"0.8973313565604151\n",
|
||||
"Iteration: 2470\n",
|
||||
"0.8977020014825797\n",
|
||||
"Iteration: 2480\n",
|
||||
"0.8980726464047443\n",
|
||||
"Iteration: 2490\n",
|
||||
"0.8982579688658265\n",
|
||||
"Iteration: 2500\n",
|
||||
"0.8986286137879911\n",
|
||||
"Iteration: 2510\n",
|
||||
"0.899184581171238\n",
|
||||
"Iteration: 2520\n",
|
||||
"0.899184581171238\n",
|
||||
"Iteration: 2530\n",
|
||||
"0.8989992587101556\n",
|
||||
"Iteration: 2540\n",
|
||||
"0.8989992587101556\n",
|
||||
"Iteration: 2550\n",
|
||||
"0.899184581171238\n",
|
||||
"Iteration: 2560\n",
|
||||
"0.8995552260934025\n",
|
||||
"Iteration: 2570\n",
|
||||
"0.8997405485544848\n",
|
||||
"Iteration: 2580\n",
|
||||
"0.8997405485544848\n",
|
||||
"Iteration: 2590\n",
|
||||
"0.9004818383988139\n",
|
||||
"Iteration: 2600\n",
|
||||
"0.9006671608598962\n",
|
||||
"Iteration: 2610\n",
|
||||
"0.9010378057820608\n",
|
||||
"Iteration: 2620\n",
|
||||
"0.9010378057820608\n",
|
||||
"Iteration: 2630\n",
|
||||
"0.9008524833209784\n",
|
||||
"Iteration: 2640\n",
|
||||
"0.9004818383988139\n",
|
||||
"Iteration: 2650\n",
|
||||
"0.9008524833209784\n",
|
||||
"Iteration: 2660\n",
|
||||
"0.9012231282431431\n",
|
||||
"Iteration: 2670\n",
|
||||
"0.9017790956263899\n",
|
||||
"Iteration: 2680\n",
|
||||
"0.9023350630096367\n",
|
||||
"Iteration: 2690\n",
|
||||
"0.902520385470719\n",
|
||||
"Iteration: 2700\n",
|
||||
"0.9027057079318014\n",
|
||||
"Iteration: 2710\n",
|
||||
"0.9028910303928837\n",
|
||||
"Iteration: 2720\n",
|
||||
"0.9030763528539659\n",
|
||||
"Iteration: 2730\n",
|
||||
"0.9030763528539659\n",
|
||||
"Iteration: 2740\n",
|
||||
"0.9030763528539659\n",
|
||||
"Iteration: 2750\n",
|
||||
"0.9027057079318014\n",
|
||||
"Iteration: 2760\n",
|
||||
"0.9015937731653076\n",
|
||||
"Iteration: 2770\n",
|
||||
"0.9015937731653076\n",
|
||||
"Iteration: 2780\n",
|
||||
"0.9014084507042254\n",
|
||||
"Iteration: 2790\n",
|
||||
"0.9015937731653076\n",
|
||||
"Iteration: 2800\n",
|
||||
"0.9030763528539659\n",
|
||||
"Iteration: 2810\n",
|
||||
"0.903817642698295\n",
|
||||
"Iteration: 2820\n",
|
||||
"0.9047442550037065\n",
|
||||
"Iteration: 2830\n",
|
||||
"0.9056708673091178\n",
|
||||
"Iteration: 2840\n",
|
||||
"0.906412157153447\n",
|
||||
"Iteration: 2850\n",
|
||||
"0.906412157153447\n",
|
||||
"Iteration: 2860\n",
|
||||
"0.9047442550037065\n",
|
||||
"Iteration: 2870\n",
|
||||
"0.9040029651593773\n",
|
||||
"Iteration: 2880\n",
|
||||
"0.9030763528539659\n",
|
||||
"Iteration: 2890\n",
|
||||
"0.903817642698295\n",
|
||||
"Iteration: 2900\n",
|
||||
"0.9049295774647887\n",
|
||||
"Iteration: 2910\n",
|
||||
"0.9047442550037065\n",
|
||||
"Iteration: 2920\n",
|
||||
"0.9053002223869533\n",
|
||||
"Iteration: 2930\n",
|
||||
"0.9058561897702001\n",
|
||||
"Iteration: 2940\n",
|
||||
"0.9065974796145293\n",
|
||||
"Iteration: 2950\n",
|
||||
"0.9073387694588584\n",
|
||||
"Iteration: 2960\n",
|
||||
"0.9086360266864344\n",
|
||||
"Iteration: 2970\n",
|
||||
"0.9091919940696812\n",
|
||||
"Iteration: 2980\n",
|
||||
"0.9091919940696812\n",
|
||||
"Iteration: 2990\n",
|
||||
"0.9091919940696812\n",
|
||||
"Iteration: 3000\n",
|
||||
"0.9095626389918459\n",
|
||||
"Iteration: 3010\n",
|
||||
"0.9073387694588584\n",
|
||||
"Iteration: 3020\n",
|
||||
"0.9027057079318014\n",
|
||||
"Iteration: 3030\n",
|
||||
"0.9023350630096367\n",
|
||||
"Iteration: 3040\n",
|
||||
"0.9049295774647887\n",
|
||||
"Iteration: 3050\n",
|
||||
"0.9062268346923648\n",
|
||||
"Iteration: 3060\n",
|
||||
"0.9080800593031876\n",
|
||||
"Iteration: 3070\n",
|
||||
"0.9080800593031876\n",
|
||||
"Iteration: 3080\n",
|
||||
"0.9082653817642699\n",
|
||||
"Iteration: 3090\n",
|
||||
"0.9082653817642699\n",
|
||||
"Iteration: 3100\n",
|
||||
"0.9088213491475167\n",
|
||||
"Iteration: 3110\n",
|
||||
"0.9097479614529281\n",
|
||||
"Iteration: 3120\n",
|
||||
"0.911045218680504\n",
|
||||
"Iteration: 3130\n",
|
||||
"0.9119718309859155\n",
|
||||
"Iteration: 3140\n",
|
||||
"0.91234247590808\n",
|
||||
"Iteration: 3150\n",
|
||||
"0.9125277983691623\n",
|
||||
"Iteration: 3160\n",
|
||||
"0.9117865085248332\n",
|
||||
"Iteration: 3170\n",
|
||||
"0.9086360266864344\n",
|
||||
"Iteration: 3180\n",
|
||||
"0.905114899925871\n",
|
||||
"Iteration: 3190\n",
|
||||
"0.9032616753150482\n",
|
||||
"Iteration: 3200\n",
|
||||
"0.9054855448480356\n",
|
||||
"Iteration: 3210\n",
|
||||
"0.9075240919199407\n",
|
||||
"Iteration: 3220\n",
|
||||
"0.9106745737583395\n",
|
||||
"Iteration: 3230\n",
|
||||
"0.9121571534469978\n",
|
||||
"Iteration: 3240\n",
|
||||
"0.9134544106745738\n",
|
||||
"Iteration: 3250\n",
|
||||
"0.9141957005189029\n",
|
||||
"Iteration: 3260\n",
|
||||
"0.9140103780578206\n",
|
||||
"Iteration: 3270\n",
|
||||
"0.9141957005189029\n",
|
||||
"Iteration: 3280\n",
|
||||
"0.9132690882134915\n",
|
||||
"Iteration: 3290\n",
|
||||
"0.9093773165307635\n",
|
||||
"Iteration: 3300\n",
|
||||
"0.9071534469977761\n",
|
||||
"Iteration: 3310\n",
|
||||
"0.9090066716085989\n",
|
||||
"Iteration: 3320\n",
|
||||
"0.9106745737583395\n",
|
||||
"Iteration: 3330\n",
|
||||
"0.911045218680504\n",
|
||||
"Iteration: 3340\n",
|
||||
"0.9127131208302446\n",
|
||||
"Iteration: 3350\n",
|
||||
"0.9141957005189029\n",
|
||||
"Iteration: 3360\n",
|
||||
"0.9145663454410674\n",
|
||||
"Iteration: 3370\n",
|
||||
"0.9147516679021498\n",
|
||||
"Iteration: 3380\n",
|
||||
"0.9149369903632321\n",
|
||||
"Iteration: 3390\n",
|
||||
"0.913639733135656\n",
|
||||
"Iteration: 3400\n",
|
||||
"0.9117865085248332\n",
|
||||
"Iteration: 3410\n",
|
||||
"0.9106745737583395\n",
|
||||
"Iteration: 3420\n",
|
||||
"0.9106745737583395\n",
|
||||
"Iteration: 3430\n",
|
||||
"0.911601186063751\n",
|
||||
"Iteration: 3440\n",
|
||||
"0.9141957005189029\n",
|
||||
"Iteration: 3450\n",
|
||||
"0.9153076352853966\n",
|
||||
"Iteration: 3460\n",
|
||||
"0.9169755374351372\n",
|
||||
"Iteration: 3470\n",
|
||||
"0.9171608598962194\n",
|
||||
"Iteration: 3480\n",
|
||||
"0.9173461823573017\n",
|
||||
"Iteration: 3490\n",
|
||||
"0.9153076352853966\n",
|
||||
"Iteration: 3500\n",
|
||||
"0.9132690882134915\n",
|
||||
"Iteration: 3510\n",
|
||||
"0.9119718309859155\n",
|
||||
"Iteration: 3520\n",
|
||||
"0.9121571534469978\n",
|
||||
"Iteration: 3530\n",
|
||||
"0.9140103780578206\n",
|
||||
"Iteration: 3540\n",
|
||||
"0.9149369903632321\n",
|
||||
"Iteration: 3550\n",
|
||||
"0.9167902149740549\n",
|
||||
"Iteration: 3560\n",
|
||||
"0.917531504818384\n",
|
||||
"Iteration: 3570\n",
|
||||
"0.9191994069681245\n",
|
||||
"Iteration: 3580\n",
|
||||
"0.9195700518902891\n",
|
||||
"Iteration: 3590\n",
|
||||
"0.9195700518902891\n",
|
||||
"Iteration: 3600\n",
|
||||
"0.9191994069681245\n",
|
||||
"Iteration: 3610\n",
|
||||
"0.9186434395848777\n",
|
||||
"Iteration: 3620\n",
|
||||
"0.9164195700518903\n",
|
||||
"Iteration: 3630\n",
|
||||
"0.911045218680504\n",
|
||||
"Iteration: 3640\n",
|
||||
"0.9106745737583395\n",
|
||||
"Iteration: 3650\n",
|
||||
"0.9128984432913269\n",
|
||||
"Iteration: 3660\n",
|
||||
"0.9160489251297257\n",
|
||||
"Iteration: 3670\n",
|
||||
"0.9184581171237954\n",
|
||||
"Iteration: 3680\n",
|
||||
"0.9190140845070423\n",
|
||||
"Iteration: 3690\n",
|
||||
"0.9204966641957005\n",
|
||||
"Iteration: 3700\n",
|
||||
"0.9206819866567828\n",
|
||||
"Iteration: 3710\n",
|
||||
"0.9203113417346183\n",
|
||||
"Iteration: 3720\n",
|
||||
"0.9203113417346183\n",
|
||||
"Iteration: 3730\n",
|
||||
"0.9197553743513713\n",
|
||||
"Iteration: 3740\n",
|
||||
"0.9195700518902891\n",
|
||||
"Iteration: 3750\n",
|
||||
"0.9156782802075611\n",
|
||||
"Iteration: 3760\n",
|
||||
"0.91234247590808\n",
|
||||
"Iteration: 3770\n",
|
||||
"0.9127131208302446\n",
|
||||
"Iteration: 3780\n",
|
||||
"0.9158636026686434\n",
|
||||
"Iteration: 3790\n",
|
||||
"0.9179021497405485\n",
|
||||
"Iteration: 3800\n",
|
||||
"0.9190140845070423\n",
|
||||
"Iteration: 3810\n",
|
||||
"0.920126019273536\n",
|
||||
"Iteration: 3820\n",
|
||||
"0.9206819866567828\n",
|
||||
"Iteration: 3830\n",
|
||||
"0.9214232765011119\n",
|
||||
"Iteration: 3840\n",
|
||||
"0.9212379540400296\n",
|
||||
"Iteration: 3850\n",
|
||||
"0.9212379540400296\n",
|
||||
"Iteration: 3860\n",
|
||||
"0.9210526315789473\n",
|
||||
"Iteration: 3870\n",
|
||||
"0.9193847294292068\n",
|
||||
"Iteration: 3880\n",
|
||||
"0.9169755374351372\n",
|
||||
"Iteration: 3890\n",
|
||||
"0.9154929577464789\n",
|
||||
"Iteration: 3900\n",
|
||||
"0.9164195700518903\n",
|
||||
"Iteration: 3910\n",
|
||||
"0.9180874722016308\n",
|
||||
"Iteration: 3920\n",
|
||||
"0.920126019273536\n",
|
||||
"Iteration: 3930\n",
|
||||
"0.9219792438843588\n",
|
||||
"Iteration: 3940\n",
|
||||
"0.9225352112676056\n",
|
||||
"Iteration: 3950\n",
|
||||
"0.9225352112676056\n",
|
||||
"Iteration: 3960\n",
|
||||
"0.9223498888065234\n",
|
||||
"Iteration: 3970\n",
|
||||
"0.9208673091178651\n",
|
||||
"Iteration: 3980\n",
|
||||
"0.9195700518902891\n",
|
||||
"Iteration: 3990\n",
|
||||
"0.9186434395848777\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"W1, b1, W2, b2 = gradient_descent(X_train, Y_train, 0.10, 4000)\n",
|
||||
"df = pd.DataFrame(acc_store)\n",
|
||||
"df.to_csv('cr_acc.csv', index=False)\n",
|
||||
"np.savez(\"cr_weights\", W1, b1, W2, b2)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "11203f4e-4adf-4a47-a6e2-a8847f27f0cc",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3 (ipykernel)",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.9.17"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
1098
German_NN_C.ipynb
Normal file
1098
German_NN_C.ipynb
Normal file
File diff suppressed because it is too large
Load diff
152
NoNeed/Bel_Data_Loader.ipynb
Normal file
152
NoNeed/Bel_Data_Loader.ipynb
Normal file
|
|
@ -0,0 +1,152 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"id": "e2a5d1d7-6bb3-4e24-9067-880296de1fc9",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"import imageio\n",
|
||||
"from skimage import color, transform\n",
|
||||
"import numpy as np\n",
|
||||
"import pandas as pd"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"id": "b37f1351-0a00-4a4b-9067-ea55a662bc80",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"main_folder = 'data/Bel_Training_Set/'"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"id": "76f41177-fd53-4bf6-9e75-ba1a98c414ff",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"subfolders = [f for f in os.listdir(main_folder) if os.path.isdir(os.path.join(main_folder, f))]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"id": "706d2a6d-8147-42a1-ba19-3cc7108fcfea",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"image_data = []\n",
|
||||
"label_data = []"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"id": "86841170-b9bc-46cf-b482-f2d653060bc0",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"docnames = [\"Pixel \" + str(i) for i in range(1024)]\n",
|
||||
"docnames.insert(0, 'Label')\n",
|
||||
"df1 = pd.DataFrame(columns = docnames) "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"id": "d9e5d953-1652-47d3-a832-d71d87c2b7ee",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def add_to_dataset(x,y,z,l):\n",
|
||||
" y.at[z,'Label'] = l\n",
|
||||
" for i in range(0,1024):\n",
|
||||
" y.at[z,docnames[i+1]] = x[i]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 7,
|
||||
"id": "82fe02ed-8471-493f-aded-58c54edb7ef6",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"i = 0\n",
|
||||
"for subfolder in subfolders:\n",
|
||||
" subfolder_path = os.path.join(main_folder, subfolder)\n",
|
||||
" for filename in os.listdir(subfolder_path):\n",
|
||||
" \n",
|
||||
" file_path = os.path.join(subfolder_path, filename)\n",
|
||||
" if filename.lower().endswith('.ppm'):\n",
|
||||
" img_array = imageio.v2.imread(file_path)\n",
|
||||
" resized_img_array = transform.resize(img_array, (32, 32))\n",
|
||||
" gray_img_array = color.rgb2gray(resized_img_array)\n",
|
||||
" flattened_img_array = gray_img_array.flatten()\n",
|
||||
" add_to_dataset(flattened_img_array,df1,i,int(subfolder))\n",
|
||||
" i = i + 1\n",
|
||||
" #print(\"Image From\", int(subfolder), \"Image Name\", filename)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 8,
|
||||
"id": "7bdcd7d7-56f3-4b9f-924f-dd811dddf605",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df1.to_csv('bel_data_test.csv', index = False) "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "12d9c974-85ed-4d10-af2e-0984a367d4be",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3 (ipykernel)",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.7.16"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
152
NoNeed/Cro_Data_Loader.ipynb
Normal file
152
NoNeed/Cro_Data_Loader.ipynb
Normal file
|
|
@ -0,0 +1,152 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"id": "0a9a579c-df1c-4742-a0ed-e3db27bdc3e4",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import pandas as pd \n",
|
||||
"import os\n",
|
||||
"import imageio\n",
|
||||
"from skimage import color, transform\n",
|
||||
"import numpy as np"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"id": "99e5b7bf-4438-477c-9880-008fb864ae56",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"image_folder = 'data/Cro_Training_Set/'"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"id": "da337206-82a6-416c-b5db-04466695a7b4",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def convert_letter(x):\n",
|
||||
" if x == 'A':\n",
|
||||
" y = 0\n",
|
||||
" if x == 'B':\n",
|
||||
" y = 1\n",
|
||||
" if x == 'C':\n",
|
||||
" y = 2\n",
|
||||
" if x == 'D':\n",
|
||||
" y = 3\n",
|
||||
" if x == 'E':\n",
|
||||
" y = 4\n",
|
||||
" return y\n",
|
||||
"def add_to_dataset(x,y,z,l):\n",
|
||||
" y.at[z,'Label'] = l\n",
|
||||
" for i in range(0,1024):\n",
|
||||
" y.at[z,docnames[i+1]] = x[i]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"id": "0d28e8d0-9386-4ccd-a7bf-f19e9d59d6f6",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"docnames = [\"Pixel \" + str(i) for i in range(1024)]\n",
|
||||
"docnames.insert(0, 'Label')\n",
|
||||
"df1 = pd.DataFrame(columns = docnames) "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"id": "ce03500d-7d48-493f-a00c-94c1d79bf02d",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"i = 0\n",
|
||||
"for filename in os.listdir(image_folder):\n",
|
||||
" file_path = os.path.join(image_folder, filename)\n",
|
||||
"\n",
|
||||
" # Check if the file is a PNG file\n",
|
||||
" if filename.lower().endswith('.bmp'):\n",
|
||||
" # Extract the single letter from the filename (adjust the index accordingly)\n",
|
||||
" single_letter = filename[0]\n",
|
||||
"\n",
|
||||
" # Read the image and convert it to a NumPy array\n",
|
||||
" img_array = imageio.v2.imread(file_path)\n",
|
||||
"\n",
|
||||
" # Resize the image to 32x32\n",
|
||||
" resized_img_array = transform.resize(img_array, (32, 32))\n",
|
||||
"\n",
|
||||
" # Convert the RGB image to grayscale\n",
|
||||
" gray_img_array = color.rgb2gray(resized_img_array)\n",
|
||||
"\n",
|
||||
" # Flatten the image to 1024\n",
|
||||
" flattened_img_array = gray_img_array.flatten()\n",
|
||||
" \n",
|
||||
" label = convert_letter(single_letter)\n",
|
||||
" add_to_dataset(flattened_img_array,df1,i,label)\n",
|
||||
" i = i + 1\n",
|
||||
"\n",
|
||||
" # Append the processed image data to the list\n",
|
||||
" "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"id": "02056706-60c3-4390-990a-9f84bb56c049",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df1.to_csv('cro_data_test.csv', index = False) "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "66da76d9-6286-4e1e-b91e-007f43642c14",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3 (ipykernel)",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.9.17"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
382
NoNeed/Germna_Data_Loader.ipynb
Normal file
382
NoNeed/Germna_Data_Loader.ipynb
Normal file
|
|
@ -0,0 +1,382 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"id": "e9cfe5db-43cb-4298-9388-d869d7314ea2",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import numpy as np \n",
|
||||
"import pandas as pd "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"id": "9a476b58-bb18-4499-96cd-4bf38ca7566f",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def img_flat(x):\n",
|
||||
" gray_img = np.mean(x, axis=0)\n",
|
||||
" flat_img = gray_img.flatten()\n",
|
||||
" return flat_img\n",
|
||||
"def add_to_dataset(x,y,z,l):\n",
|
||||
" y.at[z,'Label'] = l\n",
|
||||
" for i in range(0,1024):\n",
|
||||
" y.at[z,docnames[i+1]] = x[i]\n",
|
||||
" #print(z , \"Completed\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"id": "6130f3c5-97bd-4be8-8751-9ffbae99436b",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"docnames = [\"Pixel \" + str(i) for i in range(1024)]\n",
|
||||
"docnames.insert(0, 'Label')\n",
|
||||
"df1 = pd.DataFrame(columns = docnames) "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"id": "0d080bf5-067b-47a5-99dc-b22f145115b6",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Downloading https://sid.erda.dk/public/archives/daaeac0d7ce1152aea9b61d9f1e19370/GTSRB_Final_Test_Images.zip to data/gtsrb/GTSRB_Final_Test_Images.zip\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"100%|██████████| 88978620/88978620 [00:10<00:00, 8777572.15it/s] \n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Extracting data/gtsrb/GTSRB_Final_Test_Images.zip to data/gtsrb\n",
|
||||
"Downloading https://sid.erda.dk/public/archives/daaeac0d7ce1152aea9b61d9f1e19370/GTSRB_Final_Test_GT.zip to data/gtsrb/GTSRB_Final_Test_GT.zip\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"100%|██████████| 99620/99620 [00:00<00:00, 289763.24it/s]\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Extracting data/gtsrb/GTSRB_Final_Test_GT.zip to data/gtsrb\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"import numpy as np\n",
|
||||
"import pandas as pd\n",
|
||||
"from torchvision.datasets import GTSRB\n",
|
||||
"from torchvision import transforms\n",
|
||||
"\n",
|
||||
"# Define a transform to convert the data to a NumPy array\n",
|
||||
"transform = transforms.Compose([\n",
|
||||
" transforms.Resize((32, 32)), \n",
|
||||
" transforms.ToTensor(),\n",
|
||||
"])\n",
|
||||
"\n",
|
||||
"# Download the dataset\n",
|
||||
"dataset = GTSRB(root='./data', split=\"test\", transform=transform, download=True)\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"# Iterate through the dataset and apply transformations\n",
|
||||
"for i in range(len(dataset)):\n",
|
||||
" image, label = dataset[i]\n",
|
||||
" label = int(label)\n",
|
||||
" # Convert the PyTorch tensor to a NumPy array\n",
|
||||
" image_np = np.array(image)\n",
|
||||
" temp_img = img_flat(image_np)\n",
|
||||
" add_to_dataset(temp_img,df1,i,label)\n",
|
||||
" #data['label'].append(label)\n",
|
||||
" \n",
|
||||
" \n",
|
||||
"# Convert the data to a DataFrame\n",
|
||||
"#df = pd.DataFrame(data)\n",
|
||||
"\n",
|
||||
"# Save the DataFrame to a CSV file\n",
|
||||
"#df.to_csv('gtsrb_data.csv', index=False)\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"id": "1b64da5c-1326-4258-8066-6ab5debfec9d",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/html": [
|
||||
"<div>\n",
|
||||
"<style scoped>\n",
|
||||
" .dataframe tbody tr th:only-of-type {\n",
|
||||
" vertical-align: middle;\n",
|
||||
" }\n",
|
||||
"\n",
|
||||
" .dataframe tbody tr th {\n",
|
||||
" vertical-align: top;\n",
|
||||
" }\n",
|
||||
"\n",
|
||||
" .dataframe thead th {\n",
|
||||
" text-align: right;\n",
|
||||
" }\n",
|
||||
"</style>\n",
|
||||
"<table border=\"1\" class=\"dataframe\">\n",
|
||||
" <thead>\n",
|
||||
" <tr style=\"text-align: right;\">\n",
|
||||
" <th></th>\n",
|
||||
" <th>Label</th>\n",
|
||||
" <th>Pixel 0</th>\n",
|
||||
" <th>Pixel 1</th>\n",
|
||||
" <th>Pixel 2</th>\n",
|
||||
" <th>Pixel 3</th>\n",
|
||||
" <th>Pixel 4</th>\n",
|
||||
" <th>Pixel 5</th>\n",
|
||||
" <th>Pixel 6</th>\n",
|
||||
" <th>Pixel 7</th>\n",
|
||||
" <th>Pixel 8</th>\n",
|
||||
" <th>...</th>\n",
|
||||
" <th>Pixel 1014</th>\n",
|
||||
" <th>Pixel 1015</th>\n",
|
||||
" <th>Pixel 1016</th>\n",
|
||||
" <th>Pixel 1017</th>\n",
|
||||
" <th>Pixel 1018</th>\n",
|
||||
" <th>Pixel 1019</th>\n",
|
||||
" <th>Pixel 1020</th>\n",
|
||||
" <th>Pixel 1021</th>\n",
|
||||
" <th>Pixel 1022</th>\n",
|
||||
" <th>Pixel 1023</th>\n",
|
||||
" </tr>\n",
|
||||
" </thead>\n",
|
||||
" <tbody>\n",
|
||||
" <tr>\n",
|
||||
" <th>0</th>\n",
|
||||
" <td>16</td>\n",
|
||||
" <td>0.563399</td>\n",
|
||||
" <td>0.556863</td>\n",
|
||||
" <td>0.559477</td>\n",
|
||||
" <td>0.560784</td>\n",
|
||||
" <td>0.555556</td>\n",
|
||||
" <td>0.550327</td>\n",
|
||||
" <td>0.54902</td>\n",
|
||||
" <td>0.546405</td>\n",
|
||||
" <td>0.537255</td>\n",
|
||||
" <td>...</td>\n",
|
||||
" <td>0.551634</td>\n",
|
||||
" <td>0.54902</td>\n",
|
||||
" <td>0.545098</td>\n",
|
||||
" <td>0.550327</td>\n",
|
||||
" <td>0.554248</td>\n",
|
||||
" <td>0.54902</td>\n",
|
||||
" <td>0.539869</td>\n",
|
||||
" <td>0.547712</td>\n",
|
||||
" <td>0.551634</td>\n",
|
||||
" <td>0.554248</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>1</th>\n",
|
||||
" <td>1</td>\n",
|
||||
" <td>0.256209</td>\n",
|
||||
" <td>0.303268</td>\n",
|
||||
" <td>0.311111</td>\n",
|
||||
" <td>0.329412</td>\n",
|
||||
" <td>0.294118</td>\n",
|
||||
" <td>0.304575</td>\n",
|
||||
" <td>0.308497</td>\n",
|
||||
" <td>0.222222</td>\n",
|
||||
" <td>0.160784</td>\n",
|
||||
" <td>...</td>\n",
|
||||
" <td>0.865359</td>\n",
|
||||
" <td>0.810458</td>\n",
|
||||
" <td>0.524183</td>\n",
|
||||
" <td>0.265359</td>\n",
|
||||
" <td>0.201307</td>\n",
|
||||
" <td>0.213072</td>\n",
|
||||
" <td>0.228758</td>\n",
|
||||
" <td>0.240523</td>\n",
|
||||
" <td>0.27451</td>\n",
|
||||
" <td>0.281046</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>2</th>\n",
|
||||
" <td>38</td>\n",
|
||||
" <td>0.171242</td>\n",
|
||||
" <td>0.166013</td>\n",
|
||||
" <td>0.164706</td>\n",
|
||||
" <td>0.166013</td>\n",
|
||||
" <td>0.164706</td>\n",
|
||||
" <td>0.15817</td>\n",
|
||||
" <td>0.162092</td>\n",
|
||||
" <td>0.163399</td>\n",
|
||||
" <td>0.160784</td>\n",
|
||||
" <td>...</td>\n",
|
||||
" <td>0.150327</td>\n",
|
||||
" <td>0.115033</td>\n",
|
||||
" <td>0.135948</td>\n",
|
||||
" <td>0.118954</td>\n",
|
||||
" <td>0.115033</td>\n",
|
||||
" <td>0.134641</td>\n",
|
||||
" <td>0.142484</td>\n",
|
||||
" <td>0.155556</td>\n",
|
||||
" <td>0.169935</td>\n",
|
||||
" <td>0.179085</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>3</th>\n",
|
||||
" <td>33</td>\n",
|
||||
" <td>0.449673</td>\n",
|
||||
" <td>0.329412</td>\n",
|
||||
" <td>0.247059</td>\n",
|
||||
" <td>0.266667</td>\n",
|
||||
" <td>0.383007</td>\n",
|
||||
" <td>0.532026</td>\n",
|
||||
" <td>0.64183</td>\n",
|
||||
" <td>0.661438</td>\n",
|
||||
" <td>0.718954</td>\n",
|
||||
" <td>...</td>\n",
|
||||
" <td>0.477124</td>\n",
|
||||
" <td>0.562092</td>\n",
|
||||
" <td>0.654902</td>\n",
|
||||
" <td>0.776471</td>\n",
|
||||
" <td>0.738562</td>\n",
|
||||
" <td>0.696732</td>\n",
|
||||
" <td>0.756863</td>\n",
|
||||
" <td>0.877124</td>\n",
|
||||
" <td>0.946405</td>\n",
|
||||
" <td>0.882353</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>4</th>\n",
|
||||
" <td>11</td>\n",
|
||||
" <td>0.132026</td>\n",
|
||||
" <td>0.145098</td>\n",
|
||||
" <td>0.15817</td>\n",
|
||||
" <td>0.155556</td>\n",
|
||||
" <td>0.150327</td>\n",
|
||||
" <td>0.145098</td>\n",
|
||||
" <td>0.15817</td>\n",
|
||||
" <td>0.184314</td>\n",
|
||||
" <td>0.203922</td>\n",
|
||||
" <td>...</td>\n",
|
||||
" <td>0.147712</td>\n",
|
||||
" <td>0.141176</td>\n",
|
||||
" <td>0.138562</td>\n",
|
||||
" <td>0.145098</td>\n",
|
||||
" <td>0.151634</td>\n",
|
||||
" <td>0.156863</td>\n",
|
||||
" <td>0.155556</td>\n",
|
||||
" <td>0.162092</td>\n",
|
||||
" <td>0.171242</td>\n",
|
||||
" <td>0.177778</td>\n",
|
||||
" </tr>\n",
|
||||
" </tbody>\n",
|
||||
"</table>\n",
|
||||
"<p>5 rows × 1025 columns</p>\n",
|
||||
"</div>"
|
||||
],
|
||||
"text/plain": [
|
||||
" Label Pixel 0 Pixel 1 Pixel 2 Pixel 3 Pixel 4 Pixel 5 Pixel 6 \\\n",
|
||||
"0 16 0.563399 0.556863 0.559477 0.560784 0.555556 0.550327 0.54902 \n",
|
||||
"1 1 0.256209 0.303268 0.311111 0.329412 0.294118 0.304575 0.308497 \n",
|
||||
"2 38 0.171242 0.166013 0.164706 0.166013 0.164706 0.15817 0.162092 \n",
|
||||
"3 33 0.449673 0.329412 0.247059 0.266667 0.383007 0.532026 0.64183 \n",
|
||||
"4 11 0.132026 0.145098 0.15817 0.155556 0.150327 0.145098 0.15817 \n",
|
||||
"\n",
|
||||
" Pixel 7 Pixel 8 ... Pixel 1014 Pixel 1015 Pixel 1016 Pixel 1017 \\\n",
|
||||
"0 0.546405 0.537255 ... 0.551634 0.54902 0.545098 0.550327 \n",
|
||||
"1 0.222222 0.160784 ... 0.865359 0.810458 0.524183 0.265359 \n",
|
||||
"2 0.163399 0.160784 ... 0.150327 0.115033 0.135948 0.118954 \n",
|
||||
"3 0.661438 0.718954 ... 0.477124 0.562092 0.654902 0.776471 \n",
|
||||
"4 0.184314 0.203922 ... 0.147712 0.141176 0.138562 0.145098 \n",
|
||||
"\n",
|
||||
" Pixel 1018 Pixel 1019 Pixel 1020 Pixel 1021 Pixel 1022 Pixel 1023 \n",
|
||||
"0 0.554248 0.54902 0.539869 0.547712 0.551634 0.554248 \n",
|
||||
"1 0.201307 0.213072 0.228758 0.240523 0.27451 0.281046 \n",
|
||||
"2 0.115033 0.134641 0.142484 0.155556 0.169935 0.179085 \n",
|
||||
"3 0.738562 0.696732 0.756863 0.877124 0.946405 0.882353 \n",
|
||||
"4 0.151634 0.156863 0.155556 0.162092 0.171242 0.177778 \n",
|
||||
"\n",
|
||||
"[5 rows x 1025 columns]"
|
||||
]
|
||||
},
|
||||
"execution_count": 5,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"df1.head()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"id": "deaf22c0-5aae-45e4-a4db-196fbcc001a1",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df1.to_csv('gtsrb_data_test.csv', index = False) "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "bbc65ef5-4313-42ed-8690-557ebca488b8",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3 (ipykernel)",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.9.17"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
185
bel_semantics.ipynb
Normal file
185
bel_semantics.ipynb
Normal file
|
|
@ -0,0 +1,185 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import net.modules\n",
|
||||
"\n",
|
||||
"import numpy as np\n",
|
||||
"\n",
|
||||
"from net.transcoder import Transcoder"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"filepath = 'data/bel_data_test.csv'\n",
|
||||
"train_loader, test_loader, input_size = load_and_prepare_data(file_path=filepath)\n",
|
||||
"\n",
|
||||
"print(\"X_train shape:\", input_size.shape)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# input_size = X_train.shape[0]\n",
|
||||
"# hidden_size = 128\n",
|
||||
"# output_size = 61\n",
|
||||
"\n",
|
||||
"architecture = [input_size, [128], 61]\n",
|
||||
"activations = ['leaky_relu','softmax']"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Initialize transcoder"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# bl_transcoder = Transcoder(input_size, hidden_size, output_size, 'leaky_relu', 'softmax')\n",
|
||||
"bl_transcoder = Transcoder(architecture, hidden_activation='relu', output_activation='softmax')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Train Encoders and save weights\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# # Train the encoder if need\n",
|
||||
"\n",
|
||||
"bl_transcoder.train_model(train_loader, test_loader, learning_rate=0.001, epochs=1000)\n",
|
||||
"# bl_transcoder.train_with_validation(X_train, Y_train, alpha=0.1, iterations=1000)\n",
|
||||
"bl_transcoder.save_results('bt_1h128n')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Load weights"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"bl_transcoder.load_weights('weights/bt_1h128n_leaky_relu_weights.pth')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Analysis"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Plot learning curves\n",
|
||||
"bl_transcoder.plot_learning_curves()\n",
|
||||
"\n",
|
||||
"# Visualize encoded space\n",
|
||||
"bl_transcoder.plot_encoded_space(X_test, Y_test)\n",
|
||||
"\n",
|
||||
"print(X_test.shape)\n",
|
||||
"print(X_train.shape)\n",
|
||||
"# Check reconstructions\n",
|
||||
"bl_transcoder.plot_reconstructions(X_test)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Transcode images"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"num_images = 2\n",
|
||||
"indices = np.random.choice(X_test.shape[1], num_images, replace=False)\n",
|
||||
"\n",
|
||||
"for idx in indices:\n",
|
||||
" original_image = X_test[:, idx]\n",
|
||||
" \n",
|
||||
" # Encode the image\n",
|
||||
" encoded = bl_transcoder.encode_image(original_image.reshape(-1, 1))\n",
|
||||
" \n",
|
||||
" # Decode the image\n",
|
||||
" decoded = bl_transcoder.decode_image(encoded)\n",
|
||||
"\n",
|
||||
" # Visualize original, encoded, and decoded images\n",
|
||||
" visualize_transcoding(original_image, encoded, decoded, idx)\n",
|
||||
"\n",
|
||||
" print(f\"Image {idx}:\")\n",
|
||||
" print(\"Original shape:\", original_image.shape)\n",
|
||||
" print(\"Encoded shape:\", encoded.shape)\n",
|
||||
" print(\"Decoded shape:\", decoded.shape)\n",
|
||||
" print(\"Encoded vector:\", encoded.flatten()) # Print flattened encoded vector\n",
|
||||
" print(\"\\n\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "semantics",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.12.1"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
203
enc_dec.ipynb
Normal file
203
enc_dec.ipynb
Normal file
|
|
@ -0,0 +1,203 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import numpy as np\n",
|
||||
"import pandas as pd\n",
|
||||
"\n",
|
||||
"from net.modules import *\n",
|
||||
"from net.decoder import *\n",
|
||||
"from net.encoder import *"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Set up parameters (modify these as needed)\n",
|
||||
"file_path = \"data/bel_data_test.csv\" # Replace with your actual file path\n",
|
||||
"encoder_type = \"pca\" # Choose \"regular\" or \"pca\"\n",
|
||||
"load_weights = False # Set to True if you want to load pre-trained weights\n",
|
||||
"weight_file = \"weights/bel_weights.npz\" # Only used if load_weights is True\n",
|
||||
"dataset=\"bel\"\n",
|
||||
"\n",
|
||||
"# Load and prepare the data\n",
|
||||
"X_train, Y_train, X_test, Y_test = load_and_prepare_data(file_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Set hyperparameters\n",
|
||||
"input_size = X_train.shape[0]\n",
|
||||
"hidden_size = 128\n",
|
||||
"output_size = 61 # Number of classes\n",
|
||||
"pca_components = 50\n",
|
||||
"\n",
|
||||
"alpha = 0.01\n",
|
||||
"iterations = 2000\n",
|
||||
"num_trials = 5"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Choose encoder type\n",
|
||||
"if encoder_type == \"regular\":\n",
|
||||
" EncoderClass = Encoder\n",
|
||||
" encoder_name = \"Regular Encoder\"\n",
|
||||
"elif encoder_type == \"pca\":\n",
|
||||
" EncoderClass = PCAEncoder\n",
|
||||
" encoder_name = \"PCAEncoder\"\n",
|
||||
"else:\n",
|
||||
" raise ValueError(\"Invalid encoder type selected\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Training PCAEncoder\n",
|
||||
"Trial 1/5\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"ename": "ValueError",
|
||||
"evalue": "X has 50 features, but StandardScaler is expecting 1024 features as input.",
|
||||
"output_type": "error",
|
||||
"traceback": [
|
||||
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
||||
"\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)",
|
||||
"Cell \u001b[0;32mIn[5], line 9\u001b[0m\n\u001b[1;32m 7\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 8\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mTraining \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mencoder_name\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m----> 9\u001b[0m best_weights, best_accuracy, all_accuracies \u001b[38;5;241m=\u001b[39m \u001b[43mbest_params\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 10\u001b[0m \u001b[43m \u001b[49m\u001b[43mEncoderClass\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mX_train\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mY_train\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43minput_size\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mhidden_size\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43moutput_size\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\n\u001b[1;32m 11\u001b[0m \u001b[43m \u001b[49m\u001b[43malpha\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43miterations\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mnum_trials\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mpca_components\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mpca_components\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mif\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mEncoderClass\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m==\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mPCAEncoder\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01melse\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\n\u001b[1;32m 12\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 13\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mBest accuracy achieved with \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mencoder_name\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mbest_accuracy\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 15\u001b[0m \u001b[38;5;66;03m# Create encoder with best weights\u001b[39;00m\n",
|
||||
"File \u001b[0;32m~/Projects/School/Sem_Imp/net/modules.py:15\u001b[0m, in \u001b[0;36mbest_params\u001b[0;34m(EncoderClass, X, Y, input_size, hidden_size, output_size, alpha, iterations, num_trials, **kwargs)\u001b[0m\n\u001b[1;32m 13\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mTrial \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mtrial\u001b[38;5;250m \u001b[39m\u001b[38;5;241m+\u001b[39m\u001b[38;5;250m \u001b[39m\u001b[38;5;241m1\u001b[39m\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m/\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mnum_trials\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 14\u001b[0m encoder \u001b[38;5;241m=\u001b[39m EncoderClass(input_size, hidden_size, output_size, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[0;32m---> 15\u001b[0m accuracies \u001b[38;5;241m=\u001b[39m \u001b[43mencoder\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtrain\u001b[49m\u001b[43m(\u001b[49m\u001b[43mX\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mY\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43miterations\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43malpha\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 16\u001b[0m final_accuracy \u001b[38;5;241m=\u001b[39m accuracies[\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m]\n\u001b[1;32m 17\u001b[0m all_accuracies\u001b[38;5;241m.\u001b[39mappend(accuracies)\n",
|
||||
"File \u001b[0;32m~/Projects/School/Sem_Imp/net/encoder.py:112\u001b[0m, in \u001b[0;36mPCAEncoder.train\u001b[0;34m(self, X, Y, iterations, alpha)\u001b[0m\n\u001b[1;32m 110\u001b[0m X_scaled \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mscaler\u001b[38;5;241m.\u001b[39mtransform(X\u001b[38;5;241m.\u001b[39mT)\u001b[38;5;241m.\u001b[39mT\n\u001b[1;32m 111\u001b[0m X_pca \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpca\u001b[38;5;241m.\u001b[39mtransform(X_scaled\u001b[38;5;241m.\u001b[39mT)\u001b[38;5;241m.\u001b[39mT\n\u001b[0;32m--> 112\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43msuper\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtrain\u001b[49m\u001b[43m(\u001b[49m\u001b[43mX_pca\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mY\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43miterations\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43malpha\u001b[49m\u001b[43m)\u001b[49m\n",
|
||||
"File \u001b[0;32m~/Projects/School/Sem_Imp/net/encoder.py:64\u001b[0m, in \u001b[0;36mEncoder.train\u001b[0;34m(self, X, Y, iterations, alpha)\u001b[0m\n\u001b[1;32m 61\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mupdate_params(dW1, db1, dW2, db2, alpha)\n\u001b[1;32m 63\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m i \u001b[38;5;241m%\u001b[39m \u001b[38;5;241m100\u001b[39m \u001b[38;5;241m==\u001b[39m \u001b[38;5;241m0\u001b[39m:\n\u001b[0;32m---> 64\u001b[0m accuracy \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_accuracy\u001b[49m\u001b[43m(\u001b[49m\u001b[43mX\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mY\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 65\u001b[0m accuracies\u001b[38;5;241m.\u001b[39mappend(accuracy)\n\u001b[1;32m 66\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mIteration \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mi\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m, Accuracy: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00maccuracy\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n",
|
||||
"File \u001b[0;32m~/Projects/School/Sem_Imp/net/encoder.py:115\u001b[0m, in \u001b[0;36mPCAEncoder.get_accuracy\u001b[0;34m(self, X, Y)\u001b[0m\n\u001b[1;32m 114\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mget_accuracy\u001b[39m(\u001b[38;5;28mself\u001b[39m, X, Y):\n\u001b[0;32m--> 115\u001b[0m X_scaled \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mscaler\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtransform\u001b[49m\u001b[43m(\u001b[49m\u001b[43mX\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mT\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241m.\u001b[39mT\n\u001b[1;32m 116\u001b[0m X_pca \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpca\u001b[38;5;241m.\u001b[39mtransform(X_scaled\u001b[38;5;241m.\u001b[39mT)\u001b[38;5;241m.\u001b[39mT\n\u001b[1;32m 117\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28msuper\u001b[39m()\u001b[38;5;241m.\u001b[39mget_accuracy(X_pca, Y)\n",
|
||||
"File \u001b[0;32m~/.pyenv/versions/3.11.6/envs/tf/lib/python3.11/site-packages/sklearn/utils/_set_output.py:313\u001b[0m, in \u001b[0;36m_wrap_method_output.<locals>.wrapped\u001b[0;34m(self, X, *args, **kwargs)\u001b[0m\n\u001b[1;32m 311\u001b[0m \u001b[38;5;129m@wraps\u001b[39m(f)\n\u001b[1;32m 312\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mwrapped\u001b[39m(\u001b[38;5;28mself\u001b[39m, X, \u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs):\n\u001b[0;32m--> 313\u001b[0m data_to_wrap \u001b[38;5;241m=\u001b[39m \u001b[43mf\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mX\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 314\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(data_to_wrap, \u001b[38;5;28mtuple\u001b[39m):\n\u001b[1;32m 315\u001b[0m \u001b[38;5;66;03m# only wrap the first output for cross decomposition\u001b[39;00m\n\u001b[1;32m 316\u001b[0m return_tuple \u001b[38;5;241m=\u001b[39m (\n\u001b[1;32m 317\u001b[0m _wrap_data_with_container(method, data_to_wrap[\u001b[38;5;241m0\u001b[39m], X, \u001b[38;5;28mself\u001b[39m),\n\u001b[1;32m 318\u001b[0m \u001b[38;5;241m*\u001b[39mdata_to_wrap[\u001b[38;5;241m1\u001b[39m:],\n\u001b[1;32m 319\u001b[0m )\n",
|
||||
"File \u001b[0;32m~/.pyenv/versions/3.11.6/envs/tf/lib/python3.11/site-packages/sklearn/preprocessing/_data.py:1045\u001b[0m, in \u001b[0;36mStandardScaler.transform\u001b[0;34m(self, X, copy)\u001b[0m\n\u001b[1;32m 1042\u001b[0m check_is_fitted(\u001b[38;5;28mself\u001b[39m)\n\u001b[1;32m 1044\u001b[0m copy \u001b[38;5;241m=\u001b[39m copy \u001b[38;5;28;01mif\u001b[39;00m copy \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcopy\n\u001b[0;32m-> 1045\u001b[0m X \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_validate_data\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 1046\u001b[0m \u001b[43m \u001b[49m\u001b[43mX\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1047\u001b[0m \u001b[43m \u001b[49m\u001b[43mreset\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 1048\u001b[0m \u001b[43m \u001b[49m\u001b[43maccept_sparse\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mcsr\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1049\u001b[0m \u001b[43m \u001b[49m\u001b[43mcopy\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcopy\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1050\u001b[0m \u001b[43m \u001b[49m\u001b[43mdtype\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mFLOAT_DTYPES\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1051\u001b[0m \u001b[43m \u001b[49m\u001b[43mforce_writeable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 1052\u001b[0m \u001b[43m \u001b[49m\u001b[43mforce_all_finite\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mallow-nan\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1053\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1055\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m sparse\u001b[38;5;241m.\u001b[39missparse(X):\n\u001b[1;32m 1056\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mwith_mean:\n",
|
||||
"File \u001b[0;32m~/.pyenv/versions/3.11.6/envs/tf/lib/python3.11/site-packages/sklearn/base.py:654\u001b[0m, in \u001b[0;36mBaseEstimator._validate_data\u001b[0;34m(self, X, y, reset, validate_separately, cast_to_ndarray, **check_params)\u001b[0m\n\u001b[1;32m 651\u001b[0m out \u001b[38;5;241m=\u001b[39m X, y\n\u001b[1;32m 653\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m no_val_X \u001b[38;5;129;01mand\u001b[39;00m check_params\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mensure_2d\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;28;01mTrue\u001b[39;00m):\n\u001b[0;32m--> 654\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_check_n_features\u001b[49m\u001b[43m(\u001b[49m\u001b[43mX\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mreset\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mreset\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 656\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m out\n",
|
||||
"File \u001b[0;32m~/.pyenv/versions/3.11.6/envs/tf/lib/python3.11/site-packages/sklearn/base.py:443\u001b[0m, in \u001b[0;36mBaseEstimator._check_n_features\u001b[0;34m(self, X, reset)\u001b[0m\n\u001b[1;32m 440\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m\n\u001b[1;32m 442\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m n_features \u001b[38;5;241m!=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mn_features_in_:\n\u001b[0;32m--> 443\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\n\u001b[1;32m 444\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mX has \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mn_features\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m features, but \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__class__\u001b[39m\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__name__\u001b[39m\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 445\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mis expecting \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mn_features_in_\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m features as input.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 446\u001b[0m )\n",
|
||||
"\u001b[0;31mValueError\u001b[0m: X has 50 features, but StandardScaler is expecting 1024 features as input."
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"# Load pre-trained weights or train a new model\n",
|
||||
"if load_weights:\n",
|
||||
" encoder = EncoderClass(input_size, hidden_size, output_size, pca_components=pca_components)\n",
|
||||
" encoder.load_weights(weight_file)\n",
|
||||
" print(f\"Weights loaded for {encoder_name}\")\n",
|
||||
"else:\n",
|
||||
" print(f\"Training {encoder_name}\")\n",
|
||||
" best_weights, best_accuracy, all_accuracies = best_params(\n",
|
||||
" EncoderClass, X_train, Y_train, input_size, hidden_size, output_size, \n",
|
||||
" alpha, iterations, num_trials, pca_components=pca_components\n",
|
||||
" )\n",
|
||||
" print(f\"Best accuracy achieved with {encoder_name}: {best_accuracy}\")\n",
|
||||
"\n",
|
||||
" # Create encoder with best weights\n",
|
||||
" encoder = EncoderClass(input_size, hidden_size, output_size, pca_components=pca_components)\n",
|
||||
" encoder.W1, encoder.b1, encoder.W2, encoder.b2 = best_weights\n",
|
||||
"\n",
|
||||
" # Save the best weights\n",
|
||||
" weight_file = f\"{dataset}_{encoder_name.lower().replace(' ', '_')}_weights.npz\"\n",
|
||||
" encoder.save_weights(weight_file)\n",
|
||||
" print(f\"Best weights saved to {weight_file}\")\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Initialize the appropriate Decoder\n",
|
||||
"decoder = PCADecoder(encoder) if EncoderClass == PCAEncoder else Decoder(encoder)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Test the encoding-decoding process\n",
|
||||
"num_test_images = 5\n",
|
||||
"for i in range(num_test_images):\n",
|
||||
" original_image = X_test[:, i].reshape(input_size, 1)\n",
|
||||
" encoded = encoder.encode(original_image)\n",
|
||||
" reconstructed = decoder.decode(encoded)\n",
|
||||
"\n",
|
||||
" # Visualize the results\n",
|
||||
" img_dim = int(np.sqrt(input_size))\n",
|
||||
" plt.figure(figsize=(15, 5))\n",
|
||||
" \n",
|
||||
" plt.subplot(1, 3, 1)\n",
|
||||
" plt.imshow(original_image.reshape(img_dim, img_dim), cmap='gray')\n",
|
||||
" plt.title(f\"Original Image {i+1}\")\n",
|
||||
" \n",
|
||||
" plt.subplot(1, 3, 2)\n",
|
||||
" plt.imshow(encoded.reshape(output_size, 1), cmap='viridis', aspect='auto')\n",
|
||||
" plt.title(f\"Encoded Image {i+1}\")\n",
|
||||
" \n",
|
||||
" plt.subplot(1, 3, 3)\n",
|
||||
" plt.imshow(reconstructed.reshape(img_dim, img_dim), cmap='gray')\n",
|
||||
" plt.title(f\"Reconstructed Image {i+1}\")\n",
|
||||
" \n",
|
||||
" plt.tight_layout()\n",
|
||||
" plt.show()\n",
|
||||
"\n",
|
||||
" # Calculate and print the mean squared error\n",
|
||||
" mse = np.mean((original_image - reconstructed) ** 2)\n",
|
||||
" print(f\"Mean Squared Error for Image {i+1}: {mse}\")\n",
|
||||
"\n",
|
||||
"print(\"Encoding-decoding process completed.\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "tf",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.11.6"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
423
encoder.ipynb
Normal file
423
encoder.ipynb
Normal file
|
|
@ -0,0 +1,423 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "407c9473",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import numpy as np"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"id": "14cccaae-d3b6-4ae5-a28a-5fed4b998783",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def init_params():\n",
|
||||
" W1 = np.random.rand(10,1024) - 0.5\n",
|
||||
" b1 = np.random.rand(10,1) - 0.5\n",
|
||||
" W2 = np.random.rand(61,10) - 0.5\n",
|
||||
" b2 = np.random.rand(61,1) - 0.5\n",
|
||||
" return W1, b1 , W2, b2\n",
|
||||
"def ReLU(Z):\n",
|
||||
" return np.maximum(Z,0)\n",
|
||||
"def softmax(Z):\n",
|
||||
" A = np.exp(Z) / sum(np.exp(Z))\n",
|
||||
" return A\n",
|
||||
"def forward_prop(W1, b1, W2, b2, X):\n",
|
||||
" Z1 = W1.dot(X) + b1\n",
|
||||
" A1 = ReLU(Z1)\n",
|
||||
" Z2 = W2.dot(A1) + b2\n",
|
||||
" A2 = softmax(Z2)\n",
|
||||
" return Z1, A1, Z2, A2\n",
|
||||
"def ReLU_deriv(Z):\n",
|
||||
" return Z > 0\n",
|
||||
"def one_hot(Y):\n",
|
||||
" one_hot_Y = np.zeros((Y.size, Y.max() + 1))\n",
|
||||
" one_hot_Y[np.arange(Y.size), Y] = 1\n",
|
||||
" one_hot_Y = one_hot_Y.T\n",
|
||||
" return one_hot_Y\n",
|
||||
"def backward_prop(Z1, A1, Z2, A2, W1, W2, X, Y):\n",
|
||||
" one_hot_Y = one_hot(Y)\n",
|
||||
" dZ2 = A2 - one_hot_Y\n",
|
||||
" dW2 = 1 / m * dZ2.dot(A1.T)\n",
|
||||
" db2 = 1 / m * np.sum(dZ2)\n",
|
||||
" dZ1 = W2.T.dot(dZ2) * ReLU_deriv(Z1)\n",
|
||||
" dW1 = 1 / m * dZ1.dot(X.T)\n",
|
||||
" db1 = 1 / m * np.sum(dZ1)\n",
|
||||
" return dW1, db1, dW2, db2\n",
|
||||
"def update_params(W1, b1, W2, b2, dW1, db1, dW2, db2, alpha):\n",
|
||||
" W1 = W1 - alpha * dW1\n",
|
||||
" b1 = b1 - alpha * db1 \n",
|
||||
" W2 = W2 - alpha * dW2 \n",
|
||||
" b2 = b2 - alpha * db2 \n",
|
||||
" return W1, b1, W2, b2\n",
|
||||
"def get_predictions(A2):\n",
|
||||
" return np.argmax(A2, 0)\n",
|
||||
"def get_accuracy(predictions, Y):\n",
|
||||
" #print(predictions, Y)\n",
|
||||
" return np.sum(predictions == Y) / Y.size"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"id": "ec251927-46fc-413d-abb8-34fafb5a429d",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import pandas as pd\n",
|
||||
"#import data_ready as dr\n",
|
||||
"import os\n",
|
||||
"import struct\n",
|
||||
"import numpy as np\n",
|
||||
"from matplotlib import pyplot as plt \n",
|
||||
"\n",
|
||||
"'''\n",
|
||||
"npz = np.load(\"weights.npz\")\n",
|
||||
"W1 = np.array(npz['arr_0'])\n",
|
||||
"b1 = np.array(npz['arr_1'])\n",
|
||||
"W2 = np.array(npz['arr_2'])\n",
|
||||
"b2 = np.array(npz['arr_3'])\n",
|
||||
"'''\n",
|
||||
"def encode_image(X,W1,b1,W2,b2):\n",
|
||||
" current_image = X\n",
|
||||
" _, _, _, A2 = forward_prop(W1,b1,W2,b2,current_image)\n",
|
||||
" return A2\n",
|
||||
" #print(A2)\n",
|
||||
" #np.save('pred', A2)\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "ff4758bc-7e5d-47ba-a3d4-aa4afde6165f",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Load in The Weights"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"id": "d323d295-5f29-4233-975c-1d5eab88a830",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"bt_npz = np.load(\"bt_weights.npz\")\n",
|
||||
"cr_npz = np.load(\"cr_weights.npz\")\n",
|
||||
"gt_npz = np.load(\"gt_weights.npz\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"id": "62160a9e-f166-47d1-88af-cc767385d09f",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"W1_bt = np.array(bt_npz['arr_0'])\n",
|
||||
"b1_bt = np.array(bt_npz['arr_1'])\n",
|
||||
"W2_bt = np.array(bt_npz['arr_2'])\n",
|
||||
"b2_bt = np.array(bt_npz['arr_3'])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"id": "daa2d8e0-8f76-436d-9e3d-dfb711808e43",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"W1_cr = np.array(cr_npz['arr_0'])\n",
|
||||
"b1_cr = np.array(cr_npz['arr_1'])\n",
|
||||
"W2_cr = np.array(cr_npz['arr_2'])\n",
|
||||
"b2_cr = np.array(cr_npz['arr_3'])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"id": "17e3cd24-21b1-440c-8e38-f91597368771",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"W1_gt = np.array(gt_npz['arr_0'])\n",
|
||||
"b1_gt = np.array(gt_npz['arr_1'])\n",
|
||||
"W2_gt = np.array(gt_npz['arr_2'])\n",
|
||||
"b2_gt = np.array(gt_npz['arr_3'])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "4ae55872-8e28-419d-85af-1ef185a254ce",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Load in the Dataset"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 7,
|
||||
"id": "0f230135-264d-4b89-b0a6-cab229ed0047",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"datac = pd.read_csv('cro_data_test.csv')\n",
|
||||
"datac = np.array(datac)\n",
|
||||
"\n",
|
||||
"m,n = datac.shape\n",
|
||||
"data_trainc = datac[1000:m].T\n",
|
||||
"\n",
|
||||
"Y_trainc = data_trainc[0].astype(int)\n",
|
||||
"X_trainc = data_trainc[1:n]\n",
|
||||
"\n",
|
||||
"current_image_c = X_trainc[:,1,None]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 8,
|
||||
"id": "1f2a757a-4c29-4271-a895-03415765b105",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"datag = pd.read_csv('gtsrb_data_test.csv')\n",
|
||||
"datag = np.array(datag)\n",
|
||||
"\n",
|
||||
"m,n = datag.shape\n",
|
||||
"data_traing = datag[1000:m].T\n",
|
||||
"\n",
|
||||
"Y_traing = data_traing[0].astype(int)\n",
|
||||
"X_traing = data_traing[1:n]\n",
|
||||
"\n",
|
||||
"current_image_g = X_traing[:,1,None]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 9,
|
||||
"id": "7026b28c-ef29-4706-9e9b-ed2417ab06eb",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"datab = pd.read_csv('bel_data_test.csv')\n",
|
||||
"datab = np.array(datab)\n",
|
||||
"\n",
|
||||
"m,n = datab.shape\n",
|
||||
"data_trainb = datab[1000:m].T\n",
|
||||
"\n",
|
||||
"Y_trainb = data_trainb[0].astype(int)\n",
|
||||
"X_trainb = data_trainb[1:n]\n",
|
||||
"\n",
|
||||
"current_image_b = X_trainc[:,1,None]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "c0552a80-ab8f-4ac0-ba4b-bc3ab26a1944",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Encoding 1 Image"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 10,
|
||||
"id": "3daa565f-64f0-4294-9c36-bb87d1904ad0",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"c1 = encode_image(current_image_c,W1_cr,b1_cr,W2_cr,b2_cr)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 11,
|
||||
"id": "cd9d9223-d626-43e9-86d7-ef9bd24e0bc7",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"b1 = encode_image(current_image_b,W1_bt,b1_bt,W2_bt,b2_bt)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 12,
|
||||
"id": "22443477-852f-4978-ae65-6883ed9d1e6b",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"g1 = encode_image(current_image_g,W1_gt,b1_gt,W2_gt,b2_gt)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 13,
|
||||
"id": "b33a151f-d6b1-4386-8fa8-7e88da8951c8",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"np.save('data/Single_Encoding/pred_c', c1)\n",
|
||||
"np.save('data/Single_Encoding/pred_b', b1)\n",
|
||||
"np.save('data/Single_Encoding/pred_g', g1)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "a0df95db-6f10-4f6e-a043-0a82b6e3763b",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Encoding 900 Images"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 44,
|
||||
"id": "002bd5b5-aead-413b-8051-64326ebef595",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"for x in range(0,900):\n",
|
||||
" current_image_g = X_traing[:,x,None]\n",
|
||||
" g1 = encode_image(current_image_g,W1_gt,b1_gt,W2_gt,b2_gt)\n",
|
||||
" np.save('data/9_G/pred_g' + str(x), g1)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 45,
|
||||
"id": "6513e7f4-ef60-4d96-8576-540d4ef91c7b",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"for x in range(0,900):\n",
|
||||
" current_image_b = X_trainb[:,x,None]\n",
|
||||
" b1 = encode_image(current_image_b,W1_bt,b1_bt,W2_bt,b2_bt)\n",
|
||||
" np.save('data/9_B/pred_b' + str(x), b1)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 46,
|
||||
"id": "fc9144a0-b415-463d-bcc7-5ebaed0467ce",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"for x in range(0,900):\n",
|
||||
" current_image_c = X_trainc[:,x,None]\n",
|
||||
" c1 = encode_image(current_image_c,W1_cr,b1_cr,W2_cr,b2_cr)\n",
|
||||
" np.save('data/9_C/pred_c' + str(x), c1)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "4fbd5222-e2b9-4c1d-82a0-6294136a2a71",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Encoding 1800 Images"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 47,
|
||||
"id": "00eba2e4-b5a1-4905-a20f-42d71eebaeff",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"for x in range(0,1800):\n",
|
||||
" current_image_g = X_traing[:,x,None]\n",
|
||||
" g1 = encode_image(current_image_g,W1_gt,b1_gt,W2_gt,b2_gt)\n",
|
||||
" np.save('data/18_G/pred_g' + str(x), g1)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 48,
|
||||
"id": "ee1bd756-09c1-401f-8301-95357ae52446",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"for x in range(0,1800):\n",
|
||||
" current_image_b = X_trainb[:,x,None]\n",
|
||||
" b1 = encode_image(current_image_b,W1_bt,b1_bt,W2_bt,b2_bt)\n",
|
||||
" np.save('data/18_B/pred_b' + str(x), b1)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 49,
|
||||
"id": "65dfac08-626e-47ac-9d35-15b794998f8d",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"for x in range(0,1800):\n",
|
||||
" current_image_c = X_trainc[:,x,None]\n",
|
||||
" c1 = encode_image(current_image_c,W1_cr,b1_cr,W2_cr,b2_cr)\n",
|
||||
" np.save('data/18_C/pred_c' + str(x), c1)"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3 (ipykernel)",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.7.16"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
826
eval.ipynb
Normal file
826
eval.ipynb
Normal file
File diff suppressed because one or more lines are too long
59
net/activation.py
Normal file
59
net/activation.py
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
import numpy as np
|
||||
|
||||
class Activations:
|
||||
@staticmethod
|
||||
def LeakyReLU(x, alpha=0.01):
|
||||
return np.where(x > 0, x, alpha * x)
|
||||
|
||||
@staticmethod
|
||||
def LeakyReLU_deriv(x, alpha=0.01):
|
||||
return np.where(x > 0, 1, alpha)
|
||||
|
||||
@staticmethod
|
||||
def InverseLeakyReLU(x, alpha=0.01):
|
||||
return np.where(x > 0, x, x / alpha)
|
||||
|
||||
@staticmethod
|
||||
def ReLU(x):
|
||||
return np.maximum(0, x)
|
||||
|
||||
@staticmethod
|
||||
def ReLU_deriv(x):
|
||||
return np.where(x > 0, 1, 0)
|
||||
|
||||
@staticmethod
|
||||
def InverseReLU(x):
|
||||
return np.maximum(0, x) # Note: This is lossy for negative values
|
||||
|
||||
@staticmethod
|
||||
def Sigmoid(x):
|
||||
return 1 / (1 + np.exp(-x))
|
||||
|
||||
@staticmethod
|
||||
def Sigmoid_deriv(x):
|
||||
s = Activations.Sigmoid(x)
|
||||
return s * (1 - s)
|
||||
|
||||
@staticmethod
|
||||
def InverseSigmoid(x):
|
||||
return np.log(x / (1 - x))
|
||||
|
||||
@staticmethod
|
||||
def Softmax(x):
|
||||
exp_x = np.exp(x - np.max(x, axis=0, keepdims=True))
|
||||
return exp_x / np.sum(exp_x, axis=0, keepdims=True)
|
||||
|
||||
@staticmethod
|
||||
def InverseSoftmax(x):
|
||||
return np.log(x) - np.max(np.log(x))
|
||||
|
||||
@classmethod
|
||||
def get_function_name(cls, func):
|
||||
return func.__name__
|
||||
|
||||
@classmethod
|
||||
def get_all_activation_names(cls):
|
||||
return [name for name, func in cls.__dict__.items()
|
||||
if callable(func) and not name.startswith("__") and
|
||||
not name.endswith("_deriv") and not name.startswith("Inverse") and
|
||||
not name in ['get_function_name', 'get_all_activation_names']]
|
||||
65
net/loss.py
Normal file
65
net/loss.py
Normal file
|
|
@ -0,0 +1,65 @@
|
|||
import numpy as np
|
||||
|
||||
class Loss:
|
||||
@staticmethod
|
||||
def mean_squared_error(Y, A):
|
||||
""" Mean Squared Error (MSE) """
|
||||
return np.mean((Y - A) ** 2)
|
||||
|
||||
@staticmethod
|
||||
def mean_absolute_error(Y, A):
|
||||
""" Mean Absolute Error (MAE) """
|
||||
return np.mean(np.abs(Y - A))
|
||||
|
||||
@staticmethod
|
||||
def huber_loss(Y, A, delta=1.0):
|
||||
""" Huber Loss """
|
||||
error = Y - A
|
||||
is_small_error = np.abs(error) <= delta
|
||||
squared_loss = 0.5 * error ** 2
|
||||
linear_loss = delta * (np.abs(error) - 0.5 * delta)
|
||||
return np.where(is_small_error, squared_loss, linear_loss).mean()
|
||||
|
||||
@staticmethod
|
||||
def binary_cross_entropy_loss(Y, A):
|
||||
""" Binary Cross-Entropy Loss """
|
||||
m = Y.shape[1]
|
||||
return -np.sum(Y * np.log(A + 1e-8) + (1 - Y) * np.log(1 - A + 1e-8)) / m
|
||||
|
||||
@staticmethod
|
||||
def categorical_cross_entropy_loss(Y, A):
|
||||
""" Categorical Cross-Entropy Loss (for softmax) """
|
||||
m = Y.shape[1]
|
||||
return -np.sum(Y * np.log(A + 1e-8)) / m
|
||||
|
||||
@staticmethod
|
||||
def hinge_loss(Y, A):
|
||||
""" Hinge Loss (used in SVM) """
|
||||
return np.mean(np.maximum(0, 1 - Y * A))
|
||||
|
||||
@staticmethod
|
||||
def kl_divergence(P, Q):
|
||||
""" Kullback-Leibler Divergence """
|
||||
return np.sum(P * np.log(P / (Q + 1e-8)))
|
||||
|
||||
@staticmethod
|
||||
def poisson_loss(Y, A):
|
||||
""" Poisson Loss """
|
||||
return np.mean(A - Y * np.log(A + 1e-8))
|
||||
|
||||
@staticmethod
|
||||
def cosine_proximity_loss(Y, A):
|
||||
""" Cosine Proximity Loss """
|
||||
dot_product = np.sum(Y * A, axis=0)
|
||||
norms = np.linalg.norm(Y, axis=0) * np.linalg.norm(A, axis=0)
|
||||
return -np.mean(dot_product / (norms + 1e-8))
|
||||
|
||||
@classmethod
|
||||
def get_function_name(cls, func):
|
||||
return func.__name__
|
||||
|
||||
@classmethod
|
||||
def get_all_loss_names(cls):
|
||||
return [name for name, func in cls.__dict__.items()
|
||||
if callable(func) and not name.startswith("__") and
|
||||
not name in ['get_function_name', 'get_all_loss_names']]
|
||||
169
net/mlp.py
Normal file
169
net/mlp.py
Normal file
|
|
@ -0,0 +1,169 @@
|
|||
import numpy as np
|
||||
import pandas as pd
|
||||
|
||||
from sklearn.model_selection import train_test_split
|
||||
|
||||
from net.activation import Activations as af
|
||||
from net.optimizer import Optimizers as opt
|
||||
from net.loss import Loss
|
||||
|
||||
class MLP:
|
||||
def __init__(self, architecture, activations, optimizer, loss_function):
|
||||
self.architecture = architecture
|
||||
self.activations = activations
|
||||
self.optimizer = self.select_optimizer(optimizer)
|
||||
self.loss_function = getattr(Loss, loss_function)
|
||||
|
||||
self.params = self.init_params()
|
||||
self.activation_funcs = self.select_activations()
|
||||
|
||||
self.acc_store = []
|
||||
self.loss_store = []
|
||||
self.test_results = []
|
||||
|
||||
def init_params(self):
|
||||
params = {}
|
||||
for i in range(1, len(self.architecture)):
|
||||
params[f'W{i}'] = np.random.randn(self.architecture[i], self.architecture[i-1]) * 0.01
|
||||
params[f'b{i}'] = np.zeros((self.architecture[i], 1))
|
||||
return params
|
||||
|
||||
def select_activations(self):
|
||||
activation_funcs = []
|
||||
for activation in self.activations:
|
||||
activation_funcs.append(getattr(af, activation))
|
||||
return activation_funcs
|
||||
|
||||
def select_optimizer(self, optimizer_name):
|
||||
return getattr(opt, optimizer_name)
|
||||
|
||||
def forward_prop(self, X):
|
||||
A = X
|
||||
caches = []
|
||||
for i in range(1, len(self.architecture)):
|
||||
W = self.params[f'W{i}']
|
||||
b = self.params[f'b{i}']
|
||||
Z = np.dot(W, A) + b
|
||||
A = self.activation_funcs[i-1](Z)
|
||||
caches.append((A, W, b, Z))
|
||||
return A, caches
|
||||
|
||||
def backward_prop(self, AL, Y, caches):
|
||||
grads = {}
|
||||
L = len(caches)
|
||||
|
||||
# Ensure Y is a 2D array
|
||||
Y = Y.reshape(-1, 1) if Y.ndim == 1 else Y
|
||||
m = Y.shape[1]
|
||||
|
||||
Y = self.one_hot(Y)
|
||||
|
||||
dAL = AL - Y
|
||||
current_cache = caches[L-1]
|
||||
grads[f"dA{L}"], grads[f"dW{L}"], grads[f"db{L}"] = self.linear_activation_backward(
|
||||
dAL, current_cache, self.activation_funcs[L-1].__name__)
|
||||
|
||||
for l in reversed(range(L-1)):
|
||||
current_cache = caches[l]
|
||||
dA_prev_temp, dW_temp, db_temp = self.linear_activation_backward(
|
||||
grads[f"dA{l+2}"], current_cache, self.activation_funcs[l].__name__)
|
||||
grads[f"dA{l+1}"] = dA_prev_temp
|
||||
grads[f"dW{l+1}"] = dW_temp
|
||||
grads[f"db{l+1}"] = db_temp
|
||||
|
||||
return grads
|
||||
|
||||
def one_hot(self, Y):
|
||||
num_classes = self.architecture[-1]
|
||||
if Y.ndim == 1:
|
||||
return np.eye(num_classes)[Y]
|
||||
else:
|
||||
return np.eye(num_classes)[Y.reshape(-1)].T
|
||||
|
||||
def linear_activation_backward(self, dA, cache, activation):
|
||||
A_prev, W, b, Z = cache
|
||||
m = A_prev.shape[1]
|
||||
|
||||
if activation == "Softmax":
|
||||
dZ = dA
|
||||
elif activation == "ReLU":
|
||||
dZ = dA * af.ReLU_deriv(Z)
|
||||
else:
|
||||
raise ValueError(f"Backward propagation not implemented for {activation}")
|
||||
|
||||
dW = 1 / m * np.dot(dZ, A_prev.T)
|
||||
db = 1 / m * np.sum(dZ, axis=1, keepdims=True)
|
||||
dA_prev = np.dot(W.T, dZ)
|
||||
|
||||
return dA_prev, dW, db
|
||||
|
||||
def get_predictions(self, A):
|
||||
return np.argmax(A, axis=0)
|
||||
|
||||
def get_accuracy(self, predictions, Y):
|
||||
return np.mean(predictions == Y)
|
||||
|
||||
def train(self, X, Y, alpha, iterations, validation_split=0.2):
|
||||
X_train, X_val, Y_train, Y_val = train_test_split(X.T, Y, test_size=validation_split, shuffle=True, random_state=42)
|
||||
X_train, X_val = X_train.T, X_val.T
|
||||
|
||||
# Ensure Y_train and Y_val are 1D arrays
|
||||
Y_train = Y_train.ravel()
|
||||
Y_val = Y_val.ravel()
|
||||
|
||||
for i in range(iterations):
|
||||
AL, caches = self.forward_prop(X_train)
|
||||
grads = self.backward_prop(AL, Y_train, caches)
|
||||
|
||||
self.params = self.optimizer(self.params, grads, alpha)
|
||||
|
||||
if i % 10 == 0:
|
||||
train_preds = self.get_predictions(AL)
|
||||
train_acc = self.get_accuracy(train_preds, Y_train)
|
||||
train_loss = self.loss_function(self.one_hot(Y_train), AL)
|
||||
|
||||
AL_val, _ = self.forward_prop(X_val)
|
||||
val_preds = self.get_predictions(AL_val)
|
||||
val_acc = self.get_accuracy(val_preds, Y_val)
|
||||
val_loss = self.loss_function(self.one_hot(Y_val), AL_val)
|
||||
|
||||
print(f"Iteration {i}")
|
||||
print(f"Training Accuracy: {train_acc:.4f}, Validation Accuracy: {val_acc:.4f}")
|
||||
print(f"Training Loss: {train_loss:.4f}, Validation Loss: {val_loss:.4f}")
|
||||
print("-------------------------------------------------------")
|
||||
|
||||
self.acc_store.append((train_acc, val_acc))
|
||||
self.loss_store.append((train_loss, val_loss))
|
||||
|
||||
return self.params
|
||||
|
||||
def test(self, X_test, Y_test):
|
||||
AL, _ = self.forward_prop(X_test)
|
||||
predictions = self.get_predictions(AL)
|
||||
test_accuracy = self.get_accuracy(predictions, Y_test)
|
||||
test_loss = self.loss_function(self.one_hot(Y_test), AL)
|
||||
|
||||
self.test_results.append((test_accuracy, test_loss))
|
||||
|
||||
print(f"Test Accuracy: {test_accuracy:.4f}")
|
||||
print(f"Test Loss: {test_loss:.4f}")
|
||||
|
||||
def save_model(self, dataset):
|
||||
weights_file = f"weights/{dataset}_{self.activation_funcs[0].__name__}_weights.npz"
|
||||
results_file = f"results/{dataset}_{self.activation_funcs[0].__name__}_results.csv"
|
||||
|
||||
np.savez(weights_file, **self.params)
|
||||
|
||||
train_df = pd.DataFrame(self.acc_store, columns=["training_accuracy", "validation_accuracy"])
|
||||
loss_df = pd.DataFrame(self.loss_store, columns=["training_loss", "validation_loss"])
|
||||
test_df = pd.DataFrame(self.test_results, columns=['test_accuracy', 'test_loss'])
|
||||
|
||||
combined_df = pd.concat([train_df, loss_df, test_df], axis=1)
|
||||
combined_df.to_csv(results_file, index=False)
|
||||
|
||||
print(f"Weights saved to {weights_file}")
|
||||
print(f"Results saved to {results_file}")
|
||||
|
||||
def load_weights(self, file_name):
|
||||
data = np.load(file_name)
|
||||
self.params = {key: data[key] for key in data.files}
|
||||
77
net/modules.py
Normal file
77
net/modules.py
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
import numpy as np
|
||||
import pandas as pd
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
def load_data(file_path):
|
||||
data = pd.read_csv(file_path)
|
||||
data = np.array(data)
|
||||
m, n = data.shape
|
||||
|
||||
data_train = data[1000:m].T
|
||||
Y_train = data_train[0].astype(int)
|
||||
X_train = data_train[1:n]
|
||||
|
||||
data_test = data[0:1000].T
|
||||
Y_test = data_test[0].astype(int)
|
||||
X_test = data_test[1:n]
|
||||
|
||||
return X_train, Y_train, X_test, Y_test
|
||||
|
||||
def plot_accuracy(acc_store, save_path=None):
|
||||
"""
|
||||
Plot training and validation accuracy over iterations.
|
||||
|
||||
Parameters:
|
||||
acc_store (list of tuples): Each tuple contains (training_accuracy, validation_accuracy).
|
||||
save_path (str, optional): If provided, saves the plot to the specified path.
|
||||
"""
|
||||
# Unzip the accuracy data
|
||||
training_accuracy, validation_accuracy = zip(*acc_store)
|
||||
|
||||
# Plot
|
||||
plt.figure(figsize=(10, 6))
|
||||
plt.plot(training_accuracy, label='Training Accuracy')
|
||||
plt.plot(validation_accuracy, label='Validation Accuracy')
|
||||
plt.title('Training and Validation Accuracy Over Iterations')
|
||||
plt.xlabel('Iterations (in steps of 10)')
|
||||
plt.ylabel('Accuracy')
|
||||
plt.legend()
|
||||
plt.grid(True)
|
||||
|
||||
# Save the plot if a path is provided
|
||||
if save_path:
|
||||
plt.savefig(save_path)
|
||||
print(f"Accuracy plot saved to {save_path}")
|
||||
|
||||
# Show the plot
|
||||
plt.show()
|
||||
|
||||
|
||||
def plot_loss(loss_store, save_path=None):
|
||||
"""
|
||||
Plot training and validation loss over iterations.
|
||||
|
||||
Parameters:
|
||||
loss_store (list of tuples): Each tuple contains (training_loss, validation_loss).
|
||||
save_path (str, optional): If provided, saves the plot to the specified path.
|
||||
"""
|
||||
# Unzip the loss data
|
||||
training_loss, validation_loss = zip(*loss_store)
|
||||
|
||||
# Plot
|
||||
plt.figure(figsize=(10, 6))
|
||||
plt.plot(training_loss, label='Training Loss')
|
||||
plt.plot(validation_loss, label='Validation Loss')
|
||||
plt.title('Training and Validation Loss Over Iterations')
|
||||
plt.xlabel('Iterations (in steps of 10)')
|
||||
plt.ylabel('Loss')
|
||||
plt.legend()
|
||||
plt.grid(True)
|
||||
|
||||
# Save the plot if a path is provided
|
||||
if save_path:
|
||||
plt.savefig(save_path)
|
||||
print(f"Loss plot saved to {save_path}")
|
||||
|
||||
# Show the plot
|
||||
plt.show()
|
||||
14
net/optimizer.py
Normal file
14
net/optimizer.py
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
class Optimizers:
|
||||
@staticmethod
|
||||
def gradient_descent(params, grads, alpha):
|
||||
"""
|
||||
Performs gradient descent optimization for a multi-layer network.
|
||||
|
||||
:param params: Dictionary containing the network parameters (W1, b1, W2, b2, etc.)
|
||||
:param grads: Dictionary containing the gradients (dW1, db1, dW2, db2, etc.)
|
||||
:param alpha: Learning rate
|
||||
:return: Updated parameters dictionary
|
||||
"""
|
||||
for key in params:
|
||||
params[key] -= alpha * grads['d' + key]
|
||||
return params
|
||||
109
net/transcoder.py
Normal file
109
net/transcoder.py
Normal file
|
|
@ -0,0 +1,109 @@
|
|||
import numpy as np
|
||||
|
||||
from sklearn.model_selection import train_test_split
|
||||
|
||||
from net.mlp import MLP
|
||||
from net.modules import calculate_loss, calculate_accuracy, plot_learning_curves, plot_encoded_space, plot_reconstructions
|
||||
|
||||
class Transcoder(MLP):
|
||||
def __init__(self, input_size, hidden_size, output_size, hidden_activation='leaky_relu', output_activation='softmax', alpha=0.01):
|
||||
super().__init__(input_size, hidden_size, output_size, hidden_activation, output_activation, alpha)
|
||||
self.train_losses = []
|
||||
self.val_losses = []
|
||||
self.train_accuracies = []
|
||||
self.val_accuracies = []
|
||||
self.image_shape = self.determine_image_shape(input_size)
|
||||
|
||||
@staticmethod
|
||||
def determine_image_shape(input_size):
|
||||
sqrt = int(np.sqrt(input_size))
|
||||
if sqrt ** 2 == input_size:
|
||||
return (sqrt, sqrt)
|
||||
else:
|
||||
return (input_size, 1) # Default to column vector if not square
|
||||
|
||||
def encode_image(self, X):
|
||||
_, _, _, A2 = self.forward_prop(X)
|
||||
# print(f"Debug - Encoded image shape: {A2.shape}") #Debugging
|
||||
return A2
|
||||
|
||||
def decode_image(self, A2):
|
||||
# Start decoding from the encoded representation (A2)
|
||||
# print(f"Debug - A2 image shape: {A2.shape}") #Debugging
|
||||
|
||||
# Step 1: Reverse the output_activation function to get Z2
|
||||
Z2 = self.inverse_output_activation(A2)
|
||||
# print(f"Debug - Z2 image shape: {Z2.shape}") #Debugging
|
||||
|
||||
# Step 2: Reverse the second linear transformation to get A1
|
||||
A1 = np.linalg.pinv(self.W2).dot(Z2 - self.b2)
|
||||
# print(f"Debug - A1 image shape: {A1.shape}") #Debugging
|
||||
|
||||
# Step 3: Reverse the hidden_activation function to get Z1
|
||||
Z1 = self.inverse_hidden_activation(A1, self.alpha)
|
||||
# print(f"Debug - Z1 image shape: {Z1.shape}") #Debugging
|
||||
|
||||
# Step 4: Reverse the first linear transformation to get X (flattened 1D array)
|
||||
X_flat = np.linalg.pinv(self.W1).dot(Z1 - self.b1)
|
||||
# print(f"Debug - X_Flat image shape: {X_flat.shape}") #Debugging
|
||||
|
||||
# Step 5: If X_flat has shape (1024, n_samples), reshape it for each sample
|
||||
if X_flat.ndim > 1:
|
||||
X_flat = X_flat[:, 0] # Extract the first sample or reshape for batch processing
|
||||
|
||||
# Reshape to original image dimensions (32x32)
|
||||
X_image = X_flat.reshape(self.image_shape)
|
||||
|
||||
return X_image
|
||||
|
||||
def transcode(self, X):
|
||||
print(f"Debug - Input X shape: {X.shape}")
|
||||
encoded = self.encode_image(X)
|
||||
decoded = self.decode_image(encoded)
|
||||
return encoded, decoded
|
||||
|
||||
def train_with_validation(self, X, Y, alpha, iterations, val_split=0.2):
|
||||
# Ensure X is of shape (n_features, n_samples)
|
||||
if X.shape[0] != self.input_size:
|
||||
X = X.T
|
||||
|
||||
# Ensure Y is a 1D array
|
||||
if Y.ndim > 1:
|
||||
Y = Y.ravel()
|
||||
|
||||
X_train, X_val, Y_train, Y_val = train_test_split(X.T, Y, test_size=val_split, random_state=42)
|
||||
X_train, X_val = X_train.T, X_val.T # Transpose back to (n_features, n_samples)
|
||||
|
||||
for i in range(iterations):
|
||||
# Train step
|
||||
Z1, A1, Z2, A2 = self.forward_prop(X_train)
|
||||
dW1, db1, dW2, db2 = self.backward_prop(Z1, A1, Z2, A2, X_train, Y_train)
|
||||
self.update_params(dW1, db1, dW2, db2, alpha)
|
||||
|
||||
# Calculate and store losses and accuracies
|
||||
train_loss = calculate_loss(self, X_train, Y_train)
|
||||
val_loss = calculate_loss(self, X_val, Y_val)
|
||||
train_accuracy = calculate_accuracy(self, X_train, Y_train)
|
||||
val_accuracy = calculate_accuracy(self, X_val, Y_val)
|
||||
|
||||
self.train_losses.append(train_loss)
|
||||
self.val_losses.append(val_loss)
|
||||
self.train_accuracies.append(train_accuracy)
|
||||
self.val_accuracies.append(val_accuracy)
|
||||
|
||||
if i % 100 == 0:
|
||||
print(f"Iteration {i}: Train Loss = {train_loss:.4f}, Val Loss = {val_loss:.4f}, "
|
||||
f"Train Accuracy = {train_accuracy:.4f}, Val Accuracy = {val_accuracy:.4f}")
|
||||
|
||||
def plot_learning_curves(self):
|
||||
plot_learning_curves(self.train_losses, self.val_losses, self.train_accuracies, self.val_accuracies)
|
||||
|
||||
def plot_encoded_space(self, X, Y):
|
||||
if X.shape[0] != self.input_size:
|
||||
X = X.T
|
||||
plot_encoded_space(self, X, Y)
|
||||
|
||||
def plot_reconstructions(self, X, num_images=5):
|
||||
if X.shape[0] != self.input_size:
|
||||
X = X.T
|
||||
plot_reconstructions(self, X, num_images)
|
||||
401
results/bel_all_accuracies.csv
Normal file
401
results/bel_all_accuracies.csv
Normal file
|
|
@ -0,0 +1,401 @@
|
|||
Trial_1,Trial_2,Trial_3,Trial_4,Trial_5
|
||||
0.023496503496503496,0.04055944055944056,0.016783216783216783,0.006993006993006993,0.04027972027972028
|
||||
0.011748251748251748,0.05258741258741259,0.0,0.013146853146853148,0.008111888111888113
|
||||
0.011748251748251748,0.05146853146853147,0.0,0.013146853146853148,0.008391608391608392
|
||||
0.011748251748251748,0.09958041958041958,0.0,0.013146853146853148,0.00951048951048951
|
||||
0.011748251748251748,0.11048951048951049,0.0,0.022377622377622378,0.00951048951048951
|
||||
0.011748251748251748,0.1076923076923077,0.0,0.05678321678321678,0.00951048951048951
|
||||
0.011748251748251748,0.1048951048951049,0.0,0.07160839160839161,0.00951048951048951
|
||||
0.011748251748251748,0.12195804195804195,0.0,0.08027972027972027,0.009790209790209791
|
||||
0.011748251748251748,0.16307692307692306,0.0,0.086993006993007,0.009790209790209791
|
||||
0.011748251748251748,0.16867132867132867,0.0,0.09118881118881118,0.009790209790209791
|
||||
0.011748251748251748,0.17174825174825176,0.0,0.09314685314685314,0.009790209790209791
|
||||
0.011748251748251748,0.1753846153846154,0.0,0.0937062937062937,0.01006993006993007
|
||||
0.011748251748251748,0.17622377622377622,0.0,0.09538461538461539,0.011188811188811189
|
||||
0.011748251748251748,0.17986013986013985,0.0,0.09594405594405594,0.011748251748251748
|
||||
0.011748251748251748,0.18097902097902097,0.0,0.09622377622377623,0.07608391608391608
|
||||
0.011748251748251748,0.1834965034965035,0.0,0.09706293706293706,0.08587412587412588
|
||||
0.011748251748251748,0.18573426573426574,0.013146853146853148,0.09706293706293706,0.09062937062937063
|
||||
0.011748251748251748,0.1874125874125874,0.02825174825174825,0.09734265734265735,0.09146853146853147
|
||||
0.011748251748251748,0.1888111888111888,0.03776223776223776,0.09734265734265735,0.09174825174825176
|
||||
0.011748251748251748,0.18993006993006992,0.044475524475524476,0.09762237762237762,0.09258741258741258
|
||||
0.011748251748251748,0.19132867132867132,0.05146853146853147,0.0979020979020979,0.09230769230769231
|
||||
0.011748251748251748,0.19244755244755246,0.0606993006993007,0.09706293706293706,0.09118881118881118
|
||||
0.011748251748251748,0.19384615384615383,0.06097902097902098,0.09818181818181818,0.09062937062937063
|
||||
0.011748251748251748,0.19692307692307692,0.06293706293706294,0.0979020979020979,0.0897902097902098
|
||||
0.011748251748251748,0.2013986013986014,0.06321678321678321,0.09846153846153846,0.08923076923076922
|
||||
0.011748251748251748,0.21202797202797202,0.06293706293706294,0.09762237762237762,0.08811188811188811
|
||||
0.011748251748251748,0.22097902097902097,0.06321678321678321,0.09734265734265735,0.08811188811188811
|
||||
0.011748251748251748,0.22545454545454546,0.0634965034965035,0.09874125874125875,0.07244755244755245
|
||||
0.011748251748251748,0.23272727272727273,0.06769230769230769,0.09874125874125875,0.07916083916083916
|
||||
0.011748251748251748,0.2427972027972028,0.06965034965034965,0.09762237762237762,0.08951048951048951
|
||||
0.011748251748251748,0.2632167832167832,0.07104895104895105,0.09874125874125875,0.0979020979020979
|
||||
0.011748251748251748,0.2716083916083916,0.0718881118881119,0.10797202797202797,0.10377622377622378
|
||||
0.011748251748251748,0.2816783216783217,0.07272727272727272,0.11412587412587413,0.10573426573426574
|
||||
0.011748251748251748,0.2900699300699301,0.08195804195804196,0.11692307692307692,0.1076923076923077
|
||||
0.011748251748251748,0.29762237762237764,0.11356643356643356,0.11832167832167832,0.10741258741258741
|
||||
0.011748251748251748,0.3074125874125874,0.126993006993007,0.12055944055944055,0.10797202797202797
|
||||
0.011748251748251748,0.31300699300699303,0.10601398601398601,0.1227972027972028,0.10937062937062937
|
||||
0.011748251748251748,0.3186013986013986,0.10545454545454545,0.13286713286713286,0.11020979020979021
|
||||
0.011748251748251748,0.3241958041958042,0.10545454545454545,0.14909090909090908,0.11216783216783217
|
||||
0.011748251748251748,0.332027972027972,0.10545454545454545,0.16195804195804195,0.11272727272727273
|
||||
0.011748251748251748,0.3412587412587413,0.10573426573426574,0.13258741258741258,0.11384615384615385
|
||||
0.011748251748251748,0.3474125874125874,0.10573426573426574,0.1362237762237762,0.11496503496503496
|
||||
0.011748251748251748,0.34265734265734266,0.10573426573426574,0.14853146853146854,0.11608391608391608
|
||||
0.011748251748251748,0.34881118881118883,0.10573426573426574,0.1516083916083916,0.11804195804195804
|
||||
0.011748251748251748,0.35160839160839163,0.10573426573426574,0.1586013986013986,0.11860139860139861
|
||||
0.011748251748251748,0.35748251748251747,0.10573426573426574,0.16223776223776223,0.11636363636363636
|
||||
0.011748251748251748,0.3627972027972028,0.10573426573426574,0.17006993006993007,0.11524475524475525
|
||||
0.011748251748251748,0.37006993006993005,0.10573426573426574,0.17846153846153845,0.11720279720279721
|
||||
0.011748251748251748,0.37566433566433566,0.10573426573426574,0.18237762237762237,0.12111888111888112
|
||||
0.011748251748251748,0.3829370629370629,0.10573426573426574,0.18993006993006992,0.12307692307692308
|
||||
0.011748251748251748,0.3893706293706294,0.10545454545454545,0.2067132867132867,0.12503496503496503
|
||||
0.011748251748251748,0.39636363636363636,0.10545454545454545,0.22097902097902097,0.1258741258741259
|
||||
0.011748251748251748,0.4027972027972028,0.10545454545454545,0.23244755244755244,0.12783216783216783
|
||||
0.011748251748251748,0.40755244755244757,0.10545454545454545,0.2111888111888112,0.12867132867132866
|
||||
0.011748251748251748,0.413986013986014,0.10545454545454545,0.2455944055944056,0.13006993006993006
|
||||
0.011748251748251748,0.4206993006993007,0.10545454545454545,0.2746853146853147,0.13118881118881118
|
||||
0.011748251748251748,0.42713286713286713,0.10545454545454545,0.28223776223776226,0.13258741258741258
|
||||
0.011748251748251748,0.4318881118881119,0.10545454545454545,0.2903496503496503,0.13454545454545455
|
||||
0.011748251748251748,0.43608391608391606,0.10517482517482518,0.29426573426573427,0.13426573426573427
|
||||
0.011748251748251748,0.44111888111888115,0.10517482517482518,0.3048951048951049,0.13678321678321678
|
||||
0.011748251748251748,0.4464335664335664,0.10517482517482518,0.30993006993006994,0.139020979020979
|
||||
0.011748251748251748,0.4509090909090909,0.10517482517482518,0.31580419580419583,0.13986013986013987
|
||||
0.011748251748251748,0.4584615384615385,0.10517482517482518,0.32223776223776224,0.1423776223776224
|
||||
0.011748251748251748,0.4668531468531468,0.10517482517482518,0.3239160839160839,0.14405594405594405
|
||||
0.011748251748251748,0.4735664335664336,0.10517482517482518,0.32895104895104893,0.14153846153846153
|
||||
0.011748251748251748,0.4791608391608392,0.10517482517482518,0.3264335664335664,0.14153846153846153
|
||||
0.011748251748251748,0.48363636363636364,0.10517482517482518,0.33174825174825173,0.1437762237762238
|
||||
0.011748251748251748,0.48923076923076925,0.10517482517482518,0.33986013986013985,0.15020979020979022
|
||||
0.011748251748251748,0.49202797202797205,0.10517482517482518,0.3474125874125874,0.15216783216783217
|
||||
0.011748251748251748,0.4976223776223776,0.10517482517482518,0.3507692307692308,0.15636363636363637
|
||||
0.011748251748251748,0.4995804195804196,0.1048951048951049,0.3563636363636364,0.16
|
||||
0.011748251748251748,0.5015384615384615,0.1048951048951049,0.3597202797202797,0.1655944055944056
|
||||
0.011748251748251748,0.504055944055944,0.1048951048951049,0.36475524475524473,0.16783216783216784
|
||||
0.011748251748251748,0.5062937062937063,0.1048951048951049,0.3686713286713287,0.1709090909090909
|
||||
0.011748251748251748,0.5068531468531469,0.1048951048951049,0.36895104895104897,0.1723076923076923
|
||||
0.011748251748251748,0.509090909090909,0.1048951048951049,0.3706293706293706,0.1737062937062937
|
||||
0.011748251748251748,0.5116083916083916,0.1048951048951049,0.36363636363636365,0.1751048951048951
|
||||
0.011748251748251748,0.513006993006993,0.1048951048951049,0.34937062937062935,0.17734265734265733
|
||||
0.011748251748251748,0.5158041958041958,0.1048951048951049,0.3563636363636364,0.17846153846153845
|
||||
0.011748251748251748,0.5183216783216783,0.1048951048951049,0.36475524475524473,0.18013986013986014
|
||||
0.011748251748251748,0.5208391608391608,0.1048951048951049,0.3734265734265734,0.18125874125874125
|
||||
0.011748251748251748,0.5227972027972028,0.1048951048951049,0.3837762237762238,0.18461538461538463
|
||||
0.011748251748251748,0.5258741258741259,0.1048951048951049,0.3890909090909091,0.18489510489510488
|
||||
0.011748251748251748,0.5272727272727272,0.1048951048951049,0.3918881118881119,0.18685314685314686
|
||||
0.011748251748251748,0.5306293706293707,0.1048951048951049,0.39384615384615385,0.18853146853146854
|
||||
0.011748251748251748,0.5300699300699301,0.1048951048951049,0.39692307692307693,0.18965034965034966
|
||||
0.011748251748251748,0.5300699300699301,0.1048951048951049,0.4008391608391608,0.19076923076923077
|
||||
0.011748251748251748,0.5325874125874126,0.1048951048951049,0.4036363636363636,0.19244755244755246
|
||||
0.011748251748251748,0.5345454545454545,0.1048951048951049,0.4039160839160839,0.19384615384615383
|
||||
0.011748251748251748,0.5362237762237763,0.1048951048951049,0.4067132867132867,0.19524475524475524
|
||||
0.011748251748251748,0.5370629370629371,0.1048951048951049,0.40783216783216786,0.19636363636363635
|
||||
0.011748251748251748,0.5384615384615384,0.1048951048951049,0.4083916083916084,0.19776223776223775
|
||||
0.011748251748251748,0.5406993006993007,0.1048951048951049,0.4100699300699301,0.20307692307692307
|
||||
0.011748251748251748,0.5437762237762238,0.1048951048951049,0.4137062937062937,0.2081118881118881
|
||||
0.011748251748251748,0.5462937062937063,0.1048951048951049,0.4179020979020979,0.2123076923076923
|
||||
0.011748251748251748,0.5471328671328671,0.1048951048951049,0.41986013986013987,0.21566433566433565
|
||||
0.011748251748251748,0.5499300699300699,0.1048951048951049,0.42097902097902096,0.22013986013986014
|
||||
0.011748251748251748,0.5521678321678322,0.1048951048951049,0.4246153846153846,0.22293706293706295
|
||||
0.011748251748251748,0.5518881118881119,0.1048951048951049,0.42573426573426576,0.22685314685314686
|
||||
0.011748251748251748,0.5516083916083916,0.1048951048951049,0.4276923076923077,0.22965034965034964
|
||||
0.011748251748251748,0.5521678321678322,0.1048951048951049,0.42965034965034965,0.22993006993006992
|
||||
0.011748251748251748,0.5532867132867133,0.1048951048951049,0.4307692307692308,0.2332867132867133
|
||||
0.011748251748251748,0.5566433566433566,0.1048951048951049,0.43272727272727274,0.23496503496503496
|
||||
0.011748251748251748,0.5586013986013986,0.1048951048951049,0.4341258741258741,0.24
|
||||
0.011748251748251748,0.5608391608391609,0.1048951048951049,0.4338461538461538,0.24447552447552448
|
||||
0.011748251748251748,0.5613986013986014,0.10461538461538461,0.43524475524475525,0.2483916083916084
|
||||
0.011748251748251748,0.5605594405594405,0.10433566433566434,0.43636363636363634,0.25594405594405595
|
||||
0.011748251748251748,0.5633566433566434,0.10461538461538461,0.43860139860139863,0.2755244755244755
|
||||
0.011748251748251748,0.566993006993007,0.10461538461538461,0.4397202797202797,0.29174825174825175
|
||||
0.011748251748251748,0.5683916083916084,0.10461538461538461,0.44083916083916086,0.3102097902097902
|
||||
0.011748251748251748,0.5695104895104895,0.10461538461538461,0.44167832167832166,0.31524475524475526
|
||||
0.011748251748251748,0.5697902097902098,0.10461538461538461,0.4413986013986014,0.3169230769230769
|
||||
0.011748251748251748,0.5700699300699301,0.10461538461538461,0.44167832167832166,0.30993006993006994
|
||||
0.011748251748251748,0.5723076923076923,0.10461538461538461,0.44363636363636366,0.31272727272727274
|
||||
0.011748251748251748,0.5728671328671329,0.10433566433566434,0.44475524475524475,0.3177622377622378
|
||||
0.011748251748251748,0.5737062937062937,0.10461538461538461,0.4458741258741259,0.3211188811188811
|
||||
0.011748251748251748,0.575944055944056,0.10517482517482518,0.446993006993007,0.32447552447552447
|
||||
0.011748251748251748,0.5767832167832168,0.10517482517482518,0.44755244755244755,0.32783216783216784
|
||||
0.011748251748251748,0.5776223776223777,0.10517482517482518,0.4481118881118881,0.3295104895104895
|
||||
0.011748251748251748,0.5784615384615385,0.10517482517482518,0.4497902097902098,0.33734265734265734
|
||||
0.011748251748251748,0.5801398601398602,0.10517482517482518,0.45062937062937064,0.3406993006993007
|
||||
0.011748251748251748,0.5829370629370629,0.10517482517482518,0.4511888111888112,0.3448951048951049
|
||||
0.011748251748251748,0.5848951048951049,0.10517482517482518,0.45342657342657344,0.35104895104895106
|
||||
0.011748251748251748,0.5846153846153846,0.10517482517482518,0.45370629370629373,0.35524475524475524
|
||||
0.011748251748251748,0.5857342657342657,0.1048951048951049,0.45398601398601396,0.3630769230769231
|
||||
0.011748251748251748,0.5874125874125874,0.1048951048951049,0.4548251748251748,0.3711888111888112
|
||||
0.011748251748251748,0.5882517482517482,0.1048951048951049,0.45622377622377625,0.3801398601398601
|
||||
0.011748251748251748,0.5890909090909091,0.1048951048951049,0.45678321678321676,0.38825174825174824
|
||||
0.011748251748251748,0.5907692307692308,0.1048951048951049,0.4606993006993007,0.39636363636363636
|
||||
0.011748251748251748,0.5885314685314685,0.1048951048951049,0.46237762237762237,0.4067132867132867
|
||||
0.011748251748251748,0.5916083916083916,0.1048951048951049,0.4643356643356643,0.41706293706293707
|
||||
0.011748251748251748,0.5944055944055944,0.1048951048951049,0.466013986013986,0.4254545454545455
|
||||
0.011748251748251748,0.5960839160839161,0.1048951048951049,0.4690909090909091,0.43244755244755245
|
||||
0.011748251748251748,0.5977622377622378,0.1048951048951049,0.4702097902097902,0.43636363636363634
|
||||
0.011748251748251748,0.5986013986013986,0.1048951048951049,0.4707692307692308,0.4372027972027972
|
||||
0.011748251748251748,0.6002797202797203,0.1048951048951049,0.47216783216783215,0.44195804195804195
|
||||
0.011748251748251748,0.6008391608391609,0.1048951048951049,0.4727272727272727,0.4483916083916084
|
||||
0.011748251748251748,0.6033566433566434,0.1048951048951049,0.47384615384615386,0.45454545454545453
|
||||
0.011748251748251748,0.605034965034965,0.1048951048951049,0.47440559440559443,0.46265734265734265
|
||||
0.011748251748251748,0.6047552447552448,0.1048951048951049,0.47496503496503495,0.4690909090909091
|
||||
0.011748251748251748,0.6055944055944056,0.1048951048951049,0.47664335664335666,0.4735664335664336
|
||||
0.011748251748251748,0.6067132867132867,0.1048951048951049,0.47664335664335666,0.4788811188811189
|
||||
0.011748251748251748,0.6075524475524475,0.1048951048951049,0.4772027972027972,0.48335664335664336
|
||||
0.011748251748251748,0.6092307692307692,0.1048951048951049,0.47748251748251747,0.4855944055944056
|
||||
0.011748251748251748,0.6117482517482518,0.1048951048951049,0.47804195804195804,0.48895104895104896
|
||||
0.011748251748251748,0.6145454545454545,0.10461538461538461,0.4783216783216783,0.49202797202797205
|
||||
0.011748251748251748,0.6139860139860139,0.10433566433566434,0.4786013986013986,0.499020979020979
|
||||
0.011748251748251748,0.6179020979020979,0.10433566433566434,0.47944055944055947,0.5026573426573426
|
||||
0.011748251748251748,0.612027972027972,0.10433566433566434,0.47944055944055947,0.5068531468531469
|
||||
0.011748251748251748,0.6123076923076923,0.10433566433566434,0.48083916083916084,0.5124475524475525
|
||||
0.011748251748251748,0.6184615384615385,0.10433566433566434,0.48055944055944055,0.5152447552447552
|
||||
0.011748251748251748,0.617062937062937,0.10433566433566434,0.4811188811188811,0.5191608391608392
|
||||
0.011748251748251748,0.6064335664335664,0.10461538461538461,0.481958041958042,0.521958041958042
|
||||
0.011748251748251748,0.5938461538461538,0.10461538461538461,0.4816783216783217,0.5255944055944056
|
||||
0.011748251748251748,0.6061538461538462,0.10461538461538461,0.4813986013986014,0.5278321678321678
|
||||
0.011748251748251748,0.6156643356643356,0.10461538461538461,0.4822377622377622,0.5309090909090909
|
||||
0.011748251748251748,0.6265734265734266,0.10461538461538461,0.4822377622377622,0.533986013986014
|
||||
0.011748251748251748,0.6316083916083917,0.10461538461538461,0.481958041958042,0.533986013986014
|
||||
0.011748251748251748,0.6346853146853146,0.10461538461538461,0.4827972027972028,0.5362237762237763
|
||||
0.011748251748251748,0.6411188811188812,0.10461538461538461,0.48363636363636364,0.539020979020979
|
||||
0.011748251748251748,0.6433566433566433,0.10461538461538461,0.48335664335664336,0.5401398601398602
|
||||
0.011748251748251748,0.6458741258741258,0.10433566433566434,0.4841958041958042,0.5434965034965035
|
||||
0.011748251748251748,0.6500699300699301,0.10433566433566434,0.48475524475524473,0.5434965034965035
|
||||
0.011748251748251748,0.6545454545454545,0.10433566433566434,0.485034965034965,0.5457342657342658
|
||||
0.011748251748251748,0.6587412587412588,0.10461538461538461,0.48643356643356644,0.5468531468531469
|
||||
0.011748251748251748,0.6629370629370629,0.10461538461538461,0.48391608391608393,0.5485314685314685
|
||||
0.011748251748251748,0.6668531468531469,0.10461538461538461,0.48363636363636364,0.5513286713286714
|
||||
0.011748251748251748,0.6682517482517483,0.10461538461538461,0.48307692307692307,0.553006993006993
|
||||
0.011748251748251748,0.6690909090909091,0.10461538461538461,0.4822377622377622,0.5538461538461539
|
||||
0.011748251748251748,0.6721678321678322,0.10461538461538461,0.48475524475524473,0.554965034965035
|
||||
0.011748251748251748,0.6763636363636364,0.10461538461538461,0.4853146853146853,0.5552447552447553
|
||||
0.011748251748251748,0.6786013986013986,0.10461538461538461,0.4844755244755245,0.556923076923077
|
||||
0.011748251748251748,0.68,0.10461538461538461,0.48475524475524473,0.5586013986013986
|
||||
0.011748251748251748,0.68,0.10461538461538461,0.4855944055944056,0.5605594405594405
|
||||
0.011748251748251748,0.6777622377622378,0.10461538461538461,0.4855944055944056,0.561958041958042
|
||||
0.011748251748251748,0.6732867132867133,0.1048951048951049,0.4855944055944056,0.5627972027972028
|
||||
0.011748251748251748,0.6772027972027972,0.1048951048951049,0.4853146853146853,0.561958041958042
|
||||
0.011748251748251748,0.6844755244755245,0.10545454545454545,0.48615384615384616,0.561958041958042
|
||||
0.011748251748251748,0.6906293706293706,0.10545454545454545,0.4878321678321678,0.5622377622377622
|
||||
0.011748251748251748,0.6979020979020979,0.10573426573426574,0.48895104895104896,0.563076923076923
|
||||
0.011748251748251748,0.6998601398601398,0.1062937062937063,0.4883916083916084,0.563076923076923
|
||||
0.011748251748251748,0.6970629370629371,0.10881118881118881,0.48979020979020976,0.5641958041958042
|
||||
0.011748251748251748,0.6942657342657342,0.1158041958041958,0.4906293706293706,0.5653146853146853
|
||||
0.011748251748251748,0.7012587412587412,0.13202797202797203,0.48951048951048953,0.5672727272727273
|
||||
0.011748251748251748,0.707972027972028,0.1462937062937063,0.48755244755244753,0.5681118881118881
|
||||
0.011748251748251748,0.7099300699300699,0.1616783216783217,0.4886713286713287,0.5675524475524476
|
||||
0.011748251748251748,0.7093706293706293,0.17426573426573427,0.48923076923076925,0.5692307692307692
|
||||
0.011748251748251748,0.706013986013986,0.18573426573426574,0.49006993006993005,0.5697902097902098
|
||||
0.011748251748251748,0.6987412587412587,0.19552447552447552,0.48979020979020976,0.5695104895104895
|
||||
0.011748251748251748,0.6850349650349651,0.20391608391608393,0.48979020979020976,0.5692307692307692
|
||||
0.011748251748251748,0.6937062937062937,0.2137062937062937,0.49034965034965033,0.5700699300699301
|
||||
0.011748251748251748,0.7074125874125874,0.22405594405594406,0.4909090909090909,0.5714685314685315
|
||||
0.011748251748251748,0.7222377622377623,0.2511888111888112,0.49258741258741257,0.5728671328671329
|
||||
0.011748251748251748,0.7230769230769231,0.27412587412587414,0.4937062937062937,0.5720279720279721
|
||||
0.011748251748251748,0.7241958041958042,0.2844755244755245,0.4934265734265734,0.5723076923076923
|
||||
0.011748251748251748,0.7205594405594405,0.2853146853146853,0.49230769230769234,0.5723076923076923
|
||||
0.011748251748251748,0.7141258741258741,0.2931468531468531,0.4914685314685315,0.5731468531468531
|
||||
0.011748251748251748,0.7166433566433567,0.299020979020979,0.49202797202797205,0.5737062937062937
|
||||
0.011748251748251748,0.7225174825174825,0.30293706293706296,0.49202797202797205,0.5773426573426573
|
||||
0.011748251748251748,0.7236363636363636,0.30405594405594405,0.49286713286713285,0.5787412587412587
|
||||
0.011748251748251748,0.7272727272727273,0.30713286713286714,0.49286713286713285,0.5784615384615385
|
||||
0.011748251748251748,0.7278321678321679,0.30937062937062937,0.49286713286713285,0.5767832167832168
|
||||
0.011748251748251748,0.7272727272727273,0.30965034965034965,0.49314685314685314,0.5765034965034965
|
||||
0.011748251748251748,0.7253146853146853,0.3118881118881119,0.49258741258741257,0.5781818181818181
|
||||
0.011748251748251748,0.7211188811188811,0.3141258741258741,0.493986013986014,0.5781818181818181
|
||||
0.011748251748251748,0.7225174825174825,0.3186013986013986,0.493986013986014,0.579020979020979
|
||||
0.011748251748251748,0.7197202797202797,0.32083916083916086,0.49482517482517485,0.5832167832167832
|
||||
0.011748251748251748,0.7325874125874126,0.3227972027972028,0.49538461538461537,0.5837762237762237
|
||||
0.011748251748251748,0.7398601398601399,0.3328671328671329,0.49566433566433565,0.5846153846153846
|
||||
0.011748251748251748,0.7412587412587412,0.33678321678321677,0.4962237762237762,0.5857342657342657
|
||||
0.011748251748251748,0.7426573426573426,0.3406993006993007,0.49734265734265737,0.5868531468531468
|
||||
0.011748251748251748,0.7415384615384616,0.3437762237762238,0.49734265734265737,0.5882517482517482
|
||||
0.011748251748251748,0.7381818181818182,0.3448951048951049,0.4976223776223776,0.5899300699300699
|
||||
0.011748251748251748,0.7373426573426574,0.3476923076923077,0.4993006993006993,0.5902097902097903
|
||||
0.011748251748251748,0.7390209790209791,0.3504895104895105,0.5004195804195805,0.5834965034965035
|
||||
0.011748251748251748,0.7401398601398601,0.35244755244755244,0.5006993006993007,0.5731468531468531
|
||||
0.011748251748251748,0.7393006993006993,0.3560839160839161,0.5015384615384615,0.5678321678321678
|
||||
0.011748251748251748,0.7379020979020979,0.3572027972027972,0.5026573426573426,0.5767832167832168
|
||||
0.011748251748251748,0.7379020979020979,0.35804195804195804,0.5032167832167832,0.5823776223776224
|
||||
0.011748251748251748,0.735944055944056,0.3602797202797203,0.5037762237762238,0.5823776223776224
|
||||
0.011748251748251748,0.7398601398601399,0.3625174825174825,0.5034965034965035,0.5804195804195804
|
||||
0.011748251748251748,0.7432167832167832,0.36335664335664336,0.5043356643356643,0.5876923076923077
|
||||
0.011748251748251748,0.7468531468531469,0.36475524475524473,0.5043356643356643,0.5916083916083916
|
||||
0.011748251748251748,0.7518881118881119,0.36671328671328673,0.5051748251748251,0.5946853146853147
|
||||
0.011748251748251748,0.7518881118881119,0.3711888111888112,0.5057342657342657,0.5927272727272728
|
||||
0.011748251748251748,0.7474125874125874,0.37538461538461537,0.5068531468531469,0.5902097902097903
|
||||
0.011748251748251748,0.7468531468531469,0.3795804195804196,0.5071328671328671,0.5843356643356643
|
||||
0.011748251748251748,0.7490909090909091,0.3829370629370629,0.5082517482517482,0.5770629370629371
|
||||
0.011748251748251748,0.7516083916083917,0.3862937062937063,0.5096503496503496,0.5801398601398602
|
||||
0.011748251748251748,0.7532867132867133,0.3890909090909091,0.5116083916083916,0.5882517482517482
|
||||
0.011748251748251748,0.7544055944055944,0.39132867132867133,0.5121678321678321,0.5921678321678322
|
||||
0.012307692307692308,0.754965034965035,0.39356643356643356,0.5127272727272727,0.5893706293706293
|
||||
0.012307692307692308,0.7558041958041958,0.3974825174825175,0.5107692307692308,0.5815384615384616
|
||||
0.012587412587412588,0.7569230769230769,0.4011188811188811,0.5118881118881119,0.5868531468531468
|
||||
0.014545454545454545,0.7555244755244755,0.40251748251748254,0.5138461538461538,0.5977622377622378
|
||||
0.016223776223776225,0.7535664335664336,0.40335664335664334,0.5135664335664336,0.5988811188811188
|
||||
0.017622377622377623,0.7552447552447552,0.40615384615384614,0.5121678321678321,0.6025174825174825
|
||||
0.018741258741258742,0.7457342657342657,0.40755244755244757,0.5116083916083916,0.6008391608391609
|
||||
0.02097902097902098,0.746013986013986,0.40895104895104895,0.5135664335664336,0.594965034965035
|
||||
0.022377622377622378,0.7572027972027972,0.41174825174825175,0.5135664335664336,0.591048951048951
|
||||
0.024055944055944058,0.7616783216783217,0.4125874125874126,0.5132867132867133,0.5882517482517482
|
||||
0.025454545454545455,0.7597202797202797,0.4151048951048951,0.5144055944055944,0.5848951048951049
|
||||
0.026853146853146853,0.7594405594405594,0.41762237762237764,0.5152447552447552,0.5935664335664336
|
||||
0.027692307692307693,0.7622377622377622,0.42013986013986016,0.514965034965035,0.5969230769230769
|
||||
0.0344055944055944,0.765034965034965,0.42153846153846153,0.5160839160839161,0.5991608391608392
|
||||
0.10293706293706294,0.7658741258741258,0.4254545454545455,0.5158041958041958,0.5944055944055944
|
||||
0.10685314685314685,0.7653146853146853,0.42685314685314685,0.5163636363636364,0.5932867132867133
|
||||
0.10797202797202797,0.7658741258741258,0.4288111888111888,0.5180419580419581,0.6016783216783217
|
||||
0.10965034965034966,0.7675524475524476,0.4302097902097902,0.5194405594405594,0.6036363636363636
|
||||
0.11048951048951049,0.7661538461538462,0.431048951048951,0.5197202797202797,0.6092307692307692
|
||||
0.11020979020979021,0.7672727272727272,0.4341258741258741,0.5188811188811189,0.6125874125874126
|
||||
0.11020979020979021,0.7597202797202797,0.43636363636363634,0.5191608391608392,0.6075524475524475
|
||||
0.10853146853146853,0.7457342657342657,0.4372027972027972,0.5194405594405594,0.6011188811188811
|
||||
0.10713286713286713,0.7560839160839161,0.4372027972027972,0.5194405594405594,0.5986013986013986
|
||||
0.10685314685314685,0.7655944055944056,0.4397202797202797,0.52,0.591048951048951
|
||||
0.10825174825174826,0.7653146853146853,0.4425174825174825,0.5205594405594406,0.5846153846153846
|
||||
0.11104895104895104,0.7667132867132868,0.44447552447552446,0.5208391608391608,0.5963636363636363
|
||||
0.11188811188811189,0.7675524475524476,0.4453146853146853,0.5222377622377622,0.6058741258741259
|
||||
0.11132867132867133,0.7664335664335664,0.4467132867132867,0.5222377622377622,0.6072727272727273
|
||||
0.11104895104895104,0.7655944055944056,0.446993006993007,0.5222377622377622,0.6044755244755244
|
||||
0.11188811188811189,0.7647552447552447,0.44783216783216784,0.5233566433566433,0.5974825174825175
|
||||
0.11104895104895104,0.7683916083916084,0.4486713286713287,0.5227972027972028,0.5952447552447553
|
||||
0.11076923076923077,0.7689510489510489,0.4495104895104895,0.5230769230769231,0.6019580419580419
|
||||
0.11020979020979021,0.772027972027972,0.45174825174825173,0.5236363636363637,0.6134265734265735
|
||||
0.11020979020979021,0.7762237762237763,0.45650349650349653,0.5227972027972028,0.617062937062937
|
||||
0.10993006993006993,0.7703496503496503,0.46237762237762237,0.5230769230769231,0.6195804195804195
|
||||
0.10937062937062937,0.7544055944055944,0.466013986013986,0.5253146853146853,0.6179020979020979
|
||||
0.10853146853146853,0.766993006993007,0.4707692307692308,0.5267132867132868,0.6179020979020979
|
||||
0.10853146853146853,0.7809790209790209,0.47160839160839163,0.5264335664335664,0.6106293706293706
|
||||
0.1076923076923077,0.7820979020979021,0.47160839160839163,0.5230769230769231,0.6
|
||||
0.1076923076923077,0.7820979020979021,0.473006993006993,0.5247552447552447,0.5927272727272728
|
||||
0.10741258741258741,0.7832167832167832,0.4735664335664336,0.5297902097902097,0.5882517482517482
|
||||
0.10657342657342657,0.7826573426573427,0.4763636363636364,0.5289510489510489,0.6030769230769231
|
||||
0.10657342657342657,0.7823776223776224,0.47664335664335666,0.5278321678321678,0.6176223776223776
|
||||
0.10657342657342657,0.784055944055944,0.4797202797202797,0.5295104895104895,0.622937062937063
|
||||
0.1062937062937063,0.7851748251748252,0.481958041958042,0.5306293706293707,0.6246153846153846
|
||||
0.1062937062937063,0.7854545454545454,0.4881118881118881,0.5309090909090909,0.627972027972028
|
||||
0.10601398601398601,0.786013986013986,0.4934265734265734,0.5311888111888112,0.6346853146853146
|
||||
0.10601398601398601,0.7876923076923077,0.49874125874125874,0.5309090909090909,0.6394405594405594
|
||||
0.1062937062937063,0.7846153846153846,0.5012587412587413,0.5311888111888112,0.6383216783216783
|
||||
0.1062937062937063,0.7804195804195804,0.5054545454545455,0.532027972027972,0.6425174825174825
|
||||
0.1062937062937063,0.7384615384615385,0.5082517482517482,0.5323076923076923,0.646993006993007
|
||||
0.1062937062937063,0.7843356643356644,0.5116083916083916,0.5323076923076923,0.6489510489510489
|
||||
0.10601398601398601,0.7932867132867133,0.5141258741258741,0.5325874125874126,0.6495104895104895
|
||||
0.10601398601398601,0.7941258741258741,0.5158041958041958,0.5325874125874126,0.6467132867132868
|
||||
0.10573426573426574,0.7921678321678322,0.5174825174825175,0.532027972027972,0.6481118881118881
|
||||
0.10573426573426574,0.7938461538461539,0.5197202797202797,0.5323076923076923,0.6472727272727272
|
||||
0.10573426573426574,0.7944055944055944,0.5216783216783217,0.532027972027972,0.6478321678321678
|
||||
0.10573426573426574,0.7963636363636364,0.5227972027972028,0.5334265734265734,0.6500699300699301
|
||||
0.10573426573426574,0.7966433566433566,0.525034965034965,0.5328671328671328,0.6517482517482518
|
||||
0.10573426573426574,0.7972027972027972,0.5261538461538462,0.5323076923076923,0.6464335664335664
|
||||
0.10573426573426574,0.798041958041958,0.5306293706293707,0.5325874125874126,0.6408391608391608
|
||||
0.10573426573426574,0.8,0.5325874125874126,0.5334265734265734,0.6436363636363637
|
||||
0.10573426573426574,0.8013986013986014,0.5362237762237763,0.5314685314685315,0.6467132867132868
|
||||
0.10573426573426574,0.8008391608391608,0.5418181818181819,0.5311888111888112,0.6497902097902097
|
||||
0.10545454545454545,0.801958041958042,0.5437762237762238,0.5323076923076923,0.6517482517482518
|
||||
0.10545454545454545,0.8036363636363636,0.5471328671328671,0.533986013986014,0.6523076923076923
|
||||
0.10517482517482518,0.8002797202797203,0.5499300699300699,0.5345454545454545,0.6534265734265734
|
||||
0.1048951048951049,0.7278321678321679,0.551048951048951,0.5323076923076923,0.6551048951048951
|
||||
0.1048951048951049,0.7946853146853147,0.554965034965035,0.5328671328671328,0.6565034965034965
|
||||
0.1048951048951049,0.8064335664335665,0.5574825174825175,0.5345454545454545,0.6579020979020979
|
||||
0.1048951048951049,0.806993006993007,0.56,0.5353846153846153,0.6584615384615384
|
||||
0.1048951048951049,0.808951048951049,0.5641958041958042,0.5351048951048951,0.6562237762237763
|
||||
0.1048951048951049,0.8097902097902098,0.5664335664335665,0.5345454545454545,0.6542657342657343
|
||||
0.10461538461538461,0.8097902097902098,0.5697902097902098,0.5348251748251748,0.6570629370629371
|
||||
0.1048951048951049,0.8120279720279721,0.5787412587412587,0.5356643356643357,0.6559440559440559
|
||||
0.10517482517482518,0.8125874125874126,0.5868531468531468,0.5345454545454545,0.6551048951048951
|
||||
0.10517482517482518,0.8117482517482517,0.5885314685314685,0.5351048951048951,0.6551048951048951
|
||||
0.10517482517482518,0.8081118881118882,0.5893706293706293,0.5356643356643357,0.6570629370629371
|
||||
0.10517482517482518,0.796923076923077,0.5899300699300699,0.5356643356643357,0.6598601398601398
|
||||
0.10573426573426574,0.7882517482517483,0.5893706293706293,0.5365034965034965,0.6629370629370629
|
||||
0.10573426573426574,0.8044755244755245,0.5888111888111888,0.5365034965034965,0.6573426573426573
|
||||
0.10573426573426574,0.8125874125874126,0.5882517482517482,0.5365034965034965,0.6517482517482518
|
||||
0.10573426573426574,0.8137062937062937,0.5888111888111888,0.5365034965034965,0.6483916083916084
|
||||
0.10573426573426574,0.8125874125874126,0.5896503496503497,0.5365034965034965,0.6534265734265734
|
||||
0.10601398601398601,0.8125874125874126,0.5907692307692308,0.5367832167832168,0.6579020979020979
|
||||
0.10601398601398601,0.8131468531468532,0.5904895104895105,0.5365034965034965,0.6595804195804196
|
||||
0.10601398601398601,0.8142657342657342,0.5893706293706293,0.5370629370629371,0.6601398601398601
|
||||
0.1062937062937063,0.8156643356643357,0.587972027972028,0.5373426573426573,0.6626573426573427
|
||||
0.10601398601398601,0.8176223776223777,0.5874125874125874,0.5370629370629371,0.6618181818181819
|
||||
0.10601398601398601,0.8179020979020979,0.5832167832167832,0.5373426573426573,0.6618181818181819
|
||||
0.10601398601398601,0.8184615384615385,0.5815384615384616,0.5373426573426573,0.6620979020979021
|
||||
0.10601398601398601,0.8187412587412587,0.580979020979021,0.5376223776223776,0.6601398601398601
|
||||
0.10601398601398601,0.819020979020979,0.5812587412587412,0.5384615384615384,0.6629370629370629
|
||||
0.10601398601398601,0.8195804195804196,0.5781818181818181,0.5384615384615384,0.6662937062937063
|
||||
0.1062937062937063,0.8198601398601398,0.580979020979021,0.5387412587412588,0.6668531468531469
|
||||
0.1062937062937063,0.820979020979021,0.586013986013986,0.5398601398601398,0.6685314685314685
|
||||
0.10657342657342657,0.8212587412587412,0.5938461538461538,0.5401398601398602,0.6657342657342658
|
||||
0.10657342657342657,0.8223776223776224,0.5974825174825175,0.5401398601398602,0.6665734265734266
|
||||
0.10685314685314685,0.8229370629370629,0.6011188811188811,0.5406993006993007,0.6702097902097902
|
||||
0.10685314685314685,0.8234965034965035,0.6055944055944056,0.5395804195804196,0.6735664335664335
|
||||
0.10685314685314685,0.8232167832167833,0.6095104895104895,0.5398601398601398,0.6721678321678322
|
||||
0.10685314685314685,0.8246153846153846,0.6128671328671329,0.5404195804195804,0.6704895104895104
|
||||
0.10741258741258741,0.826013986013986,0.6156643356643356,0.540979020979021,0.6612587412587413
|
||||
0.1076923076923077,0.8276923076923077,0.6176223776223776,0.5415384615384615,0.6593006993006993
|
||||
0.10797202797202797,0.8237762237762237,0.619020979020979,0.5418181818181819,0.6665734265734266
|
||||
0.10797202797202797,0.7577622377622377,0.619020979020979,0.540979020979021,0.6696503496503496
|
||||
0.10825174825174826,0.8215384615384616,0.6237762237762238,0.540979020979021,0.6704895104895104
|
||||
0.10825174825174826,0.826013986013986,0.6293706293706294,0.5418181818181819,0.6713286713286714
|
||||
0.10853146853146853,0.8276923076923077,0.633006993006993,0.5432167832167832,0.6724475524475525
|
||||
0.10853146853146853,0.8276923076923077,0.6318881118881119,0.5437762237762238,0.6738461538461539
|
||||
0.10909090909090909,0.8282517482517483,0.6310489510489511,0.5420979020979021,0.6738461538461539
|
||||
0.10909090909090909,0.8288111888111888,0.6372027972027972,0.5429370629370629,0.673006993006993
|
||||
0.10909090909090909,0.8304895104895105,0.6397202797202797,0.5448951048951048,0.6732867132867133
|
||||
0.10965034965034966,0.8307692307692308,0.6461538461538462,0.5451748251748252,0.673006993006993
|
||||
0.10993006993006993,0.8316083916083916,0.6495104895104895,0.5434965034965035,0.6755244755244755
|
||||
0.11020979020979021,0.8327272727272728,0.6537062937062937,0.5443356643356644,0.6766433566433566
|
||||
0.11020979020979021,0.8335664335664336,0.6562237762237763,0.546013986013986,0.6791608391608391
|
||||
0.11048951048951049,0.8335664335664336,0.6598601398601398,0.5465734265734266,0.6822377622377622
|
||||
0.11048951048951049,0.8338461538461538,0.6637762237762238,0.5454545454545454,0.6802797202797203
|
||||
0.11132867132867133,0.8344055944055944,0.6662937062937063,0.5465734265734266,0.6811188811188811
|
||||
0.1116083916083916,0.8341258741258741,0.6690909090909091,0.5474125874125874,0.6808391608391609
|
||||
0.11188811188811189,0.8352447552447553,0.6685314685314685,0.5474125874125874,0.6822377622377622
|
||||
0.11216783216783217,0.8346853146853147,0.6696503496503496,0.5468531468531469,0.6836363636363636
|
||||
0.11216783216783217,0.8268531468531468,0.6724475524475525,0.5476923076923077,0.6841958041958042
|
||||
0.11216783216783217,0.7773426573426574,0.6786013986013986,0.5479720279720279,0.6844755244755245
|
||||
0.11216783216783217,0.8318881118881118,0.6811188811188811,0.5502097902097902,0.6853146853146853
|
||||
0.11244755244755245,0.8386013986013986,0.6864335664335665,0.5485314685314685,0.6813986013986014
|
||||
0.11244755244755245,0.8388811188811188,0.6878321678321678,0.5507692307692308,0.6794405594405595
|
||||
0.11244755244755245,0.8369230769230769,0.6895104895104895,0.5521678321678322,0.678041958041958
|
||||
0.11244755244755245,0.8369230769230769,0.6923076923076923,0.5527272727272727,0.6735664335664335
|
||||
0.11244755244755245,0.8383216783216784,0.693986013986014,0.5476923076923077,0.6716083916083916
|
||||
0.11244755244755245,0.8386013986013986,0.6956643356643356,0.5518881118881119,0.6755244755244755
|
||||
0.11244755244755245,0.8391608391608392,0.6970629370629371,0.5541258741258741,0.6777622377622378
|
||||
0.11244755244755245,0.8394405594405594,0.6979020979020979,0.554965034965035,0.6788811188811189
|
||||
0.11244755244755245,0.84,0.7001398601398602,0.5524475524475524,0.6797202797202797
|
||||
0.11244755244755245,0.8366433566433567,0.7046153846153846,0.5541258741258741,0.6788811188811189
|
||||
0.11244755244755245,0.8346853146853147,0.7062937062937062,0.5602797202797203,0.673006993006993
|
||||
0.11216783216783217,0.8296503496503497,0.707972027972028,0.5622377622377622,0.6559440559440559
|
||||
0.11216783216783217,0.8302097902097902,0.7099300699300699,0.5555244755244755,0.6408391608391608
|
||||
0.11216783216783217,0.8332867132867133,0.711048951048951,0.5588811188811189,0.6383216783216783
|
||||
0.11216783216783217,0.8366433566433567,0.713006993006993,0.5686713286713286,0.6467132867132868
|
||||
0.11216783216783217,0.8377622377622378,0.7138461538461538,0.5675524475524476,0.6556643356643357
|
||||
0.11216783216783217,0.8402797202797203,0.7144055944055944,0.5586013986013986,0.6573426573426573
|
||||
0.11216783216783217,0.8433566433566434,0.7155244755244755,0.5622377622377622,0.6604195804195804
|
||||
0.11216783216783217,0.8427972027972028,0.7163636363636363,0.5672727272727273,0.6744055944055944
|
||||
0.11216783216783217,0.84,0.7177622377622378,0.5678321678321678,0.6811188811188811
|
||||
0.11216783216783217,0.8386013986013986,0.7194405594405594,0.5622377622377622,0.6836363636363636
|
||||
0.11244755244755245,0.8313286713286713,0.7194405594405594,0.5672727272727273,0.6841958041958042
|
||||
0.11244755244755245,0.831048951048951,0.7197202797202797,0.5725874125874126,0.6861538461538461
|
||||
0.11272727272727273,0.8388811188811188,0.7216783216783217,0.5770629370629371,0.6855944055944055
|
||||
0.11272727272727273,0.8433566433566434,0.7216783216783217,0.580979020979021,0.6864335664335665
|
||||
0.11272727272727273,0.8441958041958042,0.7208391608391609,0.5946853146853147,0.6906293706293706
|
||||
0.11272727272727273,0.8427972027972028,0.7222377622377623,0.5946853146853147,0.6925874125874126
|
||||
0.113006993006993,0.8422377622377623,0.7233566433566434,0.5941258741258741,0.6962237762237762
|
||||
0.113006993006993,0.8427972027972028,0.7239160839160839,0.5969230769230769,0.6909090909090909
|
||||
0.113006993006993,0.8427972027972028,0.7253146853146853,0.6008391608391609,0.6794405594405595
|
||||
0.113006993006993,0.8461538461538461,0.7272727272727273,0.6022377622377623,0.6794405594405595
|
||||
0.113006993006993,0.8464335664335665,0.7286713286713287,0.6033566433566434,0.6758041958041958
|
||||
0.113006993006993,0.8458741258741259,0.7281118881118881,0.605034965034965,0.6783216783216783
|
||||
0.113006993006993,0.8453146853146853,0.7303496503496504,0.6041958041958042,0.6858741258741259
|
||||
0.113006993006993,0.8394405594405594,0.73006993006993,0.6055944055944056,0.6878321678321678
|
||||
0.113006993006993,0.8402797202797203,0.730909090909091,0.6072727272727273,0.6892307692307692
|
||||
0.113006993006993,0.8430769230769231,0.732027972027972,0.61006993006993,0.6895104895104895
|
||||
0.113006993006993,0.8472727272727273,0.7323076923076923,0.61006993006993,0.6892307692307692
|
||||
0.113006993006993,0.8478321678321679,0.7334265734265735,0.612027972027972,0.6895104895104895
|
||||
0.113006993006993,0.848951048951049,0.733986013986014,0.6139860139860139,0.6861538461538461
|
||||
0.113006993006993,0.8495104895104895,0.7351048951048951,0.6137062937062937,0.6816783216783217
|
||||
0.113006993006993,0.8486713286713287,0.735944055944056,0.6139860139860139,0.6827972027972028
|
||||
0.113006993006993,0.8497902097902098,0.7362237762237762,0.6162237762237762,0.6836363636363636
|
||||
|
401
results/bt_1h128n_ReLU_results.csv
Normal file
401
results/bt_1h128n_ReLU_results.csv
Normal file
|
|
@ -0,0 +1,401 @@
|
|||
training_accuracy,validation_accuracy,training_loss,validation_loss,test_accuracy,test_loss
|
||||
0.038811188811188814,0.019580419580419582,6.393057707616091,4.424285425524005,0.005,11.902604900906708
|
||||
0.061888111888111885,0.07132867132867132,3.9845982330615697,3.9789311938961127,,
|
||||
0.16783216783216784,0.16783216783216784,3.7523821576223106,3.73892906486598,,
|
||||
0.20104895104895104,0.2111888111888112,3.547375963126129,3.5325973891094073,,
|
||||
0.21573426573426574,0.24335664335664337,3.3889472691926317,3.3784455129417115,,
|
||||
0.23986013986013985,0.27412587412587414,3.2452346036953097,3.2397597229873654,,
|
||||
0.2765734265734266,0.3062937062937063,3.105799184604157,3.105523916826381,,
|
||||
0.30524475524475525,0.3202797202797203,2.974008278291045,2.9807005713549932,,
|
||||
0.32342657342657344,0.3412587412587413,2.851272600241783,2.8633433589565733,,
|
||||
0.34440559440559443,0.35664335664335667,2.734853740721626,2.7516118580217057,,
|
||||
0.3632867132867133,0.36643356643356645,2.621368308580227,2.6435472302556806,,
|
||||
0.38006993006993006,0.38181818181818183,2.509419887826101,2.539612409717918,,
|
||||
0.3996503496503496,0.3986013986013986,2.403726365402373,2.4406362731369544,,
|
||||
0.42202797202797204,0.413986013986014,2.308426181453028,2.3469294625743102,,
|
||||
0.436013986013986,0.42377622377622376,2.2211191866913147,2.2605116663938296,,
|
||||
0.4486013986013986,0.43776223776223777,2.1401110491667907,2.1821801077271625,,
|
||||
0.46678321678321677,0.46293706293706294,2.064477867947346,2.1105243264683984,,
|
||||
0.48391608391608393,0.4727272727272727,1.994366376479573,2.0460212523098784,,
|
||||
0.4972027972027972,0.4783216783216783,1.929564498384421,1.9876044250342433,,
|
||||
0.5087412587412588,0.4881118881118881,1.8693292144897695,1.9341425261979983,,
|
||||
0.5241258741258741,0.49230769230769234,1.8139224030421812,1.8846863061732506,,
|
||||
0.5405594405594406,0.5048951048951049,1.7622262223437983,1.83837352939355,,
|
||||
0.5534965034965035,0.5188811188811189,1.714016525703335,1.7954754044442,,
|
||||
0.563986013986014,0.5328671328671328,1.6687527498049022,1.7546248297241294,,
|
||||
0.5723776223776224,0.5482517482517483,1.6258644869700987,1.7157204875655092,,
|
||||
0.5842657342657342,0.5566433566433566,1.5853954338793133,1.6793711405187335,,
|
||||
0.5947552447552448,0.5706293706293706,1.5469014110749013,1.644938588911164,,
|
||||
0.6055944055944056,0.5818181818181818,1.5102016832585097,1.6123021607656616,,
|
||||
0.6174825174825175,0.5888111888111888,1.4753172002652277,1.581162917786896,,
|
||||
0.6342657342657343,0.6013986013986014,1.4426747107366018,1.5524952879150062,,
|
||||
0.6234265734265734,0.6,1.437213750108359,1.5687059299238513,,
|
||||
0.5961538461538461,0.5678321678321678,1.4814467405677567,1.622348542065846,,
|
||||
0.6223776223776224,0.6027972027972028,1.416683287828632,1.5488620058290425,,
|
||||
0.6384615384615384,0.6125874125874126,1.3745146293628525,1.5070317252590513,,
|
||||
0.6493006993006993,0.6265734265734266,1.3423865754262512,1.4766873403236087,,
|
||||
0.6580419580419581,0.6335664335664336,1.315826644295191,1.4525128796359559,,
|
||||
0.6636363636363637,0.6391608391608392,1.2918495064750344,1.430728196927532,,
|
||||
0.6713286713286714,0.6475524475524476,1.2673976692943372,1.4078775136297337,,
|
||||
0.6786713286713286,0.6531468531468532,1.2440141031244953,1.3860083679370314,,
|
||||
0.6828671328671329,0.6531468531468532,1.2225187419435568,1.3664157247318005,,
|
||||
0.6895104895104895,0.6671328671328671,1.199199180426684,1.3446421905043267,,
|
||||
0.6923076923076923,0.6741258741258741,1.1776850306235038,1.324060313273129,,
|
||||
0.6975524475524476,0.6783216783216783,1.157110950827814,1.3045481230790203,,
|
||||
0.7062937062937062,0.6839160839160839,1.1368607348756332,1.2847942378546264,,
|
||||
0.7118881118881119,0.6937062937062937,1.1165596106772973,1.2646542083734644,,
|
||||
0.7185314685314685,0.7034965034965035,1.0962060621974061,1.244319672831613,,
|
||||
0.7251748251748251,0.7118881118881119,1.0775863118246656,1.2259321395759932,,
|
||||
0.7307692307692307,0.7160839160839161,1.060454814393957,1.2089018768858784,,
|
||||
0.7346153846153847,0.7258741258741259,1.0439702047034873,1.1925118787450188,,
|
||||
0.7384615384615385,0.73006993006993,1.0279636945114625,1.1765035855084014,,
|
||||
0.7426573426573426,0.737062937062937,1.011891048956524,1.1603082123734547,,
|
||||
0.7468531468531469,0.7384615384615385,0.9965589565541074,1.1447441459099315,,
|
||||
0.7520979020979021,0.7412587412587412,0.9812919885739722,1.1291320261250148,,
|
||||
0.7580419580419581,0.7496503496503496,0.966097016893109,1.1136842983266404,,
|
||||
0.7618881118881119,0.7538461538461538,0.9513886433919149,1.0986762135598267,,
|
||||
0.765034965034965,0.7552447552447552,0.9375878985390935,1.0847650421521045,,
|
||||
0.7692307692307693,0.7566433566433567,0.9250631304970762,1.0725547354217941,,
|
||||
0.772027972027972,0.7636363636363637,0.9132519618891549,1.0609879143115475,,
|
||||
0.7762237762237763,0.7636363636363637,0.9015459602772413,1.0493720753096798,,
|
||||
0.7772727272727272,0.7678321678321678,0.8892439639360892,1.037002738019266,,
|
||||
0.7828671328671328,0.7706293706293706,0.8774895355842973,1.0253320508717176,,
|
||||
0.7863636363636364,0.7706293706293706,0.8658329626409835,1.01362667276598,,
|
||||
0.7916083916083916,0.7734265734265734,0.8546794043173643,1.0025531835413701,,
|
||||
0.7933566433566434,0.7734265734265734,0.8438615748964038,0.9917907963972437,,
|
||||
0.7965034965034965,0.7762237762237763,0.8333783921238097,0.9815615944170573,,
|
||||
0.7986013986013986,0.7776223776223776,0.8233494606617316,0.9718676470084168,,
|
||||
0.8,0.7776223776223776,0.8142087045062824,0.9631735404108931,,
|
||||
0.8024475524475524,0.779020979020979,0.8055572904920124,0.9550423453653094,,
|
||||
0.8038461538461539,0.7804195804195804,0.7974506602416759,0.9476143891485486,,
|
||||
0.8076923076923077,0.7832167832167832,0.7891565843722607,0.9398005770691026,,
|
||||
0.8097902097902098,0.786013986013986,0.7811394184752668,0.932329095369298,,
|
||||
0.8122377622377622,0.786013986013986,0.7734192190307038,0.9252579017858467,,
|
||||
0.8129370629370629,0.7846153846153846,0.7663496217726583,0.9189669756880081,,
|
||||
0.8160839160839161,0.7846153846153846,0.7590579658322949,0.9122104559348019,,
|
||||
0.8171328671328671,0.786013986013986,0.7519676036295828,0.9056122843196367,,
|
||||
0.8195804195804196,0.7874125874125875,0.7448404774072959,0.8989313070461413,,
|
||||
0.8213286713286714,0.7888111888111888,0.7376309962277134,0.8920762316789772,,
|
||||
0.8223776223776224,0.7902097902097902,0.7303351495565,0.8850566077378048,,
|
||||
0.8227272727272728,0.7944055944055944,0.7231511121436227,0.8781260192540412,,
|
||||
0.8248251748251748,0.7958041958041958,0.7161559885896078,0.8714681573836499,,
|
||||
0.8276223776223777,0.7986013986013986,0.7094243600002218,0.8651138719138035,,
|
||||
0.8304195804195804,0.7986013986013986,0.7029206627905166,0.8590038894788389,,
|
||||
0.8325174825174825,0.8041958041958042,0.696580610664915,0.8530319481772916,,
|
||||
0.8332167832167832,0.8055944055944056,0.6904835856070884,0.8473692501964367,,
|
||||
0.8356643356643356,0.8055944055944056,0.6845144739985044,0.84183413033758,,
|
||||
0.8367132867132867,0.8083916083916084,0.6787705799214523,0.8366698561563649,,
|
||||
0.8395104895104896,0.8111888111888111,0.673056791191927,0.8315509266005122,,
|
||||
0.8398601398601399,0.8153846153846154,0.6674282584926006,0.8265499888803137,,
|
||||
0.8402097902097903,0.8167832167832167,0.6619811367504673,0.8216820574951014,,
|
||||
0.8405594405594405,0.8181818181818182,0.6566156743804398,0.8168627874070057,,
|
||||
0.8419580419580419,0.8195804195804196,0.6513404906007424,0.8121313969550531,,
|
||||
0.8426573426573427,0.8195804195804196,0.6461879832824878,0.8075395974525217,,
|
||||
0.843006993006993,0.820979020979021,0.6411945668461844,0.8030902823578719,,
|
||||
0.8444055944055944,0.820979020979021,0.6363466944943622,0.798806941469012,,
|
||||
0.8444055944055944,0.8223776223776224,0.6315867126586852,0.794634247701298,,
|
||||
0.8454545454545455,0.8223776223776224,0.6269059404511083,0.7905163497781146,,
|
||||
0.8486013986013986,0.8237762237762237,0.6223473530856064,0.7865190153571869,,
|
||||
0.8506993006993007,0.8265734265734266,0.6178500346577955,0.7825200702889235,,
|
||||
0.8527972027972028,0.8293706293706293,0.6134392565077933,0.7786267436350723,,
|
||||
0.8534965034965035,0.8321678321678322,0.6091360668539197,0.7748232650743433,,
|
||||
0.8545454545454545,0.8335664335664336,0.6049043301108064,0.7711060437287951,,
|
||||
0.8555944055944056,0.8335664335664336,0.6007900290441675,0.7675295154191503,,
|
||||
0.855944055944056,0.8349650349650349,0.5967736590078688,0.7640587461411142,,
|
||||
0.8566433566433567,0.8349650349650349,0.5928488577155713,0.7607200279498646,,
|
||||
0.856993006993007,0.8349650349650349,0.5889981768421532,0.7575087581495752,,
|
||||
0.8573426573426574,0.8363636363636363,0.5852159828739676,0.7542851054170967,,
|
||||
0.8576923076923076,0.8363636363636363,0.5814956022369508,0.7511261697263104,,
|
||||
0.8590909090909091,0.8377622377622378,0.5778345564281717,0.7480160383795313,,
|
||||
0.8597902097902098,0.8377622377622378,0.5742345980334811,0.7449932545258477,,
|
||||
0.8604895104895105,0.8377622377622378,0.5707001226043489,0.7420538727286314,,
|
||||
0.8604895104895105,0.8377622377622378,0.5672242119522065,0.7391782783320443,,
|
||||
0.8611888111888112,0.8377622377622378,0.5638013759074683,0.7363752661699542,,
|
||||
0.8622377622377623,0.8377622377622378,0.5604299662675772,0.733604275237464,,
|
||||
0.8625874125874126,0.8391608391608392,0.5571142482263086,0.730894022004846,,
|
||||
0.8625874125874126,0.8391608391608392,0.553849399115484,0.728285085741934,,
|
||||
0.8639860139860139,0.8391608391608392,0.5506392245832176,0.7256658911191665,,
|
||||
0.8646853146853147,0.8405594405594405,0.5474755631501967,0.7231211968216628,,
|
||||
0.8646853146853147,0.8405594405594405,0.5443572500952057,0.7206277604669946,,
|
||||
0.8653846153846154,0.8405594405594405,0.5412845002956146,0.7181690102994878,,
|
||||
0.8657342657342657,0.8419580419580419,0.5382548130615198,0.7157418499673489,,
|
||||
0.8664335664335664,0.8433566433566434,0.5352669309251715,0.7133577600077438,,
|
||||
0.8685314685314686,0.8447552447552448,0.5323167199800389,0.7109988416866246,,
|
||||
0.8692307692307693,0.8461538461538461,0.5294013825862169,0.7086920098789949,,
|
||||
0.8699300699300699,0.8475524475524475,0.5265268388557007,0.7064247287167126,,
|
||||
0.8702797202797202,0.8475524475524475,0.5236924884166508,0.7041684412943551,,
|
||||
0.8716783216783217,0.8475524475524475,0.520896140989342,0.7019681432505747,,
|
||||
0.8723776223776224,0.8475524475524475,0.518135288154923,0.6998087590791549,,
|
||||
0.8730769230769231,0.8475524475524475,0.5154073462119959,0.697645904530215,,
|
||||
0.8734265734265734,0.848951048951049,0.5127118227655683,0.6955359460654298,,
|
||||
0.8741258741258742,0.848951048951049,0.5100503673283945,0.693450345407703,,
|
||||
0.8744755244755245,0.848951048951049,0.5074169591655656,0.6914004053415536,,
|
||||
0.8755244755244755,0.8517482517482518,0.5048139070950527,0.6893822908073006,,
|
||||
0.8758741258741258,0.8503496503496504,0.5022388203367849,0.6874237647178243,,
|
||||
0.8765734265734266,0.8503496503496504,0.4996961723903023,0.6854394899477907,,
|
||||
0.8765734265734266,0.8503496503496504,0.4971854634381349,0.68352695343613,,
|
||||
0.877972027972028,0.8503496503496504,0.49470493715832464,0.6816194363882497,,
|
||||
0.8786713286713287,0.8503496503496504,0.4922539434752675,0.6797341153065294,,
|
||||
0.879020979020979,0.8517482517482518,0.4898315704029924,0.6778677859835772,,
|
||||
0.8800699300699301,0.8517482517482518,0.4874376335899764,0.676024636628574,,
|
||||
0.8807692307692307,0.8517482517482518,0.48507227721831425,0.6742008994352066,,
|
||||
0.8807692307692307,0.8517482517482518,0.4827295363648105,0.6724167602449469,,
|
||||
0.8818181818181818,0.8517482517482518,0.48041554606960396,0.6706431109694873,,
|
||||
0.8825174825174825,0.8531468531468531,0.47812807566959203,0.6688950412303597,,
|
||||
0.8828671328671329,0.8531468531468531,0.47586494112641514,0.6671813634884427,,
|
||||
0.8828671328671329,0.8531468531468531,0.47362707061352943,0.6654690120247269,,
|
||||
0.8832167832167832,0.8531468531468531,0.47141449691046233,0.6637974796216513,,
|
||||
0.8835664335664336,0.8517482517482518,0.46922742141935825,0.6621791645478712,,
|
||||
0.8839160839160839,0.8531468531468531,0.4670554085026014,0.6605096170197975,,
|
||||
0.8853146853146853,0.8531468531468531,0.4649079287597316,0.6588856698181031,,
|
||||
0.8860139860139861,0.8545454545454545,0.4627844019115648,0.6573029446271528,,
|
||||
0.8863636363636364,0.8545454545454545,0.4606837656533336,0.6557227610427312,,
|
||||
0.8867132867132868,0.8545454545454545,0.4586044737529252,0.6541503632788631,,
|
||||
0.8874125874125874,0.855944055944056,0.4565457056153967,0.6526087081756855,,
|
||||
0.8874125874125874,0.855944055944056,0.45450031437603217,0.6511156807157686,,
|
||||
0.8881118881118881,0.8573426573426574,0.452469631223566,0.6496579647015781,,
|
||||
0.8881118881118881,0.8573426573426574,0.45043736119726824,0.6481775940109681,,
|
||||
0.8884615384615384,0.855944055944056,0.44842718903608775,0.6467032517142088,,
|
||||
0.8888111888111888,0.8545454545454545,0.4464307357485297,0.6452403676155243,,
|
||||
0.8895104895104895,0.8545454545454545,0.4444547818783156,0.6438218038608309,,
|
||||
0.8895104895104895,0.8545454545454545,0.4424953376969365,0.6423810112788174,,
|
||||
0.8895104895104895,0.8545454545454545,0.44055643028671193,0.6409952419489282,,
|
||||
0.8909090909090909,0.8545454545454545,0.43863776812521277,0.6396045339447246,,
|
||||
0.8916083916083916,0.855944055944056,0.43673839282594384,0.6382439837401861,,
|
||||
0.8923076923076924,0.855944055944056,0.4348567633176291,0.6368913686968546,,
|
||||
0.8926573426573426,0.855944055944056,0.4329933826878536,0.6355552678264379,,
|
||||
0.8933566433566433,0.855944055944056,0.43114814780487865,0.6342286874365796,,
|
||||
0.8937062937062937,0.855944055944056,0.42932036775475707,0.6329448679387268,,
|
||||
0.8937062937062937,0.855944055944056,0.4275090771066927,0.6316505883129272,,
|
||||
0.8944055944055944,0.855944055944056,0.42571281405848455,0.6303612498500293,,
|
||||
0.8944055944055944,0.855944055944056,0.4239325735285791,0.6290800531887386,,
|
||||
0.8944055944055944,0.855944055944056,0.42216648298555526,0.6278178449316542,,
|
||||
0.8951048951048951,0.855944055944056,0.4204148081434475,0.6265670036792441,,
|
||||
0.8951048951048951,0.855944055944056,0.4186784305858992,0.625335890551962,,
|
||||
0.8958041958041958,0.855944055944056,0.4169577559985044,0.6241198412982444,,
|
||||
0.8968531468531469,0.855944055944056,0.41525346412907493,0.62291000404537,,
|
||||
0.8968531468531469,0.855944055944056,0.41356243263670045,0.6217139501631839,,
|
||||
0.8968531468531469,0.855944055944056,0.41188512504531605,0.6205233751222432,,
|
||||
0.8989510489510489,0.855944055944056,0.4102159602499308,0.6193594557306982,,
|
||||
0.8993006993006993,0.8573426573426574,0.4085575168400502,0.6181767234483239,,
|
||||
0.9,0.8573426573426574,0.406898091460201,0.6170434103665362,,
|
||||
0.9,0.8573426573426574,0.4052500427150192,0.6158886398513813,,
|
||||
0.901048951048951,0.8573426573426574,0.4036157452162119,0.6147516539846416,,
|
||||
0.901048951048951,0.8587412587412587,0.4019939478652895,0.6136297287817403,,
|
||||
0.9013986013986014,0.8587412587412587,0.400385182512734,0.6125080100137715,,
|
||||
0.9020979020979021,0.8587412587412587,0.39879080421913493,0.6114125972892116,,
|
||||
0.9020979020979021,0.8601398601398601,0.39720657925460967,0.6103201241308411,,
|
||||
0.9024475524475525,0.8601398601398601,0.39563412527779157,0.6092339273235348,,
|
||||
0.9031468531468532,0.8601398601398601,0.3940736903916854,0.6081884836850824,,
|
||||
0.9034965034965035,0.8615384615384616,0.39252519715472944,0.6071068279271707,,
|
||||
0.9038461538461539,0.8615384615384616,0.3909877153192137,0.6060480518254954,,
|
||||
0.9038461538461539,0.862937062937063,0.3894634719418215,0.6049849581883495,,
|
||||
0.9041958041958041,0.862937062937063,0.38795354075106675,0.6039482257486838,,
|
||||
0.9041958041958041,0.862937062937063,0.38645505410023534,0.6029205510591219,,
|
||||
0.9045454545454545,0.862937062937063,0.38496941086665315,0.6018983393577093,,
|
||||
0.9055944055944056,0.8615384615384616,0.38349550107534575,0.600883117240084,,
|
||||
0.9059440559440559,0.8601398601398601,0.38203256190047524,0.5998768645013188,,
|
||||
0.9062937062937063,0.8615384615384616,0.38058017939908584,0.5988757069951127,,
|
||||
0.906993006993007,0.8615384615384616,0.3791385133872387,0.5978921137623348,,
|
||||
0.9073426573426573,0.8615384615384616,0.3777074628405843,0.5969167016464789,,
|
||||
0.9083916083916084,0.8615384615384616,0.3762867127398134,0.5959361915832547,,
|
||||
0.9087412587412588,0.8615384615384616,0.3748756581619316,0.5949720305783379,,
|
||||
0.9094405594405595,0.8615384615384616,0.3734750051104741,0.5940171252536561,,
|
||||
0.9097902097902097,0.8615384615384616,0.3720843518557027,0.5930810535764094,,
|
||||
0.9101398601398601,0.8615384615384616,0.37070386923285203,0.5921452489906264,,
|
||||
0.9104895104895104,0.8615384615384616,0.36933285928928034,0.5912052891622114,,
|
||||
0.9104895104895104,0.8615384615384616,0.36797075838619686,0.590285715917607,,
|
||||
0.9108391608391608,0.8615384615384616,0.36661633242485114,0.589347585844113,,
|
||||
0.9111888111888112,0.8615384615384616,0.365271276215104,0.5884337634993637,,
|
||||
0.9108391608391608,0.8615384615384616,0.36393404164337123,0.5875374263164678,,
|
||||
0.9115384615384615,0.862937062937063,0.36260413636138566,0.5866329908531599,,
|
||||
0.9115384615384615,0.862937062937063,0.36128325362782066,0.5857386605438457,,
|
||||
0.9118881118881119,0.8643356643356643,0.35997168402294116,0.5848474960011882,,
|
||||
0.9118881118881119,0.8643356643356643,0.35866835402869146,0.5839650252794272,,
|
||||
0.9122377622377622,0.862937062937063,0.3573746174438719,0.5830943423303864,,
|
||||
0.9122377622377622,0.862937062937063,0.3560890381179709,0.5822084242682377,,
|
||||
0.9125874125874126,0.8643356643356643,0.3548117107886054,0.5813361985195449,,
|
||||
0.9125874125874126,0.8643356643356643,0.35354272001452897,0.5804842186173711,,
|
||||
0.9132867132867133,0.8643356643356643,0.35228213118887847,0.5796388589302973,,
|
||||
0.9129370629370629,0.8657342657342657,0.3510305015289124,0.5788000920701826,,
|
||||
0.9132867132867133,0.8657342657342657,0.34978725447285536,0.5779639686260436,,
|
||||
0.9136363636363637,0.8657342657342657,0.34855258242001275,0.5771436947701198,,
|
||||
0.9136363636363637,0.8657342657342657,0.3473261962597384,0.5763246944431208,,
|
||||
0.9143356643356644,0.8643356643356643,0.34610770499571075,0.5755039778456756,,
|
||||
0.9150349650349651,0.8643356643356643,0.34489688928493745,0.5746902058065239,,
|
||||
0.9157342657342658,0.8643356643356643,0.3436939449521259,0.5738845449246318,,
|
||||
0.916083916083916,0.8643356643356643,0.34249707510223026,0.5730974997114726,,
|
||||
0.9164335664335664,0.8643356643356643,0.341305080949274,0.5722956460471083,,
|
||||
0.9171328671328671,0.8671328671328671,0.340120857750397,0.571499768418261,,
|
||||
0.9178321678321678,0.8671328671328671,0.3389439123536763,0.5707209968219016,,
|
||||
0.9181818181818182,0.8671328671328671,0.3377704311176365,0.5699469948963801,,
|
||||
0.9188811188811189,0.8671328671328671,0.33660382953040846,0.5691842260043686,,
|
||||
0.9192307692307692,0.8671328671328671,0.33544495271640934,0.5684083159088281,,
|
||||
0.9195804195804196,0.8685314685314686,0.33429209162819556,0.5676390381742121,,
|
||||
0.91993006993007,0.8699300699300699,0.33314775986454354,0.5668703391558941,,
|
||||
0.9195804195804196,0.8699300699300699,0.33201067315866495,0.5661116138429493,,
|
||||
0.9195804195804196,0.8699300699300699,0.33088039950817705,0.5653593877541289,,
|
||||
0.91993006993007,0.8699300699300699,0.32975694782205633,0.5646051010858683,,
|
||||
0.91993006993007,0.8713286713286713,0.32863735793766247,0.5638656014298777,,
|
||||
0.91993006993007,0.8727272727272727,0.32752344806047284,0.5631235535873936,,
|
||||
0.9206293706293707,0.8727272727272727,0.3264158062807729,0.5623914978511804,,
|
||||
0.920979020979021,0.8741258741258742,0.32531426100992744,0.5616637767237913,,
|
||||
0.920979020979021,0.8741258741258742,0.3242191435102792,0.5609416428156083,,
|
||||
0.920979020979021,0.8741258741258742,0.3231307871136036,0.5602363302788812,,
|
||||
0.9213286713286714,0.8741258741258742,0.32204859585828155,0.5595288798580862,,
|
||||
0.9213286713286714,0.8741258741258742,0.3209712942832012,0.5588207801008908,,
|
||||
0.9216783216783216,0.8741258741258742,0.3198989188756257,0.5581068866211645,,
|
||||
0.922027972027972,0.8727272727272727,0.31883293871712914,0.5574146127510091,,
|
||||
0.922027972027972,0.8727272727272727,0.31777292779615823,0.5567252239176785,,
|
||||
0.922027972027972,0.8727272727272727,0.31671848501341315,0.5560573190550208,,
|
||||
0.9223776223776223,0.8727272727272727,0.3156702465974988,0.555373204793962,,
|
||||
0.9227272727272727,0.8727272727272727,0.31462780428052806,0.5547042586488047,,
|
||||
0.9223776223776223,0.8727272727272727,0.3135902329012098,0.5540355287787795,,
|
||||
0.9223776223776223,0.8727272727272727,0.3125581931275951,0.5533992035494814,,
|
||||
0.9227272727272727,0.8727272727272727,0.31153089988401667,0.5527303815241956,,
|
||||
0.9227272727272727,0.8727272727272727,0.31050873515141525,0.5520759277277298,,
|
||||
0.9234265734265734,0.8713286713286713,0.3094926960568246,0.5514250585461603,,
|
||||
0.9237762237762238,0.8713286713286713,0.3084825796180311,0.5507872209482763,,
|
||||
0.9241258741258741,0.8713286713286713,0.307477723363289,0.5501267275276988,,
|
||||
0.9251748251748252,0.8713286713286713,0.3064775953614192,0.5494841713621249,,
|
||||
0.9251748251748252,0.8713286713286713,0.30548290981153753,0.5488512614475514,,
|
||||
0.9255244755244755,0.8713286713286713,0.30449320772269234,0.548219854423086,,
|
||||
0.9262237762237763,0.8713286713286713,0.30350798174280336,0.5476044036385928,,
|
||||
0.9262237762237763,0.8713286713286713,0.3025281337224161,0.5469665261667487,,
|
||||
0.9262237762237763,0.8713286713286713,0.3015454302007661,0.5463863912708424,,
|
||||
0.926923076923077,0.8713286713286713,0.3005666440823029,0.5458018715681543,,
|
||||
0.926923076923077,0.8713286713286713,0.29959446711445936,0.5451988139412536,,
|
||||
0.9272727272727272,0.8713286713286713,0.2986271796296898,0.544641032465172,,
|
||||
0.9272727272727272,0.8713286713286713,0.29766554567136755,0.5440459770152157,,
|
||||
0.9279720279720279,0.8713286713286713,0.29670601108686867,0.5434399319144567,,
|
||||
0.9279720279720279,0.8713286713286713,0.2957500274674958,0.5428167553063393,,
|
||||
0.9283216783216783,0.8713286713286713,0.2947972592759179,0.5422030241280202,,
|
||||
0.9283216783216783,0.8713286713286713,0.29384654004475,0.5415536360666485,,
|
||||
0.9286713286713286,0.8713286713286713,0.29290217944483776,0.5409500887008593,,
|
||||
0.929020979020979,0.8713286713286713,0.29196186414518716,0.5403387903773804,,
|
||||
0.9300699300699301,0.8727272727272727,0.29102503554400694,0.5396835525871049,,
|
||||
0.9300699300699301,0.8741258741258742,0.2900943090327005,0.5391033038791131,,
|
||||
0.9304195804195804,0.8741258741258742,0.28916726103636153,0.5384695911425714,,
|
||||
0.9307692307692308,0.8741258741258742,0.288245724077909,0.5378678107954935,,
|
||||
0.9314685314685315,0.8741258741258742,0.2873306433322575,0.5373341737005595,,
|
||||
0.9318181818181818,0.8755244755244755,0.28641889350956357,0.5367329810562751,,
|
||||
0.9314685314685315,0.8755244755244755,0.2855126951331183,0.536187969524412,,
|
||||
0.9318181818181818,0.8755244755244755,0.2846101960810131,0.535593149771985,,
|
||||
0.9321678321678322,0.8755244755244755,0.2837133434626587,0.5350530863790872,,
|
||||
0.9321678321678322,0.8755244755244755,0.2828202437448496,0.5344872873754231,,
|
||||
0.9321678321678322,0.8755244755244755,0.2819315358910068,0.5339255270893254,,
|
||||
0.9325174825174826,0.8755244755244755,0.28104614066211225,0.5333324544262709,,
|
||||
0.9325174825174826,0.8755244755244755,0.28016619883738597,0.5327824765353293,,
|
||||
0.9325174825174826,0.8755244755244755,0.27929024413352416,0.5322547062820385,,
|
||||
0.9328671328671329,0.8755244755244755,0.2784176884436498,0.5316715182860199,,
|
||||
0.9328671328671329,0.8755244755244755,0.27755005772841435,0.5311347230580158,,
|
||||
0.9328671328671329,0.8755244755244755,0.27668635250210516,0.5306187137648718,,
|
||||
0.9332167832167833,0.8755244755244755,0.27582629399259717,0.5300537833912206,,
|
||||
0.9335664335664335,0.8755244755244755,0.2749699735961338,0.5295114368462083,,
|
||||
0.9335664335664335,0.8755244755244755,0.27411847175831655,0.529013926114464,,
|
||||
0.9335664335664335,0.8755244755244755,0.27327029526896596,0.5284588667604068,,
|
||||
0.9335664335664335,0.8755244755244755,0.27242610074794715,0.5279334913956336,,
|
||||
0.9339160839160839,0.8755244755244755,0.2715855110330222,0.5274096769613312,,
|
||||
0.9342657342657342,0.8755244755244755,0.27074833895445777,0.5268795051360914,,
|
||||
0.9349650349650349,0.8755244755244755,0.2699154083134741,0.5263652439287603,,
|
||||
0.9353146853146853,0.8755244755244755,0.26908656543052495,0.5258601024137561,,
|
||||
0.9356643356643357,0.8755244755244755,0.268261543417423,0.5253622020806207,,
|
||||
0.9356643356643357,0.8755244755244755,0.2674404547722463,0.5248796019034135,,
|
||||
0.9356643356643357,0.8755244755244755,0.2666220735310435,0.5243675326894884,,
|
||||
0.936013986013986,0.8755244755244755,0.2658079064059659,0.523883199285919,,
|
||||
0.9367132867132867,0.8755244755244755,0.26499831429151693,0.5234273170724312,,
|
||||
0.9367132867132867,0.8755244755244755,0.2641911169588695,0.5229026826834856,,
|
||||
0.9370629370629371,0.8755244755244755,0.2633881167749007,0.5224164187545928,,
|
||||
0.9370629370629371,0.8755244755244755,0.2625903619321901,0.5219444242008332,,
|
||||
0.9370629370629371,0.8769230769230769,0.2617960390867563,0.5214690488959348,,
|
||||
0.9374125874125874,0.8769230769230769,0.2610033826365655,0.5209556929099863,,
|
||||
0.9384615384615385,0.8783216783216783,0.2602151883813818,0.5205016592816051,,
|
||||
0.9384615384615385,0.8783216783216783,0.2594290633327102,0.5200009602170742,,
|
||||
0.9384615384615385,0.8783216783216783,0.2586479112751194,0.5195603986159505,,
|
||||
0.9388111888111889,0.8783216783216783,0.2578692532430515,0.5190848403296827,,
|
||||
0.9391608391608391,0.8783216783216783,0.2570942154297703,0.5186164255767688,,
|
||||
0.9395104895104895,0.8783216783216783,0.25632218127652484,0.5181446318685636,,
|
||||
0.9395104895104895,0.8783216783216783,0.25555364795386354,0.5176767704205878,,
|
||||
0.9398601398601398,0.8783216783216783,0.2547883408940661,0.517203959168206,,
|
||||
0.9402097902097902,0.8783216783216783,0.2540264087633706,0.5167361903054182,,
|
||||
0.9409090909090909,0.8783216783216783,0.25326802066782006,0.5162900027488889,,
|
||||
0.9409090909090909,0.8783216783216783,0.2525141043308968,0.5158659485242324,,
|
||||
0.9409090909090909,0.8783216783216783,0.2517626291101916,0.515419492292404,,
|
||||
0.9409090909090909,0.8783216783216783,0.25101217572749074,0.5149645329145646,,
|
||||
0.9412587412587412,0.8783216783216783,0.25026634915699525,0.5145151260160205,,
|
||||
0.9412587412587412,0.8783216783216783,0.24952531500485747,0.5141111703028366,,
|
||||
0.9416083916083916,0.8783216783216783,0.24878437957180108,0.5136431536941855,,
|
||||
0.941958041958042,0.8783216783216783,0.24804875559467976,0.5132385994981842,,
|
||||
0.9423076923076923,0.8783216783216783,0.24731445054638088,0.5127814691061278,,
|
||||
0.9426573426573427,0.8783216783216783,0.24658531452721807,0.5123881008677809,,
|
||||
0.943006993006993,0.8783216783216783,0.24585810251741305,0.5119597289008176,,
|
||||
0.9433566433566434,0.8783216783216783,0.2451341999281766,0.5115366916563869,,
|
||||
0.9440559440559441,0.8797202797202798,0.2444124796187053,0.5111041373032766,,
|
||||
0.9447552447552447,0.8797202797202798,0.24369459955805786,0.5106730159138082,,
|
||||
0.9447552447552447,0.8797202797202798,0.24297979189321878,0.5102565738991465,,
|
||||
0.9451048951048951,0.8797202797202798,0.24226752435360183,0.5098373039370909,,
|
||||
0.9458041958041958,0.8797202797202798,0.241558463154129,0.5094272536930845,,
|
||||
0.9458041958041958,0.8797202797202798,0.24085226989327252,0.5090241317776396,,
|
||||
0.9461538461538461,0.8811188811188811,0.24014876117722161,0.5086057582250901,,
|
||||
0.9472027972027972,0.8811188811188811,0.23944483213705842,0.5082245948653248,,
|
||||
0.9479020979020979,0.8825174825174825,0.23874465192417485,0.5078361363388749,,
|
||||
0.9482517482517483,0.8825174825174825,0.23804733417015583,0.5074346124518522,,
|
||||
0.9486013986013986,0.8825174825174825,0.23735308316095222,0.5070237675889876,,
|
||||
0.9486013986013986,0.8825174825174825,0.23666086982687237,0.5065915256695374,,
|
||||
0.9486013986013986,0.8825174825174825,0.23597283645138092,0.5061958410568818,,
|
||||
0.9493006993006993,0.8825174825174825,0.23528926206892659,0.5058451555464295,,
|
||||
0.9493006993006993,0.8811188811188811,0.23460577710795597,0.5054123077380466,,
|
||||
0.9493006993006993,0.8811188811188811,0.2339051689620153,0.5049346451337071,,
|
||||
0.9503496503496504,0.8811188811188811,0.23320987015378572,0.5045207489661953,,
|
||||
0.9503496503496504,0.8811188811188811,0.2325192591121786,0.5041074209020331,,
|
||||
0.951048951048951,0.8811188811188811,0.2318326574139189,0.503704096666146,,
|
||||
0.951048951048951,0.8811188811188811,0.23114910168633637,0.5033033796440842,,
|
||||
0.951048951048951,0.8811188811188811,0.23046941488778458,0.5029076562542335,,
|
||||
0.9513986013986014,0.8811188811188811,0.22979304226854033,0.5025141136631591,,
|
||||
0.9517482517482517,0.8811188811188811,0.22911874913207683,0.5021271409973774,,
|
||||
0.9517482517482517,0.8811188811188811,0.2284476762093357,0.5017415194880208,,
|
||||
0.9520979020979021,0.8825174825174825,0.22777980768938585,0.501357511785521,,
|
||||
0.9520979020979021,0.8811188811188811,0.22711457718670486,0.5009701892858283,,
|
||||
0.9520979020979021,0.8811188811188811,0.2264520578004723,0.500591569316164,,
|
||||
0.9527972027972028,0.8811188811188811,0.2257923308735793,0.5002115286513854,,
|
||||
0.9527972027972028,0.8811188811188811,0.22513535170931148,0.4998388248436576,,
|
||||
0.9534965034965035,0.8811188811188811,0.2244801643784773,0.49946456292572083,,
|
||||
0.9538461538461539,0.8811188811188811,0.22382771806159313,0.49909291664112937,,
|
||||
0.9538461538461539,0.8811188811188811,0.22317780975599977,0.49872109780279966,,
|
||||
0.9538461538461539,0.8811188811188811,0.222530718084159,0.49835008327672053,,
|
||||
0.9541958041958042,0.8825174825174825,0.22188625733287293,0.49798575564162734,,
|
||||
0.9545454545454546,0.8811188811188811,0.22124480224148343,0.49762249203759085,,
|
||||
0.9545454545454546,0.8811188811188811,0.22060575976549385,0.4972555860818327,,
|
||||
0.9545454545454546,0.8825174825174825,0.21996934578415048,0.4968967948999989,,
|
||||
0.9545454545454546,0.8825174825174825,0.219335796199205,0.49653188350606553,,
|
||||
0.9545454545454546,0.8825174825174825,0.21870476075627662,0.4961795193318407,,
|
||||
0.9545454545454546,0.8825174825174825,0.21807618845996243,0.4958205407324051,,
|
||||
0.9545454545454546,0.8825174825174825,0.21745036383378033,0.49546419116270235,,
|
||||
0.9545454545454546,0.8825174825174825,0.21682875302698007,0.49508659829221396,,
|
||||
0.9545454545454546,0.8825174825174825,0.21620867951756872,0.49473479367147044,,
|
||||
0.9545454545454546,0.8825174825174825,0.21558917285809176,0.4944048029036305,,
|
||||
0.9545454545454546,0.8825174825174825,0.21497292796095102,0.4940688917086719,,
|
||||
0.9548951048951049,0.8825174825174825,0.2143589095370103,0.4937266980665681,,
|
||||
0.9548951048951049,0.8825174825174825,0.2137472961575646,0.49335317532756523,,
|
||||
0.9552447552447553,0.8825174825174825,0.2131380096141815,0.49301012384241105,,
|
||||
0.9552447552447553,0.8811188811188811,0.21251040125425807,0.49253933912041986,,
|
||||
0.9555944055944056,0.8811188811188811,0.2118786487354526,0.4921451679790616,,
|
||||
0.9555944055944056,0.8811188811188811,0.21125287567225542,0.4917602492857801,,
|
||||
0.9552447552447553,0.8811188811188811,0.21063147819042174,0.49138574069176655,,
|
||||
0.9552447552447553,0.8811188811188811,0.2100147713154583,0.491024461472506,,
|
||||
0.9552447552447553,0.8811188811188811,0.20940178718499622,0.4906505796110533,,
|
||||
0.9552447552447553,0.8811188811188811,0.2087907375536904,0.49029555905968614,,
|
||||
0.955944055944056,0.8825174825174825,0.20818658874242277,0.48993115968003975,,
|
||||
0.955944055944056,0.8825174825174825,0.20758326860040371,0.4895818410748453,,
|
||||
0.955944055944056,0.8825174825174825,0.20698108041128366,0.48923237967908956,,
|
||||
0.955944055944056,0.8825174825174825,0.20638500731874315,0.4888777133174856,,
|
||||
0.955944055944056,0.8825174825174825,0.20579041667611342,0.4885208516157874,,
|
||||
0.955944055944056,0.8825174825174825,0.20519719221659524,0.4881837717290209,,
|
||||
0.9562937062937062,0.8825174825174825,0.20460762555237358,0.48783513146340163,,
|
||||
0.9562937062937062,0.8825174825174825,0.20401783228554077,0.48750201435589013,,
|
||||
0.9566433566433566,0.8825174825174825,0.203434238022485,0.4871461636950177,,
|
||||
0.9566433566433566,0.8825174825174825,0.2028517055874286,0.48680227668835635,,
|
||||
0.956993006993007,0.8825174825174825,0.20227101188606883,0.486467956625879,,
|
||||
0.956993006993007,0.8825174825174825,0.20169035173258237,0.4861477884105082,,
|
||||
0.9576923076923077,0.8839160839160839,0.20111560472671297,0.4858101489967659,,
|
||||
0.9576923076923077,0.8853146853146853,0.2005416171599293,0.4854822204570802,,
|
||||
|
BIN
weights/bel_weights.npz
Normal file
BIN
weights/bel_weights.npz
Normal file
Binary file not shown.
BIN
weights/bt_1h128n_ReLU_weights.npz
Normal file
BIN
weights/bt_1h128n_ReLU_weights.npz
Normal file
Binary file not shown.
BIN
weights/bt_weights.npz
Normal file
BIN
weights/bt_weights.npz
Normal file
Binary file not shown.
Loading…
Add table
Add a link
Reference in a new issue