diff --git a/NLP/WordEmbedding/word2vec/.ipynb_checkpoints/simple_wrd2vec-checkpoint.ipynb b/NLP/WordEmbedding/word2vec/.ipynb_checkpoints/simple_wrd2vec-checkpoint.ipynb new file mode 100644 index 0000000..fc05c8a --- /dev/null +++ b/NLP/WordEmbedding/word2vec/.ipynb_checkpoints/simple_wrd2vec-checkpoint.ipynb @@ -0,0 +1,5378 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "import numpy as np" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Source " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "https://nathanrooy.github.io/posts/2018-03-22/word2vec-from-scratch-with-python-and-numpy/" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Code" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 0 LOSS: 115.711946046\n", + "EPOCH: 1 LOSS: 114.736293974\n", + "EPOCH: 2 LOSS: 113.832568632\n", + "EPOCH: 3 LOSS: 112.992428501\n", + "EPOCH: 4 LOSS: 112.208726729\n", + "EPOCH: 5 LOSS: 111.47530631\n", + "EPOCH: 6 LOSS: 110.786835298\n", + "EPOCH: 7 LOSS: 110.138673405\n", + "EPOCH: 8 LOSS: 109.526763334\n", + "EPOCH: 9 LOSS: 108.947541748\n", + "EPOCH: 10 LOSS: 108.397865917\n", + "EPOCH: 11 LOSS: 107.874952963\n", + "EPOCH: 12 LOSS: 107.376329274\n", + "EPOCH: 13 LOSS: 106.899788188\n", + "EPOCH: 14 LOSS: 106.443354426\n", + "EPOCH: 15 LOSS: 106.005254059\n", + "EPOCH: 16 LOSS: 105.583889059\n", + "EPOCH: 17 LOSS: 105.177815627\n", + "EPOCH: 18 LOSS: 104.785725681\n", + "EPOCH: 19 LOSS: 104.406430981\n", + "EPOCH: 20 LOSS: 104.038849466\n", + "EPOCH: 21 LOSS: 103.681993468\n", + "EPOCH: 22 LOSS: 103.334959495\n", + "EPOCH: 23 LOSS: 102.99691937\n", + "EPOCH: 24 LOSS: 102.667112505\n", + "EPOCH: 25 LOSS: 102.344839165\n", + "EPOCH: 26 LOSS: 102.029454568\n", + "EPOCH: 27 LOSS: 101.720363708\n", + "EPOCH: 28 LOSS: 101.417016813\n", + "EPOCH: 29 LOSS: 101.118905338\n", + "EPOCH: 30 LOSS: 100.825558431\n", + "EPOCH: 31 LOSS: 100.536539809\n", + "EPOCH: 32 LOSS: 100.251444996\n", + "EPOCH: 33 LOSS: 99.9698988659\n", + "EPOCH: 34 LOSS: 99.6915534695\n", + "EPOCH: 35 LOSS: 99.4160860939\n", + "EPOCH: 36 LOSS: 99.1431975376\n", + "EPOCH: 37 LOSS: 98.8726105697\n", + "EPOCH: 38 LOSS: 98.6040685537\n", + "EPOCH: 39 LOSS: 98.3373342132\n", + "EPOCH: 40 LOSS: 98.0721885256\n", + "EPOCH: 41 LOSS: 97.8084297249\n", + "EPOCH: 42 LOSS: 97.5458724031\n", + "EPOCH: 43 LOSS: 97.2843466953\n", + "EPOCH: 44 LOSS: 97.023697539\n", + "EPOCH: 45 LOSS: 96.7637839976\n", + "EPOCH: 46 LOSS: 96.5044786387\n", + "EPOCH: 47 LOSS: 96.2456669596\n", + "EPOCH: 48 LOSS: 95.9872468527\n", + "EPOCH: 49 LOSS: 95.7291281038\n", + "EPOCH: 50 LOSS: 95.4712319185\n", + "EPOCH: 51 LOSS: 95.2134904698\n", + "EPOCH: 52 LOSS: 94.9558464635\n", + "EPOCH: 53 LOSS: 94.6982527157\n", + "EPOCH: 54 LOSS: 94.4406717406\n", + "EPOCH: 55 LOSS: 94.1830753422\n", + "EPOCH: 56 LOSS: 93.9254442103\n", + "EPOCH: 57 LOSS: 93.6677675148\n", + "EPOCH: 58 LOSS: 93.4100424998\n", + "EPOCH: 59 LOSS: 93.1522740719\n", + "EPOCH: 60 LOSS: 92.8944743846\n", + "EPOCH: 61 LOSS: 92.6366624158\n", + "EPOCH: 62 LOSS: 92.3788635387\n", + "EPOCH: 63 LOSS: 92.121109085\n", + "EPOCH: 64 LOSS: 91.8634359016\n", + "EPOCH: 65 LOSS: 91.6058858998\n", + "EPOCH: 66 LOSS: 91.3485055991\n", + "EPOCH: 67 LOSS: 91.0913456655\n", + "EPOCH: 68 LOSS: 90.8344604456\n", + "EPOCH: 69 LOSS: 90.5779074992\n", + "EPOCH: 70 LOSS: 90.3217471301\n", + "EPOCH: 71 LOSS: 90.0660419185\n", + "EPOCH: 72 LOSS: 89.8108562565\n", + "EPOCH: 73 LOSS: 89.5562558884\n", + "EPOCH: 74 LOSS: 89.302307459\n", + "EPOCH: 75 LOSS: 89.0490780705\n", + "EPOCH: 76 LOSS: 88.7966348519\n", + "EPOCH: 77 LOSS: 88.5450445417\n", + "EPOCH: 78 LOSS: 88.2943730876\n", + "EPOCH: 79 LOSS: 88.0446852628\n", + "EPOCH: 80 LOSS: 87.7960443033\n", + "EPOCH: 81 LOSS: 87.548511567\n", + "EPOCH: 82 LOSS: 87.3021462152\n", + "EPOCH: 83 LOSS: 87.0570049197\n", + "EPOCH: 84 LOSS: 86.8131415956\n", + "EPOCH: 85 LOSS: 86.5706071611\n", + "EPOCH: 86 LOSS: 86.3294493252\n", + "EPOCH: 87 LOSS: 86.089712404\n", + "EPOCH: 88 LOSS: 85.8514371661\n", + "EPOCH: 89 LOSS: 85.6146607069\n", + "EPOCH: 90 LOSS: 85.3794163524\n", + "EPOCH: 91 LOSS: 85.1457335922\n", + "EPOCH: 92 LOSS: 84.9136380411\n", + "EPOCH: 93 LOSS: 84.6831514288\n", + "EPOCH: 94 LOSS: 84.4542916182\n", + "EPOCH: 95 LOSS: 84.2270726492\n", + "EPOCH: 96 LOSS: 84.0015048091\n", + "EPOCH: 97 LOSS: 83.7775947279\n", + "EPOCH: 98 LOSS: 83.5553454962\n", + "EPOCH: 99 LOSS: 83.3347568059\n", + "EPOCH: 100 LOSS: 83.115825111\n", + "EPOCH: 101 LOSS: 82.8985438075\n", + "EPOCH: 102 LOSS: 82.6829034306\n", + "EPOCH: 103 LOSS: 82.4688918669\n", + "EPOCH: 104 LOSS: 82.2564945797\n", + "EPOCH: 105 LOSS: 82.0456948472\n", + "EPOCH: 106 LOSS: 81.8364740077\n", + "EPOCH: 107 LOSS: 81.6288117148\n", + "EPOCH: 108 LOSS: 81.422686195\n", + "EPOCH: 109 LOSS: 81.2180745108\n", + "EPOCH: 110 LOSS: 81.0149528225\n", + "EPOCH: 111 LOSS: 80.8132966505\n", + "EPOCH: 112 LOSS: 80.6130811329\n", + "EPOCH: 113 LOSS: 80.4142812789\n", + "EPOCH: 114 LOSS: 80.2168722136\n", + "EPOCH: 115 LOSS: 80.020829415\n", + "EPOCH: 116 LOSS: 79.8261289392\n", + "EPOCH: 117 LOSS: 79.6327476335\n", + "EPOCH: 118 LOSS: 79.4406633353\n", + "EPOCH: 119 LOSS: 79.2498550559\n", + "EPOCH: 120 LOSS: 79.0603031483\n", + "EPOCH: 121 LOSS: 78.8719894567\n", + "EPOCH: 122 LOSS: 78.6848974485\n", + "EPOCH: 123 LOSS: 78.4990123276\n", + "EPOCH: 124 LOSS: 78.3143211282\n", + "EPOCH: 125 LOSS: 78.1308127893\n", + "EPOCH: 126 LOSS: 77.9484782097\n", + "EPOCH: 127 LOSS: 77.7673102838\n", + "EPOCH: 128 LOSS: 77.5873039184\n", + "EPOCH: 129 LOSS: 77.4084560306\n", + "EPOCH: 130 LOSS: 77.230765528\n", + "EPOCH: 131 LOSS: 77.054233272\n", + "EPOCH: 132 LOSS: 76.8788620246\n", + "EPOCH: 133 LOSS: 76.7046563795\n", + "EPOCH: 134 LOSS: 76.5316226801\n", + "EPOCH: 135 LOSS: 76.3597689237\n", + "EPOCH: 136 LOSS: 76.189104655\n", + "EPOCH: 137 LOSS: 76.0196408483\n", + "EPOCH: 138 LOSS: 75.8513897822\n", + "EPOCH: 139 LOSS: 75.684364906\n", + "EPOCH: 140 LOSS: 75.5185807005\n", + "EPOCH: 141 LOSS: 75.3540525345\n", + "EPOCH: 142 LOSS: 75.1907965186\n", + "EPOCH: 143 LOSS: 75.0288293566\n", + "EPOCH: 144 LOSS: 74.8681681975\n", + "EPOCH: 145 LOSS: 74.7088304885\n", + "EPOCH: 146 LOSS: 74.5508338305\n", + "EPOCH: 147 LOSS: 74.3941958364\n", + "EPOCH: 148 LOSS: 74.2389339954\n", + "EPOCH: 149 LOSS: 74.0850655418\n", + "EPOCH: 150 LOSS: 73.932607331\n", + "EPOCH: 151 LOSS: 73.7815757229\n", + "EPOCH: 152 LOSS: 73.6319864728\n", + "EPOCH: 153 LOSS: 73.483854631\n", + "EPOCH: 154 LOSS: 73.3371944525\n", + "EPOCH: 155 LOSS: 73.1920193142\n", + "EPOCH: 156 LOSS: 73.0483416432\n", + "EPOCH: 157 LOSS: 72.9061728542\n", + "EPOCH: 158 LOSS: 72.7655232968\n", + "EPOCH: 159 LOSS: 72.6264022119\n", + "EPOCH: 160 LOSS: 72.4888176983\n", + "EPOCH: 161 LOSS: 72.352776688\n", + "EPOCH: 162 LOSS: 72.2182849303\n", + "EPOCH: 163 LOSS: 72.0853469845\n", + "EPOCH: 164 LOSS: 71.9539662208\n", + "EPOCH: 165 LOSS: 71.8241448279\n", + "EPOCH: 166 LOSS: 71.6958838283\n", + "EPOCH: 167 LOSS: 71.5691830993\n", + "EPOCH: 168 LOSS: 71.4440414003\n", + "EPOCH: 169 LOSS: 71.3204564045\n", + "EPOCH: 170 LOSS: 71.1984247359\n", + "EPOCH: 171 LOSS: 71.0779420091\n", + "EPOCH: 172 LOSS: 70.9590028735\n", + "EPOCH: 173 LOSS: 70.8416010592\n", + "EPOCH: 174 LOSS: 70.7257294256\n", + "EPOCH: 175 LOSS: 70.6113800114\n", + "EPOCH: 176 LOSS: 70.4985440849\n", + "EPOCH: 177 LOSS: 70.3872121964\n", + "EPOCH: 178 LOSS: 70.2773742289\n", + "EPOCH: 179 LOSS: 70.1690194499\n", + "EPOCH: 180 LOSS: 70.0621365616\n", + "EPOCH: 181 LOSS: 69.9567137504\n", + "EPOCH: 182 LOSS: 69.8527387356\n", + "EPOCH: 183 LOSS: 69.7501988153\n", + "EPOCH: 184 LOSS: 69.6490809116\n", + "EPOCH: 185 LOSS: 69.5493716136\n", + "EPOCH: 186 LOSS: 69.451057218\n", + "EPOCH: 187 LOSS: 69.3541237677\n", + "EPOCH: 188 LOSS: 69.2585570881\n", + "EPOCH: 189 LOSS: 69.1643428208\n", + "EPOCH: 190 LOSS: 69.0714664555\n", + "EPOCH: 191 LOSS: 68.9799133592\n", + "EPOCH: 192 LOSS: 68.8896688025\n", + "EPOCH: 193 LOSS: 68.800717985\n", + "EPOCH: 194 LOSS: 68.7130460566\n", + "EPOCH: 195 LOSS: 68.6266381388\n", + "EPOCH: 196 LOSS: 68.5414793417\n", + "EPOCH: 197 LOSS: 68.4575547809\n", + "EPOCH: 198 LOSS: 68.3748495912\n", + "EPOCH: 199 LOSS: 68.2933489394\n", + "EPOCH: 200 LOSS: 68.2130380347\n", + "EPOCH: 201 LOSS: 68.1339021387\n", + "EPOCH: 202 LOSS: 68.0559265726\n", + "EPOCH: 203 LOSS: 67.9790967242\n", + "EPOCH: 204 LOSS: 67.903398053\n", + "EPOCH: 205 LOSS: 67.8288160954\n", + "EPOCH: 206 LOSS: 67.7553364672\n", + "EPOCH: 207 LOSS: 67.682944867\n", + "EPOCH: 208 LOSS: 67.611627078\n", + "EPOCH: 209 LOSS: 67.5413689692\n", + "EPOCH: 210 LOSS: 67.4721564968\n", + "EPOCH: 211 LOSS: 67.4039757038\n", + "EPOCH: 212 LOSS: 67.3368127213\n", + "EPOCH: 213 LOSS: 67.2706537673\n", + "EPOCH: 214 LOSS: 67.2054851472\n", + "EPOCH: 215 LOSS: 67.141293253\n", + "EPOCH: 216 LOSS: 67.0780645632\n", + "EPOCH: 217 LOSS: 67.0157856418\n", + "EPOCH: 218 LOSS: 66.9544431387\n", + "EPOCH: 219 LOSS: 66.8940237886\n", + "EPOCH: 220 LOSS: 66.8345144113\n", + "EPOCH: 221 LOSS: 66.7759019115\n", + "EPOCH: 222 LOSS: 66.7181732787\n", + "EPOCH: 223 LOSS: 66.661315588\n", + "EPOCH: 224 LOSS: 66.6053159999\n", + "EPOCH: 225 LOSS: 66.5501617615\n", + "EPOCH: 226 LOSS: 66.4958402072\n", + "EPOCH: 227 LOSS: 66.4423387598\n", + "EPOCH: 228 LOSS: 66.3896449316\n", + "EPOCH: 229 LOSS: 66.3377463262\n", + "EPOCH: 230 LOSS: 66.2866306397\n", + "EPOCH: 231 LOSS: 66.2362856631\n", + "EPOCH: 232 LOSS: 66.1866992837\n", + "EPOCH: 233 LOSS: 66.1378594872\n", + "EPOCH: 234 LOSS: 66.0897543606\n", + "EPOCH: 235 LOSS: 66.0423720938\n", + "EPOCH: 236 LOSS: 65.995700982\n", + "EPOCH: 237 LOSS: 65.9497294288\n", + "EPOCH: 238 LOSS: 65.9044459481\n", + "EPOCH: 239 LOSS: 65.8598391668\n", + "EPOCH: 240 LOSS: 65.8158978275\n", + "EPOCH: 241 LOSS: 65.7726107905\n", + "EPOCH: 242 LOSS: 65.7299670372\n", + "EPOCH: 243 LOSS: 65.6879556714\n", + "EPOCH: 244 LOSS: 65.6465659227\n", + "EPOCH: 245 LOSS: 65.6057871481\n", + "EPOCH: 246 LOSS: 65.5656088345\n", + "EPOCH: 247 LOSS: 65.5260206006\n", + "EPOCH: 248 LOSS: 65.4870121992\n", + "EPOCH: 249 LOSS: 65.4485735184\n", + "EPOCH: 250 LOSS: 65.4106945842\n", + "EPOCH: 251 LOSS: 65.373365561\n", + "EPOCH: 252 LOSS: 65.336576754\n", + "EPOCH: 253 LOSS: 65.3003186097\n", + "EPOCH: 254 LOSS: 65.2645817173\n", + "EPOCH: 255 LOSS: 65.2293568096\n", + "EPOCH: 256 LOSS: 65.1946347636\n", + "EPOCH: 257 LOSS: 65.1604066012\n", + "EPOCH: 258 LOSS: 65.1266634896\n", + "EPOCH: 259 LOSS: 65.0933967414\n", + "EPOCH: 260 LOSS: 65.0605978151\n", + "EPOCH: 261 LOSS: 65.0282583143\n", + "EPOCH: 262 LOSS: 64.9963699881\n", + "EPOCH: 263 LOSS: 64.9649247305\n", + "EPOCH: 264 LOSS: 64.9339145797\n", + "EPOCH: 265 LOSS: 64.9033317178\n", + "EPOCH: 266 LOSS: 64.8731684696\n", + "EPOCH: 267 LOSS: 64.8434173019\n", + "EPOCH: 268 LOSS: 64.8140708222\n", + "EPOCH: 269 LOSS: 64.7851217779\n", + "EPOCH: 270 LOSS: 64.7565630548\n", + "EPOCH: 271 LOSS: 64.7283876755\n", + "EPOCH: 272 LOSS: 64.7005887983\n", + "EPOCH: 273 LOSS: 64.6731597155\n", + "EPOCH: 274 LOSS: 64.6460938516\n", + "EPOCH: 275 LOSS: 64.6193847617\n", + "EPOCH: 276 LOSS: 64.5930261299\n", + "EPOCH: 277 LOSS: 64.5670117669\n", + "EPOCH: 278 LOSS: 64.5413356089\n", + "EPOCH: 279 LOSS: 64.515991715\n", + "EPOCH: 280 LOSS: 64.4909742653\n", + "EPOCH: 281 LOSS: 64.4662775593\n", + "EPOCH: 282 LOSS: 64.4418960135\n", + "EPOCH: 283 LOSS: 64.4178241593\n", + "EPOCH: 284 LOSS: 64.3940566411\n", + "EPOCH: 285 LOSS: 64.3705882141\n", + "EPOCH: 286 LOSS: 64.3474137422\n", + "EPOCH: 287 LOSS: 64.3245281959\n", + "EPOCH: 288 LOSS: 64.3019266503\n", + "EPOCH: 289 LOSS: 64.2796042828\n", + "EPOCH: 290 LOSS: 64.257556371\n", + "EPOCH: 291 LOSS: 64.2357782908\n", + "EPOCH: 292 LOSS: 64.2142655143\n", + "EPOCH: 293 LOSS: 64.1930136076\n", + "EPOCH: 294 LOSS: 64.1720182287\n", + "EPOCH: 295 LOSS: 64.1512751258\n", + "EPOCH: 296 LOSS: 64.1307801351\n", + "EPOCH: 297 LOSS: 64.1105291787\n", + "EPOCH: 298 LOSS: 64.0905182631\n", + "EPOCH: 299 LOSS: 64.0707434767\n", + "EPOCH: 300 LOSS: 64.0512009885\n", + "EPOCH: 301 LOSS: 64.0318870458\n", + "EPOCH: 302 LOSS: 64.0127979727\n", + "EPOCH: 303 LOSS: 63.993930168\n", + "EPOCH: 304 LOSS: 63.9752801039\n", + "EPOCH: 305 LOSS: 63.9568443236\n", + "EPOCH: 306 LOSS: 63.9386194406\n", + "EPOCH: 307 LOSS: 63.9206021359\n", + "EPOCH: 308 LOSS: 63.9027891574\n", + "EPOCH: 309 LOSS: 63.8851773178\n", + "EPOCH: 310 LOSS: 63.8677634929\n", + "EPOCH: 311 LOSS: 63.8505446207\n", + "EPOCH: 312 LOSS: 63.8335176993\n", + "EPOCH: 313 LOSS: 63.8166797859\n", + "EPOCH: 314 LOSS: 63.8000279953\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 315 LOSS: 63.7835594981\n", + "EPOCH: 316 LOSS: 63.7672715202\n", + "EPOCH: 317 LOSS: 63.7511613406\n", + "EPOCH: 318 LOSS: 63.7352262908\n", + "EPOCH: 319 LOSS: 63.7194637533\n", + "EPOCH: 320 LOSS: 63.7038711602\n", + "EPOCH: 321 LOSS: 63.6884459925\n", + "EPOCH: 322 LOSS: 63.6731857784\n", + "EPOCH: 323 LOSS: 63.6580880927\n", + "EPOCH: 324 LOSS: 63.6431505554\n", + "EPOCH: 325 LOSS: 63.6283708305\n", + "EPOCH: 326 LOSS: 63.6137466255\n", + "EPOCH: 327 LOSS: 63.5992756899\n", + "EPOCH: 328 LOSS: 63.5849558143\n", + "EPOCH: 329 LOSS: 63.5707848299\n", + "EPOCH: 330 LOSS: 63.5567606069\n", + "EPOCH: 331 LOSS: 63.5428810539\n", + "EPOCH: 332 LOSS: 63.5291441172\n", + "EPOCH: 333 LOSS: 63.5155477798\n", + "EPOCH: 334 LOSS: 63.5020900604\n", + "EPOCH: 335 LOSS: 63.4887690129\n", + "EPOCH: 336 LOSS: 63.4755827253\n", + "EPOCH: 337 LOSS: 63.4625293191\n", + "EPOCH: 338 LOSS: 63.4496069488\n", + "EPOCH: 339 LOSS: 63.4368138007\n", + "EPOCH: 340 LOSS: 63.4241480923\n", + "EPOCH: 341 LOSS: 63.4116080719\n", + "EPOCH: 342 LOSS: 63.3991920178\n", + "EPOCH: 343 LOSS: 63.3868982374\n", + "EPOCH: 344 LOSS: 63.3747250669\n", + "EPOCH: 345 LOSS: 63.3626708706\n", + "EPOCH: 346 LOSS: 63.3507340401\n", + "EPOCH: 347 LOSS: 63.338912994\n", + "EPOCH: 348 LOSS: 63.327206177\n", + "EPOCH: 349 LOSS: 63.3156120597\n", + "EPOCH: 350 LOSS: 63.3041291378\n", + "EPOCH: 351 LOSS: 63.2927559318\n", + "EPOCH: 352 LOSS: 63.2814909863\n", + "EPOCH: 353 LOSS: 63.2703328693\n", + "EPOCH: 354 LOSS: 63.2592801723\n", + "EPOCH: 355 LOSS: 63.2483315094\n", + "EPOCH: 356 LOSS: 63.2374855168\n", + "EPOCH: 357 LOSS: 63.2267408527\n", + "EPOCH: 358 LOSS: 63.2160961963\n", + "EPOCH: 359 LOSS: 63.2055502481\n", + "EPOCH: 360 LOSS: 63.1951017287\n", + "EPOCH: 361 LOSS: 63.1847493792\n", + "EPOCH: 362 LOSS: 63.1744919599\n", + "EPOCH: 363 LOSS: 63.1643282509\n", + "EPOCH: 364 LOSS: 63.1542570509\n", + "EPOCH: 365 LOSS: 63.1442771771\n", + "EPOCH: 366 LOSS: 63.1343874652\n", + "EPOCH: 367 LOSS: 63.1245867685\n", + "EPOCH: 368 LOSS: 63.1148739579\n", + "EPOCH: 369 LOSS: 63.1052479213\n", + "EPOCH: 370 LOSS: 63.0957075637\n", + "EPOCH: 371 LOSS: 63.0862518065\n", + "EPOCH: 372 LOSS: 63.0768795872\n", + "EPOCH: 373 LOSS: 63.0675898594\n", + "EPOCH: 374 LOSS: 63.0583815922\n", + "EPOCH: 375 LOSS: 63.04925377\n", + "EPOCH: 376 LOSS: 63.0402053923\n", + "EPOCH: 377 LOSS: 63.0312354735\n", + "EPOCH: 378 LOSS: 63.0223430422\n", + "EPOCH: 379 LOSS: 63.0135271414\n", + "EPOCH: 380 LOSS: 63.0047868281\n", + "EPOCH: 381 LOSS: 62.996121173\n", + "EPOCH: 382 LOSS: 62.9875292603\n", + "EPOCH: 383 LOSS: 62.9790101874\n", + "EPOCH: 384 LOSS: 62.9705630647\n", + "EPOCH: 385 LOSS: 62.9621870155\n", + "EPOCH: 386 LOSS: 62.9538811754\n", + "EPOCH: 387 LOSS: 62.9456446927\n", + "EPOCH: 388 LOSS: 62.9374767274\n", + "EPOCH: 389 LOSS: 62.9293764519\n", + "EPOCH: 390 LOSS: 62.9213430498\n", + "EPOCH: 391 LOSS: 62.9133757166\n", + "EPOCH: 392 LOSS: 62.9054736589\n", + "EPOCH: 393 LOSS: 62.8976360945\n", + "EPOCH: 394 LOSS: 62.8898622522\n", + "EPOCH: 395 LOSS: 62.8821513714\n", + "EPOCH: 396 LOSS: 62.8745027022\n", + "EPOCH: 397 LOSS: 62.866915505\n", + "EPOCH: 398 LOSS: 62.8593890505\n", + "EPOCH: 399 LOSS: 62.8519226194\n", + "EPOCH: 400 LOSS: 62.8445155024\n", + "EPOCH: 401 LOSS: 62.8371669997\n", + "EPOCH: 402 LOSS: 62.8298764214\n", + "EPOCH: 403 LOSS: 62.8226430867\n", + "EPOCH: 404 LOSS: 62.8154663243\n", + "EPOCH: 405 LOSS: 62.8083454718\n", + "EPOCH: 406 LOSS: 62.8012798761\n", + "EPOCH: 407 LOSS: 62.7942688925\n", + "EPOCH: 408 LOSS: 62.7873118854\n", + "EPOCH: 409 LOSS: 62.7804082275\n", + "EPOCH: 410 LOSS: 62.7735573\n", + "EPOCH: 411 LOSS: 62.7667584923\n", + "EPOCH: 412 LOSS: 62.7600112022\n", + "EPOCH: 413 LOSS: 62.7533148352\n", + "EPOCH: 414 LOSS: 62.7466688051\n", + "EPOCH: 415 LOSS: 62.740072533\n", + "EPOCH: 416 LOSS: 62.7335254482\n", + "EPOCH: 417 LOSS: 62.7270269872\n", + "EPOCH: 418 LOSS: 62.720576594\n", + "EPOCH: 419 LOSS: 62.71417372\n", + "EPOCH: 420 LOSS: 62.7078178238\n", + "EPOCH: 421 LOSS: 62.701508371\n", + "EPOCH: 422 LOSS: 62.6952448344\n", + "EPOCH: 423 LOSS: 62.6890266936\n", + "EPOCH: 424 LOSS: 62.6828534349\n", + "EPOCH: 425 LOSS: 62.6767245514\n", + "EPOCH: 426 LOSS: 62.6706395428\n", + "EPOCH: 427 LOSS: 62.6645979153\n", + "EPOCH: 428 LOSS: 62.6585991814\n", + "EPOCH: 429 LOSS: 62.6526428602\n", + "EPOCH: 430 LOSS: 62.6467284767\n", + "EPOCH: 431 LOSS: 62.6408555621\n", + "EPOCH: 432 LOSS: 62.6350236538\n", + "EPOCH: 433 LOSS: 62.629232295\n", + "EPOCH: 434 LOSS: 62.623481035\n", + "EPOCH: 435 LOSS: 62.6177694285\n", + "EPOCH: 436 LOSS: 62.6120970363\n", + "EPOCH: 437 LOSS: 62.6064634246\n", + "EPOCH: 438 LOSS: 62.6008681652\n", + "EPOCH: 439 LOSS: 62.5953108354\n", + "EPOCH: 440 LOSS: 62.589791018\n", + "EPOCH: 441 LOSS: 62.5843083008\n", + "EPOCH: 442 LOSS: 62.5788622771\n", + "EPOCH: 443 LOSS: 62.5734525454\n", + "EPOCH: 444 LOSS: 62.5680787092\n", + "EPOCH: 445 LOSS: 62.5627403769\n", + "EPOCH: 446 LOSS: 62.5574371622\n", + "EPOCH: 447 LOSS: 62.5521686835\n", + "EPOCH: 448 LOSS: 62.5469345639\n", + "EPOCH: 449 LOSS: 62.5417344314\n", + "EPOCH: 450 LOSS: 62.5365679189\n", + "EPOCH: 451 LOSS: 62.5314346635\n", + "EPOCH: 452 LOSS: 62.5263343072\n", + "EPOCH: 453 LOSS: 62.5212664965\n", + "EPOCH: 454 LOSS: 62.5162308821\n", + "EPOCH: 455 LOSS: 62.5112271194\n", + "EPOCH: 456 LOSS: 62.5062548679\n", + "EPOCH: 457 LOSS: 62.5013137914\n", + "EPOCH: 458 LOSS: 62.4964035581\n", + "EPOCH: 459 LOSS: 62.4915238403\n", + "EPOCH: 460 LOSS: 62.4866743142\n", + "EPOCH: 461 LOSS: 62.4818546603\n", + "EPOCH: 462 LOSS: 62.4770645629\n", + "EPOCH: 463 LOSS: 62.4723037105\n", + "EPOCH: 464 LOSS: 62.4675717953\n", + "EPOCH: 465 LOSS: 62.4628685133\n", + "EPOCH: 466 LOSS: 62.4581935646\n", + "EPOCH: 467 LOSS: 62.4535466526\n", + "EPOCH: 468 LOSS: 62.4489274848\n", + "EPOCH: 469 LOSS: 62.4443357722\n", + "EPOCH: 470 LOSS: 62.4397712294\n", + "EPOCH: 471 LOSS: 62.4352335744\n", + "EPOCH: 472 LOSS: 62.4307225291\n", + "EPOCH: 473 LOSS: 62.4262378184\n", + "EPOCH: 474 LOSS: 62.4217791711\n", + "EPOCH: 475 LOSS: 62.417346319\n", + "EPOCH: 476 LOSS: 62.4129389973\n", + "EPOCH: 477 LOSS: 62.4085569448\n", + "EPOCH: 478 LOSS: 62.4041999031\n", + "EPOCH: 479 LOSS: 62.3998676174\n", + "EPOCH: 480 LOSS: 62.3955598358\n", + "EPOCH: 481 LOSS: 62.3912763097\n", + "EPOCH: 482 LOSS: 62.3870167935\n", + "EPOCH: 483 LOSS: 62.3827810447\n", + "EPOCH: 484 LOSS: 62.3785688239\n", + "EPOCH: 485 LOSS: 62.3743798944\n", + "EPOCH: 486 LOSS: 62.3702140228\n", + "EPOCH: 487 LOSS: 62.3660709783\n", + "EPOCH: 488 LOSS: 62.3619505332\n", + "EPOCH: 489 LOSS: 62.3578524624\n", + "EPOCH: 490 LOSS: 62.3537765439\n", + "EPOCH: 491 LOSS: 62.3497225582\n", + "EPOCH: 492 LOSS: 62.3456902886\n", + "EPOCH: 493 LOSS: 62.3416795212\n", + "EPOCH: 494 LOSS: 62.3376900446\n", + "EPOCH: 495 LOSS: 62.3337216501\n", + "EPOCH: 496 LOSS: 62.3297741317\n", + "EPOCH: 497 LOSS: 62.3258472859\n", + "EPOCH: 498 LOSS: 62.3219409116\n", + "EPOCH: 499 LOSS: 62.3180548104\n", + "EPOCH: 500 LOSS: 62.3141887862\n", + "EPOCH: 501 LOSS: 62.3103426455\n", + "EPOCH: 502 LOSS: 62.3065161972\n", + "EPOCH: 503 LOSS: 62.3027092525\n", + "EPOCH: 504 LOSS: 62.2989216249\n", + "EPOCH: 505 LOSS: 62.2951531305\n", + "EPOCH: 506 LOSS: 62.2914035874\n", + "EPOCH: 507 LOSS: 62.2876728161\n", + "EPOCH: 508 LOSS: 62.2839606393\n", + "EPOCH: 509 LOSS: 62.280266882\n", + "EPOCH: 510 LOSS: 62.2765913714\n", + "EPOCH: 511 LOSS: 62.2729339368\n", + "EPOCH: 512 LOSS: 62.2692944095\n", + "EPOCH: 513 LOSS: 62.2656726232\n", + "EPOCH: 514 LOSS: 62.2620684136\n", + "EPOCH: 515 LOSS: 62.2584816183\n", + "EPOCH: 516 LOSS: 62.254912077\n", + "EPOCH: 517 LOSS: 62.2513596316\n", + "EPOCH: 518 LOSS: 62.2478241259\n", + "EPOCH: 519 LOSS: 62.2443054054\n", + "EPOCH: 520 LOSS: 62.240803318\n", + "EPOCH: 521 LOSS: 62.2373177132\n", + "EPOCH: 522 LOSS: 62.2338484424\n", + "EPOCH: 523 LOSS: 62.230395359\n", + "EPOCH: 524 LOSS: 62.2269583183\n", + "EPOCH: 525 LOSS: 62.2235371773\n", + "EPOCH: 526 LOSS: 62.2201317948\n", + "EPOCH: 527 LOSS: 62.2167420315\n", + "EPOCH: 528 LOSS: 62.2133677498\n", + "EPOCH: 529 LOSS: 62.2100088138\n", + "EPOCH: 530 LOSS: 62.2066650894\n", + "EPOCH: 531 LOSS: 62.2033364441\n", + "EPOCH: 532 LOSS: 62.2000227474\n", + "EPOCH: 533 LOSS: 62.19672387\n", + "EPOCH: 534 LOSS: 62.1934396846\n", + "EPOCH: 535 LOSS: 62.1901700653\n", + "EPOCH: 536 LOSS: 62.186914888\n", + "EPOCH: 537 LOSS: 62.1836740301\n", + "EPOCH: 538 LOSS: 62.1804473705\n", + "EPOCH: 539 LOSS: 62.1772347897\n", + "EPOCH: 540 LOSS: 62.1740361698\n", + "EPOCH: 541 LOSS: 62.1708513943\n", + "EPOCH: 542 LOSS: 62.1676803483\n", + "EPOCH: 543 LOSS: 62.1645229184\n", + "EPOCH: 544 LOSS: 62.1613789924\n", + "EPOCH: 545 LOSS: 62.15824846\n", + "EPOCH: 546 LOSS: 62.1551312118\n", + "EPOCH: 547 LOSS: 62.1520271403\n", + "EPOCH: 548 LOSS: 62.148936139\n", + "EPOCH: 549 LOSS: 62.1458581031\n", + "EPOCH: 550 LOSS: 62.1427929288\n", + "EPOCH: 551 LOSS: 62.1397405141\n", + "EPOCH: 552 LOSS: 62.1367007579\n", + "EPOCH: 553 LOSS: 62.1336735606\n", + "EPOCH: 554 LOSS: 62.130658824\n", + "EPOCH: 555 LOSS: 62.1276564509\n", + "EPOCH: 556 LOSS: 62.1246663457\n", + "EPOCH: 557 LOSS: 62.1216884139\n", + "EPOCH: 558 LOSS: 62.1187225621\n", + "EPOCH: 559 LOSS: 62.1157686983\n", + "EPOCH: 560 LOSS: 62.1128267316\n", + "EPOCH: 561 LOSS: 62.1098965725\n", + "EPOCH: 562 LOSS: 62.1069781323\n", + "EPOCH: 563 LOSS: 62.104071324\n", + "EPOCH: 564 LOSS: 62.1011760611\n", + "EPOCH: 565 LOSS: 62.0982922589\n", + "EPOCH: 566 LOSS: 62.0954198333\n", + "EPOCH: 567 LOSS: 62.0925587015\n", + "EPOCH: 568 LOSS: 62.0897087819\n", + "EPOCH: 569 LOSS: 62.0868699939\n", + "EPOCH: 570 LOSS: 62.0840422579\n", + "EPOCH: 571 LOSS: 62.0812254955\n", + "EPOCH: 572 LOSS: 62.0784196291\n", + "EPOCH: 573 LOSS: 62.0756245825\n", + "EPOCH: 574 LOSS: 62.0728402802\n", + "EPOCH: 575 LOSS: 62.0700666478\n", + "EPOCH: 576 LOSS: 62.067303612\n", + "EPOCH: 577 LOSS: 62.0645511003\n", + "EPOCH: 578 LOSS: 62.0618090413\n", + "EPOCH: 579 LOSS: 62.0590773646\n", + "EPOCH: 580 LOSS: 62.0563560007\n", + "EPOCH: 581 LOSS: 62.0536448808\n", + "EPOCH: 582 LOSS: 62.0509439374\n", + "EPOCH: 583 LOSS: 62.0482531036\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 584 LOSS: 62.0455723137\n", + "EPOCH: 585 LOSS: 62.0429015027\n", + "EPOCH: 586 LOSS: 62.0402406064\n", + "EPOCH: 587 LOSS: 62.0375895617\n", + "EPOCH: 588 LOSS: 62.0349483062\n", + "EPOCH: 589 LOSS: 62.0323167782\n", + "EPOCH: 590 LOSS: 62.0296949173\n", + "EPOCH: 591 LOSS: 62.0270826634\n", + "EPOCH: 592 LOSS: 62.0244799575\n", + "EPOCH: 593 LOSS: 62.0218867414\n", + "EPOCH: 594 LOSS: 62.0193029577\n", + "EPOCH: 595 LOSS: 62.0167285495\n", + "EPOCH: 596 LOSS: 62.0141634611\n", + "EPOCH: 597 LOSS: 62.0116076373\n", + "EPOCH: 598 LOSS: 62.0090610236\n", + "EPOCH: 599 LOSS: 62.0065235665\n", + "EPOCH: 600 LOSS: 62.0039952129\n", + "EPOCH: 601 LOSS: 62.0014759108\n", + "EPOCH: 602 LOSS: 61.9989656086\n", + "EPOCH: 603 LOSS: 61.9964642556\n", + "EPOCH: 604 LOSS: 61.9939718016\n", + "EPOCH: 605 LOSS: 61.9914881973\n", + "EPOCH: 606 LOSS: 61.9890133938\n", + "EPOCH: 607 LOSS: 61.9865473433\n", + "EPOCH: 608 LOSS: 61.9840899982\n", + "EPOCH: 609 LOSS: 61.9816413118\n", + "EPOCH: 610 LOSS: 61.9792012379\n", + "EPOCH: 611 LOSS: 61.9767697312\n", + "EPOCH: 612 LOSS: 61.9743467467\n", + "EPOCH: 613 LOSS: 61.9719322401\n", + "EPOCH: 614 LOSS: 61.9695261679\n", + "EPOCH: 615 LOSS: 61.9671284869\n", + "EPOCH: 616 LOSS: 61.9647391548\n", + "EPOCH: 617 LOSS: 61.9623581295\n", + "EPOCH: 618 LOSS: 61.9599853698\n", + "EPOCH: 619 LOSS: 61.957620835\n", + "EPOCH: 620 LOSS: 61.9552644848\n", + "EPOCH: 621 LOSS: 61.9529162796\n", + "EPOCH: 622 LOSS: 61.9505761802\n", + "EPOCH: 623 LOSS: 61.9482441481\n", + "EPOCH: 624 LOSS: 61.9459201452\n", + "EPOCH: 625 LOSS: 61.943604134\n", + "EPOCH: 626 LOSS: 61.9412960774\n", + "EPOCH: 627 LOSS: 61.9389959388\n", + "EPOCH: 628 LOSS: 61.9367036823\n", + "EPOCH: 629 LOSS: 61.9344192722\n", + "EPOCH: 630 LOSS: 61.9321426735\n", + "EPOCH: 631 LOSS: 61.9298738514\n", + "EPOCH: 632 LOSS: 61.927612772\n", + "EPOCH: 633 LOSS: 61.9253594014\n", + "EPOCH: 634 LOSS: 61.9231137064\n", + "EPOCH: 635 LOSS: 61.9208756542\n", + "EPOCH: 636 LOSS: 61.9186452123\n", + "EPOCH: 637 LOSS: 61.9164223489\n", + "EPOCH: 638 LOSS: 61.9142070324\n", + "EPOCH: 639 LOSS: 61.9119992316\n", + "EPOCH: 640 LOSS: 61.9097989158\n", + "EPOCH: 641 LOSS: 61.9076060548\n", + "EPOCH: 642 LOSS: 61.9054206185\n", + "EPOCH: 643 LOSS: 61.9032425774\n", + "EPOCH: 644 LOSS: 61.9010719025\n", + "EPOCH: 645 LOSS: 61.8989085647\n", + "EPOCH: 646 LOSS: 61.8967525358\n", + "EPOCH: 647 LOSS: 61.8946037877\n", + "EPOCH: 648 LOSS: 61.8924622926\n", + "EPOCH: 649 LOSS: 61.8903280232\n", + "EPOCH: 650 LOSS: 61.8882009524\n", + "EPOCH: 651 LOSS: 61.8860810536\n", + "EPOCH: 652 LOSS: 61.8839683004\n", + "EPOCH: 653 LOSS: 61.8818626667\n", + "EPOCH: 654 LOSS: 61.8797641269\n", + "EPOCH: 655 LOSS: 61.8776726555\n", + "EPOCH: 656 LOSS: 61.8755882274\n", + "EPOCH: 657 LOSS: 61.8735108178\n", + "EPOCH: 658 LOSS: 61.8714404021\n", + "EPOCH: 659 LOSS: 61.8693769563\n", + "EPOCH: 660 LOSS: 61.8673204563\n", + "EPOCH: 661 LOSS: 61.8652708784\n", + "EPOCH: 662 LOSS: 61.8632281993\n", + "EPOCH: 663 LOSS: 61.8611923958\n", + "EPOCH: 664 LOSS: 61.8591634451\n", + "EPOCH: 665 LOSS: 61.8571413246\n", + "EPOCH: 666 LOSS: 61.8551260118\n", + "EPOCH: 667 LOSS: 61.8531174848\n", + "EPOCH: 668 LOSS: 61.8511157215\n", + "EPOCH: 669 LOSS: 61.8491207005\n", + "EPOCH: 670 LOSS: 61.8471324002\n", + "EPOCH: 671 LOSS: 61.8451507995\n", + "EPOCH: 672 LOSS: 61.8431758774\n", + "EPOCH: 673 LOSS: 61.8412076132\n", + "EPOCH: 674 LOSS: 61.8392459863\n", + "EPOCH: 675 LOSS: 61.8372909765\n", + "EPOCH: 676 LOSS: 61.8353425635\n", + "EPOCH: 677 LOSS: 61.8334007275\n", + "EPOCH: 678 LOSS: 61.8314654487\n", + "EPOCH: 679 LOSS: 61.8295367076\n", + "EPOCH: 680 LOSS: 61.8276144847\n", + "EPOCH: 681 LOSS: 61.825698761\n", + "EPOCH: 682 LOSS: 61.8237895174\n", + "EPOCH: 683 LOSS: 61.821886735\n", + "EPOCH: 684 LOSS: 61.8199903952\n", + "EPOCH: 685 LOSS: 61.8181004795\n", + "EPOCH: 686 LOSS: 61.8162169695\n", + "EPOCH: 687 LOSS: 61.814339847\n", + "EPOCH: 688 LOSS: 61.812469094\n", + "EPOCH: 689 LOSS: 61.8106046925\n", + "EPOCH: 690 LOSS: 61.8087466248\n", + "EPOCH: 691 LOSS: 61.8068948733\n", + "EPOCH: 692 LOSS: 61.8050494205\n", + "EPOCH: 693 LOSS: 61.803210249\n", + "EPOCH: 694 LOSS: 61.8013773416\n", + "EPOCH: 695 LOSS: 61.7995506812\n", + "EPOCH: 696 LOSS: 61.7977302508\n", + "EPOCH: 697 LOSS: 61.7959160335\n", + "EPOCH: 698 LOSS: 61.7941080126\n", + "EPOCH: 699 LOSS: 61.7923061715\n", + "EPOCH: 700 LOSS: 61.7905104936\n", + "EPOCH: 701 LOSS: 61.7887209624\n", + "EPOCH: 702 LOSS: 61.7869375616\n", + "EPOCH: 703 LOSS: 61.7851602751\n", + "EPOCH: 704 LOSS: 61.7833890865\n", + "EPOCH: 705 LOSS: 61.78162398\n", + "EPOCH: 706 LOSS: 61.7798649395\n", + "EPOCH: 707 LOSS: 61.7781119492\n", + "EPOCH: 708 LOSS: 61.7763649932\n", + "EPOCH: 709 LOSS: 61.7746240558\n", + "EPOCH: 710 LOSS: 61.7728891215\n", + "EPOCH: 711 LOSS: 61.7711601745\n", + "EPOCH: 712 LOSS: 61.7694371995\n", + "EPOCH: 713 LOSS: 61.7677201809\n", + "EPOCH: 714 LOSS: 61.7660091035\n", + "EPOCH: 715 LOSS: 61.7643039519\n", + "EPOCH: 716 LOSS: 61.7626047109\n", + "EPOCH: 717 LOSS: 61.7609113653\n", + "EPOCH: 718 LOSS: 61.7592238999\n", + "EPOCH: 719 LOSS: 61.7575422998\n", + "EPOCH: 720 LOSS: 61.7558665498\n", + "EPOCH: 721 LOSS: 61.754196635\n", + "EPOCH: 722 LOSS: 61.7525325405\n", + "EPOCH: 723 LOSS: 61.7508742514\n", + "EPOCH: 724 LOSS: 61.7492217528\n", + "EPOCH: 725 LOSS: 61.7475750299\n", + "EPOCH: 726 LOSS: 61.745934068\n", + "EPOCH: 727 LOSS: 61.7442988523\n", + "EPOCH: 728 LOSS: 61.7426693682\n", + "EPOCH: 729 LOSS: 61.741045601\n", + "EPOCH: 730 LOSS: 61.739427536\n", + "EPOCH: 731 LOSS: 61.7378151587\n", + "EPOCH: 732 LOSS: 61.7362084544\n", + "EPOCH: 733 LOSS: 61.7346074086\n", + "EPOCH: 734 LOSS: 61.7330120068\n", + "EPOCH: 735 LOSS: 61.7314222345\n", + "EPOCH: 736 LOSS: 61.7298380772\n", + "EPOCH: 737 LOSS: 61.7282595204\n", + "EPOCH: 738 LOSS: 61.7266865496\n", + "EPOCH: 739 LOSS: 61.7251191505\n", + "EPOCH: 740 LOSS: 61.7235573086\n", + "EPOCH: 741 LOSS: 61.7220010094\n", + "EPOCH: 742 LOSS: 61.7204502387\n", + "EPOCH: 743 LOSS: 61.7189049819\n", + "EPOCH: 744 LOSS: 61.7173652247\n", + "EPOCH: 745 LOSS: 61.7158309528\n", + "EPOCH: 746 LOSS: 61.7143021518\n", + "EPOCH: 747 LOSS: 61.7127788073\n", + "EPOCH: 748 LOSS: 61.7112609049\n", + "EPOCH: 749 LOSS: 61.7097484303\n", + "EPOCH: 750 LOSS: 61.7082413692\n", + "EPOCH: 751 LOSS: 61.7067397072\n", + "EPOCH: 752 LOSS: 61.7052434299\n", + "EPOCH: 753 LOSS: 61.7037525229\n", + "EPOCH: 754 LOSS: 61.702266972\n", + "EPOCH: 755 LOSS: 61.7007867628\n", + "EPOCH: 756 LOSS: 61.6993118808\n", + "EPOCH: 757 LOSS: 61.6978423118\n", + "EPOCH: 758 LOSS: 61.6963780414\n", + "EPOCH: 759 LOSS: 61.6949190551\n", + "EPOCH: 760 LOSS: 61.6934653386\n", + "EPOCH: 761 LOSS: 61.6920168776\n", + "EPOCH: 762 LOSS: 61.6905736575\n", + "EPOCH: 763 LOSS: 61.689135664\n", + "EPOCH: 764 LOSS: 61.6877028828\n", + "EPOCH: 765 LOSS: 61.6862752992\n", + "EPOCH: 766 LOSS: 61.684852899\n", + "EPOCH: 767 LOSS: 61.6834356676\n", + "EPOCH: 768 LOSS: 61.6820235907\n", + "EPOCH: 769 LOSS: 61.6806166537\n", + "EPOCH: 770 LOSS: 61.6792148421\n", + "EPOCH: 771 LOSS: 61.6778181415\n", + "EPOCH: 772 LOSS: 61.6764265373\n", + "EPOCH: 773 LOSS: 61.6750400151\n", + "EPOCH: 774 LOSS: 61.6736585603\n", + "EPOCH: 775 LOSS: 61.6722821583\n", + "EPOCH: 776 LOSS: 61.6709107946\n", + "EPOCH: 777 LOSS: 61.6695444547\n", + "EPOCH: 778 LOSS: 61.6681831238\n", + "EPOCH: 779 LOSS: 61.6668267875\n", + "EPOCH: 780 LOSS: 61.6654754311\n", + "EPOCH: 781 LOSS: 61.66412904\n", + "EPOCH: 782 LOSS: 61.6627875995\n", + "EPOCH: 783 LOSS: 61.6614510951\n", + "EPOCH: 784 LOSS: 61.6601195119\n", + "EPOCH: 785 LOSS: 61.6587928353\n", + "EPOCH: 786 LOSS: 61.6574710507\n", + "EPOCH: 787 LOSS: 61.6561541432\n", + "EPOCH: 788 LOSS: 61.6548420982\n", + "EPOCH: 789 LOSS: 61.653534901\n", + "EPOCH: 790 LOSS: 61.6522325366\n", + "EPOCH: 791 LOSS: 61.6509349905\n", + "EPOCH: 792 LOSS: 61.6496422478\n", + "EPOCH: 793 LOSS: 61.6483542937\n", + "EPOCH: 794 LOSS: 61.6470711133\n", + "EPOCH: 795 LOSS: 61.6457926919\n", + "EPOCH: 796 LOSS: 61.6445190146\n", + "EPOCH: 797 LOSS: 61.6432500666\n", + "EPOCH: 798 LOSS: 61.6419858329\n", + "EPOCH: 799 LOSS: 61.6407262988\n", + "EPOCH: 800 LOSS: 61.6394714492\n", + "EPOCH: 801 LOSS: 61.6382212693\n", + "EPOCH: 802 LOSS: 61.6369757442\n", + "EPOCH: 803 LOSS: 61.635734859\n", + "EPOCH: 804 LOSS: 61.6344985987\n", + "EPOCH: 805 LOSS: 61.6332669483\n", + "EPOCH: 806 LOSS: 61.6320398929\n", + "EPOCH: 807 LOSS: 61.6308174176\n", + "EPOCH: 808 LOSS: 61.6295995072\n", + "EPOCH: 809 LOSS: 61.628386147\n", + "EPOCH: 810 LOSS: 61.6271773217\n", + "EPOCH: 811 LOSS: 61.6259730165\n", + "EPOCH: 812 LOSS: 61.6247732163\n", + "EPOCH: 813 LOSS: 61.6235779061\n", + "EPOCH: 814 LOSS: 61.6223870708\n", + "EPOCH: 815 LOSS: 61.6212006955\n", + "EPOCH: 816 LOSS: 61.620018765\n", + "EPOCH: 817 LOSS: 61.6188412643\n", + "EPOCH: 818 LOSS: 61.6176681784\n", + "EPOCH: 819 LOSS: 61.6164994921\n", + "EPOCH: 820 LOSS: 61.6153351905\n", + "EPOCH: 821 LOSS: 61.6141752584\n", + "EPOCH: 822 LOSS: 61.6130196807\n", + "EPOCH: 823 LOSS: 61.6118684425\n", + "EPOCH: 824 LOSS: 61.6107215285\n", + "EPOCH: 825 LOSS: 61.6095789237\n", + "EPOCH: 826 LOSS: 61.6084406131\n", + "EPOCH: 827 LOSS: 61.6073065815\n", + "EPOCH: 828 LOSS: 61.6061768138\n", + "EPOCH: 829 LOSS: 61.605051295\n", + "EPOCH: 830 LOSS: 61.6039300099\n", + "EPOCH: 831 LOSS: 61.6028129435\n", + "EPOCH: 832 LOSS: 61.6017000807\n", + "EPOCH: 833 LOSS: 61.6005914064\n", + "EPOCH: 834 LOSS: 61.5994869055\n", + "EPOCH: 835 LOSS: 61.5983865629\n", + "EPOCH: 836 LOSS: 61.5972903636\n", + "EPOCH: 837 LOSS: 61.5961982925\n", + "EPOCH: 838 LOSS: 61.5951103345\n", + "EPOCH: 839 LOSS: 61.5940264745\n", + "EPOCH: 840 LOSS: 61.5929466975\n", + "EPOCH: 841 LOSS: 61.5918709885\n", + "EPOCH: 842 LOSS: 61.5907993324\n", + "EPOCH: 843 LOSS: 61.5897317142\n", + "EPOCH: 844 LOSS: 61.5886681188\n", + "EPOCH: 845 LOSS: 61.5876085312\n", + "EPOCH: 846 LOSS: 61.5865529364\n", + "EPOCH: 847 LOSS: 61.5855013194\n", + "EPOCH: 848 LOSS: 61.5844536653\n", + "EPOCH: 849 LOSS: 61.583409959\n", + "EPOCH: 850 LOSS: 61.5823701855\n", + "EPOCH: 851 LOSS: 61.58133433\n", + "EPOCH: 852 LOSS: 61.5803023774\n", + "EPOCH: 853 LOSS: 61.5792743129\n", + "EPOCH: 854 LOSS: 61.5782501216\n", + "EPOCH: 855 LOSS: 61.5772297884\n", + "EPOCH: 856 LOSS: 61.5762132986\n", + "EPOCH: 857 LOSS: 61.5752006372\n", + "EPOCH: 858 LOSS: 61.5741917895\n", + "EPOCH: 859 LOSS: 61.5731867405\n", + "EPOCH: 860 LOSS: 61.5721854755\n", + "EPOCH: 861 LOSS: 61.5711879796\n", + "EPOCH: 862 LOSS: 61.5701942381\n", + "EPOCH: 863 LOSS: 61.5692042361\n", + "EPOCH: 864 LOSS: 61.568217959\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 865 LOSS: 61.567235392\n", + "EPOCH: 866 LOSS: 61.5662565204\n", + "EPOCH: 867 LOSS: 61.5652813295\n", + "EPOCH: 868 LOSS: 61.5643098046\n", + "EPOCH: 869 LOSS: 61.5633419311\n", + "EPOCH: 870 LOSS: 61.5623776944\n", + "EPOCH: 871 LOSS: 61.5614170799\n", + "EPOCH: 872 LOSS: 61.5604600729\n", + "EPOCH: 873 LOSS: 61.559506659\n", + "EPOCH: 874 LOSS: 61.5585568236\n", + "EPOCH: 875 LOSS: 61.5576105522\n", + "EPOCH: 876 LOSS: 61.5566678303\n", + "EPOCH: 877 LOSS: 61.5557286434\n", + "EPOCH: 878 LOSS: 61.5547929772\n", + "EPOCH: 879 LOSS: 61.5538608173\n", + "EPOCH: 880 LOSS: 61.5529321492\n", + "EPOCH: 881 LOSS: 61.5520069586\n", + "EPOCH: 882 LOSS: 61.5510852312\n", + "EPOCH: 883 LOSS: 61.5501669527\n", + "EPOCH: 884 LOSS: 61.5492521088\n", + "EPOCH: 885 LOSS: 61.5483406853\n", + "EPOCH: 886 LOSS: 61.5474326681\n", + "EPOCH: 887 LOSS: 61.5465280428\n", + "EPOCH: 888 LOSS: 61.5456267955\n", + "EPOCH: 889 LOSS: 61.5447289119\n", + "EPOCH: 890 LOSS: 61.5438343781\n", + "EPOCH: 891 LOSS: 61.5429431798\n", + "EPOCH: 892 LOSS: 61.5420553032\n", + "EPOCH: 893 LOSS: 61.5411707343\n", + "EPOCH: 894 LOSS: 61.540289459\n", + "EPOCH: 895 LOSS: 61.5394114636\n", + "EPOCH: 896 LOSS: 61.538536734\n", + "EPOCH: 897 LOSS: 61.5376652564\n", + "EPOCH: 898 LOSS: 61.5367970171\n", + "EPOCH: 899 LOSS: 61.5359320023\n", + "EPOCH: 900 LOSS: 61.5350701981\n", + "EPOCH: 901 LOSS: 61.534211591\n", + "EPOCH: 902 LOSS: 61.5333561671\n", + "EPOCH: 903 LOSS: 61.532503913\n", + "EPOCH: 904 LOSS: 61.5316548149\n", + "EPOCH: 905 LOSS: 61.5308088594\n", + "EPOCH: 906 LOSS: 61.5299660328\n", + "EPOCH: 907 LOSS: 61.5291263217\n", + "EPOCH: 908 LOSS: 61.5282897127\n", + "EPOCH: 909 LOSS: 61.5274561923\n", + "EPOCH: 910 LOSS: 61.5266257472\n", + "EPOCH: 911 LOSS: 61.525798364\n", + "EPOCH: 912 LOSS: 61.5249740295\n", + "EPOCH: 913 LOSS: 61.5241527303\n", + "EPOCH: 914 LOSS: 61.5233344532\n", + "EPOCH: 915 LOSS: 61.5225191851\n", + "EPOCH: 916 LOSS: 61.5217069128\n", + "EPOCH: 917 LOSS: 61.5208976233\n", + "EPOCH: 918 LOSS: 61.5200913034\n", + "EPOCH: 919 LOSS: 61.5192879402\n", + "EPOCH: 920 LOSS: 61.5184875207\n", + "EPOCH: 921 LOSS: 61.5176900318\n", + "EPOCH: 922 LOSS: 61.5168954608\n", + "EPOCH: 923 LOSS: 61.5161037949\n", + "EPOCH: 924 LOSS: 61.515315021\n", + "EPOCH: 925 LOSS: 61.5145291266\n", + "EPOCH: 926 LOSS: 61.5137460989\n", + "EPOCH: 927 LOSS: 61.5129659251\n", + "EPOCH: 928 LOSS: 61.5121885927\n", + "EPOCH: 929 LOSS: 61.511414089\n", + "EPOCH: 930 LOSS: 61.5106424015\n", + "EPOCH: 931 LOSS: 61.5098735177\n", + "EPOCH: 932 LOSS: 61.5091074252\n", + "EPOCH: 933 LOSS: 61.5083441113\n", + "EPOCH: 934 LOSS: 61.5075835639\n", + "EPOCH: 935 LOSS: 61.5068257705\n", + "EPOCH: 936 LOSS: 61.5060707189\n", + "EPOCH: 937 LOSS: 61.5053183968\n", + "EPOCH: 938 LOSS: 61.504568792\n", + "EPOCH: 939 LOSS: 61.5038218923\n", + "EPOCH: 940 LOSS: 61.5030776856\n", + "EPOCH: 941 LOSS: 61.5023361598\n", + "EPOCH: 942 LOSS: 61.501597303\n", + "EPOCH: 943 LOSS: 61.500861103\n", + "EPOCH: 944 LOSS: 61.5001275481\n", + "EPOCH: 945 LOSS: 61.4993966262\n", + "EPOCH: 946 LOSS: 61.4986683255\n", + "EPOCH: 947 LOSS: 61.4979426343\n", + "EPOCH: 948 LOSS: 61.4972195407\n", + "EPOCH: 949 LOSS: 61.4964990331\n", + "EPOCH: 950 LOSS: 61.4957810997\n", + "EPOCH: 951 LOSS: 61.495065729\n", + "EPOCH: 952 LOSS: 61.4943529093\n", + "EPOCH: 953 LOSS: 61.4936426292\n", + "EPOCH: 954 LOSS: 61.492934877\n", + "EPOCH: 955 LOSS: 61.4922296415\n", + "EPOCH: 956 LOSS: 61.4915269112\n", + "EPOCH: 957 LOSS: 61.4908266748\n", + "EPOCH: 958 LOSS: 61.4901289208\n", + "EPOCH: 959 LOSS: 61.4894336382\n", + "EPOCH: 960 LOSS: 61.4887408155\n", + "EPOCH: 961 LOSS: 61.4880504418\n", + "EPOCH: 962 LOSS: 61.4873625059\n", + "EPOCH: 963 LOSS: 61.4866769966\n", + "EPOCH: 964 LOSS: 61.485993903\n", + "EPOCH: 965 LOSS: 61.485313214\n", + "EPOCH: 966 LOSS: 61.4846349188\n", + "EPOCH: 967 LOSS: 61.4839590064\n", + "EPOCH: 968 LOSS: 61.4832854659\n", + "EPOCH: 969 LOSS: 61.4826142867\n", + "EPOCH: 970 LOSS: 61.4819454578\n", + "EPOCH: 971 LOSS: 61.4812789687\n", + "EPOCH: 972 LOSS: 61.4806148086\n", + "EPOCH: 973 LOSS: 61.4799529669\n", + "EPOCH: 974 LOSS: 61.479293433\n", + "EPOCH: 975 LOSS: 61.4786361965\n", + "EPOCH: 976 LOSS: 61.4779812468\n", + "EPOCH: 977 LOSS: 61.4773285734\n", + "EPOCH: 978 LOSS: 61.4766781661\n", + "EPOCH: 979 LOSS: 61.4760300144\n", + "EPOCH: 980 LOSS: 61.475384108\n", + "EPOCH: 981 LOSS: 61.4747404367\n", + "EPOCH: 982 LOSS: 61.4740989904\n", + "EPOCH: 983 LOSS: 61.4734597587\n", + "EPOCH: 984 LOSS: 61.4728227316\n", + "EPOCH: 985 LOSS: 61.472187899\n", + "EPOCH: 986 LOSS: 61.4715552509\n", + "EPOCH: 987 LOSS: 61.4709247772\n", + "EPOCH: 988 LOSS: 61.4702964682\n", + "EPOCH: 989 LOSS: 61.4696703138\n", + "EPOCH: 990 LOSS: 61.4690463042\n", + "EPOCH: 991 LOSS: 61.4684244296\n", + "EPOCH: 992 LOSS: 61.4678046802\n", + "EPOCH: 993 LOSS: 61.4671870464\n", + "EPOCH: 994 LOSS: 61.4665715184\n", + "EPOCH: 995 LOSS: 61.4659580866\n", + "EPOCH: 996 LOSS: 61.4653467415\n", + "EPOCH: 997 LOSS: 61.4647374735\n", + "EPOCH: 998 LOSS: 61.464130273\n", + "EPOCH: 999 LOSS: 61.4635251307\n", + "EPOCH: 1000 LOSS: 61.4629220372\n", + "EPOCH: 1001 LOSS: 61.462320983\n", + "EPOCH: 1002 LOSS: 61.4617219589\n", + "EPOCH: 1003 LOSS: 61.4611249556\n", + "EPOCH: 1004 LOSS: 61.4605299639\n", + "EPOCH: 1005 LOSS: 61.4599369745\n", + "EPOCH: 1006 LOSS: 61.4593459783\n", + "EPOCH: 1007 LOSS: 61.4587569662\n", + "EPOCH: 1008 LOSS: 61.4581699291\n", + "EPOCH: 1009 LOSS: 61.4575848581\n", + "EPOCH: 1010 LOSS: 61.4570017442\n", + "EPOCH: 1011 LOSS: 61.4564205784\n", + "EPOCH: 1012 LOSS: 61.4558413518\n", + "EPOCH: 1013 LOSS: 61.4552640557\n", + "EPOCH: 1014 LOSS: 61.4546886811\n", + "EPOCH: 1015 LOSS: 61.4541152193\n", + "EPOCH: 1016 LOSS: 61.4535436617\n", + "EPOCH: 1017 LOSS: 61.4529739995\n", + "EPOCH: 1018 LOSS: 61.4524062241\n", + "EPOCH: 1019 LOSS: 61.4518403268\n", + "EPOCH: 1020 LOSS: 61.4512762993\n", + "EPOCH: 1021 LOSS: 61.4507141329\n", + "EPOCH: 1022 LOSS: 61.4501538191\n", + "EPOCH: 1023 LOSS: 61.4495953496\n", + "EPOCH: 1024 LOSS: 61.4490387159\n", + "EPOCH: 1025 LOSS: 61.4484839098\n", + "EPOCH: 1026 LOSS: 61.4479309228\n", + "EPOCH: 1027 LOSS: 61.4473797468\n", + "EPOCH: 1028 LOSS: 61.4468303735\n", + "EPOCH: 1029 LOSS: 61.4462827948\n", + "EPOCH: 1030 LOSS: 61.4457370024\n", + "EPOCH: 1031 LOSS: 61.4451929883\n", + "EPOCH: 1032 LOSS: 61.4446507444\n", + "EPOCH: 1033 LOSS: 61.4441102627\n", + "EPOCH: 1034 LOSS: 61.4435715352\n", + "EPOCH: 1035 LOSS: 61.443034554\n", + "EPOCH: 1036 LOSS: 61.4424993111\n", + "EPOCH: 1037 LOSS: 61.4419657988\n", + "EPOCH: 1038 LOSS: 61.4414340091\n", + "EPOCH: 1039 LOSS: 61.4409039343\n", + "EPOCH: 1040 LOSS: 61.4403755666\n", + "EPOCH: 1041 LOSS: 61.4398488983\n", + "EPOCH: 1042 LOSS: 61.4393239218\n", + "EPOCH: 1043 LOSS: 61.4388006293\n", + "EPOCH: 1044 LOSS: 61.4382790134\n", + "EPOCH: 1045 LOSS: 61.4377590665\n", + "EPOCH: 1046 LOSS: 61.4372407809\n", + "EPOCH: 1047 LOSS: 61.4367241494\n", + "EPOCH: 1048 LOSS: 61.4362091643\n", + "EPOCH: 1049 LOSS: 61.4356958184\n", + "EPOCH: 1050 LOSS: 61.4351841041\n", + "EPOCH: 1051 LOSS: 61.4346740143\n", + "EPOCH: 1052 LOSS: 61.4341655416\n", + "EPOCH: 1053 LOSS: 61.4336586787\n", + "EPOCH: 1054 LOSS: 61.4331534184\n", + "EPOCH: 1055 LOSS: 61.4326497535\n", + "EPOCH: 1056 LOSS: 61.432147677\n", + "EPOCH: 1057 LOSS: 61.4316471816\n", + "EPOCH: 1058 LOSS: 61.4311482603\n", + "EPOCH: 1059 LOSS: 61.430650906\n", + "EPOCH: 1060 LOSS: 61.4301551118\n", + "EPOCH: 1061 LOSS: 61.4296608706\n", + "EPOCH: 1062 LOSS: 61.4291681756\n", + "EPOCH: 1063 LOSS: 61.4286770198\n", + "EPOCH: 1064 LOSS: 61.4281873963\n", + "EPOCH: 1065 LOSS: 61.4276992984\n", + "EPOCH: 1066 LOSS: 61.4272127192\n", + "EPOCH: 1067 LOSS: 61.4267276521\n", + "EPOCH: 1068 LOSS: 61.4262440901\n", + "EPOCH: 1069 LOSS: 61.4257620267\n", + "EPOCH: 1070 LOSS: 61.4252814552\n", + "EPOCH: 1071 LOSS: 61.424802369\n", + "EPOCH: 1072 LOSS: 61.4243247615\n", + "EPOCH: 1073 LOSS: 61.4238486261\n", + "EPOCH: 1074 LOSS: 61.4233739563\n", + "EPOCH: 1075 LOSS: 61.4229007456\n", + "EPOCH: 1076 LOSS: 61.4224289875\n", + "EPOCH: 1077 LOSS: 61.4219586757\n", + "EPOCH: 1078 LOSS: 61.4214898037\n", + "EPOCH: 1079 LOSS: 61.4210223652\n", + "EPOCH: 1080 LOSS: 61.4205563538\n", + "EPOCH: 1081 LOSS: 61.4200917633\n", + "EPOCH: 1082 LOSS: 61.4196285873\n", + "EPOCH: 1083 LOSS: 61.4191668198\n", + "EPOCH: 1084 LOSS: 61.4187064544\n", + "EPOCH: 1085 LOSS: 61.418247485\n", + "EPOCH: 1086 LOSS: 61.4177899054\n", + "EPOCH: 1087 LOSS: 61.4173337096\n", + "EPOCH: 1088 LOSS: 61.4168788915\n", + "EPOCH: 1089 LOSS: 61.4164254451\n", + "EPOCH: 1090 LOSS: 61.4159733643\n", + "EPOCH: 1091 LOSS: 61.4155226431\n", + "EPOCH: 1092 LOSS: 61.4150732757\n", + "EPOCH: 1093 LOSS: 61.414625256\n", + "EPOCH: 1094 LOSS: 61.4141785782\n", + "EPOCH: 1095 LOSS: 61.4137332365\n", + "EPOCH: 1096 LOSS: 61.413289225\n", + "EPOCH: 1097 LOSS: 61.4128465379\n", + "EPOCH: 1098 LOSS: 61.4124051694\n", + "EPOCH: 1099 LOSS: 61.4119651139\n", + "EPOCH: 1100 LOSS: 61.4115263656\n", + "EPOCH: 1101 LOSS: 61.4110889188\n", + "EPOCH: 1102 LOSS: 61.4106527678\n", + "EPOCH: 1103 LOSS: 61.4102179071\n", + "EPOCH: 1104 LOSS: 61.4097843311\n", + "EPOCH: 1105 LOSS: 61.4093520342\n", + "EPOCH: 1106 LOSS: 61.4089210108\n", + "EPOCH: 1107 LOSS: 61.4084912555\n", + "EPOCH: 1108 LOSS: 61.4080627627\n", + "EPOCH: 1109 LOSS: 61.407635527\n", + "EPOCH: 1110 LOSS: 61.4072095431\n", + "EPOCH: 1111 LOSS: 61.4067848055\n", + "EPOCH: 1112 LOSS: 61.4063613087\n", + "EPOCH: 1113 LOSS: 61.4059390476\n", + "EPOCH: 1114 LOSS: 61.4055180168\n", + "EPOCH: 1115 LOSS: 61.405098211\n", + "EPOCH: 1116 LOSS: 61.4046796249\n", + "EPOCH: 1117 LOSS: 61.4042622533\n", + "EPOCH: 1118 LOSS: 61.403846091\n", + "EPOCH: 1119 LOSS: 61.4034311328\n", + "EPOCH: 1120 LOSS: 61.4030173736\n", + "EPOCH: 1121 LOSS: 61.4026048083\n", + "EPOCH: 1122 LOSS: 61.4021934317\n", + "EPOCH: 1123 LOSS: 61.4017832388\n", + "EPOCH: 1124 LOSS: 61.4013742245\n", + "EPOCH: 1125 LOSS: 61.4009663838\n", + "EPOCH: 1126 LOSS: 61.4005597117\n", + "EPOCH: 1127 LOSS: 61.4001542033\n", + "EPOCH: 1128 LOSS: 61.3997498536\n", + "EPOCH: 1129 LOSS: 61.3993466576\n", + "EPOCH: 1130 LOSS: 61.3989446106\n", + "EPOCH: 1131 LOSS: 61.3985437075\n", + "EPOCH: 1132 LOSS: 61.3981439436\n", + "EPOCH: 1133 LOSS: 61.3977453141\n", + "EPOCH: 1134 LOSS: 61.3973478142\n", + "EPOCH: 1135 LOSS: 61.396951439\n", + "EPOCH: 1136 LOSS: 61.3965561839\n", + "EPOCH: 1137 LOSS: 61.3961620441\n", + "EPOCH: 1138 LOSS: 61.3957690149\n", + "EPOCH: 1139 LOSS: 61.3953770916\n", + "EPOCH: 1140 LOSS: 61.3949862697\n", + "EPOCH: 1141 LOSS: 61.3945965445\n", + "EPOCH: 1142 LOSS: 61.3942079113\n", + "EPOCH: 1143 LOSS: 61.3938203656\n", + "EPOCH: 1144 LOSS: 61.3934339028\n", + "EPOCH: 1145 LOSS: 61.3930485184\n", + "EPOCH: 1146 LOSS: 61.392664208\n", + "EPOCH: 1147 LOSS: 61.3922809669\n", + "EPOCH: 1148 LOSS: 61.3918987908\n", + "EPOCH: 1149 LOSS: 61.3915176751\n", + "EPOCH: 1150 LOSS: 61.3911376156\n", + "EPOCH: 1151 LOSS: 61.3907586077\n", + "EPOCH: 1152 LOSS: 61.3903806472\n", + "EPOCH: 1153 LOSS: 61.3900037296\n", + "EPOCH: 1154 LOSS: 61.3896278506\n", + "EPOCH: 1155 LOSS: 61.389253006\n", + "EPOCH: 1156 LOSS: 61.3888791913\n", + "EPOCH: 1157 LOSS: 61.3885064025\n", + "EPOCH: 1158 LOSS: 61.3881346352\n", + "EPOCH: 1159 LOSS: 61.3877638851\n", + "EPOCH: 1160 LOSS: 61.3873941482\n", + "EPOCH: 1161 LOSS: 61.3870254202\n", + "EPOCH: 1162 LOSS: 61.3866576969\n", + "EPOCH: 1163 LOSS: 61.3862909743\n", + "EPOCH: 1164 LOSS: 61.3859252482\n", + "EPOCH: 1165 LOSS: 61.3855605145\n", + "EPOCH: 1166 LOSS: 61.3851967692\n", + "EPOCH: 1167 LOSS: 61.3848340081\n", + "EPOCH: 1168 LOSS: 61.3844722273\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 1169 LOSS: 61.3841114227\n", + "EPOCH: 1170 LOSS: 61.3837515904\n", + "EPOCH: 1171 LOSS: 61.3833927263\n", + "EPOCH: 1172 LOSS: 61.3830348266\n", + "EPOCH: 1173 LOSS: 61.3826778872\n", + "EPOCH: 1174 LOSS: 61.3823219043\n", + "EPOCH: 1175 LOSS: 61.381966874\n", + "EPOCH: 1176 LOSS: 61.3816127924\n", + "EPOCH: 1177 LOSS: 61.3812596556\n", + "EPOCH: 1178 LOSS: 61.3809074598\n", + "EPOCH: 1179 LOSS: 61.3805562012\n", + "EPOCH: 1180 LOSS: 61.380205876\n", + "EPOCH: 1181 LOSS: 61.3798564804\n", + "EPOCH: 1182 LOSS: 61.3795080107\n", + "EPOCH: 1183 LOSS: 61.379160463\n", + "EPOCH: 1184 LOSS: 61.3788138338\n", + "EPOCH: 1185 LOSS: 61.3784681192\n", + "EPOCH: 1186 LOSS: 61.3781233157\n", + "EPOCH: 1187 LOSS: 61.3777794194\n", + "EPOCH: 1188 LOSS: 61.3774364269\n", + "EPOCH: 1189 LOSS: 61.3770943344\n", + "EPOCH: 1190 LOSS: 61.3767531383\n", + "EPOCH: 1191 LOSS: 61.376412835\n", + "EPOCH: 1192 LOSS: 61.3760734211\n", + "EPOCH: 1193 LOSS: 61.3757348928\n", + "EPOCH: 1194 LOSS: 61.3753972467\n", + "EPOCH: 1195 LOSS: 61.3750604792\n", + "EPOCH: 1196 LOSS: 61.3747245868\n", + "EPOCH: 1197 LOSS: 61.3743895661\n", + "EPOCH: 1198 LOSS: 61.3740554136\n", + "EPOCH: 1199 LOSS: 61.3737221257\n", + "EPOCH: 1200 LOSS: 61.3733896991\n", + "EPOCH: 1201 LOSS: 61.3730581304\n", + "EPOCH: 1202 LOSS: 61.3727274161\n", + "EPOCH: 1203 LOSS: 61.3723975529\n", + "EPOCH: 1204 LOSS: 61.3720685374\n", + "EPOCH: 1205 LOSS: 61.3717403662\n", + "EPOCH: 1206 LOSS: 61.371413036\n", + "EPOCH: 1207 LOSS: 61.3710865434\n", + "EPOCH: 1208 LOSS: 61.3707608852\n", + "EPOCH: 1209 LOSS: 61.3704360581\n", + "EPOCH: 1210 LOSS: 61.3701120588\n", + "EPOCH: 1211 LOSS: 61.3697888841\n", + "EPOCH: 1212 LOSS: 61.3694665306\n", + "EPOCH: 1213 LOSS: 61.3691449953\n", + "EPOCH: 1214 LOSS: 61.3688242748\n", + "EPOCH: 1215 LOSS: 61.368504366\n", + "EPOCH: 1216 LOSS: 61.3681852656\n", + "EPOCH: 1217 LOSS: 61.3678669706\n", + "EPOCH: 1218 LOSS: 61.3675494778\n", + "EPOCH: 1219 LOSS: 61.3672327841\n", + "EPOCH: 1220 LOSS: 61.3669168863\n", + "EPOCH: 1221 LOSS: 61.3666017813\n", + "EPOCH: 1222 LOSS: 61.3662874661\n", + "EPOCH: 1223 LOSS: 61.3659739375\n", + "EPOCH: 1224 LOSS: 61.3656611926\n", + "EPOCH: 1225 LOSS: 61.3653492283\n", + "EPOCH: 1226 LOSS: 61.3650380415\n", + "EPOCH: 1227 LOSS: 61.3647276292\n", + "EPOCH: 1228 LOSS: 61.3644179885\n", + "EPOCH: 1229 LOSS: 61.3641091163\n", + "EPOCH: 1230 LOSS: 61.3638010098\n", + "EPOCH: 1231 LOSS: 61.3634936658\n", + "EPOCH: 1232 LOSS: 61.3631870816\n", + "EPOCH: 1233 LOSS: 61.3628812541\n", + "EPOCH: 1234 LOSS: 61.3625761805\n", + "EPOCH: 1235 LOSS: 61.3622718578\n", + "EPOCH: 1236 LOSS: 61.3619682832\n", + "EPOCH: 1237 LOSS: 61.3616654538\n", + "EPOCH: 1238 LOSS: 61.3613633668\n", + "EPOCH: 1239 LOSS: 61.3610620192\n", + "EPOCH: 1240 LOSS: 61.3607614084\n", + "EPOCH: 1241 LOSS: 61.3604615313\n", + "EPOCH: 1242 LOSS: 61.3601623854\n", + "EPOCH: 1243 LOSS: 61.3598639677\n", + "EPOCH: 1244 LOSS: 61.3595662755\n", + "EPOCH: 1245 LOSS: 61.359269306\n", + "EPOCH: 1246 LOSS: 61.3589730565\n", + "EPOCH: 1247 LOSS: 61.3586775242\n", + "EPOCH: 1248 LOSS: 61.3583827065\n", + "EPOCH: 1249 LOSS: 61.3580886005\n", + "EPOCH: 1250 LOSS: 61.3577952037\n", + "EPOCH: 1251 LOSS: 61.3575025133\n", + "EPOCH: 1252 LOSS: 61.3572105266\n", + "EPOCH: 1253 LOSS: 61.3569192411\n", + "EPOCH: 1254 LOSS: 61.356628654\n", + "EPOCH: 1255 LOSS: 61.3563387627\n", + "EPOCH: 1256 LOSS: 61.3560495646\n", + "EPOCH: 1257 LOSS: 61.3557610571\n", + "EPOCH: 1258 LOSS: 61.3554732376\n", + "EPOCH: 1259 LOSS: 61.3551861035\n", + "EPOCH: 1260 LOSS: 61.3548996522\n", + "EPOCH: 1261 LOSS: 61.3546138812\n", + "EPOCH: 1262 LOSS: 61.3543287879\n", + "EPOCH: 1263 LOSS: 61.3540443698\n", + "EPOCH: 1264 LOSS: 61.3537606244\n", + "EPOCH: 1265 LOSS: 61.3534775491\n", + "EPOCH: 1266 LOSS: 61.3531951414\n", + "EPOCH: 1267 LOSS: 61.352913399\n", + "EPOCH: 1268 LOSS: 61.3526323192\n", + "EPOCH: 1269 LOSS: 61.3523518996\n", + "EPOCH: 1270 LOSS: 61.3520721378\n", + "EPOCH: 1271 LOSS: 61.3517930314\n", + "EPOCH: 1272 LOSS: 61.3515145778\n", + "EPOCH: 1273 LOSS: 61.3512367747\n", + "EPOCH: 1274 LOSS: 61.3509596196\n", + "EPOCH: 1275 LOSS: 61.3506831102\n", + "EPOCH: 1276 LOSS: 61.3504072442\n", + "EPOCH: 1277 LOSS: 61.350132019\n", + "EPOCH: 1278 LOSS: 61.3498574323\n", + "EPOCH: 1279 LOSS: 61.3495834819\n", + "EPOCH: 1280 LOSS: 61.3493101653\n", + "EPOCH: 1281 LOSS: 61.3490374802\n", + "EPOCH: 1282 LOSS: 61.3487654243\n", + "EPOCH: 1283 LOSS: 61.3484939953\n", + "EPOCH: 1284 LOSS: 61.3482231909\n", + "EPOCH: 1285 LOSS: 61.3479530087\n", + "EPOCH: 1286 LOSS: 61.3476834467\n", + "EPOCH: 1287 LOSS: 61.3474145023\n", + "EPOCH: 1288 LOSS: 61.3471461735\n", + "EPOCH: 1289 LOSS: 61.3468784579\n", + "EPOCH: 1290 LOSS: 61.3466113533\n", + "EPOCH: 1291 LOSS: 61.3463448576\n", + "EPOCH: 1292 LOSS: 61.3460789684\n", + "EPOCH: 1293 LOSS: 61.3458136836\n", + "EPOCH: 1294 LOSS: 61.3455490009\n", + "EPOCH: 1295 LOSS: 61.3452849182\n", + "EPOCH: 1296 LOSS: 61.3450214334\n", + "EPOCH: 1297 LOSS: 61.3447585442\n", + "EPOCH: 1298 LOSS: 61.3444962485\n", + "EPOCH: 1299 LOSS: 61.3442345442\n", + "EPOCH: 1300 LOSS: 61.3439734291\n", + "EPOCH: 1301 LOSS: 61.343712901\n", + "EPOCH: 1302 LOSS: 61.343452958\n", + "EPOCH: 1303 LOSS: 61.3431935978\n", + "EPOCH: 1304 LOSS: 61.3429348184\n", + "EPOCH: 1305 LOSS: 61.3426766176\n", + "EPOCH: 1306 LOSS: 61.3424189935\n", + "EPOCH: 1307 LOSS: 61.3421619439\n", + "EPOCH: 1308 LOSS: 61.3419054668\n", + "EPOCH: 1309 LOSS: 61.3416495601\n", + "EPOCH: 1310 LOSS: 61.3413942218\n", + "EPOCH: 1311 LOSS: 61.3411394499\n", + "EPOCH: 1312 LOSS: 61.3408852423\n", + "EPOCH: 1313 LOSS: 61.340631597\n", + "EPOCH: 1314 LOSS: 61.340378512\n", + "EPOCH: 1315 LOSS: 61.3401259854\n", + "EPOCH: 1316 LOSS: 61.3398740151\n", + "EPOCH: 1317 LOSS: 61.3396225992\n", + "EPOCH: 1318 LOSS: 61.3393717356\n", + "EPOCH: 1319 LOSS: 61.3391214225\n", + "EPOCH: 1320 LOSS: 61.3388716579\n", + "EPOCH: 1321 LOSS: 61.3386224398\n", + "EPOCH: 1322 LOSS: 61.3383737664\n", + "EPOCH: 1323 LOSS: 61.3381256356\n", + "EPOCH: 1324 LOSS: 61.3378780456\n", + "EPOCH: 1325 LOSS: 61.3376309945\n", + "EPOCH: 1326 LOSS: 61.3373844803\n", + "EPOCH: 1327 LOSS: 61.3371385012\n", + "EPOCH: 1328 LOSS: 61.3368930553\n", + "EPOCH: 1329 LOSS: 61.3366481407\n", + "EPOCH: 1330 LOSS: 61.3364037556\n", + "EPOCH: 1331 LOSS: 61.3361598981\n", + "EPOCH: 1332 LOSS: 61.3359165664\n", + "EPOCH: 1333 LOSS: 61.3356737585\n", + "EPOCH: 1334 LOSS: 61.3354314728\n", + "EPOCH: 1335 LOSS: 61.3351897073\n", + "EPOCH: 1336 LOSS: 61.3349484602\n", + "EPOCH: 1337 LOSS: 61.3347077298\n", + "EPOCH: 1338 LOSS: 61.3344675142\n", + "EPOCH: 1339 LOSS: 61.3342278117\n", + "EPOCH: 1340 LOSS: 61.3339886204\n", + "EPOCH: 1341 LOSS: 61.3337499386\n", + "EPOCH: 1342 LOSS: 61.3335117645\n", + "EPOCH: 1343 LOSS: 61.3332740964\n", + "EPOCH: 1344 LOSS: 61.3330369325\n", + "EPOCH: 1345 LOSS: 61.332800271\n", + "EPOCH: 1346 LOSS: 61.3325641103\n", + "EPOCH: 1347 LOSS: 61.3323284486\n", + "EPOCH: 1348 LOSS: 61.3320932841\n", + "EPOCH: 1349 LOSS: 61.3318586152\n", + "EPOCH: 1350 LOSS: 61.3316244402\n", + "EPOCH: 1351 LOSS: 61.3313907573\n", + "EPOCH: 1352 LOSS: 61.3311575648\n", + "EPOCH: 1353 LOSS: 61.3309248611\n", + "EPOCH: 1354 LOSS: 61.3306926446\n", + "EPOCH: 1355 LOSS: 61.3304609134\n", + "EPOCH: 1356 LOSS: 61.3302296661\n", + "EPOCH: 1357 LOSS: 61.3299989008\n", + "EPOCH: 1358 LOSS: 61.329768616\n", + "EPOCH: 1359 LOSS: 61.3295388101\n", + "EPOCH: 1360 LOSS: 61.3293094813\n", + "EPOCH: 1361 LOSS: 61.3290806281\n", + "EPOCH: 1362 LOSS: 61.3288522488\n", + "EPOCH: 1363 LOSS: 61.3286243418\n", + "EPOCH: 1364 LOSS: 61.3283969056\n", + "EPOCH: 1365 LOSS: 61.3281699385\n", + "EPOCH: 1366 LOSS: 61.327943439\n", + "EPOCH: 1367 LOSS: 61.3277174053\n", + "EPOCH: 1368 LOSS: 61.3274918361\n", + "EPOCH: 1369 LOSS: 61.3272667297\n", + "EPOCH: 1370 LOSS: 61.3270420845\n", + "EPOCH: 1371 LOSS: 61.3268178989\n", + "EPOCH: 1372 LOSS: 61.3265941715\n", + "EPOCH: 1373 LOSS: 61.3263709007\n", + "EPOCH: 1374 LOSS: 61.326148085\n", + "EPOCH: 1375 LOSS: 61.3259257227\n", + "EPOCH: 1376 LOSS: 61.3257038124\n", + "EPOCH: 1377 LOSS: 61.3254823527\n", + "EPOCH: 1378 LOSS: 61.3252613418\n", + "EPOCH: 1379 LOSS: 61.3250407784\n", + "EPOCH: 1380 LOSS: 61.324820661\n", + "EPOCH: 1381 LOSS: 61.3246009881\n", + "EPOCH: 1382 LOSS: 61.3243817581\n", + "EPOCH: 1383 LOSS: 61.3241629696\n", + "EPOCH: 1384 LOSS: 61.3239446212\n", + "EPOCH: 1385 LOSS: 61.3237267113\n", + "EPOCH: 1386 LOSS: 61.3235092385\n", + "EPOCH: 1387 LOSS: 61.3232922013\n", + "EPOCH: 1388 LOSS: 61.3230755983\n", + "EPOCH: 1389 LOSS: 61.3228594281\n", + "EPOCH: 1390 LOSS: 61.3226436893\n", + "EPOCH: 1391 LOSS: 61.3224283802\n", + "EPOCH: 1392 LOSS: 61.3222134997\n", + "EPOCH: 1393 LOSS: 61.3219990462\n", + "EPOCH: 1394 LOSS: 61.3217850184\n", + "EPOCH: 1395 LOSS: 61.3215714147\n", + "EPOCH: 1396 LOSS: 61.3213582339\n", + "EPOCH: 1397 LOSS: 61.3211454745\n", + "EPOCH: 1398 LOSS: 61.3209331352\n", + "EPOCH: 1399 LOSS: 61.3207212145\n", + "EPOCH: 1400 LOSS: 61.3205097112\n", + "EPOCH: 1401 LOSS: 61.3202986237\n", + "EPOCH: 1402 LOSS: 61.3200879508\n", + "EPOCH: 1403 LOSS: 61.319877691\n", + "EPOCH: 1404 LOSS: 61.3196678432\n", + "EPOCH: 1405 LOSS: 61.3194584057\n", + "EPOCH: 1406 LOSS: 61.3192493775\n", + "EPOCH: 1407 LOSS: 61.319040757\n", + "EPOCH: 1408 LOSS: 61.3188325431\n", + "EPOCH: 1409 LOSS: 61.3186247342\n", + "EPOCH: 1410 LOSS: 61.3184173293\n", + "EPOCH: 1411 LOSS: 61.3182103268\n", + "EPOCH: 1412 LOSS: 61.3180037255\n", + "EPOCH: 1413 LOSS: 61.3177975242\n", + "EPOCH: 1414 LOSS: 61.3175917214\n", + "EPOCH: 1415 LOSS: 61.317386316\n", + "EPOCH: 1416 LOSS: 61.3171813066\n", + "EPOCH: 1417 LOSS: 61.3169766919\n", + "EPOCH: 1418 LOSS: 61.3167724707\n", + "EPOCH: 1419 LOSS: 61.3165686417\n", + "EPOCH: 1420 LOSS: 61.3163652036\n", + "EPOCH: 1421 LOSS: 61.3161621552\n", + "EPOCH: 1422 LOSS: 61.3159594952\n", + "EPOCH: 1423 LOSS: 61.3157572223\n", + "EPOCH: 1424 LOSS: 61.3155553354\n", + "EPOCH: 1425 LOSS: 61.3153538331\n", + "EPOCH: 1426 LOSS: 61.3151527143\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 1427 LOSS: 61.3149519777\n", + "EPOCH: 1428 LOSS: 61.3147516221\n", + "EPOCH: 1429 LOSS: 61.3145516462\n", + "EPOCH: 1430 LOSS: 61.3143520489\n", + "EPOCH: 1431 LOSS: 61.3141528289\n", + "EPOCH: 1432 LOSS: 61.3139539851\n", + "EPOCH: 1433 LOSS: 61.3137555161\n", + "EPOCH: 1434 LOSS: 61.313557421\n", + "EPOCH: 1435 LOSS: 61.3133596983\n", + "EPOCH: 1436 LOSS: 61.3131623471\n", + "EPOCH: 1437 LOSS: 61.312965366\n", + "EPOCH: 1438 LOSS: 61.3127687539\n", + "EPOCH: 1439 LOSS: 61.3125725096\n", + "EPOCH: 1440 LOSS: 61.312376632\n", + "EPOCH: 1441 LOSS: 61.3121811199\n", + "EPOCH: 1442 LOSS: 61.3119859721\n", + "EPOCH: 1443 LOSS: 61.3117911876\n", + "EPOCH: 1444 LOSS: 61.3115967651\n", + "EPOCH: 1445 LOSS: 61.3114027035\n", + "EPOCH: 1446 LOSS: 61.3112090016\n", + "EPOCH: 1447 LOSS: 61.3110156584\n", + "EPOCH: 1448 LOSS: 61.3108226727\n", + "EPOCH: 1449 LOSS: 61.3106300433\n", + "EPOCH: 1450 LOSS: 61.3104377692\n", + "EPOCH: 1451 LOSS: 61.3102458493\n", + "EPOCH: 1452 LOSS: 61.3100542824\n", + "EPOCH: 1453 LOSS: 61.3098630674\n", + "EPOCH: 1454 LOSS: 61.3096722032\n", + "EPOCH: 1455 LOSS: 61.3094816887\n", + "EPOCH: 1456 LOSS: 61.3092915229\n", + "EPOCH: 1457 LOSS: 61.3091017046\n", + "EPOCH: 1458 LOSS: 61.3089122328\n", + "EPOCH: 1459 LOSS: 61.3087231063\n", + "EPOCH: 1460 LOSS: 61.3085343241\n", + "EPOCH: 1461 LOSS: 61.3083458852\n", + "EPOCH: 1462 LOSS: 61.3081577884\n", + "EPOCH: 1463 LOSS: 61.3079700327\n", + "EPOCH: 1464 LOSS: 61.3077826171\n", + "EPOCH: 1465 LOSS: 61.3075955404\n", + "EPOCH: 1466 LOSS: 61.3074088017\n", + "EPOCH: 1467 LOSS: 61.3072223998\n", + "EPOCH: 1468 LOSS: 61.3070363338\n", + "EPOCH: 1469 LOSS: 61.3068506026\n", + "EPOCH: 1470 LOSS: 61.3066652052\n", + "EPOCH: 1471 LOSS: 61.3064801405\n", + "EPOCH: 1472 LOSS: 61.3062954075\n", + "EPOCH: 1473 LOSS: 61.3061110052\n", + "EPOCH: 1474 LOSS: 61.3059269326\n", + "EPOCH: 1475 LOSS: 61.3057431886\n", + "EPOCH: 1476 LOSS: 61.3055597723\n", + "EPOCH: 1477 LOSS: 61.3053766827\n", + "EPOCH: 1478 LOSS: 61.3051939187\n", + "EPOCH: 1479 LOSS: 61.3050114793\n", + "EPOCH: 1480 LOSS: 61.3048293636\n", + "EPOCH: 1481 LOSS: 61.3046475705\n", + "EPOCH: 1482 LOSS: 61.3044660992\n", + "EPOCH: 1483 LOSS: 61.3042849485\n", + "EPOCH: 1484 LOSS: 61.3041041175\n", + "EPOCH: 1485 LOSS: 61.3039236053\n", + "EPOCH: 1486 LOSS: 61.3037434109\n", + "EPOCH: 1487 LOSS: 61.3035635332\n", + "EPOCH: 1488 LOSS: 61.3033839714\n", + "EPOCH: 1489 LOSS: 61.3032047245\n", + "EPOCH: 1490 LOSS: 61.3030257915\n", + "EPOCH: 1491 LOSS: 61.3028471714\n", + "EPOCH: 1492 LOSS: 61.3026688634\n", + "EPOCH: 1493 LOSS: 61.3024908665\n", + "EPOCH: 1494 LOSS: 61.3023131797\n", + "EPOCH: 1495 LOSS: 61.3021358022\n", + "EPOCH: 1496 LOSS: 61.3019587329\n", + "EPOCH: 1497 LOSS: 61.3017819709\n", + "EPOCH: 1498 LOSS: 61.3016055153\n", + "EPOCH: 1499 LOSS: 61.3014293653\n", + "EPOCH: 1500 LOSS: 61.3012535198\n", + "EPOCH: 1501 LOSS: 61.301077978\n", + "EPOCH: 1502 LOSS: 61.3009027389\n", + "EPOCH: 1503 LOSS: 61.3007278016\n", + "EPOCH: 1504 LOSS: 61.3005531653\n", + "EPOCH: 1505 LOSS: 61.300378829\n", + "EPOCH: 1506 LOSS: 61.3002047918\n", + "EPOCH: 1507 LOSS: 61.3000310529\n", + "EPOCH: 1508 LOSS: 61.2998576113\n", + "EPOCH: 1509 LOSS: 61.2996844662\n", + "EPOCH: 1510 LOSS: 61.2995116166\n", + "EPOCH: 1511 LOSS: 61.2993390618\n", + "EPOCH: 1512 LOSS: 61.2991668007\n", + "EPOCH: 1513 LOSS: 61.2989948326\n", + "EPOCH: 1514 LOSS: 61.2988231566\n", + "EPOCH: 1515 LOSS: 61.2986517718\n", + "EPOCH: 1516 LOSS: 61.2984806773\n", + "EPOCH: 1517 LOSS: 61.2983098723\n", + "EPOCH: 1518 LOSS: 61.2981393559\n", + "EPOCH: 1519 LOSS: 61.2979691273\n", + "EPOCH: 1520 LOSS: 61.2977991855\n", + "EPOCH: 1521 LOSS: 61.2976295299\n", + "EPOCH: 1522 LOSS: 61.2974601595\n", + "EPOCH: 1523 LOSS: 61.2972910734\n", + "EPOCH: 1524 LOSS: 61.2971222709\n", + "EPOCH: 1525 LOSS: 61.2969537511\n", + "EPOCH: 1526 LOSS: 61.2967855132\n", + "EPOCH: 1527 LOSS: 61.2966175564\n", + "EPOCH: 1528 LOSS: 61.2964498797\n", + "EPOCH: 1529 LOSS: 61.2962824825\n", + "EPOCH: 1530 LOSS: 61.2961153638\n", + "EPOCH: 1531 LOSS: 61.2959485229\n", + "EPOCH: 1532 LOSS: 61.295781959\n", + "EPOCH: 1533 LOSS: 61.2956156711\n", + "EPOCH: 1534 LOSS: 61.2954496587\n", + "EPOCH: 1535 LOSS: 61.2952839207\n", + "EPOCH: 1536 LOSS: 61.2951184565\n", + "EPOCH: 1537 LOSS: 61.2949532652\n", + "EPOCH: 1538 LOSS: 61.2947883461\n", + "EPOCH: 1539 LOSS: 61.2946236983\n", + "EPOCH: 1540 LOSS: 61.294459321\n", + "EPOCH: 1541 LOSS: 61.2942952135\n", + "EPOCH: 1542 LOSS: 61.294131375\n", + "EPOCH: 1543 LOSS: 61.2939678048\n", + "EPOCH: 1544 LOSS: 61.2938045019\n", + "EPOCH: 1545 LOSS: 61.2936414657\n", + "EPOCH: 1546 LOSS: 61.2934786954\n", + "EPOCH: 1547 LOSS: 61.2933161902\n", + "EPOCH: 1548 LOSS: 61.2931539493\n", + "EPOCH: 1549 LOSS: 61.2929919721\n", + "EPOCH: 1550 LOSS: 61.2928302576\n", + "EPOCH: 1551 LOSS: 61.2926688052\n", + "EPOCH: 1552 LOSS: 61.2925076142\n", + "EPOCH: 1553 LOSS: 61.2923466837\n", + "EPOCH: 1554 LOSS: 61.292186013\n", + "EPOCH: 1555 LOSS: 61.2920256014\n", + "EPOCH: 1556 LOSS: 61.2918654481\n", + "EPOCH: 1557 LOSS: 61.2917055524\n", + "EPOCH: 1558 LOSS: 61.2915459136\n", + "EPOCH: 1559 LOSS: 61.2913865308\n", + "EPOCH: 1560 LOSS: 61.2912274035\n", + "EPOCH: 1561 LOSS: 61.2910685308\n", + "EPOCH: 1562 LOSS: 61.290909912\n", + "EPOCH: 1563 LOSS: 61.2907515464\n", + "EPOCH: 1564 LOSS: 61.2905934333\n", + "EPOCH: 1565 LOSS: 61.290435572\n", + "EPOCH: 1566 LOSS: 61.2902779617\n", + "EPOCH: 1567 LOSS: 61.2901206017\n", + "EPOCH: 1568 LOSS: 61.2899634914\n", + "EPOCH: 1569 LOSS: 61.2898066299\n", + "EPOCH: 1570 LOSS: 61.2896500167\n", + "EPOCH: 1571 LOSS: 61.289493651\n", + "EPOCH: 1572 LOSS: 61.289337532\n", + "EPOCH: 1573 LOSS: 61.2891816592\n", + "EPOCH: 1574 LOSS: 61.2890260318\n", + "EPOCH: 1575 LOSS: 61.2888706491\n", + "EPOCH: 1576 LOSS: 61.2887155104\n", + "EPOCH: 1577 LOSS: 61.2885606151\n", + "EPOCH: 1578 LOSS: 61.2884059624\n", + "EPOCH: 1579 LOSS: 61.2882515516\n", + "EPOCH: 1580 LOSS: 61.2880973822\n", + "EPOCH: 1581 LOSS: 61.2879434534\n", + "EPOCH: 1582 LOSS: 61.2877897645\n", + "EPOCH: 1583 LOSS: 61.2876363149\n", + "EPOCH: 1584 LOSS: 61.2874831038\n", + "EPOCH: 1585 LOSS: 61.2873301307\n", + "EPOCH: 1586 LOSS: 61.2871773949\n", + "EPOCH: 1587 LOSS: 61.2870248956\n", + "EPOCH: 1588 LOSS: 61.2868726323\n", + "EPOCH: 1589 LOSS: 61.2867206042\n", + "EPOCH: 1590 LOSS: 61.2865688108\n", + "EPOCH: 1591 LOSS: 61.2864172513\n", + "EPOCH: 1592 LOSS: 61.2862659251\n", + "EPOCH: 1593 LOSS: 61.2861148317\n", + "EPOCH: 1594 LOSS: 61.2859639702\n", + "EPOCH: 1595 LOSS: 61.2858133401\n", + "EPOCH: 1596 LOSS: 61.2856629407\n", + "EPOCH: 1597 LOSS: 61.2855127714\n", + "EPOCH: 1598 LOSS: 61.2853628316\n", + "EPOCH: 1599 LOSS: 61.2852131206\n", + "EPOCH: 1600 LOSS: 61.2850636378\n", + "EPOCH: 1601 LOSS: 61.2849143825\n", + "EPOCH: 1602 LOSS: 61.2847653542\n", + "EPOCH: 1603 LOSS: 61.2846165522\n", + "EPOCH: 1604 LOSS: 61.2844679758\n", + "EPOCH: 1605 LOSS: 61.2843196245\n", + "EPOCH: 1606 LOSS: 61.2841714976\n", + "EPOCH: 1607 LOSS: 61.2840235946\n", + "EPOCH: 1608 LOSS: 61.2838759147\n", + "EPOCH: 1609 LOSS: 61.2837284575\n", + "EPOCH: 1610 LOSS: 61.2835812222\n", + "EPOCH: 1611 LOSS: 61.2834342083\n", + "EPOCH: 1612 LOSS: 61.2832874152\n", + "EPOCH: 1613 LOSS: 61.2831408422\n", + "EPOCH: 1614 LOSS: 61.2829944888\n", + "EPOCH: 1615 LOSS: 61.2828483544\n", + "EPOCH: 1616 LOSS: 61.2827024383\n", + "EPOCH: 1617 LOSS: 61.2825567399\n", + "EPOCH: 1618 LOSS: 61.2824112588\n", + "EPOCH: 1619 LOSS: 61.2822659942\n", + "EPOCH: 1620 LOSS: 61.2821209456\n", + "EPOCH: 1621 LOSS: 61.2819761125\n", + "EPOCH: 1622 LOSS: 61.2818314941\n", + "EPOCH: 1623 LOSS: 61.28168709\n", + "EPOCH: 1624 LOSS: 61.2815428995\n", + "EPOCH: 1625 LOSS: 61.2813989221\n", + "EPOCH: 1626 LOSS: 61.2812551573\n", + "EPOCH: 1627 LOSS: 61.2811116043\n", + "EPOCH: 1628 LOSS: 61.2809682627\n", + "EPOCH: 1629 LOSS: 61.2808251318\n", + "EPOCH: 1630 LOSS: 61.2806822112\n", + "EPOCH: 1631 LOSS: 61.2805395002\n", + "EPOCH: 1632 LOSS: 61.2803969984\n", + "EPOCH: 1633 LOSS: 61.280254705\n", + "EPOCH: 1634 LOSS: 61.2801126196\n", + "EPOCH: 1635 LOSS: 61.2799707415\n", + "EPOCH: 1636 LOSS: 61.2798290703\n", + "EPOCH: 1637 LOSS: 61.2796876054\n", + "EPOCH: 1638 LOSS: 61.2795463463\n", + "EPOCH: 1639 LOSS: 61.2794052923\n", + "EPOCH: 1640 LOSS: 61.2792644429\n", + "EPOCH: 1641 LOSS: 61.2791237976\n", + "EPOCH: 1642 LOSS: 61.2789833559\n", + "EPOCH: 1643 LOSS: 61.2788431171\n", + "EPOCH: 1644 LOSS: 61.2787030808\n", + "EPOCH: 1645 LOSS: 61.2785632465\n", + "EPOCH: 1646 LOSS: 61.2784236134\n", + "EPOCH: 1647 LOSS: 61.2782841813\n", + "EPOCH: 1648 LOSS: 61.2781449494\n", + "EPOCH: 1649 LOSS: 61.2780059173\n", + "EPOCH: 1650 LOSS: 61.2778670844\n", + "EPOCH: 1651 LOSS: 61.2777284503\n", + "EPOCH: 1652 LOSS: 61.2775900143\n", + "EPOCH: 1653 LOSS: 61.277451776\n", + "EPOCH: 1654 LOSS: 61.2773137349\n", + "EPOCH: 1655 LOSS: 61.2771758903\n", + "EPOCH: 1656 LOSS: 61.2770382419\n", + "EPOCH: 1657 LOSS: 61.276900789\n", + "EPOCH: 1658 LOSS: 61.2767635312\n", + "EPOCH: 1659 LOSS: 61.2766264679\n", + "EPOCH: 1660 LOSS: 61.2764895987\n", + "EPOCH: 1661 LOSS: 61.276352923\n", + "EPOCH: 1662 LOSS: 61.2762164403\n", + "EPOCH: 1663 LOSS: 61.2760801501\n", + "EPOCH: 1664 LOSS: 61.2759440519\n", + "EPOCH: 1665 LOSS: 61.2758081452\n", + "EPOCH: 1666 LOSS: 61.2756724295\n", + "EPOCH: 1667 LOSS: 61.2755369043\n", + "EPOCH: 1668 LOSS: 61.2754015691\n", + "EPOCH: 1669 LOSS: 61.2752664234\n", + "EPOCH: 1670 LOSS: 61.2751314666\n", + "EPOCH: 1671 LOSS: 61.2749966984\n", + "EPOCH: 1672 LOSS: 61.2748621183\n", + "EPOCH: 1673 LOSS: 61.2747277256\n", + "EPOCH: 1674 LOSS: 61.27459352\n", + "EPOCH: 1675 LOSS: 61.2744595009\n", + "EPOCH: 1676 LOSS: 61.2743256679\n", + "EPOCH: 1677 LOSS: 61.2741920205\n", + "EPOCH: 1678 LOSS: 61.2740585582\n", + "EPOCH: 1679 LOSS: 61.2739252805\n", + "EPOCH: 1680 LOSS: 61.2737921869\n", + "EPOCH: 1681 LOSS: 61.2736592771\n", + "EPOCH: 1682 LOSS: 61.2735265504\n", + "EPOCH: 1683 LOSS: 61.2733940064\n", + "EPOCH: 1684 LOSS: 61.2732616446\n", + "EPOCH: 1685 LOSS: 61.2731294646\n", + "EPOCH: 1686 LOSS: 61.272997466\n", + "EPOCH: 1687 LOSS: 61.2728656481\n", + "EPOCH: 1688 LOSS: 61.2727340107\n", + "EPOCH: 1689 LOSS: 61.2726025531\n", + "EPOCH: 1690 LOSS: 61.272471275\n", + "EPOCH: 1691 LOSS: 61.2723401758\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 1692 LOSS: 61.2722092552\n", + "EPOCH: 1693 LOSS: 61.2720785126\n", + "EPOCH: 1694 LOSS: 61.2719479477\n", + "EPOCH: 1695 LOSS: 61.2718175599\n", + "EPOCH: 1696 LOSS: 61.2716873487\n", + "EPOCH: 1697 LOSS: 61.2715573139\n", + "EPOCH: 1698 LOSS: 61.2714274548\n", + "EPOCH: 1699 LOSS: 61.2712977711\n", + "EPOCH: 1700 LOSS: 61.2711682622\n", + "EPOCH: 1701 LOSS: 61.2710389278\n", + "EPOCH: 1702 LOSS: 61.2709097674\n", + "EPOCH: 1703 LOSS: 61.2707807806\n", + "EPOCH: 1704 LOSS: 61.2706519669\n", + "EPOCH: 1705 LOSS: 61.2705233259\n", + "EPOCH: 1706 LOSS: 61.2703948571\n", + "EPOCH: 1707 LOSS: 61.2702665601\n", + "EPOCH: 1708 LOSS: 61.2701384345\n", + "EPOCH: 1709 LOSS: 61.2700104799\n", + "EPOCH: 1710 LOSS: 61.2698826957\n", + "EPOCH: 1711 LOSS: 61.2697550816\n", + "EPOCH: 1712 LOSS: 61.2696276371\n", + "EPOCH: 1713 LOSS: 61.2695003618\n", + "EPOCH: 1714 LOSS: 61.2693732554\n", + "EPOCH: 1715 LOSS: 61.2692463173\n", + "EPOCH: 1716 LOSS: 61.2691195471\n", + "EPOCH: 1717 LOSS: 61.2689929444\n", + "EPOCH: 1718 LOSS: 61.2688665088\n", + "EPOCH: 1719 LOSS: 61.2687402399\n", + "EPOCH: 1720 LOSS: 61.2686141372\n", + "EPOCH: 1721 LOSS: 61.2684882004\n", + "EPOCH: 1722 LOSS: 61.268362429\n", + "EPOCH: 1723 LOSS: 61.2682368225\n", + "EPOCH: 1724 LOSS: 61.2681113807\n", + "EPOCH: 1725 LOSS: 61.267986103\n", + "EPOCH: 1726 LOSS: 61.2678609891\n", + "EPOCH: 1727 LOSS: 61.2677360386\n", + "EPOCH: 1728 LOSS: 61.267611251\n", + "EPOCH: 1729 LOSS: 61.2674866259\n", + "EPOCH: 1730 LOSS: 61.267362163\n", + "EPOCH: 1731 LOSS: 61.2672378618\n", + "EPOCH: 1732 LOSS: 61.2671137219\n", + "EPOCH: 1733 LOSS: 61.266989743\n", + "EPOCH: 1734 LOSS: 61.2668659245\n", + "EPOCH: 1735 LOSS: 61.2667422663\n", + "EPOCH: 1736 LOSS: 61.2666187677\n", + "EPOCH: 1737 LOSS: 61.2664954285\n", + "EPOCH: 1738 LOSS: 61.2663722482\n", + "EPOCH: 1739 LOSS: 61.2662492264\n", + "EPOCH: 1740 LOSS: 61.2661263628\n", + "EPOCH: 1741 LOSS: 61.266003657\n", + "EPOCH: 1742 LOSS: 61.2658811085\n", + "EPOCH: 1743 LOSS: 61.2657587171\n", + "EPOCH: 1744 LOSS: 61.2656364822\n", + "EPOCH: 1745 LOSS: 61.2655144035\n", + "EPOCH: 1746 LOSS: 61.2653924806\n", + "EPOCH: 1747 LOSS: 61.2652707132\n", + "EPOCH: 1748 LOSS: 61.2651491009\n", + "EPOCH: 1749 LOSS: 61.2650276432\n", + "EPOCH: 1750 LOSS: 61.2649063398\n", + "EPOCH: 1751 LOSS: 61.2647851904\n", + "EPOCH: 1752 LOSS: 61.2646641944\n", + "EPOCH: 1753 LOSS: 61.2645433517\n", + "EPOCH: 1754 LOSS: 61.2644226617\n", + "EPOCH: 1755 LOSS: 61.2643021242\n", + "EPOCH: 1756 LOSS: 61.2641817387\n", + "EPOCH: 1757 LOSS: 61.2640615048\n", + "EPOCH: 1758 LOSS: 61.2639414223\n", + "EPOCH: 1759 LOSS: 61.2638214907\n", + "EPOCH: 1760 LOSS: 61.2637017097\n", + "EPOCH: 1761 LOSS: 61.2635820788\n", + "EPOCH: 1762 LOSS: 61.2634625978\n", + "EPOCH: 1763 LOSS: 61.2633432663\n", + "EPOCH: 1764 LOSS: 61.2632240839\n", + "EPOCH: 1765 LOSS: 61.2631050502\n", + "EPOCH: 1766 LOSS: 61.2629861649\n", + "EPOCH: 1767 LOSS: 61.2628674276\n", + "EPOCH: 1768 LOSS: 61.262748838\n", + "EPOCH: 1769 LOSS: 61.2626303957\n", + "EPOCH: 1770 LOSS: 61.2625121003\n", + "EPOCH: 1771 LOSS: 61.2623939516\n", + "EPOCH: 1772 LOSS: 61.262275949\n", + "EPOCH: 1773 LOSS: 61.2621580924\n", + "EPOCH: 1774 LOSS: 61.2620403813\n", + "EPOCH: 1775 LOSS: 61.2619228154\n", + "EPOCH: 1776 LOSS: 61.2618053943\n", + "EPOCH: 1777 LOSS: 61.2616881177\n", + "EPOCH: 1778 LOSS: 61.2615709853\n", + "EPOCH: 1779 LOSS: 61.2614539966\n", + "EPOCH: 1780 LOSS: 61.2613371514\n", + "EPOCH: 1781 LOSS: 61.2612204493\n", + "EPOCH: 1782 LOSS: 61.26110389\n", + "EPOCH: 1783 LOSS: 61.2609874731\n", + "EPOCH: 1784 LOSS: 61.2608711983\n", + "EPOCH: 1785 LOSS: 61.2607550652\n", + "EPOCH: 1786 LOSS: 61.2606390735\n", + "EPOCH: 1787 LOSS: 61.2605232228\n", + "EPOCH: 1788 LOSS: 61.2604075129\n", + "EPOCH: 1789 LOSS: 61.2602919434\n", + "EPOCH: 1790 LOSS: 61.2601765139\n", + "EPOCH: 1791 LOSS: 61.2600612242\n", + "EPOCH: 1792 LOSS: 61.2599460739\n", + "EPOCH: 1793 LOSS: 61.2598310626\n", + "EPOCH: 1794 LOSS: 61.25971619\n", + "EPOCH: 1795 LOSS: 61.2596014559\n", + "EPOCH: 1796 LOSS: 61.2594868598\n", + "EPOCH: 1797 LOSS: 61.2593724014\n", + "EPOCH: 1798 LOSS: 61.2592580805\n", + "EPOCH: 1799 LOSS: 61.2591438967\n", + "EPOCH: 1800 LOSS: 61.2590298496\n", + "EPOCH: 1801 LOSS: 61.258915939\n", + "EPOCH: 1802 LOSS: 61.2588021645\n", + "EPOCH: 1803 LOSS: 61.2586885258\n", + "EPOCH: 1804 LOSS: 61.2585750226\n", + "EPOCH: 1805 LOSS: 61.2584616546\n", + "EPOCH: 1806 LOSS: 61.2583484214\n", + "EPOCH: 1807 LOSS: 61.2582353227\n", + "EPOCH: 1808 LOSS: 61.2581223583\n", + "EPOCH: 1809 LOSS: 61.2580095277\n", + "EPOCH: 1810 LOSS: 61.2578968308\n", + "EPOCH: 1811 LOSS: 61.2577842671\n", + "EPOCH: 1812 LOSS: 61.2576718363\n", + "EPOCH: 1813 LOSS: 61.2575595382\n", + "EPOCH: 1814 LOSS: 61.2574473724\n", + "EPOCH: 1815 LOSS: 61.2573353387\n", + "EPOCH: 1816 LOSS: 61.2572234367\n", + "EPOCH: 1817 LOSS: 61.2571116661\n", + "EPOCH: 1818 LOSS: 61.2570000266\n", + "EPOCH: 1819 LOSS: 61.2568885179\n", + "EPOCH: 1820 LOSS: 61.2567771397\n", + "EPOCH: 1821 LOSS: 61.2566658916\n", + "EPOCH: 1822 LOSS: 61.2565547735\n", + "EPOCH: 1823 LOSS: 61.2564437849\n", + "EPOCH: 1824 LOSS: 61.2563329257\n", + "EPOCH: 1825 LOSS: 61.2562221954\n", + "EPOCH: 1826 LOSS: 61.2561115938\n", + "EPOCH: 1827 LOSS: 61.2560011206\n", + "EPOCH: 1828 LOSS: 61.2558907755\n", + "EPOCH: 1829 LOSS: 61.2557805581\n", + "EPOCH: 1830 LOSS: 61.2556704683\n", + "EPOCH: 1831 LOSS: 61.2555605057\n", + "EPOCH: 1832 LOSS: 61.25545067\n", + "EPOCH: 1833 LOSS: 61.2553409609\n", + "EPOCH: 1834 LOSS: 61.2552313781\n", + "EPOCH: 1835 LOSS: 61.2551219214\n", + "EPOCH: 1836 LOSS: 61.2550125904\n", + "EPOCH: 1837 LOSS: 61.2549033848\n", + "EPOCH: 1838 LOSS: 61.2547943045\n", + "EPOCH: 1839 LOSS: 61.254685349\n", + "EPOCH: 1840 LOSS: 61.2545765181\n", + "EPOCH: 1841 LOSS: 61.2544678115\n", + "EPOCH: 1842 LOSS: 61.2543592289\n", + "EPOCH: 1843 LOSS: 61.2542507701\n", + "EPOCH: 1844 LOSS: 61.2541424347\n", + "EPOCH: 1845 LOSS: 61.2540342225\n", + "EPOCH: 1846 LOSS: 61.2539261332\n", + "EPOCH: 1847 LOSS: 61.2538181665\n", + "EPOCH: 1848 LOSS: 61.2537103222\n", + "EPOCH: 1849 LOSS: 61.2536025999\n", + "EPOCH: 1850 LOSS: 61.2534949993\n", + "EPOCH: 1851 LOSS: 61.2533875203\n", + "EPOCH: 1852 LOSS: 61.2532801625\n", + "EPOCH: 1853 LOSS: 61.2531729256\n", + "EPOCH: 1854 LOSS: 61.2530658094\n", + "EPOCH: 1855 LOSS: 61.2529588136\n", + "EPOCH: 1856 LOSS: 61.252851938\n", + "EPOCH: 1857 LOSS: 61.2527451821\n", + "EPOCH: 1858 LOSS: 61.2526385459\n", + "EPOCH: 1859 LOSS: 61.252532029\n", + "EPOCH: 1860 LOSS: 61.2524256311\n", + "EPOCH: 1861 LOSS: 61.252319352\n", + "EPOCH: 1862 LOSS: 61.2522131914\n", + "EPOCH: 1863 LOSS: 61.252107149\n", + "EPOCH: 1864 LOSS: 61.2520012246\n", + "EPOCH: 1865 LOSS: 61.2518954179\n", + "EPOCH: 1866 LOSS: 61.2517897286\n", + "EPOCH: 1867 LOSS: 61.2516841565\n", + "EPOCH: 1868 LOSS: 61.2515787014\n", + "EPOCH: 1869 LOSS: 61.2514733628\n", + "EPOCH: 1870 LOSS: 61.2513681407\n", + "EPOCH: 1871 LOSS: 61.2512630347\n", + "EPOCH: 1872 LOSS: 61.2511580445\n", + "EPOCH: 1873 LOSS: 61.25105317\n", + "EPOCH: 1874 LOSS: 61.2509484108\n", + "EPOCH: 1875 LOSS: 61.2508437667\n", + "EPOCH: 1876 LOSS: 61.2507392375\n", + "EPOCH: 1877 LOSS: 61.2506348228\n", + "EPOCH: 1878 LOSS: 61.2505305224\n", + "EPOCH: 1879 LOSS: 61.2504263361\n", + "EPOCH: 1880 LOSS: 61.2503222636\n", + "EPOCH: 1881 LOSS: 61.2502183047\n", + "EPOCH: 1882 LOSS: 61.2501144591\n", + "EPOCH: 1883 LOSS: 61.2500107265\n", + "EPOCH: 1884 LOSS: 61.2499071067\n", + "EPOCH: 1885 LOSS: 61.2498035995\n", + "EPOCH: 1886 LOSS: 61.2497002046\n", + "EPOCH: 1887 LOSS: 61.2495969218\n", + "EPOCH: 1888 LOSS: 61.2494937507\n", + "EPOCH: 1889 LOSS: 61.2493906912\n", + "EPOCH: 1890 LOSS: 61.2492877431\n", + "EPOCH: 1891 LOSS: 61.249184906\n", + "EPOCH: 1892 LOSS: 61.2490821797\n", + "EPOCH: 1893 LOSS: 61.248979564\n", + "EPOCH: 1894 LOSS: 61.2488770586\n", + "EPOCH: 1895 LOSS: 61.2487746633\n", + "EPOCH: 1896 LOSS: 61.2486723779\n", + "EPOCH: 1897 LOSS: 61.248570202\n", + "EPOCH: 1898 LOSS: 61.2484681356\n", + "EPOCH: 1899 LOSS: 61.2483661782\n", + "EPOCH: 1900 LOSS: 61.2482643298\n", + "EPOCH: 1901 LOSS: 61.24816259\n", + "EPOCH: 1902 LOSS: 61.2480609586\n", + "EPOCH: 1903 LOSS: 61.2479594353\n", + "EPOCH: 1904 LOSS: 61.24785802\n", + "EPOCH: 1905 LOSS: 61.2477567124\n", + "EPOCH: 1906 LOSS: 61.2476555123\n", + "EPOCH: 1907 LOSS: 61.2475544194\n", + "EPOCH: 1908 LOSS: 61.2474534335\n", + "EPOCH: 1909 LOSS: 61.2473525543\n", + "EPOCH: 1910 LOSS: 61.2472517817\n", + "EPOCH: 1911 LOSS: 61.2471511153\n", + "EPOCH: 1912 LOSS: 61.2470505551\n", + "EPOCH: 1913 LOSS: 61.2469501006\n", + "EPOCH: 1914 LOSS: 61.2468497518\n", + "EPOCH: 1915 LOSS: 61.2467495084\n", + "EPOCH: 1916 LOSS: 61.2466493701\n", + "EPOCH: 1917 LOSS: 61.2465493367\n", + "EPOCH: 1918 LOSS: 61.246449408\n", + "EPOCH: 1919 LOSS: 61.2463495837\n", + "EPOCH: 1920 LOSS: 61.2462498637\n", + "EPOCH: 1921 LOSS: 61.2461502477\n", + "EPOCH: 1922 LOSS: 61.2460507355\n", + "EPOCH: 1923 LOSS: 61.2459513269\n", + "EPOCH: 1924 LOSS: 61.2458520216\n", + "EPOCH: 1925 LOSS: 61.2457528195\n", + "EPOCH: 1926 LOSS: 61.2456537202\n", + "EPOCH: 1927 LOSS: 61.2455547236\n", + "EPOCH: 1928 LOSS: 61.2454558294\n", + "EPOCH: 1929 LOSS: 61.2453570375\n", + "EPOCH: 1930 LOSS: 61.2452583476\n", + "EPOCH: 1931 LOSS: 61.2451597595\n", + "EPOCH: 1932 LOSS: 61.245061273\n", + "EPOCH: 1933 LOSS: 61.2449628878\n", + "EPOCH: 1934 LOSS: 61.2448646038\n", + "EPOCH: 1935 LOSS: 61.2447664207\n", + "EPOCH: 1936 LOSS: 61.2446683383\n", + "EPOCH: 1937 LOSS: 61.2445703564\n", + "EPOCH: 1938 LOSS: 61.2444724747\n", + "EPOCH: 1939 LOSS: 61.2443746932\n", + "EPOCH: 1940 LOSS: 61.2442770115\n", + "EPOCH: 1941 LOSS: 61.2441794294\n", + "EPOCH: 1942 LOSS: 61.2440819468\n", + "EPOCH: 1943 LOSS: 61.2439845634\n", + "EPOCH: 1944 LOSS: 61.243887279\n", + "EPOCH: 1945 LOSS: 61.2437900933\n", + "EPOCH: 1946 LOSS: 61.2436930063\n", + "EPOCH: 1947 LOSS: 61.2435960177\n", + "EPOCH: 1948 LOSS: 61.2434991272\n", + "EPOCH: 1949 LOSS: 61.2434023346\n", + "EPOCH: 1950 LOSS: 61.2433056399\n", + "EPOCH: 1951 LOSS: 61.2432090426\n", + "EPOCH: 1952 LOSS: 61.2431125427\n", + "EPOCH: 1953 LOSS: 61.24301614\n", + "EPOCH: 1954 LOSS: 61.2429198342\n", + "EPOCH: 1955 LOSS: 61.2428236251\n", + "EPOCH: 1956 LOSS: 61.2427275125\n", + "EPOCH: 1957 LOSS: 61.2426314962\n", + "EPOCH: 1958 LOSS: 61.2425355761\n", + "EPOCH: 1959 LOSS: 61.2424397518\n", + "EPOCH: 1960 LOSS: 61.2423440233\n", + "EPOCH: 1961 LOSS: 61.2422483903\n", + "EPOCH: 1962 LOSS: 61.2421528526\n", + "EPOCH: 1963 LOSS: 61.24205741\n", + "EPOCH: 1964 LOSS: 61.2419620623\n", + "EPOCH: 1965 LOSS: 61.2418668093\n", + "EPOCH: 1966 LOSS: 61.2417716508\n", + "EPOCH: 1967 LOSS: 61.2416765867\n", + "EPOCH: 1968 LOSS: 61.2415816166\n", + "EPOCH: 1969 LOSS: 61.2414867405\n", + "EPOCH: 1970 LOSS: 61.2413919582\n", + "EPOCH: 1971 LOSS: 61.2412972693\n", + "EPOCH: 1972 LOSS: 61.2412026738\n", + "EPOCH: 1973 LOSS: 61.2411081714\n", + "EPOCH: 1974 LOSS: 61.241013762\n", + "EPOCH: 1975 LOSS: 61.2409194453\n", + "EPOCH: 1976 LOSS: 61.2408252212\n", + "EPOCH: 1977 LOSS: 61.2407310895\n", + "EPOCH: 1978 LOSS: 61.2406370499\n", + "EPOCH: 1979 LOSS: 61.2405431024\n", + "EPOCH: 1980 LOSS: 61.2404492466\n", + "EPOCH: 1981 LOSS: 61.2403554824\n", + "EPOCH: 1982 LOSS: 61.2402618097\n", + "EPOCH: 1983 LOSS: 61.2401682281\n", + "EPOCH: 1984 LOSS: 61.2400747377\n", + "EPOCH: 1985 LOSS: 61.239981338\n", + "EPOCH: 1986 LOSS: 61.2398880291\n", + "EPOCH: 1987 LOSS: 61.2397948106\n", + "EPOCH: 1988 LOSS: 61.2397016824\n", + "EPOCH: 1989 LOSS: 61.2396086443\n", + "EPOCH: 1990 LOSS: 61.2395156961\n", + "EPOCH: 1991 LOSS: 61.2394228377\n", + "EPOCH: 1992 LOSS: 61.2393300688\n", + "EPOCH: 1993 LOSS: 61.2392373893\n", + "EPOCH: 1994 LOSS: 61.2391447989\n", + "EPOCH: 1995 LOSS: 61.2390522976\n", + "EPOCH: 1996 LOSS: 61.2389598851\n", + "EPOCH: 1997 LOSS: 61.2388675612\n", + "EPOCH: 1998 LOSS: 61.2387753258\n", + "EPOCH: 1999 LOSS: 61.2386831786\n", + "EPOCH: 2000 LOSS: 61.2385911196\n", + "EPOCH: 2001 LOSS: 61.2384991485\n", + "EPOCH: 2002 LOSS: 61.2384072651\n", + "EPOCH: 2003 LOSS: 61.2383154692\n", + "EPOCH: 2004 LOSS: 61.2382237608\n", + "EPOCH: 2005 LOSS: 61.2381321395\n", + "EPOCH: 2006 LOSS: 61.2380406053\n", + "EPOCH: 2007 LOSS: 61.2379491579\n", + "EPOCH: 2008 LOSS: 61.2378577972\n", + "EPOCH: 2009 LOSS: 61.2377665229\n", + "EPOCH: 2010 LOSS: 61.237675335\n", + "EPOCH: 2011 LOSS: 61.2375842333\n", + "EPOCH: 2012 LOSS: 61.2374932175\n", + "EPOCH: 2013 LOSS: 61.2374022875\n", + "EPOCH: 2014 LOSS: 61.2373114431\n", + "EPOCH: 2015 LOSS: 61.2372206842\n", + "EPOCH: 2016 LOSS: 61.2371300106\n", + "EPOCH: 2017 LOSS: 61.237039422\n", + "EPOCH: 2018 LOSS: 61.2369489184\n", + "EPOCH: 2019 LOSS: 61.2368584996\n", + "EPOCH: 2020 LOSS: 61.2367681654\n", + "EPOCH: 2021 LOSS: 61.2366779155\n", + "EPOCH: 2022 LOSS: 61.23658775\n", + "EPOCH: 2023 LOSS: 61.2364976685\n", + "EPOCH: 2024 LOSS: 61.2364076709\n", + "EPOCH: 2025 LOSS: 61.2363177571\n", + "EPOCH: 2026 LOSS: 61.2362279268\n", + "EPOCH: 2027 LOSS: 61.23613818\n", + "EPOCH: 2028 LOSS: 61.2360485164\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 2029 LOSS: 61.2359589359\n", + "EPOCH: 2030 LOSS: 61.2358694383\n", + "EPOCH: 2031 LOSS: 61.2357800234\n", + "EPOCH: 2032 LOSS: 61.2356906912\n", + "EPOCH: 2033 LOSS: 61.2356014413\n", + "EPOCH: 2034 LOSS: 61.2355122737\n", + "EPOCH: 2035 LOSS: 61.2354231882\n", + "EPOCH: 2036 LOSS: 61.2353341846\n", + "EPOCH: 2037 LOSS: 61.2352452628\n", + "EPOCH: 2038 LOSS: 61.2351564225\n", + "EPOCH: 2039 LOSS: 61.2350676637\n", + "EPOCH: 2040 LOSS: 61.2349789862\n", + "EPOCH: 2041 LOSS: 61.2348903898\n", + "EPOCH: 2042 LOSS: 61.2348018744\n", + "EPOCH: 2043 LOSS: 61.2347134397\n", + "EPOCH: 2044 LOSS: 61.2346250857\n", + "EPOCH: 2045 LOSS: 61.2345368121\n", + "EPOCH: 2046 LOSS: 61.2344486189\n", + "EPOCH: 2047 LOSS: 61.2343605058\n", + "EPOCH: 2048 LOSS: 61.2342724727\n", + "EPOCH: 2049 LOSS: 61.2341845195\n", + "EPOCH: 2050 LOSS: 61.2340966459\n", + "EPOCH: 2051 LOSS: 61.2340088519\n", + "EPOCH: 2052 LOSS: 61.2339211373\n", + "EPOCH: 2053 LOSS: 61.2338335018\n", + "EPOCH: 2054 LOSS: 61.2337459454\n", + "EPOCH: 2055 LOSS: 61.2336584679\n", + "EPOCH: 2056 LOSS: 61.2335710692\n", + "EPOCH: 2057 LOSS: 61.2334837491\n", + "EPOCH: 2058 LOSS: 61.2333965074\n", + "EPOCH: 2059 LOSS: 61.2333093439\n", + "EPOCH: 2060 LOSS: 61.2332222587\n", + "EPOCH: 2061 LOSS: 61.2331352514\n", + "EPOCH: 2062 LOSS: 61.2330483219\n", + "EPOCH: 2063 LOSS: 61.2329614701\n", + "EPOCH: 2064 LOSS: 61.2328746958\n", + "EPOCH: 2065 LOSS: 61.2327879989\n", + "EPOCH: 2066 LOSS: 61.2327013793\n", + "EPOCH: 2067 LOSS: 61.2326148366\n", + "EPOCH: 2068 LOSS: 61.232528371\n", + "EPOCH: 2069 LOSS: 61.2324419821\n", + "EPOCH: 2070 LOSS: 61.2323556698\n", + "EPOCH: 2071 LOSS: 61.232269434\n", + "EPOCH: 2072 LOSS: 61.2321832745\n", + "EPOCH: 2073 LOSS: 61.2320971912\n", + "EPOCH: 2074 LOSS: 61.2320111839\n", + "EPOCH: 2075 LOSS: 61.2319252525\n", + "EPOCH: 2076 LOSS: 61.2318393969\n", + "EPOCH: 2077 LOSS: 61.2317536168\n", + "EPOCH: 2078 LOSS: 61.2316679122\n", + "EPOCH: 2079 LOSS: 61.2315822829\n", + "EPOCH: 2080 LOSS: 61.2314967287\n", + "EPOCH: 2081 LOSS: 61.2314112496\n", + "EPOCH: 2082 LOSS: 61.2313258453\n", + "EPOCH: 2083 LOSS: 61.2312405157\n", + "EPOCH: 2084 LOSS: 61.2311552608\n", + "EPOCH: 2085 LOSS: 61.2310700802\n", + "EPOCH: 2086 LOSS: 61.2309849739\n", + "EPOCH: 2087 LOSS: 61.2308999418\n", + "EPOCH: 2088 LOSS: 61.2308149837\n", + "EPOCH: 2089 LOSS: 61.2307300995\n", + "EPOCH: 2090 LOSS: 61.2306452889\n", + "EPOCH: 2091 LOSS: 61.230560552\n", + "EPOCH: 2092 LOSS: 61.2304758885\n", + "EPOCH: 2093 LOSS: 61.2303912983\n", + "EPOCH: 2094 LOSS: 61.2303067813\n", + "EPOCH: 2095 LOSS: 61.2302223372\n", + "EPOCH: 2096 LOSS: 61.2301379661\n", + "EPOCH: 2097 LOSS: 61.2300536677\n", + "EPOCH: 2098 LOSS: 61.2299694419\n", + "EPOCH: 2099 LOSS: 61.2298852886\n", + "EPOCH: 2100 LOSS: 61.2298012076\n", + "EPOCH: 2101 LOSS: 61.2297171988\n", + "EPOCH: 2102 LOSS: 61.229633262\n", + "EPOCH: 2103 LOSS: 61.2295493972\n", + "EPOCH: 2104 LOSS: 61.2294656041\n", + "EPOCH: 2105 LOSS: 61.2293818827\n", + "EPOCH: 2106 LOSS: 61.2292982328\n", + "EPOCH: 2107 LOSS: 61.2292146543\n", + "EPOCH: 2108 LOSS: 61.229131147\n", + "EPOCH: 2109 LOSS: 61.2290477108\n", + "EPOCH: 2110 LOSS: 61.2289643456\n", + "EPOCH: 2111 LOSS: 61.2288810512\n", + "EPOCH: 2112 LOSS: 61.2287978275\n", + "EPOCH: 2113 LOSS: 61.2287146744\n", + "EPOCH: 2114 LOSS: 61.2286315917\n", + "EPOCH: 2115 LOSS: 61.2285485793\n", + "EPOCH: 2116 LOSS: 61.2284656371\n", + "EPOCH: 2117 LOSS: 61.228382765\n", + "EPOCH: 2118 LOSS: 61.2282999627\n", + "EPOCH: 2119 LOSS: 61.2282172302\n", + "EPOCH: 2120 LOSS: 61.2281345674\n", + "EPOCH: 2121 LOSS: 61.2280519741\n", + "EPOCH: 2122 LOSS: 61.2279694502\n", + "EPOCH: 2123 LOSS: 61.2278869955\n", + "EPOCH: 2124 LOSS: 61.2278046099\n", + "EPOCH: 2125 LOSS: 61.2277222934\n", + "EPOCH: 2126 LOSS: 61.2276400457\n", + "EPOCH: 2127 LOSS: 61.2275578667\n", + "EPOCH: 2128 LOSS: 61.2274757564\n", + "EPOCH: 2129 LOSS: 61.2273937145\n", + "EPOCH: 2130 LOSS: 61.227311741\n", + "EPOCH: 2131 LOSS: 61.2272298357\n", + "EPOCH: 2132 LOSS: 61.2271479986\n", + "EPOCH: 2133 LOSS: 61.2270662294\n", + "EPOCH: 2134 LOSS: 61.226984528\n", + "EPOCH: 2135 LOSS: 61.2269028944\n", + "EPOCH: 2136 LOSS: 61.2268213284\n", + "EPOCH: 2137 LOSS: 61.2267398298\n", + "EPOCH: 2138 LOSS: 61.2266583986\n", + "EPOCH: 2139 LOSS: 61.2265770346\n", + "EPOCH: 2140 LOSS: 61.2264957377\n", + "EPOCH: 2141 LOSS: 61.2264145078\n", + "EPOCH: 2142 LOSS: 61.2263333448\n", + "EPOCH: 2143 LOSS: 61.2262522484\n", + "EPOCH: 2144 LOSS: 61.2261712187\n", + "EPOCH: 2145 LOSS: 61.2260902554\n", + "EPOCH: 2146 LOSS: 61.2260093586\n", + "EPOCH: 2147 LOSS: 61.2259285279\n", + "EPOCH: 2148 LOSS: 61.2258477634\n", + "EPOCH: 2149 LOSS: 61.2257670648\n", + "EPOCH: 2150 LOSS: 61.2256864321\n", + "EPOCH: 2151 LOSS: 61.2256058652\n", + "EPOCH: 2152 LOSS: 61.2255253639\n", + "EPOCH: 2153 LOSS: 61.2254449281\n", + "EPOCH: 2154 LOSS: 61.2253645578\n", + "EPOCH: 2155 LOSS: 61.2252842526\n", + "EPOCH: 2156 LOSS: 61.2252040127\n", + "EPOCH: 2157 LOSS: 61.2251238377\n", + "EPOCH: 2158 LOSS: 61.2250437277\n", + "EPOCH: 2159 LOSS: 61.2249636825\n", + "EPOCH: 2160 LOSS: 61.224883702\n", + "EPOCH: 2161 LOSS: 61.224803786\n", + "EPOCH: 2162 LOSS: 61.2247239344\n", + "EPOCH: 2163 LOSS: 61.2246441472\n", + "EPOCH: 2164 LOSS: 61.2245644242\n", + "EPOCH: 2165 LOSS: 61.2244847653\n", + "EPOCH: 2166 LOSS: 61.2244051703\n", + "EPOCH: 2167 LOSS: 61.2243256392\n", + "EPOCH: 2168 LOSS: 61.2242461719\n", + "EPOCH: 2169 LOSS: 61.2241667682\n", + "EPOCH: 2170 LOSS: 61.224087428\n", + "EPOCH: 2171 LOSS: 61.2240081512\n", + "EPOCH: 2172 LOSS: 61.2239289377\n", + "EPOCH: 2173 LOSS: 61.2238497873\n", + "EPOCH: 2174 LOSS: 61.2237707\n", + "EPOCH: 2175 LOSS: 61.2236916756\n", + "EPOCH: 2176 LOSS: 61.2236127141\n", + "EPOCH: 2177 LOSS: 61.2235338153\n", + "EPOCH: 2178 LOSS: 61.2234549791\n", + "EPOCH: 2179 LOSS: 61.2233762053\n", + "EPOCH: 2180 LOSS: 61.223297494\n", + "EPOCH: 2181 LOSS: 61.2232188449\n", + "EPOCH: 2182 LOSS: 61.223140258\n", + "EPOCH: 2183 LOSS: 61.2230617331\n", + "EPOCH: 2184 LOSS: 61.2229832701\n", + "EPOCH: 2185 LOSS: 61.222904869\n", + "EPOCH: 2186 LOSS: 61.2228265295\n", + "EPOCH: 2187 LOSS: 61.2227482517\n", + "EPOCH: 2188 LOSS: 61.2226700353\n", + "EPOCH: 2189 LOSS: 61.2225918803\n", + "EPOCH: 2190 LOSS: 61.2225137866\n", + "EPOCH: 2191 LOSS: 61.2224357541\n", + "EPOCH: 2192 LOSS: 61.2223577826\n", + "EPOCH: 2193 LOSS: 61.222279872\n", + "EPOCH: 2194 LOSS: 61.2222020223\n", + "EPOCH: 2195 LOSS: 61.2221242333\n", + "EPOCH: 2196 LOSS: 61.2220465049\n", + "EPOCH: 2197 LOSS: 61.221968837\n", + "EPOCH: 2198 LOSS: 61.2218912295\n", + "EPOCH: 2199 LOSS: 61.2218136824\n", + "EPOCH: 2200 LOSS: 61.2217361954\n", + "EPOCH: 2201 LOSS: 61.2216587684\n", + "EPOCH: 2202 LOSS: 61.2215814015\n", + "EPOCH: 2203 LOSS: 61.2215040944\n", + "EPOCH: 2204 LOSS: 61.2214268471\n", + "EPOCH: 2205 LOSS: 61.2213496594\n", + "EPOCH: 2206 LOSS: 61.2212725313\n", + "EPOCH: 2207 LOSS: 61.2211954627\n", + "EPOCH: 2208 LOSS: 61.2211184534\n", + "EPOCH: 2209 LOSS: 61.2210415033\n", + "EPOCH: 2210 LOSS: 61.2209646123\n", + "EPOCH: 2211 LOSS: 61.2208877804\n", + "EPOCH: 2212 LOSS: 61.2208110074\n", + "EPOCH: 2213 LOSS: 61.2207342932\n", + "EPOCH: 2214 LOSS: 61.2206576378\n", + "EPOCH: 2215 LOSS: 61.2205810409\n", + "EPOCH: 2216 LOSS: 61.2205045026\n", + "EPOCH: 2217 LOSS: 61.2204280227\n", + "EPOCH: 2218 LOSS: 61.220351601\n", + "EPOCH: 2219 LOSS: 61.2202752376\n", + "EPOCH: 2220 LOSS: 61.2201989323\n", + "EPOCH: 2221 LOSS: 61.220122685\n", + "EPOCH: 2222 LOSS: 61.2200464956\n", + "EPOCH: 2223 LOSS: 61.219970364\n", + "EPOCH: 2224 LOSS: 61.2198942901\n", + "EPOCH: 2225 LOSS: 61.2198182738\n", + "EPOCH: 2226 LOSS: 61.219742315\n", + "EPOCH: 2227 LOSS: 61.2196664136\n", + "EPOCH: 2228 LOSS: 61.2195905694\n", + "EPOCH: 2229 LOSS: 61.2195147825\n", + "EPOCH: 2230 LOSS: 61.2194390527\n", + "EPOCH: 2231 LOSS: 61.2193633799\n", + "EPOCH: 2232 LOSS: 61.219287764\n", + "EPOCH: 2233 LOSS: 61.2192122049\n", + "EPOCH: 2234 LOSS: 61.2191367025\n", + "EPOCH: 2235 LOSS: 61.2190612567\n", + "EPOCH: 2236 LOSS: 61.2189858674\n", + "EPOCH: 2237 LOSS: 61.2189105345\n", + "EPOCH: 2238 LOSS: 61.2188352579\n", + "EPOCH: 2239 LOSS: 61.2187600376\n", + "EPOCH: 2240 LOSS: 61.2186848733\n", + "EPOCH: 2241 LOSS: 61.2186097651\n", + "EPOCH: 2242 LOSS: 61.2185347128\n", + "EPOCH: 2243 LOSS: 61.2184597164\n", + "EPOCH: 2244 LOSS: 61.2183847756\n", + "EPOCH: 2245 LOSS: 61.2183098906\n", + "EPOCH: 2246 LOSS: 61.218235061\n", + "EPOCH: 2247 LOSS: 61.2181602869\n", + "EPOCH: 2248 LOSS: 61.2180855682\n", + "EPOCH: 2249 LOSS: 61.2180109047\n", + "EPOCH: 2250 LOSS: 61.2179362964\n", + "EPOCH: 2251 LOSS: 61.2178617431\n", + "EPOCH: 2252 LOSS: 61.2177872448\n", + "EPOCH: 2253 LOSS: 61.2177128014\n", + "EPOCH: 2254 LOSS: 61.2176384128\n", + "EPOCH: 2255 LOSS: 61.2175640789\n", + "EPOCH: 2256 LOSS: 61.2174897996\n", + "EPOCH: 2257 LOSS: 61.2174155748\n", + "EPOCH: 2258 LOSS: 61.2173414044\n", + "EPOCH: 2259 LOSS: 61.2172672883\n", + "EPOCH: 2260 LOSS: 61.2171932265\n", + "EPOCH: 2261 LOSS: 61.2171192187\n", + "EPOCH: 2262 LOSS: 61.2170452651\n", + "EPOCH: 2263 LOSS: 61.2169713654\n", + "EPOCH: 2264 LOSS: 61.2168975195\n", + "EPOCH: 2265 LOSS: 61.2168237274\n", + "EPOCH: 2266 LOSS: 61.216749989\n", + "EPOCH: 2267 LOSS: 61.2166763042\n", + "EPOCH: 2268 LOSS: 61.2166026729\n", + "EPOCH: 2269 LOSS: 61.216529095\n", + "EPOCH: 2270 LOSS: 61.2164555704\n", + "EPOCH: 2271 LOSS: 61.216382099\n", + "EPOCH: 2272 LOSS: 61.2163086808\n", + "EPOCH: 2273 LOSS: 61.2162353156\n", + "EPOCH: 2274 LOSS: 61.2161620034\n", + "EPOCH: 2275 LOSS: 61.2160887441\n", + "EPOCH: 2276 LOSS: 61.2160155375\n", + "EPOCH: 2277 LOSS: 61.2159423836\n", + "EPOCH: 2278 LOSS: 61.2158692824\n", + "EPOCH: 2279 LOSS: 61.2157962336\n", + "EPOCH: 2280 LOSS: 61.2157232373\n", + "EPOCH: 2281 LOSS: 61.2156502933\n", + "EPOCH: 2282 LOSS: 61.2155774016\n", + "EPOCH: 2283 LOSS: 61.215504562\n", + "EPOCH: 2284 LOSS: 61.2154317745\n", + "EPOCH: 2285 LOSS: 61.215359039\n", + "EPOCH: 2286 LOSS: 61.2152863554\n", + "EPOCH: 2287 LOSS: 61.2152137236\n", + "EPOCH: 2288 LOSS: 61.2151411435\n", + "EPOCH: 2289 LOSS: 61.2150686151\n", + "EPOCH: 2290 LOSS: 61.2149961383\n", + "EPOCH: 2291 LOSS: 61.2149237129\n", + "EPOCH: 2292 LOSS: 61.2148513389\n", + "EPOCH: 2293 LOSS: 61.2147790162\n", + "EPOCH: 2294 LOSS: 61.2147067447\n", + "EPOCH: 2295 LOSS: 61.2146345244\n", + "EPOCH: 2296 LOSS: 61.2145623551\n", + "EPOCH: 2297 LOSS: 61.2144902367\n", + "EPOCH: 2298 LOSS: 61.2144181693\n", + "EPOCH: 2299 LOSS: 61.2143461526\n", + "EPOCH: 2300 LOSS: 61.2142741866\n", + "EPOCH: 2301 LOSS: 61.2142022713\n", + "EPOCH: 2302 LOSS: 61.2141304065\n", + "EPOCH: 2303 LOSS: 61.2140585922\n", + "EPOCH: 2304 LOSS: 61.2139868282\n", + "EPOCH: 2305 LOSS: 61.2139151146\n", + "EPOCH: 2306 LOSS: 61.2138434511\n", + "EPOCH: 2307 LOSS: 61.2137718378\n", + "EPOCH: 2308 LOSS: 61.2137002745\n", + "EPOCH: 2309 LOSS: 61.2136287612\n", + "EPOCH: 2310 LOSS: 61.2135572977\n", + "EPOCH: 2311 LOSS: 61.2134858841\n", + "EPOCH: 2312 LOSS: 61.2134145201\n", + "EPOCH: 2313 LOSS: 61.2133432058\n", + "EPOCH: 2314 LOSS: 61.2132719411\n", + "EPOCH: 2315 LOSS: 61.2132007258\n", + "EPOCH: 2316 LOSS: 61.2131295599\n", + "EPOCH: 2317 LOSS: 61.2130584434\n", + "EPOCH: 2318 LOSS: 61.212987376\n", + "EPOCH: 2319 LOSS: 61.2129163579\n", + "EPOCH: 2320 LOSS: 61.2128453887\n", + "EPOCH: 2321 LOSS: 61.2127744686\n", + "EPOCH: 2322 LOSS: 61.2127035974\n", + "EPOCH: 2323 LOSS: 61.212632775\n", + "EPOCH: 2324 LOSS: 61.2125620014\n", + "EPOCH: 2325 LOSS: 61.2124912764\n", + "EPOCH: 2326 LOSS: 61.2124206\n", + "EPOCH: 2327 LOSS: 61.2123499722\n", + "EPOCH: 2328 LOSS: 61.2122793927\n", + "EPOCH: 2329 LOSS: 61.2122088617\n", + "EPOCH: 2330 LOSS: 61.2121383789\n", + "EPOCH: 2331 LOSS: 61.2120679443\n", + "EPOCH: 2332 LOSS: 61.2119975578\n", + "EPOCH: 2333 LOSS: 61.2119272194\n", + "EPOCH: 2334 LOSS: 61.2118569289\n", + "EPOCH: 2335 LOSS: 61.2117866864\n", + "EPOCH: 2336 LOSS: 61.2117164916\n", + "EPOCH: 2337 LOSS: 61.2116463446\n", + "EPOCH: 2338 LOSS: 61.2115762452\n", + "EPOCH: 2339 LOSS: 61.2115061934\n", + "EPOCH: 2340 LOSS: 61.2114361892\n", + "EPOCH: 2341 LOSS: 61.2113662323\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 2342 LOSS: 61.2112963228\n", + "EPOCH: 2343 LOSS: 61.2112264606\n", + "EPOCH: 2344 LOSS: 61.2111566456\n", + "EPOCH: 2345 LOSS: 61.2110868777\n", + "EPOCH: 2346 LOSS: 61.2110171568\n", + "EPOCH: 2347 LOSS: 61.210947483\n", + "EPOCH: 2348 LOSS: 61.210877856\n", + "EPOCH: 2349 LOSS: 61.2108082758\n", + "EPOCH: 2350 LOSS: 61.2107387424\n", + "EPOCH: 2351 LOSS: 61.2106692557\n", + "EPOCH: 2352 LOSS: 61.2105998156\n", + "EPOCH: 2353 LOSS: 61.210530422\n", + "EPOCH: 2354 LOSS: 61.2104610748\n", + "EPOCH: 2355 LOSS: 61.2103917741\n", + "EPOCH: 2356 LOSS: 61.2103225196\n", + "EPOCH: 2357 LOSS: 61.2102533114\n", + "EPOCH: 2358 LOSS: 61.2101841493\n", + "EPOCH: 2359 LOSS: 61.2101150333\n", + "EPOCH: 2360 LOSS: 61.2100459633\n", + "EPOCH: 2361 LOSS: 61.2099769393\n", + "EPOCH: 2362 LOSS: 61.2099079611\n", + "EPOCH: 2363 LOSS: 61.2098390287\n", + "EPOCH: 2364 LOSS: 61.209770142\n", + "EPOCH: 2365 LOSS: 61.209701301\n", + "EPOCH: 2366 LOSS: 61.2096325055\n", + "EPOCH: 2367 LOSS: 61.2095637556\n", + "EPOCH: 2368 LOSS: 61.2094950511\n", + "EPOCH: 2369 LOSS: 61.2094263919\n", + "EPOCH: 2370 LOSS: 61.209357778\n", + "EPOCH: 2371 LOSS: 61.2092892094\n", + "EPOCH: 2372 LOSS: 61.2092206859\n", + "EPOCH: 2373 LOSS: 61.2091522075\n", + "EPOCH: 2374 LOSS: 61.209083774\n", + "EPOCH: 2375 LOSS: 61.2090153855\n", + "EPOCH: 2376 LOSS: 61.2089470419\n", + "EPOCH: 2377 LOSS: 61.2088787431\n", + "EPOCH: 2378 LOSS: 61.208810489\n", + "EPOCH: 2379 LOSS: 61.2087422795\n", + "EPOCH: 2380 LOSS: 61.2086741146\n", + "EPOCH: 2381 LOSS: 61.2086059943\n", + "EPOCH: 2382 LOSS: 61.2085379183\n", + "EPOCH: 2383 LOSS: 61.2084698868\n", + "EPOCH: 2384 LOSS: 61.2084018995\n", + "EPOCH: 2385 LOSS: 61.2083339565\n", + "EPOCH: 2386 LOSS: 61.2082660577\n", + "EPOCH: 2387 LOSS: 61.2081982029\n", + "EPOCH: 2388 LOSS: 61.2081303922\n", + "EPOCH: 2389 LOSS: 61.2080626255\n", + "EPOCH: 2390 LOSS: 61.2079949026\n", + "EPOCH: 2391 LOSS: 61.2079272236\n", + "EPOCH: 2392 LOSS: 61.2078595883\n", + "EPOCH: 2393 LOSS: 61.2077919967\n", + "EPOCH: 2394 LOSS: 61.2077244487\n", + "EPOCH: 2395 LOSS: 61.2076569443\n", + "EPOCH: 2396 LOSS: 61.2075894834\n", + "EPOCH: 2397 LOSS: 61.2075220659\n", + "EPOCH: 2398 LOSS: 61.2074546917\n", + "EPOCH: 2399 LOSS: 61.2073873608\n", + "EPOCH: 2400 LOSS: 61.2073200732\n", + "EPOCH: 2401 LOSS: 61.2072528286\n", + "EPOCH: 2402 LOSS: 61.2071856272\n", + "EPOCH: 2403 LOSS: 61.2071184688\n", + "EPOCH: 2404 LOSS: 61.2070513534\n", + "EPOCH: 2405 LOSS: 61.2069842808\n", + "EPOCH: 2406 LOSS: 61.206917251\n", + "EPOCH: 2407 LOSS: 61.206850264\n", + "EPOCH: 2408 LOSS: 61.2067833197\n", + "EPOCH: 2409 LOSS: 61.206716418\n", + "EPOCH: 2410 LOSS: 61.2066495589\n", + "EPOCH: 2411 LOSS: 61.2065827423\n", + "EPOCH: 2412 LOSS: 61.2065159681\n", + "EPOCH: 2413 LOSS: 61.2064492362\n", + "EPOCH: 2414 LOSS: 61.2063825467\n", + "EPOCH: 2415 LOSS: 61.2063158994\n", + "EPOCH: 2416 LOSS: 61.2062492942\n", + "EPOCH: 2417 LOSS: 61.2061827312\n", + "EPOCH: 2418 LOSS: 61.2061162102\n", + "EPOCH: 2419 LOSS: 61.2060497312\n", + "EPOCH: 2420 LOSS: 61.2059832941\n", + "EPOCH: 2421 LOSS: 61.2059168988\n", + "EPOCH: 2422 LOSS: 61.2058505454\n", + "EPOCH: 2423 LOSS: 61.2057842336\n", + "EPOCH: 2424 LOSS: 61.2057179635\n", + "EPOCH: 2425 LOSS: 61.205651735\n", + "EPOCH: 2426 LOSS: 61.205585548\n", + "EPOCH: 2427 LOSS: 61.2055194026\n", + "EPOCH: 2428 LOSS: 61.2054532985\n", + "EPOCH: 2429 LOSS: 61.2053872357\n", + "EPOCH: 2430 LOSS: 61.2053212143\n", + "EPOCH: 2431 LOSS: 61.205255234\n", + "EPOCH: 2432 LOSS: 61.2051892949\n", + "EPOCH: 2433 LOSS: 61.2051233969\n", + "EPOCH: 2434 LOSS: 61.20505754\n", + "EPOCH: 2435 LOSS: 61.204991724\n", + "EPOCH: 2436 LOSS: 61.2049259489\n", + "EPOCH: 2437 LOSS: 61.2048602147\n", + "EPOCH: 2438 LOSS: 61.2047945212\n", + "EPOCH: 2439 LOSS: 61.2047288685\n", + "EPOCH: 2440 LOSS: 61.2046632564\n", + "EPOCH: 2441 LOSS: 61.204597685\n", + "EPOCH: 2442 LOSS: 61.2045321541\n", + "EPOCH: 2443 LOSS: 61.2044666636\n", + "EPOCH: 2444 LOSS: 61.2044012136\n", + "EPOCH: 2445 LOSS: 61.204335804\n", + "EPOCH: 2446 LOSS: 61.2042704346\n", + "EPOCH: 2447 LOSS: 61.2042051055\n", + "EPOCH: 2448 LOSS: 61.2041398166\n", + "EPOCH: 2449 LOSS: 61.2040745678\n", + "EPOCH: 2450 LOSS: 61.204009359\n", + "EPOCH: 2451 LOSS: 61.2039441903\n", + "EPOCH: 2452 LOSS: 61.2038790615\n", + "EPOCH: 2453 LOSS: 61.2038139725\n", + "EPOCH: 2454 LOSS: 61.2037489234\n", + "EPOCH: 2455 LOSS: 61.2036839141\n", + "EPOCH: 2456 LOSS: 61.2036189445\n", + "EPOCH: 2457 LOSS: 61.2035540145\n", + "EPOCH: 2458 LOSS: 61.2034891241\n", + "EPOCH: 2459 LOSS: 61.2034242732\n", + "EPOCH: 2460 LOSS: 61.2033594618\n", + "EPOCH: 2461 LOSS: 61.2032946898\n", + "EPOCH: 2462 LOSS: 61.2032299571\n", + "EPOCH: 2463 LOSS: 61.2031652638\n", + "EPOCH: 2464 LOSS: 61.2031006096\n", + "EPOCH: 2465 LOSS: 61.2030359947\n", + "EPOCH: 2466 LOSS: 61.2029714189\n", + "EPOCH: 2467 LOSS: 61.2029068821\n", + "EPOCH: 2468 LOSS: 61.2028423844\n", + "EPOCH: 2469 LOSS: 61.2027779256\n", + "EPOCH: 2470 LOSS: 61.2027135057\n", + "EPOCH: 2471 LOSS: 61.2026491246\n", + "EPOCH: 2472 LOSS: 61.2025847823\n", + "EPOCH: 2473 LOSS: 61.2025204787\n", + "EPOCH: 2474 LOSS: 61.2024562138\n", + "EPOCH: 2475 LOSS: 61.2023919875\n", + "EPOCH: 2476 LOSS: 61.2023277997\n", + "EPOCH: 2477 LOSS: 61.2022636504\n", + "EPOCH: 2478 LOSS: 61.2021995396\n", + "EPOCH: 2479 LOSS: 61.2021354671\n", + "EPOCH: 2480 LOSS: 61.202071433\n", + "EPOCH: 2481 LOSS: 61.2020074372\n", + "EPOCH: 2482 LOSS: 61.2019434795\n", + "EPOCH: 2483 LOSS: 61.20187956\n", + "EPOCH: 2484 LOSS: 61.2018156786\n", + "EPOCH: 2485 LOSS: 61.2017518353\n", + "EPOCH: 2486 LOSS: 61.2016880299\n", + "EPOCH: 2487 LOSS: 61.2016242625\n", + "EPOCH: 2488 LOSS: 61.201560533\n", + "EPOCH: 2489 LOSS: 61.2014968413\n", + "EPOCH: 2490 LOSS: 61.2014331873\n", + "EPOCH: 2491 LOSS: 61.2013695711\n", + "EPOCH: 2492 LOSS: 61.2013059926\n", + "EPOCH: 2493 LOSS: 61.2012424516\n", + "EPOCH: 2494 LOSS: 61.2011789482\n", + "EPOCH: 2495 LOSS: 61.2011154823\n", + "EPOCH: 2496 LOSS: 61.2010520539\n", + "EPOCH: 2497 LOSS: 61.2009886628\n", + "EPOCH: 2498 LOSS: 61.2009253091\n", + "EPOCH: 2499 LOSS: 61.2008619926\n", + "EPOCH: 2500 LOSS: 61.2007987134\n", + "EPOCH: 2501 LOSS: 61.2007354713\n", + "EPOCH: 2502 LOSS: 61.2006722664\n", + "EPOCH: 2503 LOSS: 61.2006090986\n", + "EPOCH: 2504 LOSS: 61.2005459677\n", + "EPOCH: 2505 LOSS: 61.2004828739\n", + "EPOCH: 2506 LOSS: 61.2004198169\n", + "EPOCH: 2507 LOSS: 61.2003567968\n", + "EPOCH: 2508 LOSS: 61.2002938135\n", + "EPOCH: 2509 LOSS: 61.2002308669\n", + "EPOCH: 2510 LOSS: 61.200167957\n", + "EPOCH: 2511 LOSS: 61.2001050838\n", + "EPOCH: 2512 LOSS: 61.2000422472\n", + "EPOCH: 2513 LOSS: 61.1999794471\n", + "EPOCH: 2514 LOSS: 61.1999166835\n", + "EPOCH: 2515 LOSS: 61.1998539564\n", + "EPOCH: 2516 LOSS: 61.1997912656\n", + "EPOCH: 2517 LOSS: 61.1997286112\n", + "EPOCH: 2518 LOSS: 61.199665993\n", + "EPOCH: 2519 LOSS: 61.1996034111\n", + "EPOCH: 2520 LOSS: 61.1995408654\n", + "EPOCH: 2521 LOSS: 61.1994783557\n", + "EPOCH: 2522 LOSS: 61.1994158822\n", + "EPOCH: 2523 LOSS: 61.1993534447\n", + "EPOCH: 2524 LOSS: 61.1992910431\n", + "EPOCH: 2525 LOSS: 61.1992286775\n", + "EPOCH: 2526 LOSS: 61.1991663477\n", + "EPOCH: 2527 LOSS: 61.1991040538\n", + "EPOCH: 2528 LOSS: 61.1990417957\n", + "EPOCH: 2529 LOSS: 61.1989795732\n", + "EPOCH: 2530 LOSS: 61.1989173865\n", + "EPOCH: 2531 LOSS: 61.1988552353\n", + "EPOCH: 2532 LOSS: 61.1987931197\n", + "EPOCH: 2533 LOSS: 61.1987310397\n", + "EPOCH: 2534 LOSS: 61.1986689951\n", + "EPOCH: 2535 LOSS: 61.1986069859\n", + "EPOCH: 2536 LOSS: 61.1985450121\n", + "EPOCH: 2537 LOSS: 61.1984830736\n", + "EPOCH: 2538 LOSS: 61.1984211704\n", + "EPOCH: 2539 LOSS: 61.1983593024\n", + "EPOCH: 2540 LOSS: 61.1982974696\n", + "EPOCH: 2541 LOSS: 61.1982356719\n", + "EPOCH: 2542 LOSS: 61.1981739093\n", + "EPOCH: 2543 LOSS: 61.1981121817\n", + "EPOCH: 2544 LOSS: 61.198050489\n", + "EPOCH: 2545 LOSS: 61.1979888313\n", + "EPOCH: 2546 LOSS: 61.1979272085\n", + "EPOCH: 2547 LOSS: 61.1978656205\n", + "EPOCH: 2548 LOSS: 61.1978040673\n", + "EPOCH: 2549 LOSS: 61.1977425488\n", + "EPOCH: 2550 LOSS: 61.197681065\n", + "EPOCH: 2551 LOSS: 61.1976196159\n", + "EPOCH: 2552 LOSS: 61.1975582013\n", + "EPOCH: 2553 LOSS: 61.1974968212\n", + "EPOCH: 2554 LOSS: 61.1974354757\n", + "EPOCH: 2555 LOSS: 61.1973741646\n", + "EPOCH: 2556 LOSS: 61.1973128879\n", + "EPOCH: 2557 LOSS: 61.1972516455\n", + "EPOCH: 2558 LOSS: 61.1971904375\n", + "EPOCH: 2559 LOSS: 61.1971292637\n", + "EPOCH: 2560 LOSS: 61.1970681241\n", + "EPOCH: 2561 LOSS: 61.1970070187\n", + "EPOCH: 2562 LOSS: 61.1969459473\n", + "EPOCH: 2563 LOSS: 61.1968849101\n", + "EPOCH: 2564 LOSS: 61.1968239068\n", + "EPOCH: 2565 LOSS: 61.1967629375\n", + "EPOCH: 2566 LOSS: 61.1967020022\n", + "EPOCH: 2567 LOSS: 61.1966411007\n", + "EPOCH: 2568 LOSS: 61.1965802331\n", + "EPOCH: 2569 LOSS: 61.1965193992\n", + "EPOCH: 2570 LOSS: 61.1964585991\n", + "EPOCH: 2571 LOSS: 61.1963978327\n", + "EPOCH: 2572 LOSS: 61.1963370999\n", + "EPOCH: 2573 LOSS: 61.1962764007\n", + "EPOCH: 2574 LOSS: 61.1962157351\n", + "EPOCH: 2575 LOSS: 61.196155103\n", + "EPOCH: 2576 LOSS: 61.1960945043\n", + "EPOCH: 2577 LOSS: 61.1960339391\n", + "EPOCH: 2578 LOSS: 61.1959734072\n", + "EPOCH: 2579 LOSS: 61.1959129086\n", + "EPOCH: 2580 LOSS: 61.1958524434\n", + "EPOCH: 2581 LOSS: 61.1957920113\n", + "EPOCH: 2582 LOSS: 61.1957316125\n", + "EPOCH: 2583 LOSS: 61.1956712468\n", + "EPOCH: 2584 LOSS: 61.1956109142\n", + "EPOCH: 2585 LOSS: 61.1955506146\n", + "EPOCH: 2586 LOSS: 61.1954903481\n", + "EPOCH: 2587 LOSS: 61.1954301145\n", + "EPOCH: 2588 LOSS: 61.1953699138\n", + "EPOCH: 2589 LOSS: 61.1953097461\n", + "EPOCH: 2590 LOSS: 61.1952496111\n", + "EPOCH: 2591 LOSS: 61.1951895089\n", + "EPOCH: 2592 LOSS: 61.1951294395\n", + "EPOCH: 2593 LOSS: 61.1950694028\n", + "EPOCH: 2594 LOSS: 61.1950093987\n", + "EPOCH: 2595 LOSS: 61.1949494272\n", + "EPOCH: 2596 LOSS: 61.1948894883\n", + "EPOCH: 2597 LOSS: 61.194829582\n", + "EPOCH: 2598 LOSS: 61.1947697081\n", + "EPOCH: 2599 LOSS: 61.1947098666\n", + "EPOCH: 2600 LOSS: 61.1946500575\n", + "EPOCH: 2601 LOSS: 61.1945902808\n", + "EPOCH: 2602 LOSS: 61.1945305364\n", + "EPOCH: 2603 LOSS: 61.1944708242\n", + "EPOCH: 2604 LOSS: 61.1944111442\n", + "EPOCH: 2605 LOSS: 61.1943514965\n", + "EPOCH: 2606 LOSS: 61.1942918808\n", + "EPOCH: 2607 LOSS: 61.1942322973\n", + "EPOCH: 2608 LOSS: 61.1941727457\n", + "EPOCH: 2609 LOSS: 61.1941132262\n", + "EPOCH: 2610 LOSS: 61.1940537387\n", + "EPOCH: 2611 LOSS: 61.193994283\n", + "EPOCH: 2612 LOSS: 61.1939348593\n", + "EPOCH: 2613 LOSS: 61.1938754673\n", + "EPOCH: 2614 LOSS: 61.1938161072\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 2615 LOSS: 61.1937567788\n", + "EPOCH: 2616 LOSS: 61.1936974821\n", + "EPOCH: 2617 LOSS: 61.1936382171\n", + "EPOCH: 2618 LOSS: 61.1935789837\n", + "EPOCH: 2619 LOSS: 61.1935197818\n", + "EPOCH: 2620 LOSS: 61.1934606115\n", + "EPOCH: 2621 LOSS: 61.1934014727\n", + "EPOCH: 2622 LOSS: 61.1933423654\n", + "EPOCH: 2623 LOSS: 61.1932832895\n", + "EPOCH: 2624 LOSS: 61.1932242449\n", + "EPOCH: 2625 LOSS: 61.1931652316\n", + "EPOCH: 2626 LOSS: 61.1931062497\n", + "EPOCH: 2627 LOSS: 61.193047299\n", + "EPOCH: 2628 LOSS: 61.1929883795\n", + "EPOCH: 2629 LOSS: 61.1929294912\n", + "EPOCH: 2630 LOSS: 61.1928706339\n", + "EPOCH: 2631 LOSS: 61.1928118078\n", + "EPOCH: 2632 LOSS: 61.1927530127\n", + "EPOCH: 2633 LOSS: 61.1926942486\n", + "EPOCH: 2634 LOSS: 61.1926355155\n", + "EPOCH: 2635 LOSS: 61.1925768133\n", + "EPOCH: 2636 LOSS: 61.1925181419\n", + "EPOCH: 2637 LOSS: 61.1924595014\n", + "EPOCH: 2638 LOSS: 61.1924008917\n", + "EPOCH: 2639 LOSS: 61.1923423128\n", + "EPOCH: 2640 LOSS: 61.1922837646\n", + "EPOCH: 2641 LOSS: 61.192225247\n", + "EPOCH: 2642 LOSS: 61.1921667601\n", + "EPOCH: 2643 LOSS: 61.1921083038\n", + "EPOCH: 2644 LOSS: 61.192049878\n", + "EPOCH: 2645 LOSS: 61.1919914827\n", + "EPOCH: 2646 LOSS: 61.191933118\n", + "EPOCH: 2647 LOSS: 61.1918747836\n", + "EPOCH: 2648 LOSS: 61.1918164797\n", + "EPOCH: 2649 LOSS: 61.1917582061\n", + "EPOCH: 2650 LOSS: 61.1916999629\n", + "EPOCH: 2651 LOSS: 61.1916417499\n", + "EPOCH: 2652 LOSS: 61.1915835671\n", + "EPOCH: 2653 LOSS: 61.1915254146\n", + "EPOCH: 2654 LOSS: 61.1914672922\n", + "EPOCH: 2655 LOSS: 61.1914092\n", + "EPOCH: 2656 LOSS: 61.1913511378\n", + "EPOCH: 2657 LOSS: 61.1912931057\n", + "EPOCH: 2658 LOSS: 61.1912351035\n", + "EPOCH: 2659 LOSS: 61.1911771314\n", + "EPOCH: 2660 LOSS: 61.1911191892\n", + "EPOCH: 2661 LOSS: 61.1910612768\n", + "EPOCH: 2662 LOSS: 61.1910033943\n", + "EPOCH: 2663 LOSS: 61.1909455417\n", + "EPOCH: 2664 LOSS: 61.1908877188\n", + "EPOCH: 2665 LOSS: 61.1908299256\n", + "EPOCH: 2666 LOSS: 61.1907721621\n", + "EPOCH: 2667 LOSS: 61.1907144283\n", + "EPOCH: 2668 LOSS: 61.1906567242\n", + "EPOCH: 2669 LOSS: 61.1905990496\n", + "EPOCH: 2670 LOSS: 61.1905414045\n", + "EPOCH: 2671 LOSS: 61.190483789\n", + "EPOCH: 2672 LOSS: 61.1904262029\n", + "EPOCH: 2673 LOSS: 61.1903686462\n", + "EPOCH: 2674 LOSS: 61.190311119\n", + "EPOCH: 2675 LOSS: 61.1902536211\n", + "EPOCH: 2676 LOSS: 61.1901961525\n", + "EPOCH: 2677 LOSS: 61.1901387133\n", + "EPOCH: 2678 LOSS: 61.1900813032\n", + "EPOCH: 2679 LOSS: 61.1900239224\n", + "EPOCH: 2680 LOSS: 61.1899665707\n", + "EPOCH: 2681 LOSS: 61.1899092482\n", + "EPOCH: 2682 LOSS: 61.1898519548\n", + "EPOCH: 2683 LOSS: 61.1897946904\n", + "EPOCH: 2684 LOSS: 61.1897374551\n", + "EPOCH: 2685 LOSS: 61.1896802487\n", + "EPOCH: 2686 LOSS: 61.1896230713\n", + "EPOCH: 2687 LOSS: 61.1895659228\n", + "EPOCH: 2688 LOSS: 61.1895088032\n", + "EPOCH: 2689 LOSS: 61.1894517124\n", + "EPOCH: 2690 LOSS: 61.1893946504\n", + "EPOCH: 2691 LOSS: 61.1893376172\n", + "EPOCH: 2692 LOSS: 61.1892806127\n", + "EPOCH: 2693 LOSS: 61.189223637\n", + "EPOCH: 2694 LOSS: 61.1891666898\n", + "EPOCH: 2695 LOSS: 61.1891097713\n", + "EPOCH: 2696 LOSS: 61.1890528814\n", + "EPOCH: 2697 LOSS: 61.1889960201\n", + "EPOCH: 2698 LOSS: 61.1889391872\n", + "EPOCH: 2699 LOSS: 61.1888823828\n", + "EPOCH: 2700 LOSS: 61.1888256069\n", + "EPOCH: 2701 LOSS: 61.1887688594\n", + "EPOCH: 2702 LOSS: 61.1887121402\n", + "EPOCH: 2703 LOSS: 61.1886554494\n", + "EPOCH: 2704 LOSS: 61.1885987869\n", + "EPOCH: 2705 LOSS: 61.1885421526\n", + "EPOCH: 2706 LOSS: 61.1884855466\n", + "EPOCH: 2707 LOSS: 61.1884289688\n", + "EPOCH: 2708 LOSS: 61.1883724191\n", + "EPOCH: 2709 LOSS: 61.1883158976\n", + "EPOCH: 2710 LOSS: 61.1882594041\n", + "EPOCH: 2711 LOSS: 61.1882029387\n", + "EPOCH: 2712 LOSS: 61.1881465014\n", + "EPOCH: 2713 LOSS: 61.188090092\n", + "EPOCH: 2714 LOSS: 61.1880337105\n", + "EPOCH: 2715 LOSS: 61.187977357\n", + "EPOCH: 2716 LOSS: 61.1879210313\n", + "EPOCH: 2717 LOSS: 61.1878647335\n", + "EPOCH: 2718 LOSS: 61.1878084635\n", + "EPOCH: 2719 LOSS: 61.1877522213\n", + "EPOCH: 2720 LOSS: 61.1876960068\n", + "EPOCH: 2721 LOSS: 61.1876398201\n", + "EPOCH: 2722 LOSS: 61.187583661\n", + "EPOCH: 2723 LOSS: 61.1875275295\n", + "EPOCH: 2724 LOSS: 61.1874714257\n", + "EPOCH: 2725 LOSS: 61.1874153494\n", + "EPOCH: 2726 LOSS: 61.1873593007\n", + "EPOCH: 2727 LOSS: 61.1873032794\n", + "EPOCH: 2728 LOSS: 61.1872472857\n", + "EPOCH: 2729 LOSS: 61.1871913193\n", + "EPOCH: 2730 LOSS: 61.1871353804\n", + "EPOCH: 2731 LOSS: 61.1870794689\n", + "EPOCH: 2732 LOSS: 61.1870235846\n", + "EPOCH: 2733 LOSS: 61.1869677277\n", + "EPOCH: 2734 LOSS: 61.1869118981\n", + "EPOCH: 2735 LOSS: 61.1868560956\n", + "EPOCH: 2736 LOSS: 61.1868003204\n", + "EPOCH: 2737 LOSS: 61.1867445724\n", + "EPOCH: 2738 LOSS: 61.1866888515\n", + "EPOCH: 2739 LOSS: 61.1866331576\n", + "EPOCH: 2740 LOSS: 61.1865774909\n", + "EPOCH: 2741 LOSS: 61.1865218512\n", + "EPOCH: 2742 LOSS: 61.1864662385\n", + "EPOCH: 2743 LOSS: 61.1864106527\n", + "EPOCH: 2744 LOSS: 61.1863550939\n", + "EPOCH: 2745 LOSS: 61.186299562\n", + "EPOCH: 2746 LOSS: 61.186244057\n", + "EPOCH: 2747 LOSS: 61.1861885788\n", + "EPOCH: 2748 LOSS: 61.1861331274\n", + "EPOCH: 2749 LOSS: 61.1860777028\n", + "EPOCH: 2750 LOSS: 61.1860223049\n", + "EPOCH: 2751 LOSS: 61.1859669338\n", + "EPOCH: 2752 LOSS: 61.1859115893\n", + "EPOCH: 2753 LOSS: 61.1858562714\n", + "EPOCH: 2754 LOSS: 61.1858009802\n", + "EPOCH: 2755 LOSS: 61.1857457156\n", + "EPOCH: 2756 LOSS: 61.1856904774\n", + "EPOCH: 2757 LOSS: 61.1856352659\n", + "EPOCH: 2758 LOSS: 61.1855800808\n", + "EPOCH: 2759 LOSS: 61.1855249221\n", + "EPOCH: 2760 LOSS: 61.1854697899\n", + "EPOCH: 2761 LOSS: 61.185414684\n", + "EPOCH: 2762 LOSS: 61.1853596045\n", + "EPOCH: 2763 LOSS: 61.1853045514\n", + "EPOCH: 2764 LOSS: 61.1852495245\n", + "EPOCH: 2765 LOSS: 61.1851945239\n", + "EPOCH: 2766 LOSS: 61.1851395495\n", + "EPOCH: 2767 LOSS: 61.1850846013\n", + "EPOCH: 2768 LOSS: 61.1850296793\n", + "EPOCH: 2769 LOSS: 61.1849747834\n", + "EPOCH: 2770 LOSS: 61.1849199136\n", + "EPOCH: 2771 LOSS: 61.1848650699\n", + "EPOCH: 2772 LOSS: 61.1848102522\n", + "EPOCH: 2773 LOSS: 61.1847554606\n", + "EPOCH: 2774 LOSS: 61.1847006949\n", + "EPOCH: 2775 LOSS: 61.1846459552\n", + "EPOCH: 2776 LOSS: 61.1845912414\n", + "EPOCH: 2777 LOSS: 61.1845365534\n", + "EPOCH: 2778 LOSS: 61.1844818914\n", + "EPOCH: 2779 LOSS: 61.1844272551\n", + "EPOCH: 2780 LOSS: 61.1843726447\n", + "EPOCH: 2781 LOSS: 61.18431806\n", + "EPOCH: 2782 LOSS: 61.1842635011\n", + "EPOCH: 2783 LOSS: 61.1842089678\n", + "EPOCH: 2784 LOSS: 61.1841544602\n", + "EPOCH: 2785 LOSS: 61.1840999783\n", + "EPOCH: 2786 LOSS: 61.184045522\n", + "EPOCH: 2787 LOSS: 61.1839910912\n", + "EPOCH: 2788 LOSS: 61.1839366861\n", + "EPOCH: 2789 LOSS: 61.1838823064\n", + "EPOCH: 2790 LOSS: 61.1838279522\n", + "EPOCH: 2791 LOSS: 61.1837736235\n", + "EPOCH: 2792 LOSS: 61.1837193202\n", + "EPOCH: 2793 LOSS: 61.1836650423\n", + "EPOCH: 2794 LOSS: 61.1836107898\n", + "EPOCH: 2795 LOSS: 61.1835565627\n", + "EPOCH: 2796 LOSS: 61.1835023608\n", + "EPOCH: 2797 LOSS: 61.1834481842\n", + "EPOCH: 2798 LOSS: 61.1833940329\n", + "EPOCH: 2799 LOSS: 61.1833399068\n", + "EPOCH: 2800 LOSS: 61.1832858059\n", + "EPOCH: 2801 LOSS: 61.1832317301\n", + "EPOCH: 2802 LOSS: 61.1831776795\n", + "EPOCH: 2803 LOSS: 61.183123654\n", + "EPOCH: 2804 LOSS: 61.1830696536\n", + "EPOCH: 2805 LOSS: 61.1830156782\n", + "EPOCH: 2806 LOSS: 61.1829617278\n", + "EPOCH: 2807 LOSS: 61.1829078025\n", + "EPOCH: 2808 LOSS: 61.182853902\n", + "EPOCH: 2809 LOSS: 61.1828000265\n", + "EPOCH: 2810 LOSS: 61.1827461759\n", + "EPOCH: 2811 LOSS: 61.1826923502\n", + "EPOCH: 2812 LOSS: 61.1826385493\n", + "EPOCH: 2813 LOSS: 61.1825847733\n", + "EPOCH: 2814 LOSS: 61.182531022\n", + "EPOCH: 2815 LOSS: 61.1824772955\n", + "EPOCH: 2816 LOSS: 61.1824235937\n", + "EPOCH: 2817 LOSS: 61.1823699166\n", + "EPOCH: 2818 LOSS: 61.1823162641\n", + "EPOCH: 2819 LOSS: 61.1822626363\n", + "EPOCH: 2820 LOSS: 61.1822090332\n", + "EPOCH: 2821 LOSS: 61.1821554546\n", + "EPOCH: 2822 LOSS: 61.1821019005\n", + "EPOCH: 2823 LOSS: 61.182048371\n", + "EPOCH: 2824 LOSS: 61.181994866\n", + "EPOCH: 2825 LOSS: 61.1819413855\n", + "EPOCH: 2826 LOSS: 61.1818879294\n", + "EPOCH: 2827 LOSS: 61.1818344977\n", + "EPOCH: 2828 LOSS: 61.1817810904\n", + "EPOCH: 2829 LOSS: 61.1817277075\n", + "EPOCH: 2830 LOSS: 61.1816743489\n", + "EPOCH: 2831 LOSS: 61.1816210146\n", + "EPOCH: 2832 LOSS: 61.1815677046\n", + "EPOCH: 2833 LOSS: 61.1815144188\n", + "EPOCH: 2834 LOSS: 61.1814611572\n", + "EPOCH: 2835 LOSS: 61.1814079199\n", + "EPOCH: 2836 LOSS: 61.1813547067\n", + "EPOCH: 2837 LOSS: 61.1813015176\n", + "EPOCH: 2838 LOSS: 61.1812483527\n", + "EPOCH: 2839 LOSS: 61.1811952118\n", + "EPOCH: 2840 LOSS: 61.181142095\n", + "EPOCH: 2841 LOSS: 61.1810890022\n", + "EPOCH: 2842 LOSS: 61.1810359334\n", + "EPOCH: 2843 LOSS: 61.1809828886\n", + "EPOCH: 2844 LOSS: 61.1809298677\n", + "EPOCH: 2845 LOSS: 61.1808768707\n", + "EPOCH: 2846 LOSS: 61.1808238977\n", + "EPOCH: 2847 LOSS: 61.1807709485\n", + "EPOCH: 2848 LOSS: 61.1807180231\n", + "EPOCH: 2849 LOSS: 61.1806651215\n", + "EPOCH: 2850 LOSS: 61.1806122437\n", + "EPOCH: 2851 LOSS: 61.1805593897\n", + "EPOCH: 2852 LOSS: 61.1805065594\n", + "EPOCH: 2853 LOSS: 61.1804537528\n", + "EPOCH: 2854 LOSS: 61.1804009699\n", + "EPOCH: 2855 LOSS: 61.1803482107\n", + "EPOCH: 2856 LOSS: 61.180295475\n", + "EPOCH: 2857 LOSS: 61.1802427629\n", + "EPOCH: 2858 LOSS: 61.1801900745\n", + "EPOCH: 2859 LOSS: 61.1801374095\n", + "EPOCH: 2860 LOSS: 61.1800847681\n", + "EPOCH: 2861 LOSS: 61.1800321502\n", + "EPOCH: 2862 LOSS: 61.1799795557\n", + "EPOCH: 2863 LOSS: 61.1799269846\n", + "EPOCH: 2864 LOSS: 61.179874437\n", + "EPOCH: 2865 LOSS: 61.1798219128\n", + "EPOCH: 2866 LOSS: 61.1797694119\n", + "EPOCH: 2867 LOSS: 61.1797169343\n", + "EPOCH: 2868 LOSS: 61.1796644801\n", + "EPOCH: 2869 LOSS: 61.1796120491\n", + "EPOCH: 2870 LOSS: 61.1795596414\n", + "EPOCH: 2871 LOSS: 61.1795072569\n", + "EPOCH: 2872 LOSS: 61.1794548956\n", + "EPOCH: 2873 LOSS: 61.1794025574\n", + "EPOCH: 2874 LOSS: 61.1793502425\n", + "EPOCH: 2875 LOSS: 61.1792979506\n", + "EPOCH: 2876 LOSS: 61.1792456819\n", + "EPOCH: 2877 LOSS: 61.1791934362\n", + "EPOCH: 2878 LOSS: 61.1791412136\n", + "EPOCH: 2879 LOSS: 61.179089014\n", + "EPOCH: 2880 LOSS: 61.1790368374\n", + "EPOCH: 2881 LOSS: 61.1789846837\n", + "EPOCH: 2882 LOSS: 61.178932553\n", + "EPOCH: 2883 LOSS: 61.1788804452\n", + "EPOCH: 2884 LOSS: 61.1788283603\n", + "EPOCH: 2885 LOSS: 61.1787762983\n", + "EPOCH: 2886 LOSS: 61.1787242592\n", + "EPOCH: 2887 LOSS: 61.1786722428\n", + "EPOCH: 2888 LOSS: 61.1786202492\n", + "EPOCH: 2889 LOSS: 61.1785682784\n", + "EPOCH: 2890 LOSS: 61.1785163304\n", + "EPOCH: 2891 LOSS: 61.178464405\n", + "EPOCH: 2892 LOSS: 61.1784125024\n", + "EPOCH: 2893 LOSS: 61.1783606224\n", + "EPOCH: 2894 LOSS: 61.1783087651\n", + "EPOCH: 2895 LOSS: 61.1782569304\n", + "EPOCH: 2896 LOSS: 61.1782051182\n", + "EPOCH: 2897 LOSS: 61.1781533287\n", + "EPOCH: 2898 LOSS: 61.1781015617\n", + "EPOCH: 2899 LOSS: 61.1780498172\n", + "EPOCH: 2900 LOSS: 61.1779980952\n", + "EPOCH: 2901 LOSS: 61.1779463957\n", + "EPOCH: 2902 LOSS: 61.1778947186\n", + "EPOCH: 2903 LOSS: 61.1778430639\n", + "EPOCH: 2904 LOSS: 61.1777914316\n", + "EPOCH: 2905 LOSS: 61.1777398217\n", + "EPOCH: 2906 LOSS: 61.1776882342\n", + "EPOCH: 2907 LOSS: 61.1776366689\n", + "EPOCH: 2908 LOSS: 61.177585126\n", + "EPOCH: 2909 LOSS: 61.1775336054\n", + "EPOCH: 2910 LOSS: 61.177482107\n", + "EPOCH: 2911 LOSS: 61.1774306308\n", + "EPOCH: 2912 LOSS: 61.1773791768\n", + "EPOCH: 2913 LOSS: 61.177327745\n", + "EPOCH: 2914 LOSS: 61.1772763353\n", + "EPOCH: 2915 LOSS: 61.1772249478\n", + "EPOCH: 2916 LOSS: 61.1771735824\n", + "EPOCH: 2917 LOSS: 61.1771222391\n", + "EPOCH: 2918 LOSS: 61.1770709178\n", + "EPOCH: 2919 LOSS: 61.1770196185\n", + "EPOCH: 2920 LOSS: 61.1769683413\n", + "EPOCH: 2921 LOSS: 61.1769170861\n", + "EPOCH: 2922 LOSS: 61.1768658528\n", + "EPOCH: 2923 LOSS: 61.1768146414\n", + "EPOCH: 2924 LOSS: 61.176763452\n", + "EPOCH: 2925 LOSS: 61.1767122845\n", + "EPOCH: 2926 LOSS: 61.1766611388\n", + "EPOCH: 2927 LOSS: 61.176610015\n", + "EPOCH: 2928 LOSS: 61.176558913\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 2929 LOSS: 61.1765078328\n", + "EPOCH: 2930 LOSS: 61.1764567743\n", + "EPOCH: 2931 LOSS: 61.1764057376\n", + "EPOCH: 2932 LOSS: 61.1763547227\n", + "EPOCH: 2933 LOSS: 61.1763037294\n", + "EPOCH: 2934 LOSS: 61.1762527579\n", + "EPOCH: 2935 LOSS: 61.176201808\n", + "EPOCH: 2936 LOSS: 61.1761508797\n", + "EPOCH: 2937 LOSS: 61.176099973\n", + "EPOCH: 2938 LOSS: 61.1760490879\n", + "EPOCH: 2939 LOSS: 61.1759982244\n", + "EPOCH: 2940 LOSS: 61.1759473825\n", + "EPOCH: 2941 LOSS: 61.175896562\n", + "EPOCH: 2942 LOSS: 61.1758457631\n", + "EPOCH: 2943 LOSS: 61.1757949856\n", + "EPOCH: 2944 LOSS: 61.1757442295\n", + "EPOCH: 2945 LOSS: 61.1756934949\n", + "EPOCH: 2946 LOSS: 61.1756427818\n", + "EPOCH: 2947 LOSS: 61.1755920899\n", + "EPOCH: 2948 LOSS: 61.1755414195\n", + "EPOCH: 2949 LOSS: 61.1754907704\n", + "EPOCH: 2950 LOSS: 61.1754401426\n", + "EPOCH: 2951 LOSS: 61.1753895361\n", + "EPOCH: 2952 LOSS: 61.1753389508\n", + "EPOCH: 2953 LOSS: 61.1752883869\n", + "EPOCH: 2954 LOSS: 61.1752378441\n", + "EPOCH: 2955 LOSS: 61.1751873225\n", + "EPOCH: 2956 LOSS: 61.1751368222\n", + "EPOCH: 2957 LOSS: 61.1750863429\n", + "EPOCH: 2958 LOSS: 61.1750358849\n", + "EPOCH: 2959 LOSS: 61.1749854479\n", + "EPOCH: 2960 LOSS: 61.174935032\n", + "EPOCH: 2961 LOSS: 61.1748846372\n", + "EPOCH: 2962 LOSS: 61.1748342634\n", + "EPOCH: 2963 LOSS: 61.1747839107\n", + "EPOCH: 2964 LOSS: 61.174733579\n", + "EPOCH: 2965 LOSS: 61.1746832682\n", + "EPOCH: 2966 LOSS: 61.1746329784\n", + "EPOCH: 2967 LOSS: 61.1745827095\n", + "EPOCH: 2968 LOSS: 61.1745324616\n", + "EPOCH: 2969 LOSS: 61.1744822345\n", + "EPOCH: 2970 LOSS: 61.1744320284\n", + "EPOCH: 2971 LOSS: 61.174381843\n", + "EPOCH: 2972 LOSS: 61.1743316785\n", + "EPOCH: 2973 LOSS: 61.1742815348\n", + "EPOCH: 2974 LOSS: 61.1742314119\n", + "EPOCH: 2975 LOSS: 61.1741813098\n", + "EPOCH: 2976 LOSS: 61.1741312284\n", + "EPOCH: 2977 LOSS: 61.1740811677\n", + "EPOCH: 2978 LOSS: 61.1740311277\n", + "EPOCH: 2979 LOSS: 61.1739811084\n", + "EPOCH: 2980 LOSS: 61.1739311097\n", + "EPOCH: 2981 LOSS: 61.1738811317\n", + "EPOCH: 2982 LOSS: 61.1738311743\n", + "EPOCH: 2983 LOSS: 61.1737812375\n", + "EPOCH: 2984 LOSS: 61.1737313213\n", + "EPOCH: 2985 LOSS: 61.1736814256\n", + "EPOCH: 2986 LOSS: 61.1736315504\n", + "EPOCH: 2987 LOSS: 61.1735816957\n", + "EPOCH: 2988 LOSS: 61.1735318615\n", + "EPOCH: 2989 LOSS: 61.1734820478\n", + "EPOCH: 2990 LOSS: 61.1734322546\n", + "EPOCH: 2991 LOSS: 61.1733824817\n", + "EPOCH: 2992 LOSS: 61.1733327293\n", + "EPOCH: 2993 LOSS: 61.1732829972\n", + "EPOCH: 2994 LOSS: 61.1732332855\n", + "EPOCH: 2995 LOSS: 61.1731835941\n", + "EPOCH: 2996 LOSS: 61.173133923\n", + "EPOCH: 2997 LOSS: 61.1730842723\n", + "EPOCH: 2998 LOSS: 61.1730346418\n", + "EPOCH: 2999 LOSS: 61.1729850316\n", + "EPOCH: 3000 LOSS: 61.1729354416\n", + "EPOCH: 3001 LOSS: 61.1728858718\n", + "EPOCH: 3002 LOSS: 61.1728363222\n", + "EPOCH: 3003 LOSS: 61.1727867928\n", + "EPOCH: 3004 LOSS: 61.1727372835\n", + "EPOCH: 3005 LOSS: 61.1726877944\n", + "EPOCH: 3006 LOSS: 61.1726383254\n", + "EPOCH: 3007 LOSS: 61.1725888765\n", + "EPOCH: 3008 LOSS: 61.1725394476\n", + "EPOCH: 3009 LOSS: 61.1724900388\n", + "EPOCH: 3010 LOSS: 61.17244065\n", + "EPOCH: 3011 LOSS: 61.1723912812\n", + "EPOCH: 3012 LOSS: 61.1723419324\n", + "EPOCH: 3013 LOSS: 61.1722926036\n", + "EPOCH: 3014 LOSS: 61.1722432947\n", + "EPOCH: 3015 LOSS: 61.1721940058\n", + "EPOCH: 3016 LOSS: 61.1721447368\n", + "EPOCH: 3017 LOSS: 61.1720954876\n", + "EPOCH: 3018 LOSS: 61.1720462583\n", + "EPOCH: 3019 LOSS: 61.1719970489\n", + "EPOCH: 3020 LOSS: 61.1719478593\n", + "EPOCH: 3021 LOSS: 61.1718986895\n", + "EPOCH: 3022 LOSS: 61.1718495395\n", + "EPOCH: 3023 LOSS: 61.1718004092\n", + "EPOCH: 3024 LOSS: 61.1717512987\n", + "EPOCH: 3025 LOSS: 61.1717022079\n", + "EPOCH: 3026 LOSS: 61.1716531369\n", + "EPOCH: 3027 LOSS: 61.1716040855\n", + "EPOCH: 3028 LOSS: 61.1715550538\n", + "EPOCH: 3029 LOSS: 61.1715060417\n", + "EPOCH: 3030 LOSS: 61.1714570493\n", + "EPOCH: 3031 LOSS: 61.1714080765\n", + "EPOCH: 3032 LOSS: 61.1713591232\n", + "EPOCH: 3033 LOSS: 61.1713101896\n", + "EPOCH: 3034 LOSS: 61.1712612755\n", + "EPOCH: 3035 LOSS: 61.1712123809\n", + "EPOCH: 3036 LOSS: 61.1711635058\n", + "EPOCH: 3037 LOSS: 61.1711146502\n", + "EPOCH: 3038 LOSS: 61.1710658141\n", + "EPOCH: 3039 LOSS: 61.1710169974\n", + "EPOCH: 3040 LOSS: 61.1709682002\n", + "EPOCH: 3041 LOSS: 61.1709194224\n", + "EPOCH: 3042 LOSS: 61.170870664\n", + "EPOCH: 3043 LOSS: 61.1708219249\n", + "EPOCH: 3044 LOSS: 61.1707732053\n", + "EPOCH: 3045 LOSS: 61.1707245049\n", + "EPOCH: 3046 LOSS: 61.1706758239\n", + "EPOCH: 3047 LOSS: 61.1706271621\n", + "EPOCH: 3048 LOSS: 61.1705785197\n", + "EPOCH: 3049 LOSS: 61.1705298965\n", + "EPOCH: 3050 LOSS: 61.1704812926\n", + "EPOCH: 3051 LOSS: 61.1704327078\n", + "EPOCH: 3052 LOSS: 61.1703841423\n", + "EPOCH: 3053 LOSS: 61.170335596\n", + "EPOCH: 3054 LOSS: 61.1702870688\n", + "EPOCH: 3055 LOSS: 61.1702385607\n", + "EPOCH: 3056 LOSS: 61.1701900718\n", + "EPOCH: 3057 LOSS: 61.1701416021\n", + "EPOCH: 3058 LOSS: 61.1700931514\n", + "EPOCH: 3059 LOSS: 61.1700447197\n", + "EPOCH: 3060 LOSS: 61.1699963071\n", + "EPOCH: 3061 LOSS: 61.1699479136\n", + "EPOCH: 3062 LOSS: 61.1698995391\n", + "EPOCH: 3063 LOSS: 61.1698511835\n", + "EPOCH: 3064 LOSS: 61.169802847\n", + "EPOCH: 3065 LOSS: 61.1697545294\n", + "EPOCH: 3066 LOSS: 61.1697062307\n", + "EPOCH: 3067 LOSS: 61.169657951\n", + "EPOCH: 3068 LOSS: 61.1696096901\n", + "EPOCH: 3069 LOSS: 61.1695614482\n", + "EPOCH: 3070 LOSS: 61.1695132251\n", + "EPOCH: 3071 LOSS: 61.1694650209\n", + "EPOCH: 3072 LOSS: 61.1694168355\n", + "EPOCH: 3073 LOSS: 61.1693686689\n", + "EPOCH: 3074 LOSS: 61.1693205211\n", + "EPOCH: 3075 LOSS: 61.1692723921\n", + "EPOCH: 3076 LOSS: 61.1692242818\n", + "EPOCH: 3077 LOSS: 61.1691761903\n", + "EPOCH: 3078 LOSS: 61.1691281175\n", + "EPOCH: 3079 LOSS: 61.1690800634\n", + "EPOCH: 3080 LOSS: 61.169032028\n", + "EPOCH: 3081 LOSS: 61.1689840112\n", + "EPOCH: 3082 LOSS: 61.1689360131\n", + "EPOCH: 3083 LOSS: 61.1688880336\n", + "EPOCH: 3084 LOSS: 61.1688400728\n", + "EPOCH: 3085 LOSS: 61.1687921305\n", + "EPOCH: 3086 LOSS: 61.1687442068\n", + "EPOCH: 3087 LOSS: 61.1686963017\n", + "EPOCH: 3088 LOSS: 61.1686484151\n", + "EPOCH: 3089 LOSS: 61.168600547\n", + "EPOCH: 3090 LOSS: 61.1685526974\n", + "EPOCH: 3091 LOSS: 61.1685048663\n", + "EPOCH: 3092 LOSS: 61.1684570537\n", + "EPOCH: 3093 LOSS: 61.1684092595\n", + "EPOCH: 3094 LOSS: 61.1683614838\n", + "EPOCH: 3095 LOSS: 61.1683137265\n", + "EPOCH: 3096 LOSS: 61.1682659876\n", + "EPOCH: 3097 LOSS: 61.168218267\n", + "EPOCH: 3098 LOSS: 61.1681705649\n", + "EPOCH: 3099 LOSS: 61.168122881\n", + "EPOCH: 3100 LOSS: 61.1680752155\n", + "EPOCH: 3101 LOSS: 61.1680275683\n", + "EPOCH: 3102 LOSS: 61.1679799394\n", + "EPOCH: 3103 LOSS: 61.1679323288\n", + "EPOCH: 3104 LOSS: 61.1678847364\n", + "EPOCH: 3105 LOSS: 61.1678371623\n", + "EPOCH: 3106 LOSS: 61.1677896064\n", + "EPOCH: 3107 LOSS: 61.1677420687\n", + "EPOCH: 3108 LOSS: 61.1676945492\n", + "EPOCH: 3109 LOSS: 61.1676470479\n", + "EPOCH: 3110 LOSS: 61.1675995647\n", + "EPOCH: 3111 LOSS: 61.1675520996\n", + "EPOCH: 3112 LOSS: 61.1675046527\n", + "EPOCH: 3113 LOSS: 61.1674572238\n", + "EPOCH: 3114 LOSS: 61.1674098131\n", + "EPOCH: 3115 LOSS: 61.1673624204\n", + "EPOCH: 3116 LOSS: 61.1673150458\n", + "EPOCH: 3117 LOSS: 61.1672676892\n", + "EPOCH: 3118 LOSS: 61.1672203506\n", + "EPOCH: 3119 LOSS: 61.16717303\n", + "EPOCH: 3120 LOSS: 61.1671257274\n", + "EPOCH: 3121 LOSS: 61.1670784428\n", + "EPOCH: 3122 LOSS: 61.1670311761\n", + "EPOCH: 3123 LOSS: 61.1669839274\n", + "EPOCH: 3124 LOSS: 61.1669366965\n", + "EPOCH: 3125 LOSS: 61.1668894836\n", + "EPOCH: 3126 LOSS: 61.1668422885\n", + "EPOCH: 3127 LOSS: 61.1667951113\n", + "EPOCH: 3128 LOSS: 61.166747952\n", + "EPOCH: 3129 LOSS: 61.1667008105\n", + "EPOCH: 3130 LOSS: 61.1666536868\n", + "EPOCH: 3131 LOSS: 61.1666065809\n", + "EPOCH: 3132 LOSS: 61.1665594927\n", + "EPOCH: 3133 LOSS: 61.1665124224\n", + "EPOCH: 3134 LOSS: 61.1664653698\n", + "EPOCH: 3135 LOSS: 61.1664183349\n", + "EPOCH: 3136 LOSS: 61.1663713177\n", + "EPOCH: 3137 LOSS: 61.1663243182\n", + "EPOCH: 3138 LOSS: 61.1662773364\n", + "EPOCH: 3139 LOSS: 61.1662303723\n", + "EPOCH: 3140 LOSS: 61.1661834258\n", + "EPOCH: 3141 LOSS: 61.166136497\n", + "EPOCH: 3142 LOSS: 61.1660895857\n", + "EPOCH: 3143 LOSS: 61.1660426921\n", + "EPOCH: 3144 LOSS: 61.1659958161\n", + "EPOCH: 3145 LOSS: 61.1659489576\n", + "EPOCH: 3146 LOSS: 61.1659021166\n", + "EPOCH: 3147 LOSS: 61.1658552932\n", + "EPOCH: 3148 LOSS: 61.1658084873\n", + "EPOCH: 3149 LOSS: 61.165761699\n", + "EPOCH: 3150 LOSS: 61.1657149281\n", + "EPOCH: 3151 LOSS: 61.1656681746\n", + "EPOCH: 3152 LOSS: 61.1656214387\n", + "EPOCH: 3153 LOSS: 61.1655747201\n", + "EPOCH: 3154 LOSS: 61.165528019\n", + "EPOCH: 3155 LOSS: 61.1654813353\n", + "EPOCH: 3156 LOSS: 61.1654346689\n", + "EPOCH: 3157 LOSS: 61.16538802\n", + "EPOCH: 3158 LOSS: 61.1653413884\n", + "EPOCH: 3159 LOSS: 61.1652947741\n", + "EPOCH: 3160 LOSS: 61.1652481772\n", + "EPOCH: 3161 LOSS: 61.1652015976\n", + "EPOCH: 3162 LOSS: 61.1651550352\n", + "EPOCH: 3163 LOSS: 61.1651084902\n", + "EPOCH: 3164 LOSS: 61.1650619624\n", + "EPOCH: 3165 LOSS: 61.1650154518\n", + "EPOCH: 3166 LOSS: 61.1649689585\n", + "EPOCH: 3167 LOSS: 61.1649224824\n", + "EPOCH: 3168 LOSS: 61.1648760235\n", + "EPOCH: 3169 LOSS: 61.1648295817\n", + "EPOCH: 3170 LOSS: 61.1647831572\n", + "EPOCH: 3171 LOSS: 61.1647367498\n", + "EPOCH: 3172 LOSS: 61.1646903595\n", + "EPOCH: 3173 LOSS: 61.1646439863\n", + "EPOCH: 3174 LOSS: 61.1645976303\n", + "EPOCH: 3175 LOSS: 61.1645512913\n", + "EPOCH: 3176 LOSS: 61.1645049694\n", + "EPOCH: 3177 LOSS: 61.1644586646\n", + "EPOCH: 3178 LOSS: 61.1644123768\n", + "EPOCH: 3179 LOSS: 61.164366106\n", + "EPOCH: 3180 LOSS: 61.1643198522\n", + "EPOCH: 3181 LOSS: 61.1642736155\n", + "EPOCH: 3182 LOSS: 61.1642273957\n", + "EPOCH: 3183 LOSS: 61.1641811928\n", + "EPOCH: 3184 LOSS: 61.164135007\n", + "EPOCH: 3185 LOSS: 61.164088838\n", + "EPOCH: 3186 LOSS: 61.164042686\n", + "EPOCH: 3187 LOSS: 61.1639965509\n", + "EPOCH: 3188 LOSS: 61.1639504327\n", + "EPOCH: 3189 LOSS: 61.1639043313\n", + "EPOCH: 3190 LOSS: 61.1638582468\n", + "EPOCH: 3191 LOSS: 61.1638121791\n", + "EPOCH: 3192 LOSS: 61.1637661283\n", + "EPOCH: 3193 LOSS: 61.1637200943\n", + "EPOCH: 3194 LOSS: 61.1636740771\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 3195 LOSS: 61.1636280766\n", + "EPOCH: 3196 LOSS: 61.163582093\n", + "EPOCH: 3197 LOSS: 61.163536126\n", + "EPOCH: 3198 LOSS: 61.1634901758\n", + "EPOCH: 3199 LOSS: 61.1634442424\n", + "EPOCH: 3200 LOSS: 61.1633983256\n", + "EPOCH: 3201 LOSS: 61.1633524256\n", + "EPOCH: 3202 LOSS: 61.1633065422\n", + "EPOCH: 3203 LOSS: 61.1632606754\n", + "EPOCH: 3204 LOSS: 61.1632148254\n", + "EPOCH: 3205 LOSS: 61.1631689919\n", + "EPOCH: 3206 LOSS: 61.1631231751\n", + "EPOCH: 3207 LOSS: 61.1630773748\n", + "EPOCH: 3208 LOSS: 61.1630315912\n", + "EPOCH: 3209 LOSS: 61.1629858241\n", + "EPOCH: 3210 LOSS: 61.1629400736\n", + "EPOCH: 3211 LOSS: 61.1628943397\n", + "EPOCH: 3212 LOSS: 61.1628486222\n", + "EPOCH: 3213 LOSS: 61.1628029213\n", + "EPOCH: 3214 LOSS: 61.1627572369\n", + "EPOCH: 3215 LOSS: 61.1627115689\n", + "EPOCH: 3216 LOSS: 61.1626659174\n", + "EPOCH: 3217 LOSS: 61.1626202824\n", + "EPOCH: 3218 LOSS: 61.1625746639\n", + "EPOCH: 3219 LOSS: 61.1625290617\n", + "EPOCH: 3220 LOSS: 61.162483476\n", + "EPOCH: 3221 LOSS: 61.1624379066\n", + "EPOCH: 3222 LOSS: 61.1623923537\n", + "EPOCH: 3223 LOSS: 61.1623468171\n", + "EPOCH: 3224 LOSS: 61.1623012969\n", + "EPOCH: 3225 LOSS: 61.162255793\n", + "EPOCH: 3226 LOSS: 61.1622103054\n", + "EPOCH: 3227 LOSS: 61.1621648341\n", + "EPOCH: 3228 LOSS: 61.1621193792\n", + "EPOCH: 3229 LOSS: 61.1620739405\n", + "EPOCH: 3230 LOSS: 61.1620285181\n", + "EPOCH: 3231 LOSS: 61.1619831119\n", + "EPOCH: 3232 LOSS: 61.161937722\n", + "EPOCH: 3233 LOSS: 61.1618923483\n", + "EPOCH: 3234 LOSS: 61.1618469908\n", + "EPOCH: 3235 LOSS: 61.1618016495\n", + "EPOCH: 3236 LOSS: 61.1617563244\n", + "EPOCH: 3237 LOSS: 61.1617110155\n", + "EPOCH: 3238 LOSS: 61.1616657227\n", + "EPOCH: 3239 LOSS: 61.1616204461\n", + "EPOCH: 3240 LOSS: 61.1615751855\n", + "EPOCH: 3241 LOSS: 61.1615299411\n", + "EPOCH: 3242 LOSS: 61.1614847128\n", + "EPOCH: 3243 LOSS: 61.1614395006\n", + "EPOCH: 3244 LOSS: 61.1613943045\n", + "EPOCH: 3245 LOSS: 61.1613491244\n", + "EPOCH: 3246 LOSS: 61.1613039603\n", + "EPOCH: 3247 LOSS: 61.1612588123\n", + "EPOCH: 3248 LOSS: 61.1612136803\n", + "EPOCH: 3249 LOSS: 61.1611685643\n", + "EPOCH: 3250 LOSS: 61.1611234642\n", + "EPOCH: 3251 LOSS: 61.1610783802\n", + "EPOCH: 3252 LOSS: 61.1610333121\n", + "EPOCH: 3253 LOSS: 61.1609882599\n", + "EPOCH: 3254 LOSS: 61.1609432237\n", + "EPOCH: 3255 LOSS: 61.1608982034\n", + "EPOCH: 3256 LOSS: 61.160853199\n", + "EPOCH: 3257 LOSS: 61.1608082105\n", + "EPOCH: 3258 LOSS: 61.1607632379\n", + "EPOCH: 3259 LOSS: 61.1607182811\n", + "EPOCH: 3260 LOSS: 61.1606733402\n", + "EPOCH: 3261 LOSS: 61.1606284151\n", + "EPOCH: 3262 LOSS: 61.1605835059\n", + "EPOCH: 3263 LOSS: 61.1605386124\n", + "EPOCH: 3264 LOSS: 61.1604937347\n", + "EPOCH: 3265 LOSS: 61.1604488729\n", + "EPOCH: 3266 LOSS: 61.1604040267\n", + "EPOCH: 3267 LOSS: 61.1603591964\n", + "EPOCH: 3268 LOSS: 61.1603143818\n", + "EPOCH: 3269 LOSS: 61.1602695829\n", + "EPOCH: 3270 LOSS: 61.1602247997\n", + "EPOCH: 3271 LOSS: 61.1601800322\n", + "EPOCH: 3272 LOSS: 61.1601352804\n", + "EPOCH: 3273 LOSS: 61.1600905443\n", + "EPOCH: 3274 LOSS: 61.1600458238\n", + "EPOCH: 3275 LOSS: 61.160001119\n", + "EPOCH: 3276 LOSS: 61.1599564299\n", + "EPOCH: 3277 LOSS: 61.1599117563\n", + "EPOCH: 3278 LOSS: 61.1598670983\n", + "EPOCH: 3279 LOSS: 61.159822456\n", + "EPOCH: 3280 LOSS: 61.1597778292\n", + "EPOCH: 3281 LOSS: 61.159733218\n", + "EPOCH: 3282 LOSS: 61.1596886223\n", + "EPOCH: 3283 LOSS: 61.1596440422\n", + "EPOCH: 3284 LOSS: 61.1595994776\n", + "EPOCH: 3285 LOSS: 61.1595549285\n", + "EPOCH: 3286 LOSS: 61.1595103949\n", + "EPOCH: 3287 LOSS: 61.1594658769\n", + "EPOCH: 3288 LOSS: 61.1594213742\n", + "EPOCH: 3289 LOSS: 61.1593768871\n", + "EPOCH: 3290 LOSS: 61.1593324154\n", + "EPOCH: 3291 LOSS: 61.1592879591\n", + "EPOCH: 3292 LOSS: 61.1592435183\n", + "EPOCH: 3293 LOSS: 61.1591990928\n", + "EPOCH: 3294 LOSS: 61.1591546828\n", + "EPOCH: 3295 LOSS: 61.1591102881\n", + "EPOCH: 3296 LOSS: 61.1590659088\n", + "EPOCH: 3297 LOSS: 61.1590215449\n", + "EPOCH: 3298 LOSS: 61.1589771963\n", + "EPOCH: 3299 LOSS: 61.158932863\n", + "EPOCH: 3300 LOSS: 61.1588885451\n", + "EPOCH: 3301 LOSS: 61.1588442425\n", + "EPOCH: 3302 LOSS: 61.1587999551\n", + "EPOCH: 3303 LOSS: 61.1587556831\n", + "EPOCH: 3304 LOSS: 61.1587114263\n", + "EPOCH: 3305 LOSS: 61.1586671847\n", + "EPOCH: 3306 LOSS: 61.1586229585\n", + "EPOCH: 3307 LOSS: 61.1585787474\n", + "EPOCH: 3308 LOSS: 61.1585345515\n", + "EPOCH: 3309 LOSS: 61.1584903709\n", + "EPOCH: 3310 LOSS: 61.1584462054\n", + "EPOCH: 3311 LOSS: 61.1584020552\n", + "EPOCH: 3312 LOSS: 61.1583579201\n", + "EPOCH: 3313 LOSS: 61.1583138001\n", + "EPOCH: 3314 LOSS: 61.1582696953\n", + "EPOCH: 3315 LOSS: 61.1582256056\n", + "EPOCH: 3316 LOSS: 61.158181531\n", + "EPOCH: 3317 LOSS: 61.1581374716\n", + "EPOCH: 3318 LOSS: 61.1580934272\n", + "EPOCH: 3319 LOSS: 61.1580493979\n", + "EPOCH: 3320 LOSS: 61.1580053837\n", + "EPOCH: 3321 LOSS: 61.1579613845\n", + "EPOCH: 3322 LOSS: 61.1579174003\n", + "EPOCH: 3323 LOSS: 61.1578734312\n", + "EPOCH: 3324 LOSS: 61.1578294771\n", + "EPOCH: 3325 LOSS: 61.157785538\n", + "EPOCH: 3326 LOSS: 61.1577416139\n", + "EPOCH: 3327 LOSS: 61.1576977048\n", + "EPOCH: 3328 LOSS: 61.1576538107\n", + "EPOCH: 3329 LOSS: 61.1576099314\n", + "EPOCH: 3330 LOSS: 61.1575660672\n", + "EPOCH: 3331 LOSS: 61.1575222179\n", + "EPOCH: 3332 LOSS: 61.1574783834\n", + "EPOCH: 3333 LOSS: 61.1574345639\n", + "EPOCH: 3334 LOSS: 61.1573907593\n", + "EPOCH: 3335 LOSS: 61.1573469696\n", + "EPOCH: 3336 LOSS: 61.1573031947\n", + "EPOCH: 3337 LOSS: 61.1572594347\n", + "EPOCH: 3338 LOSS: 61.1572156895\n", + "EPOCH: 3339 LOSS: 61.1571719592\n", + "EPOCH: 3340 LOSS: 61.1571282436\n", + "EPOCH: 3341 LOSS: 61.1570845429\n", + "EPOCH: 3342 LOSS: 61.157040857\n", + "EPOCH: 3343 LOSS: 61.1569971859\n", + "EPOCH: 3344 LOSS: 61.1569535295\n", + "EPOCH: 3345 LOSS: 61.1569098879\n", + "EPOCH: 3346 LOSS: 61.1568662611\n", + "EPOCH: 3347 LOSS: 61.1568226489\n", + "EPOCH: 3348 LOSS: 61.1567790515\n", + "EPOCH: 3349 LOSS: 61.1567354688\n", + "EPOCH: 3350 LOSS: 61.1566919008\n", + "EPOCH: 3351 LOSS: 61.1566483475\n", + "EPOCH: 3352 LOSS: 61.1566048089\n", + "EPOCH: 3353 LOSS: 61.156561285\n", + "EPOCH: 3354 LOSS: 61.1565177756\n", + "EPOCH: 3355 LOSS: 61.156474281\n", + "EPOCH: 3356 LOSS: 61.1564308009\n", + "EPOCH: 3357 LOSS: 61.1563873355\n", + "EPOCH: 3358 LOSS: 61.1563438847\n", + "EPOCH: 3359 LOSS: 61.1563004484\n", + "EPOCH: 3360 LOSS: 61.1562570268\n", + "EPOCH: 3361 LOSS: 61.1562136197\n", + "EPOCH: 3362 LOSS: 61.1561702272\n", + "EPOCH: 3363 LOSS: 61.1561268492\n", + "EPOCH: 3364 LOSS: 61.1560834857\n", + "EPOCH: 3365 LOSS: 61.1560401368\n", + "EPOCH: 3366 LOSS: 61.1559968024\n", + "EPOCH: 3367 LOSS: 61.1559534825\n", + "EPOCH: 3368 LOSS: 61.155910177\n", + "EPOCH: 3369 LOSS: 61.1558668861\n", + "EPOCH: 3370 LOSS: 61.1558236096\n", + "EPOCH: 3371 LOSS: 61.1557803475\n", + "EPOCH: 3372 LOSS: 61.1557370999\n", + "EPOCH: 3373 LOSS: 61.1556938668\n", + "EPOCH: 3374 LOSS: 61.155650648\n", + "EPOCH: 3375 LOSS: 61.1556074437\n", + "EPOCH: 3376 LOSS: 61.1555642537\n", + "EPOCH: 3377 LOSS: 61.1555210781\n", + "EPOCH: 3378 LOSS: 61.1554779169\n", + "EPOCH: 3379 LOSS: 61.1554347701\n", + "EPOCH: 3380 LOSS: 61.1553916376\n", + "EPOCH: 3381 LOSS: 61.1553485194\n", + "EPOCH: 3382 LOSS: 61.1553054156\n", + "EPOCH: 3383 LOSS: 61.1552623261\n", + "EPOCH: 3384 LOSS: 61.1552192509\n", + "EPOCH: 3385 LOSS: 61.1551761899\n", + "EPOCH: 3386 LOSS: 61.1551331433\n", + "EPOCH: 3387 LOSS: 61.1550901109\n", + "EPOCH: 3388 LOSS: 61.1550470928\n", + "EPOCH: 3389 LOSS: 61.1550040889\n", + "EPOCH: 3390 LOSS: 61.1549610993\n", + "EPOCH: 3391 LOSS: 61.1549181239\n", + "EPOCH: 3392 LOSS: 61.1548751627\n", + "EPOCH: 3393 LOSS: 61.1548322157\n", + "EPOCH: 3394 LOSS: 61.1547892829\n", + "EPOCH: 3395 LOSS: 61.1547463642\n", + "EPOCH: 3396 LOSS: 61.1547034598\n", + "EPOCH: 3397 LOSS: 61.1546605694\n", + "EPOCH: 3398 LOSS: 61.1546176933\n", + "EPOCH: 3399 LOSS: 61.1545748312\n", + "EPOCH: 3400 LOSS: 61.1545319833\n", + "EPOCH: 3401 LOSS: 61.1544891495\n", + "EPOCH: 3402 LOSS: 61.1544463298\n", + "EPOCH: 3403 LOSS: 61.1544035242\n", + "EPOCH: 3404 LOSS: 61.1543607327\n", + "EPOCH: 3405 LOSS: 61.1543179552\n", + "EPOCH: 3406 LOSS: 61.1542751918\n", + "EPOCH: 3407 LOSS: 61.1542324425\n", + "EPOCH: 3408 LOSS: 61.1541897071\n", + "EPOCH: 3409 LOSS: 61.1541469858\n", + "EPOCH: 3410 LOSS: 61.1541042785\n", + "EPOCH: 3411 LOSS: 61.1540615853\n", + "EPOCH: 3412 LOSS: 61.154018906\n", + "EPOCH: 3413 LOSS: 61.1539762406\n", + "EPOCH: 3414 LOSS: 61.1539335893\n", + "EPOCH: 3415 LOSS: 61.1538909519\n", + "EPOCH: 3416 LOSS: 61.1538483284\n", + "EPOCH: 3417 LOSS: 61.1538057189\n", + "EPOCH: 3418 LOSS: 61.1537631233\n", + "EPOCH: 3419 LOSS: 61.1537205417\n", + "EPOCH: 3420 LOSS: 61.1536779739\n", + "EPOCH: 3421 LOSS: 61.15363542\n", + "EPOCH: 3422 LOSS: 61.15359288\n", + "EPOCH: 3423 LOSS: 61.1535503539\n", + "EPOCH: 3424 LOSS: 61.1535078416\n", + "EPOCH: 3425 LOSS: 61.1534653432\n", + "EPOCH: 3426 LOSS: 61.1534228586\n", + "EPOCH: 3427 LOSS: 61.1533803879\n", + "EPOCH: 3428 LOSS: 61.153337931\n", + "EPOCH: 3429 LOSS: 61.1532954878\n", + "EPOCH: 3430 LOSS: 61.1532530585\n", + "EPOCH: 3431 LOSS: 61.153210643\n", + "EPOCH: 3432 LOSS: 61.1531682412\n", + "EPOCH: 3433 LOSS: 61.1531258532\n", + "EPOCH: 3434 LOSS: 61.1530834789\n", + "EPOCH: 3435 LOSS: 61.1530411184\n", + "EPOCH: 3436 LOSS: 61.1529987716\n", + "EPOCH: 3437 LOSS: 61.1529564386\n", + "EPOCH: 3438 LOSS: 61.1529141192\n", + "EPOCH: 3439 LOSS: 61.1528718136\n", + "EPOCH: 3440 LOSS: 61.1528295216\n", + "EPOCH: 3441 LOSS: 61.1527872434\n", + "EPOCH: 3442 LOSS: 61.1527449788\n", + "EPOCH: 3443 LOSS: 61.1527027278\n", + "EPOCH: 3444 LOSS: 61.1526604905\n", + "EPOCH: 3445 LOSS: 61.1526182669\n", + "EPOCH: 3446 LOSS: 61.1525760568\n", + "EPOCH: 3447 LOSS: 61.1525338604\n", + "EPOCH: 3448 LOSS: 61.1524916776\n", + "EPOCH: 3449 LOSS: 61.1524495084\n", + "EPOCH: 3450 LOSS: 61.1524073527\n", + "EPOCH: 3451 LOSS: 61.1523652107\n", + "EPOCH: 3452 LOSS: 61.1523230822\n", + "EPOCH: 3453 LOSS: 61.1522809673\n", + "EPOCH: 3454 LOSS: 61.1522388659\n", + "EPOCH: 3455 LOSS: 61.152196778\n", + "EPOCH: 3456 LOSS: 61.1521547037\n", + "EPOCH: 3457 LOSS: 61.1521126429\n", + "EPOCH: 3458 LOSS: 61.1520705956\n", + "EPOCH: 3459 LOSS: 61.1520285617\n", + "EPOCH: 3460 LOSS: 61.1519865414\n", + "EPOCH: 3461 LOSS: 61.1519445345\n", + "EPOCH: 3462 LOSS: 61.1519025411\n", + "EPOCH: 3463 LOSS: 61.1518605612\n", + "EPOCH: 3464 LOSS: 61.1518185947\n", + "EPOCH: 3465 LOSS: 61.1517766416\n", + "EPOCH: 3466 LOSS: 61.1517347019\n", + "EPOCH: 3467 LOSS: 61.1516927757\n", + "EPOCH: 3468 LOSS: 61.1516508628\n", + "EPOCH: 3469 LOSS: 61.1516089634\n", + "EPOCH: 3470 LOSS: 61.1515670773\n", + "EPOCH: 3471 LOSS: 61.1515252046\n", + "EPOCH: 3472 LOSS: 61.1514833453\n", + "EPOCH: 3473 LOSS: 61.1514414993\n", + "EPOCH: 3474 LOSS: 61.1513996666\n", + "EPOCH: 3475 LOSS: 61.1513578473\n", + "EPOCH: 3476 LOSS: 61.1513160413\n", + "EPOCH: 3477 LOSS: 61.1512742486\n", + "EPOCH: 3478 LOSS: 61.1512324692\n", + "EPOCH: 3479 LOSS: 61.1511907031\n", + "EPOCH: 3480 LOSS: 61.1511489503\n", + "EPOCH: 3481 LOSS: 61.1511072108\n", + "EPOCH: 3482 LOSS: 61.1510654845\n", + "EPOCH: 3483 LOSS: 61.1510237715\n", + "EPOCH: 3484 LOSS: 61.1509820717\n", + "EPOCH: 3485 LOSS: 61.1509403851\n", + "EPOCH: 3486 LOSS: 61.1508987118\n", + "EPOCH: 3487 LOSS: 61.1508570516\n", + "EPOCH: 3488 LOSS: 61.1508154047\n", + "EPOCH: 3489 LOSS: 61.150773771\n", + "EPOCH: 3490 LOSS: 61.1507321504\n", + "EPOCH: 3491 LOSS: 61.1506905431\n", + "EPOCH: 3492 LOSS: 61.1506489488\n", + "EPOCH: 3493 LOSS: 61.1506073678\n", + "EPOCH: 3494 LOSS: 61.1505657999\n", + "EPOCH: 3495 LOSS: 61.1505242451\n", + "EPOCH: 3496 LOSS: 61.1504827034\n", + "EPOCH: 3497 LOSS: 61.1504411749\n", + "EPOCH: 3498 LOSS: 61.1503996594\n", + "EPOCH: 3499 LOSS: 61.1503581571\n", + "EPOCH: 3500 LOSS: 61.1503166678\n", + "EPOCH: 3501 LOSS: 61.1502751916\n", + "EPOCH: 3502 LOSS: 61.1502337285\n", + "EPOCH: 3503 LOSS: 61.1501922784\n", + "EPOCH: 3504 LOSS: 61.1501508414\n", + "EPOCH: 3505 LOSS: 61.1501094174\n", + "EPOCH: 3506 LOSS: 61.1500680064\n", + "EPOCH: 3507 LOSS: 61.1500266085\n", + "EPOCH: 3508 LOSS: 61.1499852236\n", + "EPOCH: 3509 LOSS: 61.1499438516\n", + "EPOCH: 3510 LOSS: 61.1499024927\n", + "EPOCH: 3511 LOSS: 61.1498611467\n", + "EPOCH: 3512 LOSS: 61.1498198137\n", + "EPOCH: 3513 LOSS: 61.1497784937\n", + "EPOCH: 3514 LOSS: 61.1497371866\n", + "EPOCH: 3515 LOSS: 61.1496958924\n", + "EPOCH: 3516 LOSS: 61.1496546112\n", + "EPOCH: 3517 LOSS: 61.1496133429\n", + "EPOCH: 3518 LOSS: 61.1495720875\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 3519 LOSS: 61.149530845\n", + "EPOCH: 3520 LOSS: 61.1494896154\n", + "EPOCH: 3521 LOSS: 61.1494483987\n", + "EPOCH: 3522 LOSS: 61.1494071949\n", + "EPOCH: 3523 LOSS: 61.1493660039\n", + "EPOCH: 3524 LOSS: 61.1493248258\n", + "EPOCH: 3525 LOSS: 61.1492836606\n", + "EPOCH: 3526 LOSS: 61.1492425082\n", + "EPOCH: 3527 LOSS: 61.1492013686\n", + "EPOCH: 3528 LOSS: 61.1491602418\n", + "EPOCH: 3529 LOSS: 61.1491191278\n", + "EPOCH: 3530 LOSS: 61.1490780266\n", + "EPOCH: 3531 LOSS: 61.1490369383\n", + "EPOCH: 3532 LOSS: 61.1489958627\n", + "EPOCH: 3533 LOSS: 61.1489547998\n", + "EPOCH: 3534 LOSS: 61.1489137498\n", + "EPOCH: 3535 LOSS: 61.1488727125\n", + "EPOCH: 3536 LOSS: 61.1488316879\n", + "EPOCH: 3537 LOSS: 61.1487906761\n", + "EPOCH: 3538 LOSS: 61.148749677\n", + "EPOCH: 3539 LOSS: 61.1487086906\n", + "EPOCH: 3540 LOSS: 61.1486677169\n", + "EPOCH: 3541 LOSS: 61.1486267559\n", + "EPOCH: 3542 LOSS: 61.1485858077\n", + "EPOCH: 3543 LOSS: 61.148544872\n", + "EPOCH: 3544 LOSS: 61.1485039491\n", + "EPOCH: 3545 LOSS: 61.1484630388\n", + "EPOCH: 3546 LOSS: 61.1484221412\n", + "EPOCH: 3547 LOSS: 61.1483812562\n", + "EPOCH: 3548 LOSS: 61.1483403839\n", + "EPOCH: 3549 LOSS: 61.1482995242\n", + "EPOCH: 3550 LOSS: 61.1482586771\n", + "EPOCH: 3551 LOSS: 61.1482178426\n", + "EPOCH: 3552 LOSS: 61.1481770207\n", + "EPOCH: 3553 LOSS: 61.1481362113\n", + "EPOCH: 3554 LOSS: 61.1480954146\n", + "EPOCH: 3555 LOSS: 61.1480546305\n", + "EPOCH: 3556 LOSS: 61.1480138589\n", + "EPOCH: 3557 LOSS: 61.1479730998\n", + "EPOCH: 3558 LOSS: 61.1479323533\n", + "EPOCH: 3559 LOSS: 61.1478916194\n", + "EPOCH: 3560 LOSS: 61.1478508979\n", + "EPOCH: 3561 LOSS: 61.147810189\n", + "EPOCH: 3562 LOSS: 61.1477694926\n", + "EPOCH: 3563 LOSS: 61.1477288087\n", + "EPOCH: 3564 LOSS: 61.1476881373\n", + "EPOCH: 3565 LOSS: 61.1476474783\n", + "EPOCH: 3566 LOSS: 61.1476068319\n", + "EPOCH: 3567 LOSS: 61.1475661979\n", + "EPOCH: 3568 LOSS: 61.1475255763\n", + "EPOCH: 3569 LOSS: 61.1474849672\n", + "EPOCH: 3570 LOSS: 61.1474443706\n", + "EPOCH: 3571 LOSS: 61.1474037863\n", + "EPOCH: 3572 LOSS: 61.1473632145\n", + "EPOCH: 3573 LOSS: 61.1473226551\n", + "EPOCH: 3574 LOSS: 61.1472821081\n", + "EPOCH: 3575 LOSS: 61.1472415735\n", + "EPOCH: 3576 LOSS: 61.1472010513\n", + "EPOCH: 3577 LOSS: 61.1471605415\n", + "EPOCH: 3578 LOSS: 61.147120044\n", + "EPOCH: 3579 LOSS: 61.1470795589\n", + "EPOCH: 3580 LOSS: 61.1470390861\n", + "EPOCH: 3581 LOSS: 61.1469986257\n", + "EPOCH: 3582 LOSS: 61.1469581776\n", + "EPOCH: 3583 LOSS: 61.1469177418\n", + "EPOCH: 3584 LOSS: 61.1468773184\n", + "EPOCH: 3585 LOSS: 61.1468369072\n", + "EPOCH: 3586 LOSS: 61.1467965084\n", + "EPOCH: 3587 LOSS: 61.1467561218\n", + "EPOCH: 3588 LOSS: 61.1467157476\n", + "EPOCH: 3589 LOSS: 61.1466753856\n", + "EPOCH: 3590 LOSS: 61.1466350358\n", + "EPOCH: 3591 LOSS: 61.1465946983\n", + "EPOCH: 3592 LOSS: 61.1465543731\n", + "EPOCH: 3593 LOSS: 61.1465140601\n", + "EPOCH: 3594 LOSS: 61.1464737593\n", + "EPOCH: 3595 LOSS: 61.1464334708\n", + "EPOCH: 3596 LOSS: 61.1463931944\n", + "EPOCH: 3597 LOSS: 61.1463529303\n", + "EPOCH: 3598 LOSS: 61.1463126783\n", + "EPOCH: 3599 LOSS: 61.1462724386\n", + "EPOCH: 3600 LOSS: 61.146232211\n", + "EPOCH: 3601 LOSS: 61.1461919956\n", + "EPOCH: 3602 LOSS: 61.1461517924\n", + "EPOCH: 3603 LOSS: 61.1461116013\n", + "EPOCH: 3604 LOSS: 61.1460714223\n", + "EPOCH: 3605 LOSS: 61.1460312555\n", + "EPOCH: 3606 LOSS: 61.1459911008\n", + "EPOCH: 3607 LOSS: 61.1459509583\n", + "EPOCH: 3608 LOSS: 61.1459108278\n", + "EPOCH: 3609 LOSS: 61.1458707095\n", + "EPOCH: 3610 LOSS: 61.1458306032\n", + "EPOCH: 3611 LOSS: 61.145790509\n", + "EPOCH: 3612 LOSS: 61.1457504269\n", + "EPOCH: 3613 LOSS: 61.1457103569\n", + "EPOCH: 3614 LOSS: 61.145670299\n", + "EPOCH: 3615 LOSS: 61.145630253\n", + "EPOCH: 3616 LOSS: 61.1455902192\n", + "EPOCH: 3617 LOSS: 61.1455501973\n", + "EPOCH: 3618 LOSS: 61.1455101875\n", + "EPOCH: 3619 LOSS: 61.1454701898\n", + "EPOCH: 3620 LOSS: 61.145430204\n", + "EPOCH: 3621 LOSS: 61.1453902302\n", + "EPOCH: 3622 LOSS: 61.1453502684\n", + "EPOCH: 3623 LOSS: 61.1453103186\n", + "EPOCH: 3624 LOSS: 61.1452703808\n", + "EPOCH: 3625 LOSS: 61.145230455\n", + "EPOCH: 3626 LOSS: 61.1451905411\n", + "EPOCH: 3627 LOSS: 61.1451506392\n", + "EPOCH: 3628 LOSS: 61.1451107492\n", + "EPOCH: 3629 LOSS: 61.1450708711\n", + "EPOCH: 3630 LOSS: 61.145031005\n", + "EPOCH: 3631 LOSS: 61.1449911508\n", + "EPOCH: 3632 LOSS: 61.1449513085\n", + "EPOCH: 3633 LOSS: 61.1449114781\n", + "EPOCH: 3634 LOSS: 61.1448716596\n", + "EPOCH: 3635 LOSS: 61.144831853\n", + "EPOCH: 3636 LOSS: 61.1447920583\n", + "EPOCH: 3637 LOSS: 61.1447522755\n", + "EPOCH: 3638 LOSS: 61.1447125045\n", + "EPOCH: 3639 LOSS: 61.1446727454\n", + "EPOCH: 3640 LOSS: 61.1446329981\n", + "EPOCH: 3641 LOSS: 61.1445932627\n", + "EPOCH: 3642 LOSS: 61.144553539\n", + "EPOCH: 3643 LOSS: 61.1445138273\n", + "EPOCH: 3644 LOSS: 61.1444741273\n", + "EPOCH: 3645 LOSS: 61.1444344391\n", + "EPOCH: 3646 LOSS: 61.1443947628\n", + "EPOCH: 3647 LOSS: 61.1443550982\n", + "EPOCH: 3648 LOSS: 61.1443154454\n", + "EPOCH: 3649 LOSS: 61.1442758044\n", + "EPOCH: 3650 LOSS: 61.1442361751\n", + "EPOCH: 3651 LOSS: 61.1441965577\n", + "EPOCH: 3652 LOSS: 61.1441569519\n", + "EPOCH: 3653 LOSS: 61.1441173579\n", + "EPOCH: 3654 LOSS: 61.1440777757\n", + "EPOCH: 3655 LOSS: 61.1440382052\n", + "EPOCH: 3656 LOSS: 61.1439986463\n", + "EPOCH: 3657 LOSS: 61.1439590992\n", + "EPOCH: 3658 LOSS: 61.1439195639\n", + "EPOCH: 3659 LOSS: 61.1438800402\n", + "EPOCH: 3660 LOSS: 61.1438405282\n", + "EPOCH: 3661 LOSS: 61.1438010278\n", + "EPOCH: 3662 LOSS: 61.1437615392\n", + "EPOCH: 3663 LOSS: 61.1437220622\n", + "EPOCH: 3664 LOSS: 61.1436825969\n", + "EPOCH: 3665 LOSS: 61.1436431432\n", + "EPOCH: 3666 LOSS: 61.1436037011\n", + "EPOCH: 3667 LOSS: 61.1435642707\n", + "EPOCH: 3668 LOSS: 61.143524852\n", + "EPOCH: 3669 LOSS: 61.1434854448\n", + "EPOCH: 3670 LOSS: 61.1434460492\n", + "EPOCH: 3671 LOSS: 61.1434066653\n", + "EPOCH: 3672 LOSS: 61.1433672929\n", + "EPOCH: 3673 LOSS: 61.1433279322\n", + "EPOCH: 3674 LOSS: 61.143288583\n", + "EPOCH: 3675 LOSS: 61.1432492454\n", + "EPOCH: 3676 LOSS: 61.1432099193\n", + "EPOCH: 3677 LOSS: 61.1431706048\n", + "EPOCH: 3678 LOSS: 61.1431313019\n", + "EPOCH: 3679 LOSS: 61.1430920105\n", + "EPOCH: 3680 LOSS: 61.1430527306\n", + "EPOCH: 3681 LOSS: 61.1430134623\n", + "EPOCH: 3682 LOSS: 61.1429742054\n", + "EPOCH: 3683 LOSS: 61.1429349601\n", + "EPOCH: 3684 LOSS: 61.1428957263\n", + "EPOCH: 3685 LOSS: 61.142856504\n", + "EPOCH: 3686 LOSS: 61.1428172931\n", + "EPOCH: 3687 LOSS: 61.1427780938\n", + "EPOCH: 3688 LOSS: 61.1427389059\n", + "EPOCH: 3689 LOSS: 61.1426997295\n", + "EPOCH: 3690 LOSS: 61.1426605645\n", + "EPOCH: 3691 LOSS: 61.142621411\n", + "EPOCH: 3692 LOSS: 61.142582269\n", + "EPOCH: 3693 LOSS: 61.1425431383\n", + "EPOCH: 3694 LOSS: 61.1425040191\n", + "EPOCH: 3695 LOSS: 61.1424649113\n", + "EPOCH: 3696 LOSS: 61.142425815\n", + "EPOCH: 3697 LOSS: 61.14238673\n", + "EPOCH: 3698 LOSS: 61.1423476564\n", + "EPOCH: 3699 LOSS: 61.1423085942\n", + "EPOCH: 3700 LOSS: 61.1422695434\n", + "EPOCH: 3701 LOSS: 61.142230504\n", + "EPOCH: 3702 LOSS: 61.142191476\n", + "EPOCH: 3703 LOSS: 61.1421524593\n", + "EPOCH: 3704 LOSS: 61.1421134539\n", + "EPOCH: 3705 LOSS: 61.1420744599\n", + "EPOCH: 3706 LOSS: 61.1420354772\n", + "EPOCH: 3707 LOSS: 61.1419965059\n", + "EPOCH: 3708 LOSS: 61.1419575459\n", + "EPOCH: 3709 LOSS: 61.1419185972\n", + "EPOCH: 3710 LOSS: 61.1418796598\n", + "EPOCH: 3711 LOSS: 61.1418407337\n", + "EPOCH: 3712 LOSS: 61.1418018189\n", + "EPOCH: 3713 LOSS: 61.1417629154\n", + "EPOCH: 3714 LOSS: 61.1417240232\n", + "EPOCH: 3715 LOSS: 61.1416851422\n", + "EPOCH: 3716 LOSS: 61.1416462725\n", + "EPOCH: 3717 LOSS: 61.1416074141\n", + "EPOCH: 3718 LOSS: 61.1415685669\n", + "EPOCH: 3719 LOSS: 61.1415297309\n", + "EPOCH: 3720 LOSS: 61.1414909062\n", + "EPOCH: 3721 LOSS: 61.1414520927\n", + "EPOCH: 3722 LOSS: 61.1414132904\n", + "EPOCH: 3723 LOSS: 61.1413744994\n", + "EPOCH: 3724 LOSS: 61.1413357195\n", + "EPOCH: 3725 LOSS: 61.1412969509\n", + "EPOCH: 3726 LOSS: 61.1412581934\n", + "EPOCH: 3727 LOSS: 61.1412194471\n", + "EPOCH: 3728 LOSS: 61.141180712\n", + "EPOCH: 3729 LOSS: 61.1411419881\n", + "EPOCH: 3730 LOSS: 61.1411032753\n", + "EPOCH: 3731 LOSS: 61.1410645737\n", + "EPOCH: 3732 LOSS: 61.1410258832\n", + "EPOCH: 3733 LOSS: 61.1409872039\n", + "EPOCH: 3734 LOSS: 61.1409485357\n", + "EPOCH: 3735 LOSS: 61.1409098786\n", + "EPOCH: 3736 LOSS: 61.1408712327\n", + "EPOCH: 3737 LOSS: 61.1408325978\n", + "EPOCH: 3738 LOSS: 61.1407939741\n", + "EPOCH: 3739 LOSS: 61.1407553614\n", + "EPOCH: 3740 LOSS: 61.1407167599\n", + "EPOCH: 3741 LOSS: 61.1406781694\n", + "EPOCH: 3742 LOSS: 61.14063959\n", + "EPOCH: 3743 LOSS: 61.1406010217\n", + "EPOCH: 3744 LOSS: 61.1405624644\n", + "EPOCH: 3745 LOSS: 61.1405239182\n", + "EPOCH: 3746 LOSS: 61.1404853831\n", + "EPOCH: 3747 LOSS: 61.140446859\n", + "EPOCH: 3748 LOSS: 61.1404083459\n", + "EPOCH: 3749 LOSS: 61.1403698438\n", + "EPOCH: 3750 LOSS: 61.1403313528\n", + "EPOCH: 3751 LOSS: 61.1402928728\n", + "EPOCH: 3752 LOSS: 61.1402544037\n", + "EPOCH: 3753 LOSS: 61.1402159457\n", + "EPOCH: 3754 LOSS: 61.1401774987\n", + "EPOCH: 3755 LOSS: 61.1401390626\n", + "EPOCH: 3756 LOSS: 61.1401006376\n", + "EPOCH: 3757 LOSS: 61.1400622235\n", + "EPOCH: 3758 LOSS: 61.1400238203\n", + "EPOCH: 3759 LOSS: 61.1399854282\n", + "EPOCH: 3760 LOSS: 61.1399470469\n", + "EPOCH: 3761 LOSS: 61.1399086767\n", + "EPOCH: 3762 LOSS: 61.1398703173\n", + "EPOCH: 3763 LOSS: 61.1398319689\n", + "EPOCH: 3764 LOSS: 61.1397936314\n", + "EPOCH: 3765 LOSS: 61.1397553048\n", + "EPOCH: 3766 LOSS: 61.1397169891\n", + "EPOCH: 3767 LOSS: 61.1396786844\n", + "EPOCH: 3768 LOSS: 61.1396403905\n", + "EPOCH: 3769 LOSS: 61.1396021075\n", + "EPOCH: 3770 LOSS: 61.1395638354\n", + "EPOCH: 3771 LOSS: 61.1395255741\n", + "EPOCH: 3772 LOSS: 61.1394873238\n", + "EPOCH: 3773 LOSS: 61.1394490843\n", + "EPOCH: 3774 LOSS: 61.1394108556\n", + "EPOCH: 3775 LOSS: 61.1393726378\n", + "EPOCH: 3776 LOSS: 61.1393344308\n", + "EPOCH: 3777 LOSS: 61.1392962347\n", + "EPOCH: 3778 LOSS: 61.1392580494\n", + "EPOCH: 3779 LOSS: 61.1392198749\n", + "EPOCH: 3780 LOSS: 61.1391817112\n", + "EPOCH: 3781 LOSS: 61.1391435583\n", + "EPOCH: 3782 LOSS: 61.1391054162\n", + "EPOCH: 3783 LOSS: 61.1390672849\n", + "EPOCH: 3784 LOSS: 61.1390291644\n", + "EPOCH: 3785 LOSS: 61.1389910547\n", + "EPOCH: 3786 LOSS: 61.1389529558\n", + "EPOCH: 3787 LOSS: 61.1389148676\n", + "EPOCH: 3788 LOSS: 61.1388767902\n", + "EPOCH: 3789 LOSS: 61.1388387235\n", + "EPOCH: 3790 LOSS: 61.1388006676\n", + "EPOCH: 3791 LOSS: 61.1387626224\n", + "EPOCH: 3792 LOSS: 61.1387245879\n", + "EPOCH: 3793 LOSS: 61.1386865642\n", + "EPOCH: 3794 LOSS: 61.1386485512\n", + "EPOCH: 3795 LOSS: 61.1386105488\n", + "EPOCH: 3796 LOSS: 61.1385725572\n", + "EPOCH: 3797 LOSS: 61.1385345763\n", + "EPOCH: 3798 LOSS: 61.1384966061\n", + "EPOCH: 3799 LOSS: 61.1384586466\n", + "EPOCH: 3800 LOSS: 61.1384206978\n", + "EPOCH: 3801 LOSS: 61.1383827596\n", + "EPOCH: 3802 LOSS: 61.1383448321\n", + "EPOCH: 3803 LOSS: 61.1383069152\n", + "EPOCH: 3804 LOSS: 61.138269009\n", + "EPOCH: 3805 LOSS: 61.1382311135\n", + "EPOCH: 3806 LOSS: 61.1381932285\n", + "EPOCH: 3807 LOSS: 61.1381553543\n", + "EPOCH: 3808 LOSS: 61.1381174906\n", + "EPOCH: 3809 LOSS: 61.1380796376\n", + "EPOCH: 3810 LOSS: 61.1380417951\n", + "EPOCH: 3811 LOSS: 61.1380039633\n", + "EPOCH: 3812 LOSS: 61.1379661421\n", + "EPOCH: 3813 LOSS: 61.1379283315\n", + "EPOCH: 3814 LOSS: 61.1378905314\n", + "EPOCH: 3815 LOSS: 61.137852742\n", + "EPOCH: 3816 LOSS: 61.1378149631\n", + "EPOCH: 3817 LOSS: 61.1377771947\n", + "EPOCH: 3818 LOSS: 61.137739437\n", + "EPOCH: 3819 LOSS: 61.1377016898\n", + "EPOCH: 3820 LOSS: 61.1376639531\n", + "EPOCH: 3821 LOSS: 61.137626227\n", + "EPOCH: 3822 LOSS: 61.1375885114\n", + "EPOCH: 3823 LOSS: 61.1375508063\n", + "EPOCH: 3824 LOSS: 61.1375131118\n", + "EPOCH: 3825 LOSS: 61.1374754278\n", + "EPOCH: 3826 LOSS: 61.1374377543\n", + "EPOCH: 3827 LOSS: 61.1374000913\n", + "EPOCH: 3828 LOSS: 61.1373624387\n", + "EPOCH: 3829 LOSS: 61.1373247967\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 3830 LOSS: 61.1372871652\n", + "EPOCH: 3831 LOSS: 61.1372495441\n", + "EPOCH: 3832 LOSS: 61.1372119335\n", + "EPOCH: 3833 LOSS: 61.1371743334\n", + "EPOCH: 3834 LOSS: 61.1371367437\n", + "EPOCH: 3835 LOSS: 61.1370991645\n", + "EPOCH: 3836 LOSS: 61.1370615957\n", + "EPOCH: 3837 LOSS: 61.1370240374\n", + "EPOCH: 3838 LOSS: 61.1369864894\n", + "EPOCH: 3839 LOSS: 61.136948952\n", + "EPOCH: 3840 LOSS: 61.1369114249\n", + "EPOCH: 3841 LOSS: 61.1368739083\n", + "EPOCH: 3842 LOSS: 61.136836402\n", + "EPOCH: 3843 LOSS: 61.1367989062\n", + "EPOCH: 3844 LOSS: 61.1367614207\n", + "EPOCH: 3845 LOSS: 61.1367239457\n", + "EPOCH: 3846 LOSS: 61.136686481\n", + "EPOCH: 3847 LOSS: 61.1366490267\n", + "EPOCH: 3848 LOSS: 61.1366115827\n", + "EPOCH: 3849 LOSS: 61.1365741492\n", + "EPOCH: 3850 LOSS: 61.136536726\n", + "EPOCH: 3851 LOSS: 61.1364993131\n", + "EPOCH: 3852 LOSS: 61.1364619106\n", + "EPOCH: 3853 LOSS: 61.1364245184\n", + "EPOCH: 3854 LOSS: 61.1363871365\n", + "EPOCH: 3855 LOSS: 61.136349765\n", + "EPOCH: 3856 LOSS: 61.1363124038\n", + "EPOCH: 3857 LOSS: 61.1362750529\n", + "EPOCH: 3858 LOSS: 61.1362377123\n", + "EPOCH: 3859 LOSS: 61.136200382\n", + "EPOCH: 3860 LOSS: 61.1361630621\n", + "EPOCH: 3861 LOSS: 61.1361257523\n", + "EPOCH: 3862 LOSS: 61.1360884529\n", + "EPOCH: 3863 LOSS: 61.1360511638\n", + "EPOCH: 3864 LOSS: 61.1360138849\n", + "EPOCH: 3865 LOSS: 61.1359766163\n", + "EPOCH: 3866 LOSS: 61.1359393579\n", + "EPOCH: 3867 LOSS: 61.1359021098\n", + "EPOCH: 3868 LOSS: 61.135864872\n", + "EPOCH: 3869 LOSS: 61.1358276444\n", + "EPOCH: 3870 LOSS: 61.135790427\n", + "EPOCH: 3871 LOSS: 61.1357532198\n", + "EPOCH: 3872 LOSS: 61.1357160229\n", + "EPOCH: 3873 LOSS: 61.1356788362\n", + "EPOCH: 3874 LOSS: 61.1356416597\n", + "EPOCH: 3875 LOSS: 61.1356044933\n", + "EPOCH: 3876 LOSS: 61.1355673372\n", + "EPOCH: 3877 LOSS: 61.1355301913\n", + "EPOCH: 3878 LOSS: 61.1354930556\n", + "EPOCH: 3879 LOSS: 61.13545593\n", + "EPOCH: 3880 LOSS: 61.1354188146\n", + "EPOCH: 3881 LOSS: 61.1353817094\n", + "EPOCH: 3882 LOSS: 61.1353446143\n", + "EPOCH: 3883 LOSS: 61.1353075294\n", + "EPOCH: 3884 LOSS: 61.1352704547\n", + "EPOCH: 3885 LOSS: 61.13523339\n", + "EPOCH: 3886 LOSS: 61.1351963356\n", + "EPOCH: 3887 LOSS: 61.1351592912\n", + "EPOCH: 3888 LOSS: 61.135122257\n", + "EPOCH: 3889 LOSS: 61.1350852329\n", + "EPOCH: 3890 LOSS: 61.1350482189\n", + "EPOCH: 3891 LOSS: 61.135011215\n", + "EPOCH: 3892 LOSS: 61.1349742212\n", + "EPOCH: 3893 LOSS: 61.1349372375\n", + "EPOCH: 3894 LOSS: 61.1349002639\n", + "EPOCH: 3895 LOSS: 61.1348633004\n", + "EPOCH: 3896 LOSS: 61.1348263469\n", + "EPOCH: 3897 LOSS: 61.1347894035\n", + "EPOCH: 3898 LOSS: 61.1347524702\n", + "EPOCH: 3899 LOSS: 61.134715547\n", + "EPOCH: 3900 LOSS: 61.1346786338\n", + "EPOCH: 3901 LOSS: 61.1346417306\n", + "EPOCH: 3902 LOSS: 61.1346048375\n", + "EPOCH: 3903 LOSS: 61.1345679544\n", + "EPOCH: 3904 LOSS: 61.1345310814\n", + "EPOCH: 3905 LOSS: 61.1344942184\n", + "EPOCH: 3906 LOSS: 61.1344573654\n", + "EPOCH: 3907 LOSS: 61.1344205224\n", + "EPOCH: 3908 LOSS: 61.1343836894\n", + "EPOCH: 3909 LOSS: 61.1343468664\n", + "EPOCH: 3910 LOSS: 61.1343100534\n", + "EPOCH: 3911 LOSS: 61.1342732504\n", + "EPOCH: 3912 LOSS: 61.1342364574\n", + "EPOCH: 3913 LOSS: 61.1341996743\n", + "EPOCH: 3914 LOSS: 61.1341629012\n", + "EPOCH: 3915 LOSS: 61.1341261381\n", + "EPOCH: 3916 LOSS: 61.134089385\n", + "EPOCH: 3917 LOSS: 61.1340526418\n", + "EPOCH: 3918 LOSS: 61.1340159085\n", + "EPOCH: 3919 LOSS: 61.1339791852\n", + "EPOCH: 3920 LOSS: 61.1339424719\n", + "EPOCH: 3921 LOSS: 61.1339057684\n", + "EPOCH: 3922 LOSS: 61.1338690749\n", + "EPOCH: 3923 LOSS: 61.1338323913\n", + "EPOCH: 3924 LOSS: 61.1337957176\n", + "EPOCH: 3925 LOSS: 61.1337590538\n", + "EPOCH: 3926 LOSS: 61.1337224\n", + "EPOCH: 3927 LOSS: 61.133685756\n", + "EPOCH: 3928 LOSS: 61.1336491219\n", + "EPOCH: 3929 LOSS: 61.1336124977\n", + "EPOCH: 3930 LOSS: 61.1335758834\n", + "EPOCH: 3931 LOSS: 61.1335392789\n", + "EPOCH: 3932 LOSS: 61.1335026843\n", + "EPOCH: 3933 LOSS: 61.1334660996\n", + "EPOCH: 3934 LOSS: 61.1334295247\n", + "EPOCH: 3935 LOSS: 61.1333929597\n", + "EPOCH: 3936 LOSS: 61.1333564045\n", + "EPOCH: 3937 LOSS: 61.1333198592\n", + "EPOCH: 3938 LOSS: 61.1332833237\n", + "EPOCH: 3939 LOSS: 61.133246798\n", + "EPOCH: 3940 LOSS: 61.1332102822\n", + "EPOCH: 3941 LOSS: 61.1331737761\n", + "EPOCH: 3942 LOSS: 61.1331372799\n", + "EPOCH: 3943 LOSS: 61.1331007935\n", + "EPOCH: 3944 LOSS: 61.1330643169\n", + "EPOCH: 3945 LOSS: 61.13302785\n", + "EPOCH: 3946 LOSS: 61.132991393\n", + "EPOCH: 3947 LOSS: 61.1329549458\n", + "EPOCH: 3948 LOSS: 61.1329185083\n", + "EPOCH: 3949 LOSS: 61.1328820806\n", + "EPOCH: 3950 LOSS: 61.1328456626\n", + "EPOCH: 3951 LOSS: 61.1328092544\n", + "EPOCH: 3952 LOSS: 61.132772856\n", + "EPOCH: 3953 LOSS: 61.1327364673\n", + "EPOCH: 3954 LOSS: 61.1327000884\n", + "EPOCH: 3955 LOSS: 61.1326637192\n", + "EPOCH: 3956 LOSS: 61.1326273598\n", + "EPOCH: 3957 LOSS: 61.13259101\n", + "EPOCH: 3958 LOSS: 61.13255467\n", + "EPOCH: 3959 LOSS: 61.1325183397\n", + "EPOCH: 3960 LOSS: 61.1324820191\n", + "EPOCH: 3961 LOSS: 61.1324457082\n", + "EPOCH: 3962 LOSS: 61.1324094071\n", + "EPOCH: 3963 LOSS: 61.1323731156\n", + "EPOCH: 3964 LOSS: 61.1323368338\n", + "EPOCH: 3965 LOSS: 61.1323005616\n", + "EPOCH: 3966 LOSS: 61.1322642992\n", + "EPOCH: 3967 LOSS: 61.1322280464\n", + "EPOCH: 3968 LOSS: 61.1321918033\n", + "EPOCH: 3969 LOSS: 61.1321555699\n", + "EPOCH: 3970 LOSS: 61.1321193461\n", + "EPOCH: 3971 LOSS: 61.132083132\n", + "EPOCH: 3972 LOSS: 61.1320469275\n", + "EPOCH: 3973 LOSS: 61.1320107326\n", + "EPOCH: 3974 LOSS: 61.1319745474\n", + "EPOCH: 3975 LOSS: 61.1319383718\n", + "EPOCH: 3976 LOSS: 61.1319022058\n", + "EPOCH: 3977 LOSS: 61.1318660495\n", + "EPOCH: 3978 LOSS: 61.1318299027\n", + "EPOCH: 3979 LOSS: 61.1317937656\n", + "EPOCH: 3980 LOSS: 61.1317576381\n", + "EPOCH: 3981 LOSS: 61.1317215201\n", + "EPOCH: 3982 LOSS: 61.1316854118\n", + "EPOCH: 3983 LOSS: 61.131649313\n", + "EPOCH: 3984 LOSS: 61.1316132238\n", + "EPOCH: 3985 LOSS: 61.1315771442\n", + "EPOCH: 3986 LOSS: 61.1315410742\n", + "EPOCH: 3987 LOSS: 61.1315050137\n", + "EPOCH: 3988 LOSS: 61.1314689628\n", + "EPOCH: 3989 LOSS: 61.1314329214\n", + "EPOCH: 3990 LOSS: 61.1313968896\n", + "EPOCH: 3991 LOSS: 61.1313608673\n", + "EPOCH: 3992 LOSS: 61.1313248546\n", + "EPOCH: 3993 LOSS: 61.1312888514\n", + "EPOCH: 3994 LOSS: 61.1312528577\n", + "EPOCH: 3995 LOSS: 61.1312168736\n", + "EPOCH: 3996 LOSS: 61.1311808989\n", + "EPOCH: 3997 LOSS: 61.1311449338\n", + "EPOCH: 3998 LOSS: 61.1311089781\n", + "EPOCH: 3999 LOSS: 61.131073032\n", + "EPOCH: 4000 LOSS: 61.1310370954\n", + "EPOCH: 4001 LOSS: 61.1310011682\n", + "EPOCH: 4002 LOSS: 61.1309652506\n", + "EPOCH: 4003 LOSS: 61.1309293424\n", + "EPOCH: 4004 LOSS: 61.1308934437\n", + "EPOCH: 4005 LOSS: 61.1308575544\n", + "EPOCH: 4006 LOSS: 61.1308216746\n", + "EPOCH: 4007 LOSS: 61.1307858043\n", + "EPOCH: 4008 LOSS: 61.1307499434\n", + "EPOCH: 4009 LOSS: 61.130714092\n", + "EPOCH: 4010 LOSS: 61.13067825\n", + "EPOCH: 4011 LOSS: 61.1306424175\n", + "EPOCH: 4012 LOSS: 61.1306065943\n", + "EPOCH: 4013 LOSS: 61.1305707807\n", + "EPOCH: 4014 LOSS: 61.1305349764\n", + "EPOCH: 4015 LOSS: 61.1304991815\n", + "EPOCH: 4016 LOSS: 61.1304633961\n", + "EPOCH: 4017 LOSS: 61.13042762\n", + "EPOCH: 4018 LOSS: 61.1303918534\n", + "EPOCH: 4019 LOSS: 61.1303560962\n", + "EPOCH: 4020 LOSS: 61.1303203483\n", + "EPOCH: 4021 LOSS: 61.1302846098\n", + "EPOCH: 4022 LOSS: 61.1302488807\n", + "EPOCH: 4023 LOSS: 61.130213161\n", + "EPOCH: 4024 LOSS: 61.1301774507\n", + "EPOCH: 4025 LOSS: 61.1301417497\n", + "EPOCH: 4026 LOSS: 61.130106058\n", + "EPOCH: 4027 LOSS: 61.1300703758\n", + "EPOCH: 4028 LOSS: 61.1300347028\n", + "EPOCH: 4029 LOSS: 61.1299990393\n", + "EPOCH: 4030 LOSS: 61.129963385\n", + "EPOCH: 4031 LOSS: 61.1299277401\n", + "EPOCH: 4032 LOSS: 61.1298921045\n", + "EPOCH: 4033 LOSS: 61.1298564783\n", + "EPOCH: 4034 LOSS: 61.1298208613\n", + "EPOCH: 4035 LOSS: 61.1297852537\n", + "EPOCH: 4036 LOSS: 61.1297496554\n", + "EPOCH: 4037 LOSS: 61.1297140663\n", + "EPOCH: 4038 LOSS: 61.1296784866\n", + "EPOCH: 4039 LOSS: 61.1296429162\n", + "EPOCH: 4040 LOSS: 61.129607355\n", + "EPOCH: 4041 LOSS: 61.1295718032\n", + "EPOCH: 4042 LOSS: 61.1295362606\n", + "EPOCH: 4043 LOSS: 61.1295007273\n", + "EPOCH: 4044 LOSS: 61.1294652032\n", + "EPOCH: 4045 LOSS: 61.1294296884\n", + "EPOCH: 4046 LOSS: 61.1293941829\n", + "EPOCH: 4047 LOSS: 61.1293586866\n", + "EPOCH: 4048 LOSS: 61.1293231996\n", + "EPOCH: 4049 LOSS: 61.1292877218\n", + "EPOCH: 4050 LOSS: 61.1292522533\n", + "EPOCH: 4051 LOSS: 61.1292167939\n", + "EPOCH: 4052 LOSS: 61.1291813438\n", + "EPOCH: 4053 LOSS: 61.129145903\n", + "EPOCH: 4054 LOSS: 61.1291104713\n", + "EPOCH: 4055 LOSS: 61.1290750489\n", + "EPOCH: 4056 LOSS: 61.1290396356\n", + "EPOCH: 4057 LOSS: 61.1290042316\n", + "EPOCH: 4058 LOSS: 61.1289688367\n", + "EPOCH: 4059 LOSS: 61.1289334511\n", + "EPOCH: 4060 LOSS: 61.1288980746\n", + "EPOCH: 4061 LOSS: 61.1288627074\n", + "EPOCH: 4062 LOSS: 61.1288273493\n", + "EPOCH: 4063 LOSS: 61.1287920003\n", + "EPOCH: 4064 LOSS: 61.1287566606\n", + "EPOCH: 4065 LOSS: 61.12872133\n", + "EPOCH: 4066 LOSS: 61.1286860085\n", + "EPOCH: 4067 LOSS: 61.1286506962\n", + "EPOCH: 4068 LOSS: 61.1286153931\n", + "EPOCH: 4069 LOSS: 61.1285800991\n", + "EPOCH: 4070 LOSS: 61.1285448142\n", + "EPOCH: 4071 LOSS: 61.1285095385\n", + "EPOCH: 4072 LOSS: 61.1284742719\n", + "EPOCH: 4073 LOSS: 61.1284390144\n", + "EPOCH: 4074 LOSS: 61.128403766\n", + "EPOCH: 4075 LOSS: 61.1283685268\n", + "EPOCH: 4076 LOSS: 61.1283332966\n", + "EPOCH: 4077 LOSS: 61.1282980756\n", + "EPOCH: 4078 LOSS: 61.1282628636\n", + "EPOCH: 4079 LOSS: 61.1282276608\n", + "EPOCH: 4080 LOSS: 61.128192467\n", + "EPOCH: 4081 LOSS: 61.1281572824\n", + "EPOCH: 4082 LOSS: 61.1281221068\n", + "EPOCH: 4083 LOSS: 61.1280869402\n", + "EPOCH: 4084 LOSS: 61.1280517828\n", + "EPOCH: 4085 LOSS: 61.1280166344\n", + "EPOCH: 4086 LOSS: 61.1279814951\n", + "EPOCH: 4087 LOSS: 61.1279463648\n", + "EPOCH: 4088 LOSS: 61.1279112436\n", + "EPOCH: 4089 LOSS: 61.1278761314\n", + "EPOCH: 4090 LOSS: 61.1278410282\n", + "EPOCH: 4091 LOSS: 61.1278059341\n", + "EPOCH: 4092 LOSS: 61.127770849\n", + "EPOCH: 4093 LOSS: 61.127735773\n", + "EPOCH: 4094 LOSS: 61.127700706\n", + "EPOCH: 4095 LOSS: 61.127665648\n", + "EPOCH: 4096 LOSS: 61.127630599\n", + "EPOCH: 4097 LOSS: 61.127595559\n", + "EPOCH: 4098 LOSS: 61.127560528\n", + "EPOCH: 4099 LOSS: 61.127525506\n", + "EPOCH: 4100 LOSS: 61.127490493\n", + "EPOCH: 4101 LOSS: 61.1274554889\n", + "EPOCH: 4102 LOSS: 61.1274204939\n", + "EPOCH: 4103 LOSS: 61.1273855079\n", + "EPOCH: 4104 LOSS: 61.1273505308\n", + "EPOCH: 4105 LOSS: 61.1273155627\n", + "EPOCH: 4106 LOSS: 61.1272806035\n", + "EPOCH: 4107 LOSS: 61.1272456533\n", + "EPOCH: 4108 LOSS: 61.1272107121\n", + "EPOCH: 4109 LOSS: 61.1271757798\n", + "EPOCH: 4110 LOSS: 61.1271408565\n", + "EPOCH: 4111 LOSS: 61.1271059421\n", + "EPOCH: 4112 LOSS: 61.1270710366\n", + "EPOCH: 4113 LOSS: 61.1270361401\n", + "EPOCH: 4114 LOSS: 61.1270012524\n", + "EPOCH: 4115 LOSS: 61.1269663738\n", + "EPOCH: 4116 LOSS: 61.126931504\n", + "EPOCH: 4117 LOSS: 61.1268966431\n", + "EPOCH: 4118 LOSS: 61.1268617912\n", + "EPOCH: 4119 LOSS: 61.1268269481\n", + "EPOCH: 4120 LOSS: 61.126792114\n", + "EPOCH: 4121 LOSS: 61.1267572887\n", + "EPOCH: 4122 LOSS: 61.1267224724\n", + "EPOCH: 4123 LOSS: 61.1266876649\n", + "EPOCH: 4124 LOSS: 61.1266528663\n", + "EPOCH: 4125 LOSS: 61.1266180765\n", + "EPOCH: 4126 LOSS: 61.1265832957\n", + "EPOCH: 4127 LOSS: 61.1265485237\n", + "EPOCH: 4128 LOSS: 61.1265137605\n", + "EPOCH: 4129 LOSS: 61.1264790063\n", + "EPOCH: 4130 LOSS: 61.1264442608\n", + "EPOCH: 4131 LOSS: 61.1264095243\n", + "EPOCH: 4132 LOSS: 61.1263747965\n", + "EPOCH: 4133 LOSS: 61.1263400776\n", + "EPOCH: 4134 LOSS: 61.1263053676\n", + "EPOCH: 4135 LOSS: 61.1262706663\n", + "EPOCH: 4136 LOSS: 61.1262359739\n", + "EPOCH: 4137 LOSS: 61.1262012903\n", + "EPOCH: 4138 LOSS: 61.1261666155\n", + "EPOCH: 4139 LOSS: 61.1261319496\n", + "EPOCH: 4140 LOSS: 61.1260972924\n", + "EPOCH: 4141 LOSS: 61.126062644\n", + "EPOCH: 4142 LOSS: 61.1260280045\n", + "EPOCH: 4143 LOSS: 61.1259933737\n", + "EPOCH: 4144 LOSS: 61.1259587517\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 4145 LOSS: 61.1259241385\n", + "EPOCH: 4146 LOSS: 61.1258895341\n", + "EPOCH: 4147 LOSS: 61.1258549384\n", + "EPOCH: 4148 LOSS: 61.1258203516\n", + "EPOCH: 4149 LOSS: 61.1257857734\n", + "EPOCH: 4150 LOSS: 61.1257512041\n", + "EPOCH: 4151 LOSS: 61.1257166435\n", + "EPOCH: 4152 LOSS: 61.1256820916\n", + "EPOCH: 4153 LOSS: 61.1256475485\n", + "EPOCH: 4154 LOSS: 61.1256130142\n", + "EPOCH: 4155 LOSS: 61.1255784885\n", + "EPOCH: 4156 LOSS: 61.1255439716\n", + "EPOCH: 4157 LOSS: 61.1255094635\n", + "EPOCH: 4158 LOSS: 61.125474964\n", + "EPOCH: 4159 LOSS: 61.1254404733\n", + "EPOCH: 4160 LOSS: 61.1254059913\n", + "EPOCH: 4161 LOSS: 61.125371518\n", + "EPOCH: 4162 LOSS: 61.1253370534\n", + "EPOCH: 4163 LOSS: 61.1253025976\n", + "EPOCH: 4164 LOSS: 61.1252681504\n", + "EPOCH: 4165 LOSS: 61.1252337119\n", + "EPOCH: 4166 LOSS: 61.1251992821\n", + "EPOCH: 4167 LOSS: 61.1251648609\n", + "EPOCH: 4168 LOSS: 61.1251304485\n", + "EPOCH: 4169 LOSS: 61.1250960447\n", + "EPOCH: 4170 LOSS: 61.1250616496\n", + "EPOCH: 4171 LOSS: 61.1250272632\n", + "EPOCH: 4172 LOSS: 61.1249928854\n", + "EPOCH: 4173 LOSS: 61.1249585163\n", + "EPOCH: 4174 LOSS: 61.1249241559\n", + "EPOCH: 4175 LOSS: 61.124889804\n", + "EPOCH: 4176 LOSS: 61.1248554609\n", + "EPOCH: 4177 LOSS: 61.1248211263\n", + "EPOCH: 4178 LOSS: 61.1247868005\n", + "EPOCH: 4179 LOSS: 61.1247524832\n", + "EPOCH: 4180 LOSS: 61.1247181746\n", + "EPOCH: 4181 LOSS: 61.1246838745\n", + "EPOCH: 4182 LOSS: 61.1246495831\n", + "EPOCH: 4183 LOSS: 61.1246153004\n", + "EPOCH: 4184 LOSS: 61.1245810262\n", + "EPOCH: 4185 LOSS: 61.1245467606\n", + "EPOCH: 4186 LOSS: 61.1245125036\n", + "EPOCH: 4187 LOSS: 61.1244782553\n", + "EPOCH: 4188 LOSS: 61.1244440155\n", + "EPOCH: 4189 LOSS: 61.1244097843\n", + "EPOCH: 4190 LOSS: 61.1243755617\n", + "EPOCH: 4191 LOSS: 61.1243413476\n", + "EPOCH: 4192 LOSS: 61.1243071422\n", + "EPOCH: 4193 LOSS: 61.1242729453\n", + "EPOCH: 4194 LOSS: 61.124238757\n", + "EPOCH: 4195 LOSS: 61.1242045772\n", + "EPOCH: 4196 LOSS: 61.124170406\n", + "EPOCH: 4197 LOSS: 61.1241362433\n", + "EPOCH: 4198 LOSS: 61.1241020892\n", + "EPOCH: 4199 LOSS: 61.1240679437\n", + "EPOCH: 4200 LOSS: 61.1240338066\n", + "EPOCH: 4201 LOSS: 61.1239996781\n", + "EPOCH: 4202 LOSS: 61.1239655582\n", + "EPOCH: 4203 LOSS: 61.1239314467\n", + "EPOCH: 4204 LOSS: 61.1238973438\n", + "EPOCH: 4205 LOSS: 61.1238632494\n", + "EPOCH: 4206 LOSS: 61.1238291636\n", + "EPOCH: 4207 LOSS: 61.1237950862\n", + "EPOCH: 4208 LOSS: 61.1237610173\n", + "EPOCH: 4209 LOSS: 61.123726957\n", + "EPOCH: 4210 LOSS: 61.1236929051\n", + "EPOCH: 4211 LOSS: 61.1236588617\n", + "EPOCH: 4212 LOSS: 61.1236248268\n", + "EPOCH: 4213 LOSS: 61.1235908004\n", + "EPOCH: 4214 LOSS: 61.1235567825\n", + "EPOCH: 4215 LOSS: 61.123522773\n", + "EPOCH: 4216 LOSS: 61.1234887721\n", + "EPOCH: 4217 LOSS: 61.1234547795\n", + "EPOCH: 4218 LOSS: 61.1234207955\n", + "EPOCH: 4219 LOSS: 61.1233868199\n", + "EPOCH: 4220 LOSS: 61.1233528528\n", + "EPOCH: 4221 LOSS: 61.1233188941\n", + "EPOCH: 4222 LOSS: 61.1232849438\n", + "EPOCH: 4223 LOSS: 61.123251002\n", + "EPOCH: 4224 LOSS: 61.1232170687\n", + "EPOCH: 4225 LOSS: 61.1231831437\n", + "EPOCH: 4226 LOSS: 61.1231492272\n", + "EPOCH: 4227 LOSS: 61.1231153192\n", + "EPOCH: 4228 LOSS: 61.1230814195\n", + "EPOCH: 4229 LOSS: 61.1230475283\n", + "EPOCH: 4230 LOSS: 61.1230136454\n", + "EPOCH: 4231 LOSS: 61.122979771\n", + "EPOCH: 4232 LOSS: 61.122945905\n", + "EPOCH: 4233 LOSS: 61.1229120474\n", + "EPOCH: 4234 LOSS: 61.1228781981\n", + "EPOCH: 4235 LOSS: 61.1228443573\n", + "EPOCH: 4236 LOSS: 61.1228105249\n", + "EPOCH: 4237 LOSS: 61.1227767008\n", + "EPOCH: 4238 LOSS: 61.1227428851\n", + "EPOCH: 4239 LOSS: 61.1227090778\n", + "EPOCH: 4240 LOSS: 61.1226752789\n", + "EPOCH: 4241 LOSS: 61.1226414883\n", + "EPOCH: 4242 LOSS: 61.1226077061\n", + "EPOCH: 4243 LOSS: 61.1225739322\n", + "EPOCH: 4244 LOSS: 61.1225401667\n", + "EPOCH: 4245 LOSS: 61.1225064096\n", + "EPOCH: 4246 LOSS: 61.1224726608\n", + "EPOCH: 4247 LOSS: 61.1224389203\n", + "EPOCH: 4248 LOSS: 61.1224051882\n", + "EPOCH: 4249 LOSS: 61.1223714644\n", + "EPOCH: 4250 LOSS: 61.1223377489\n", + "EPOCH: 4251 LOSS: 61.1223040418\n", + "EPOCH: 4252 LOSS: 61.122270343\n", + "EPOCH: 4253 LOSS: 61.1222366525\n", + "EPOCH: 4254 LOSS: 61.1222029703\n", + "EPOCH: 4255 LOSS: 61.1221692964\n", + "EPOCH: 4256 LOSS: 61.1221356308\n", + "EPOCH: 4257 LOSS: 61.1221019736\n", + "EPOCH: 4258 LOSS: 61.1220683246\n", + "EPOCH: 4259 LOSS: 61.1220346839\n", + "EPOCH: 4260 LOSS: 61.1220010515\n", + "EPOCH: 4261 LOSS: 61.1219674274\n", + "EPOCH: 4262 LOSS: 61.1219338115\n", + "EPOCH: 4263 LOSS: 61.121900204\n", + "EPOCH: 4264 LOSS: 61.1218666047\n", + "EPOCH: 4265 LOSS: 61.1218330137\n", + "EPOCH: 4266 LOSS: 61.1217994309\n", + "EPOCH: 4267 LOSS: 61.1217658564\n", + "EPOCH: 4268 LOSS: 61.1217322902\n", + "EPOCH: 4269 LOSS: 61.1216987322\n", + "EPOCH: 4270 LOSS: 61.1216651825\n", + "EPOCH: 4271 LOSS: 61.121631641\n", + "EPOCH: 4272 LOSS: 61.1215981077\n", + "EPOCH: 4273 LOSS: 61.1215645827\n", + "EPOCH: 4274 LOSS: 61.1215310659\n", + "EPOCH: 4275 LOSS: 61.1214975573\n", + "EPOCH: 4276 LOSS: 61.121464057\n", + "EPOCH: 4277 LOSS: 61.1214305649\n", + "EPOCH: 4278 LOSS: 61.121397081\n", + "EPOCH: 4279 LOSS: 61.1213636053\n", + "EPOCH: 4280 LOSS: 61.1213301378\n", + "EPOCH: 4281 LOSS: 61.1212966785\n", + "EPOCH: 4282 LOSS: 61.1212632274\n", + "EPOCH: 4283 LOSS: 61.1212297846\n", + "EPOCH: 4284 LOSS: 61.1211963499\n", + "EPOCH: 4285 LOSS: 61.1211629234\n", + "EPOCH: 4286 LOSS: 61.121129505\n", + "EPOCH: 4287 LOSS: 61.1210960949\n", + "EPOCH: 4288 LOSS: 61.1210626929\n", + "EPOCH: 4289 LOSS: 61.1210292991\n", + "EPOCH: 4290 LOSS: 61.1209959135\n", + "EPOCH: 4291 LOSS: 61.1209625361\n", + "EPOCH: 4292 LOSS: 61.1209291668\n", + "EPOCH: 4293 LOSS: 61.1208958056\n", + "EPOCH: 4294 LOSS: 61.1208624526\n", + "EPOCH: 4295 LOSS: 61.1208291078\n", + "EPOCH: 4296 LOSS: 61.1207957711\n", + "EPOCH: 4297 LOSS: 61.1207624425\n", + "EPOCH: 4298 LOSS: 61.1207291221\n", + "EPOCH: 4299 LOSS: 61.1206958098\n", + "EPOCH: 4300 LOSS: 61.1206625056\n", + "EPOCH: 4301 LOSS: 61.1206292096\n", + "EPOCH: 4302 LOSS: 61.1205959217\n", + "EPOCH: 4303 LOSS: 61.1205626419\n", + "EPOCH: 4304 LOSS: 61.1205293702\n", + "EPOCH: 4305 LOSS: 61.1204961066\n", + "EPOCH: 4306 LOSS: 61.1204628511\n", + "EPOCH: 4307 LOSS: 61.1204296038\n", + "EPOCH: 4308 LOSS: 61.1203963645\n", + "EPOCH: 4309 LOSS: 61.1203631333\n", + "EPOCH: 4310 LOSS: 61.1203299102\n", + "EPOCH: 4311 LOSS: 61.1202966952\n", + "EPOCH: 4312 LOSS: 61.1202634883\n", + "EPOCH: 4313 LOSS: 61.1202302894\n", + "EPOCH: 4314 LOSS: 61.1201970986\n", + "EPOCH: 4315 LOSS: 61.1201639159\n", + "EPOCH: 4316 LOSS: 61.1201307413\n", + "EPOCH: 4317 LOSS: 61.1200975747\n", + "EPOCH: 4318 LOSS: 61.1200644162\n", + "EPOCH: 4319 LOSS: 61.1200312658\n", + "EPOCH: 4320 LOSS: 61.1199981233\n", + "EPOCH: 4321 LOSS: 61.119964989\n", + "EPOCH: 4322 LOSS: 61.1199318627\n", + "EPOCH: 4323 LOSS: 61.1198987444\n", + "EPOCH: 4324 LOSS: 61.1198656341\n", + "EPOCH: 4325 LOSS: 61.1198325319\n", + "EPOCH: 4326 LOSS: 61.1197994377\n", + "EPOCH: 4327 LOSS: 61.1197663516\n", + "EPOCH: 4328 LOSS: 61.1197332734\n", + "EPOCH: 4329 LOSS: 61.1197002033\n", + "EPOCH: 4330 LOSS: 61.1196671412\n", + "EPOCH: 4331 LOSS: 61.1196340871\n", + "EPOCH: 4332 LOSS: 61.119601041\n", + "EPOCH: 4333 LOSS: 61.1195680029\n", + "EPOCH: 4334 LOSS: 61.1195349728\n", + "EPOCH: 4335 LOSS: 61.1195019507\n", + "EPOCH: 4336 LOSS: 61.1194689366\n", + "EPOCH: 4337 LOSS: 61.1194359305\n", + "EPOCH: 4338 LOSS: 61.1194029323\n", + "EPOCH: 4339 LOSS: 61.1193699422\n", + "EPOCH: 4340 LOSS: 61.11933696\n", + "EPOCH: 4341 LOSS: 61.1193039857\n", + "EPOCH: 4342 LOSS: 61.1192710195\n", + "EPOCH: 4343 LOSS: 61.1192380612\n", + "EPOCH: 4344 LOSS: 61.1192051109\n", + "EPOCH: 4345 LOSS: 61.1191721685\n", + "EPOCH: 4346 LOSS: 61.1191392341\n", + "EPOCH: 4347 LOSS: 61.1191063076\n", + "EPOCH: 4348 LOSS: 61.1190733891\n", + "EPOCH: 4349 LOSS: 61.1190404785\n", + "EPOCH: 4350 LOSS: 61.1190075758\n", + "EPOCH: 4351 LOSS: 61.1189746811\n", + "EPOCH: 4352 LOSS: 61.1189417943\n", + "EPOCH: 4353 LOSS: 61.1189089154\n", + "EPOCH: 4354 LOSS: 61.1188760445\n", + "EPOCH: 4355 LOSS: 61.1188431815\n", + "EPOCH: 4356 LOSS: 61.1188103264\n", + "EPOCH: 4357 LOSS: 61.1187774792\n", + "EPOCH: 4358 LOSS: 61.1187446399\n", + "EPOCH: 4359 LOSS: 61.1187118085\n", + "EPOCH: 4360 LOSS: 61.118678985\n", + "EPOCH: 4361 LOSS: 61.1186461694\n", + "EPOCH: 4362 LOSS: 61.1186133617\n", + "EPOCH: 4363 LOSS: 61.1185805619\n", + "EPOCH: 4364 LOSS: 61.11854777\n", + "EPOCH: 4365 LOSS: 61.118514986\n", + "EPOCH: 4366 LOSS: 61.1184822098\n", + "EPOCH: 4367 LOSS: 61.1184494415\n", + "EPOCH: 4368 LOSS: 61.1184166811\n", + "EPOCH: 4369 LOSS: 61.1183839285\n", + "EPOCH: 4370 LOSS: 61.1183511839\n", + "EPOCH: 4371 LOSS: 61.118318447\n", + "EPOCH: 4372 LOSS: 61.1182857181\n", + "EPOCH: 4373 LOSS: 61.1182529969\n", + "EPOCH: 4374 LOSS: 61.1182202837\n", + "EPOCH: 4375 LOSS: 61.1181875782\n", + "EPOCH: 4376 LOSS: 61.1181548806\n", + "EPOCH: 4377 LOSS: 61.1181221909\n", + "EPOCH: 4378 LOSS: 61.118089509\n", + "EPOCH: 4379 LOSS: 61.1180568349\n", + "EPOCH: 4380 LOSS: 61.1180241686\n", + "EPOCH: 4381 LOSS: 61.1179915102\n", + "EPOCH: 4382 LOSS: 61.1179588596\n", + "EPOCH: 4383 LOSS: 61.1179262168\n", + "EPOCH: 4384 LOSS: 61.1178935818\n", + "EPOCH: 4385 LOSS: 61.1178609546\n", + "EPOCH: 4386 LOSS: 61.1178283352\n", + "EPOCH: 4387 LOSS: 61.1177957237\n", + "EPOCH: 4388 LOSS: 61.1177631199\n", + "EPOCH: 4389 LOSS: 61.1177305239\n", + "EPOCH: 4390 LOSS: 61.1176979357\n", + "EPOCH: 4391 LOSS: 61.1176653553\n", + "EPOCH: 4392 LOSS: 61.1176327827\n", + "EPOCH: 4393 LOSS: 61.1176002178\n", + "EPOCH: 4394 LOSS: 61.1175676608\n", + "EPOCH: 4395 LOSS: 61.1175351115\n", + "EPOCH: 4396 LOSS: 61.1175025699\n", + "EPOCH: 4397 LOSS: 61.1174700362\n", + "EPOCH: 4398 LOSS: 61.1174375102\n", + "EPOCH: 4399 LOSS: 61.1174049919\n", + "EPOCH: 4400 LOSS: 61.1173724814\n", + "EPOCH: 4401 LOSS: 61.1173399787\n", + "EPOCH: 4402 LOSS: 61.1173074837\n", + "EPOCH: 4403 LOSS: 61.1172749965\n", + "EPOCH: 4404 LOSS: 61.1172425169\n", + "EPOCH: 4405 LOSS: 61.1172100452\n", + "EPOCH: 4406 LOSS: 61.1171775811\n", + "EPOCH: 4407 LOSS: 61.1171451248\n", + "EPOCH: 4408 LOSS: 61.1171126762\n", + "EPOCH: 4409 LOSS: 61.1170802354\n", + "EPOCH: 4410 LOSS: 61.1170478022\n", + "EPOCH: 4411 LOSS: 61.1170153768\n", + "EPOCH: 4412 LOSS: 61.1169829591\n", + "EPOCH: 4413 LOSS: 61.1169505491\n", + "EPOCH: 4414 LOSS: 61.1169181468\n", + "EPOCH: 4415 LOSS: 61.1168857522\n", + "EPOCH: 4416 LOSS: 61.1168533653\n", + "EPOCH: 4417 LOSS: 61.1168209861\n", + "EPOCH: 4418 LOSS: 61.1167886146\n", + "EPOCH: 4419 LOSS: 61.1167562507\n", + "EPOCH: 4420 LOSS: 61.1167238946\n", + "EPOCH: 4421 LOSS: 61.1166915461\n", + "EPOCH: 4422 LOSS: 61.1166592053\n", + "EPOCH: 4423 LOSS: 61.1166268722\n", + "EPOCH: 4424 LOSS: 61.1165945468\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 4425 LOSS: 61.116562229\n", + "EPOCH: 4426 LOSS: 61.1165299189\n", + "EPOCH: 4427 LOSS: 61.1164976164\n", + "EPOCH: 4428 LOSS: 61.1164653216\n", + "EPOCH: 4429 LOSS: 61.1164330344\n", + "EPOCH: 4430 LOSS: 61.1164007549\n", + "EPOCH: 4431 LOSS: 61.1163684831\n", + "EPOCH: 4432 LOSS: 61.1163362188\n", + "EPOCH: 4433 LOSS: 61.1163039623\n", + "EPOCH: 4434 LOSS: 61.1162717133\n", + "EPOCH: 4435 LOSS: 61.116239472\n", + "EPOCH: 4436 LOSS: 61.1162072383\n", + "EPOCH: 4437 LOSS: 61.1161750123\n", + "EPOCH: 4438 LOSS: 61.1161427938\n", + "EPOCH: 4439 LOSS: 61.116110583\n", + "EPOCH: 4440 LOSS: 61.1160783798\n", + "EPOCH: 4441 LOSS: 61.1160461842\n", + "EPOCH: 4442 LOSS: 61.1160139962\n", + "EPOCH: 4443 LOSS: 61.1159818158\n", + "EPOCH: 4444 LOSS: 61.115949643\n", + "EPOCH: 4445 LOSS: 61.1159174778\n", + "EPOCH: 4446 LOSS: 61.1158853202\n", + "EPOCH: 4447 LOSS: 61.1158531702\n", + "EPOCH: 4448 LOSS: 61.1158210278\n", + "EPOCH: 4449 LOSS: 61.1157888929\n", + "EPOCH: 4450 LOSS: 61.1157567657\n", + "EPOCH: 4451 LOSS: 61.115724646\n", + "EPOCH: 4452 LOSS: 61.1156925339\n", + "EPOCH: 4453 LOSS: 61.1156604293\n", + "EPOCH: 4454 LOSS: 61.1156283323\n", + "EPOCH: 4455 LOSS: 61.1155962429\n", + "EPOCH: 4456 LOSS: 61.1155641611\n", + "EPOCH: 4457 LOSS: 61.1155320868\n", + "EPOCH: 4458 LOSS: 61.11550002\n", + "EPOCH: 4459 LOSS: 61.1154679608\n", + "EPOCH: 4460 LOSS: 61.1154359092\n", + "EPOCH: 4461 LOSS: 61.115403865\n", + "EPOCH: 4462 LOSS: 61.1153718285\n", + "EPOCH: 4463 LOSS: 61.1153397994\n", + "EPOCH: 4464 LOSS: 61.1153077779\n", + "EPOCH: 4465 LOSS: 61.1152757639\n", + "EPOCH: 4466 LOSS: 61.1152437575\n", + "EPOCH: 4467 LOSS: 61.1152117586\n", + "EPOCH: 4468 LOSS: 61.1151797671\n", + "EPOCH: 4469 LOSS: 61.1151477833\n", + "EPOCH: 4470 LOSS: 61.1151158069\n", + "EPOCH: 4471 LOSS: 61.115083838\n", + "EPOCH: 4472 LOSS: 61.1150518766\n", + "EPOCH: 4473 LOSS: 61.1150199227\n", + "EPOCH: 4474 LOSS: 61.1149879764\n", + "EPOCH: 4475 LOSS: 61.1149560375\n", + "EPOCH: 4476 LOSS: 61.1149241061\n", + "EPOCH: 4477 LOSS: 61.1148921822\n", + "EPOCH: 4478 LOSS: 61.1148602658\n", + "EPOCH: 4479 LOSS: 61.1148283569\n", + "EPOCH: 4480 LOSS: 61.1147964554\n", + "EPOCH: 4481 LOSS: 61.1147645614\n", + "EPOCH: 4482 LOSS: 61.1147326749\n", + "EPOCH: 4483 LOSS: 61.1147007959\n", + "EPOCH: 4484 LOSS: 61.1146689243\n", + "EPOCH: 4485 LOSS: 61.1146370602\n", + "EPOCH: 4486 LOSS: 61.1146052035\n", + "EPOCH: 4487 LOSS: 61.1145733543\n", + "EPOCH: 4488 LOSS: 61.1145415126\n", + "EPOCH: 4489 LOSS: 61.1145096783\n", + "EPOCH: 4490 LOSS: 61.1144778514\n", + "EPOCH: 4491 LOSS: 61.114446032\n", + "EPOCH: 4492 LOSS: 61.11441422\n", + "EPOCH: 4493 LOSS: 61.1143824155\n", + "EPOCH: 4494 LOSS: 61.1143506184\n", + "EPOCH: 4495 LOSS: 61.1143188287\n", + "EPOCH: 4496 LOSS: 61.1142870464\n", + "EPOCH: 4497 LOSS: 61.1142552716\n", + "EPOCH: 4498 LOSS: 61.1142235042\n", + "EPOCH: 4499 LOSS: 61.1141917442\n", + "EPOCH: 4500 LOSS: 61.1141599916\n", + "EPOCH: 4501 LOSS: 61.1141282464\n", + "EPOCH: 4502 LOSS: 61.1140965086\n", + "EPOCH: 4503 LOSS: 61.1140647783\n", + "EPOCH: 4504 LOSS: 61.1140330553\n", + "EPOCH: 4505 LOSS: 61.1140013397\n", + "EPOCH: 4506 LOSS: 61.1139696315\n", + "EPOCH: 4507 LOSS: 61.1139379307\n", + "EPOCH: 4508 LOSS: 61.1139062373\n", + "EPOCH: 4509 LOSS: 61.1138745513\n", + "EPOCH: 4510 LOSS: 61.1138428727\n", + "EPOCH: 4511 LOSS: 61.1138112014\n", + "EPOCH: 4512 LOSS: 61.1137795375\n", + "EPOCH: 4513 LOSS: 61.113747881\n", + "EPOCH: 4514 LOSS: 61.1137162318\n", + "EPOCH: 4515 LOSS: 61.11368459\n", + "EPOCH: 4516 LOSS: 61.1136529556\n", + "EPOCH: 4517 LOSS: 61.1136213285\n", + "EPOCH: 4518 LOSS: 61.1135897088\n", + "EPOCH: 4519 LOSS: 61.1135580964\n", + "EPOCH: 4520 LOSS: 61.1135264913\n", + "EPOCH: 4521 LOSS: 61.1134948937\n", + "EPOCH: 4522 LOSS: 61.1134633033\n", + "EPOCH: 4523 LOSS: 61.1134317203\n", + "EPOCH: 4524 LOSS: 61.1134001446\n", + "EPOCH: 4525 LOSS: 61.1133685763\n", + "EPOCH: 4526 LOSS: 61.1133370153\n", + "EPOCH: 4527 LOSS: 61.1133054616\n", + "EPOCH: 4528 LOSS: 61.1132739152\n", + "EPOCH: 4529 LOSS: 61.1132423761\n", + "EPOCH: 4530 LOSS: 61.1132108444\n", + "EPOCH: 4531 LOSS: 61.11317932\n", + "EPOCH: 4532 LOSS: 61.1131478029\n", + "EPOCH: 4533 LOSS: 61.1131162931\n", + "EPOCH: 4534 LOSS: 61.1130847905\n", + "EPOCH: 4535 LOSS: 61.1130532953\n", + "EPOCH: 4536 LOSS: 61.1130218074\n", + "EPOCH: 4537 LOSS: 61.1129903268\n", + "EPOCH: 4538 LOSS: 61.1129588535\n", + "EPOCH: 4539 LOSS: 61.1129273874\n", + "EPOCH: 4540 LOSS: 61.1128959287\n", + "EPOCH: 4541 LOSS: 61.1128644772\n", + "EPOCH: 4542 LOSS: 61.112833033\n", + "EPOCH: 4543 LOSS: 61.112801596\n", + "EPOCH: 4544 LOSS: 61.1127701664\n", + "EPOCH: 4545 LOSS: 61.112738744\n", + "EPOCH: 4546 LOSS: 61.1127073288\n", + "EPOCH: 4547 LOSS: 61.112675921\n", + "EPOCH: 4548 LOSS: 61.1126445204\n", + "EPOCH: 4549 LOSS: 61.112613127\n", + "EPOCH: 4550 LOSS: 61.1125817409\n", + "EPOCH: 4551 LOSS: 61.112550362\n", + "EPOCH: 4552 LOSS: 61.1125189904\n", + "EPOCH: 4553 LOSS: 61.1124876261\n", + "EPOCH: 4554 LOSS: 61.1124562689\n", + "EPOCH: 4555 LOSS: 61.112424919\n", + "EPOCH: 4556 LOSS: 61.1123935764\n", + "EPOCH: 4557 LOSS: 61.1123622409\n", + "EPOCH: 4558 LOSS: 61.1123309127\n", + "EPOCH: 4559 LOSS: 61.1122995917\n", + "EPOCH: 4560 LOSS: 61.112268278\n", + "EPOCH: 4561 LOSS: 61.1122369714\n", + "EPOCH: 4562 LOSS: 61.1122056721\n", + "EPOCH: 4563 LOSS: 61.11217438\n", + "EPOCH: 4564 LOSS: 61.1121430951\n", + "EPOCH: 4565 LOSS: 61.1121118174\n", + "EPOCH: 4566 LOSS: 61.1120805469\n", + "EPOCH: 4567 LOSS: 61.1120492836\n", + "EPOCH: 4568 LOSS: 61.1120180274\n", + "EPOCH: 4569 LOSS: 61.1119867785\n", + "EPOCH: 4570 LOSS: 61.1119555368\n", + "EPOCH: 4571 LOSS: 61.1119243022\n", + "EPOCH: 4572 LOSS: 61.1118930749\n", + "EPOCH: 4573 LOSS: 61.1118618547\n", + "EPOCH: 4574 LOSS: 61.1118306417\n", + "EPOCH: 4575 LOSS: 61.1117994359\n", + "EPOCH: 4576 LOSS: 61.1117682372\n", + "EPOCH: 4577 LOSS: 61.1117370457\n", + "EPOCH: 4578 LOSS: 61.1117058614\n", + "EPOCH: 4579 LOSS: 61.1116746842\n", + "EPOCH: 4580 LOSS: 61.1116435142\n", + "EPOCH: 4581 LOSS: 61.1116123514\n", + "EPOCH: 4582 LOSS: 61.1115811957\n", + "EPOCH: 4583 LOSS: 61.1115500471\n", + "EPOCH: 4584 LOSS: 61.1115189057\n", + "EPOCH: 4585 LOSS: 61.1114877714\n", + "EPOCH: 4586 LOSS: 61.1114566443\n", + "EPOCH: 4587 LOSS: 61.1114255243\n", + "EPOCH: 4588 LOSS: 61.1113944115\n", + "EPOCH: 4589 LOSS: 61.1113633058\n", + "EPOCH: 4590 LOSS: 61.1113322072\n", + "EPOCH: 4591 LOSS: 61.1113011157\n", + "EPOCH: 4592 LOSS: 61.1112700314\n", + "EPOCH: 4593 LOSS: 61.1112389541\n", + "EPOCH: 4594 LOSS: 61.111207884\n", + "EPOCH: 4595 LOSS: 61.111176821\n", + "EPOCH: 4596 LOSS: 61.1111457651\n", + "EPOCH: 4597 LOSS: 61.1111147164\n", + "EPOCH: 4598 LOSS: 61.1110836747\n", + "EPOCH: 4599 LOSS: 61.1110526401\n", + "EPOCH: 4600 LOSS: 61.1110216126\n", + "EPOCH: 4601 LOSS: 61.1109905922\n", + "EPOCH: 4602 LOSS: 61.110959579\n", + "EPOCH: 4603 LOSS: 61.1109285727\n", + "EPOCH: 4604 LOSS: 61.1108975736\n", + "EPOCH: 4605 LOSS: 61.1108665816\n", + "EPOCH: 4606 LOSS: 61.1108355966\n", + "EPOCH: 4607 LOSS: 61.1108046188\n", + "EPOCH: 4608 LOSS: 61.1107736479\n", + "EPOCH: 4609 LOSS: 61.1107426842\n", + "EPOCH: 4610 LOSS: 61.1107117275\n", + "EPOCH: 4611 LOSS: 61.1106807779\n", + "EPOCH: 4612 LOSS: 61.1106498354\n", + "EPOCH: 4613 LOSS: 61.1106188999\n", + "EPOCH: 4614 LOSS: 61.1105879715\n", + "EPOCH: 4615 LOSS: 61.1105570501\n", + "EPOCH: 4616 LOSS: 61.1105261358\n", + "EPOCH: 4617 LOSS: 61.1104952285\n", + "EPOCH: 4618 LOSS: 61.1104643283\n", + "EPOCH: 4619 LOSS: 61.1104334351\n", + "EPOCH: 4620 LOSS: 61.1104025489\n", + "EPOCH: 4621 LOSS: 61.1103716698\n", + "EPOCH: 4622 LOSS: 61.1103407977\n", + "EPOCH: 4623 LOSS: 61.1103099326\n", + "EPOCH: 4624 LOSS: 61.1102790746\n", + "EPOCH: 4625 LOSS: 61.1102482236\n", + "EPOCH: 4626 LOSS: 61.1102173796\n", + "EPOCH: 4627 LOSS: 61.1101865426\n", + "EPOCH: 4628 LOSS: 61.1101557127\n", + "EPOCH: 4629 LOSS: 61.1101248897\n", + "EPOCH: 4630 LOSS: 61.1100940738\n", + "EPOCH: 4631 LOSS: 61.1100632648\n", + "EPOCH: 4632 LOSS: 61.1100324629\n", + "EPOCH: 4633 LOSS: 61.110001668\n", + "EPOCH: 4634 LOSS: 61.10997088\n", + "EPOCH: 4635 LOSS: 61.1099400991\n", + "EPOCH: 4636 LOSS: 61.1099093251\n", + "EPOCH: 4637 LOSS: 61.1098785582\n", + "EPOCH: 4638 LOSS: 61.1098477982\n", + "EPOCH: 4639 LOSS: 61.1098170452\n", + "EPOCH: 4640 LOSS: 61.1097862992\n", + "EPOCH: 4641 LOSS: 61.1097555601\n", + "EPOCH: 4642 LOSS: 61.109724828\n", + "EPOCH: 4643 LOSS: 61.1096941029\n", + "EPOCH: 4644 LOSS: 61.1096633848\n", + "EPOCH: 4645 LOSS: 61.1096326736\n", + "EPOCH: 4646 LOSS: 61.1096019694\n", + "EPOCH: 4647 LOSS: 61.1095712722\n", + "EPOCH: 4648 LOSS: 61.1095405819\n", + "EPOCH: 4649 LOSS: 61.1095098985\n", + "EPOCH: 4650 LOSS: 61.1094792222\n", + "EPOCH: 4651 LOSS: 61.1094485527\n", + "EPOCH: 4652 LOSS: 61.1094178902\n", + "EPOCH: 4653 LOSS: 61.1093872346\n", + "EPOCH: 4654 LOSS: 61.109356586\n", + "EPOCH: 4655 LOSS: 61.1093259443\n", + "EPOCH: 4656 LOSS: 61.1092953096\n", + "EPOCH: 4657 LOSS: 61.1092646818\n", + "EPOCH: 4658 LOSS: 61.1092340609\n", + "EPOCH: 4659 LOSS: 61.1092034469\n", + "EPOCH: 4660 LOSS: 61.1091728398\n", + "EPOCH: 4661 LOSS: 61.1091422397\n", + "EPOCH: 4662 LOSS: 61.1091116465\n", + "EPOCH: 4663 LOSS: 61.1090810602\n", + "EPOCH: 4664 LOSS: 61.1090504808\n", + "EPOCH: 4665 LOSS: 61.1090199083\n", + "EPOCH: 4666 LOSS: 61.1089893427\n", + "EPOCH: 4667 LOSS: 61.108958784\n", + "EPOCH: 4668 LOSS: 61.1089282322\n", + "EPOCH: 4669 LOSS: 61.1088976873\n", + "EPOCH: 4670 LOSS: 61.1088671493\n", + "EPOCH: 4671 LOSS: 61.1088366182\n", + "EPOCH: 4672 LOSS: 61.108806094\n", + "EPOCH: 4673 LOSS: 61.1087755766\n", + "EPOCH: 4674 LOSS: 61.1087450662\n", + "EPOCH: 4675 LOSS: 61.1087145626\n", + "EPOCH: 4676 LOSS: 61.1086840659\n", + "EPOCH: 4677 LOSS: 61.1086535761\n", + "EPOCH: 4678 LOSS: 61.1086230931\n", + "EPOCH: 4679 LOSS: 61.108592617\n", + "EPOCH: 4680 LOSS: 61.1085621478\n", + "EPOCH: 4681 LOSS: 61.1085316854\n", + "EPOCH: 4682 LOSS: 61.1085012299\n", + "EPOCH: 4683 LOSS: 61.1084707812\n", + "EPOCH: 4684 LOSS: 61.1084403394\n", + "EPOCH: 4685 LOSS: 61.1084099044\n", + "EPOCH: 4686 LOSS: 61.1083794763\n", + "EPOCH: 4687 LOSS: 61.1083490551\n", + "EPOCH: 4688 LOSS: 61.1083186406\n", + "EPOCH: 4689 LOSS: 61.108288233\n", + "EPOCH: 4690 LOSS: 61.1082578323\n", + "EPOCH: 4691 LOSS: 61.1082274384\n", + "EPOCH: 4692 LOSS: 61.1081970513\n", + "EPOCH: 4693 LOSS: 61.108166671\n", + "EPOCH: 4694 LOSS: 61.1081362976\n", + "EPOCH: 4695 LOSS: 61.108105931\n", + "EPOCH: 4696 LOSS: 61.1080755712\n", + "EPOCH: 4697 LOSS: 61.1080452182\n", + "EPOCH: 4698 LOSS: 61.108014872\n", + "EPOCH: 4699 LOSS: 61.1079845327\n", + "EPOCH: 4700 LOSS: 61.1079542001\n", + "EPOCH: 4701 LOSS: 61.1079238744\n", + "EPOCH: 4702 LOSS: 61.1078935554\n", + "EPOCH: 4703 LOSS: 61.1078632433\n", + "EPOCH: 4704 LOSS: 61.107832938\n", + "EPOCH: 4705 LOSS: 61.1078026394\n", + "EPOCH: 4706 LOSS: 61.1077723476\n", + "EPOCH: 4707 LOSS: 61.1077420627\n", + "EPOCH: 4708 LOSS: 61.1077117845\n", + "EPOCH: 4709 LOSS: 61.1076815131\n", + "EPOCH: 4710 LOSS: 61.1076512485\n", + "EPOCH: 4711 LOSS: 61.1076209906\n", + "EPOCH: 4712 LOSS: 61.1075907395\n", + "EPOCH: 4713 LOSS: 61.1075604952\n", + "EPOCH: 4714 LOSS: 61.1075302577\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 4715 LOSS: 61.1075000269\n", + "EPOCH: 4716 LOSS: 61.1074698029\n", + "EPOCH: 4717 LOSS: 61.1074395857\n", + "EPOCH: 4718 LOSS: 61.1074093752\n", + "EPOCH: 4719 LOSS: 61.1073791715\n", + "EPOCH: 4720 LOSS: 61.1073489745\n", + "EPOCH: 4721 LOSS: 61.1073187843\n", + "EPOCH: 4722 LOSS: 61.1072886008\n", + "EPOCH: 4723 LOSS: 61.1072584241\n", + "EPOCH: 4724 LOSS: 61.1072282541\n", + "EPOCH: 4725 LOSS: 61.1071980908\n", + "EPOCH: 4726 LOSS: 61.1071679343\n", + "EPOCH: 4727 LOSS: 61.1071377845\n", + "EPOCH: 4728 LOSS: 61.1071076415\n", + "EPOCH: 4729 LOSS: 61.1070775051\n", + "EPOCH: 4730 LOSS: 61.1070473755\n", + "EPOCH: 4731 LOSS: 61.1070172527\n", + "EPOCH: 4732 LOSS: 61.1069871365\n", + "EPOCH: 4733 LOSS: 61.1069570271\n", + "EPOCH: 4734 LOSS: 61.1069269243\n", + "EPOCH: 4735 LOSS: 61.1068968283\n", + "EPOCH: 4736 LOSS: 61.106866739\n", + "EPOCH: 4737 LOSS: 61.1068366564\n", + "EPOCH: 4738 LOSS: 61.1068065805\n", + "EPOCH: 4739 LOSS: 61.1067765113\n", + "EPOCH: 4740 LOSS: 61.1067464488\n", + "EPOCH: 4741 LOSS: 61.106716393\n", + "EPOCH: 4742 LOSS: 61.1066863439\n", + "EPOCH: 4743 LOSS: 61.1066563015\n", + "EPOCH: 4744 LOSS: 61.1066262657\n", + "EPOCH: 4745 LOSS: 61.1065962367\n", + "EPOCH: 4746 LOSS: 61.1065662143\n", + "EPOCH: 4747 LOSS: 61.1065361986\n", + "EPOCH: 4748 LOSS: 61.1065061896\n", + "EPOCH: 4749 LOSS: 61.1064761873\n", + "EPOCH: 4750 LOSS: 61.1064461916\n", + "EPOCH: 4751 LOSS: 61.1064162026\n", + "EPOCH: 4752 LOSS: 61.1063862203\n", + "EPOCH: 4753 LOSS: 61.1063562446\n", + "EPOCH: 4754 LOSS: 61.1063262756\n", + "EPOCH: 4755 LOSS: 61.1062963132\n", + "EPOCH: 4756 LOSS: 61.1062663575\n", + "EPOCH: 4757 LOSS: 61.1062364085\n", + "EPOCH: 4758 LOSS: 61.1062064661\n", + "EPOCH: 4759 LOSS: 61.1061765303\n", + "EPOCH: 4760 LOSS: 61.1061466012\n", + "EPOCH: 4761 LOSS: 61.1061166788\n", + "EPOCH: 4762 LOSS: 61.1060867629\n", + "EPOCH: 4763 LOSS: 61.1060568538\n", + "EPOCH: 4764 LOSS: 61.1060269512\n", + "EPOCH: 4765 LOSS: 61.1059970553\n", + "EPOCH: 4766 LOSS: 61.105967166\n", + "EPOCH: 4767 LOSS: 61.1059372833\n", + "EPOCH: 4768 LOSS: 61.1059074073\n", + "EPOCH: 4769 LOSS: 61.1058775378\n", + "EPOCH: 4770 LOSS: 61.105847675\n", + "EPOCH: 4771 LOSS: 61.1058178188\n", + "EPOCH: 4772 LOSS: 61.1057879693\n", + "EPOCH: 4773 LOSS: 61.1057581263\n", + "EPOCH: 4774 LOSS: 61.1057282899\n", + "EPOCH: 4775 LOSS: 61.1056984602\n", + "EPOCH: 4776 LOSS: 61.105668637\n", + "EPOCH: 4777 LOSS: 61.1056388205\n", + "EPOCH: 4778 LOSS: 61.1056090105\n", + "EPOCH: 4779 LOSS: 61.1055792071\n", + "EPOCH: 4780 LOSS: 61.1055494104\n", + "EPOCH: 4781 LOSS: 61.1055196202\n", + "EPOCH: 4782 LOSS: 61.1054898366\n", + "EPOCH: 4783 LOSS: 61.1054600596\n", + "EPOCH: 4784 LOSS: 61.1054302891\n", + "EPOCH: 4785 LOSS: 61.1054005253\n", + "EPOCH: 4786 LOSS: 61.105370768\n", + "EPOCH: 4787 LOSS: 61.1053410173\n", + "EPOCH: 4788 LOSS: 61.1053112731\n", + "EPOCH: 4789 LOSS: 61.1052815356\n", + "EPOCH: 4790 LOSS: 61.1052518046\n", + "EPOCH: 4791 LOSS: 61.1052220801\n", + "EPOCH: 4792 LOSS: 61.1051923623\n", + "EPOCH: 4793 LOSS: 61.1051626509\n", + "EPOCH: 4794 LOSS: 61.1051329462\n", + "EPOCH: 4795 LOSS: 61.105103248\n", + "EPOCH: 4796 LOSS: 61.1050735563\n", + "EPOCH: 4797 LOSS: 61.1050438712\n", + "EPOCH: 4798 LOSS: 61.1050141926\n", + "EPOCH: 4799 LOSS: 61.1049845206\n", + "EPOCH: 4800 LOSS: 61.1049548551\n", + "EPOCH: 4801 LOSS: 61.1049251961\n", + "EPOCH: 4802 LOSS: 61.1048955437\n", + "EPOCH: 4803 LOSS: 61.1048658978\n", + "EPOCH: 4804 LOSS: 61.1048362585\n", + "EPOCH: 4805 LOSS: 61.1048066256\n", + "EPOCH: 4806 LOSS: 61.1047769993\n", + "EPOCH: 4807 LOSS: 61.1047473795\n", + "EPOCH: 4808 LOSS: 61.1047177663\n", + "EPOCH: 4809 LOSS: 61.1046881595\n", + "EPOCH: 4810 LOSS: 61.1046585593\n", + "EPOCH: 4811 LOSS: 61.1046289655\n", + "EPOCH: 4812 LOSS: 61.1045993783\n", + "EPOCH: 4813 LOSS: 61.1045697976\n", + "EPOCH: 4814 LOSS: 61.1045402234\n", + "EPOCH: 4815 LOSS: 61.1045106557\n", + "EPOCH: 4816 LOSS: 61.1044810945\n", + "EPOCH: 4817 LOSS: 61.1044515398\n", + "EPOCH: 4818 LOSS: 61.1044219916\n", + "EPOCH: 4819 LOSS: 61.1043924498\n", + "EPOCH: 4820 LOSS: 61.1043629146\n", + "EPOCH: 4821 LOSS: 61.1043333858\n", + "EPOCH: 4822 LOSS: 61.1043038636\n", + "EPOCH: 4823 LOSS: 61.1042743478\n", + "EPOCH: 4824 LOSS: 61.1042448385\n", + "EPOCH: 4825 LOSS: 61.1042153356\n", + "EPOCH: 4826 LOSS: 61.1041858393\n", + "EPOCH: 4827 LOSS: 61.1041563494\n", + "EPOCH: 4828 LOSS: 61.1041268659\n", + "EPOCH: 4829 LOSS: 61.104097389\n", + "EPOCH: 4830 LOSS: 61.1040679185\n", + "EPOCH: 4831 LOSS: 61.1040384544\n", + "EPOCH: 4832 LOSS: 61.1040089969\n", + "EPOCH: 4833 LOSS: 61.1039795457\n", + "EPOCH: 4834 LOSS: 61.1039501011\n", + "EPOCH: 4835 LOSS: 61.1039206628\n", + "EPOCH: 4836 LOSS: 61.1038912311\n", + "EPOCH: 4837 LOSS: 61.1038618057\n", + "EPOCH: 4838 LOSS: 61.1038323868\n", + "EPOCH: 4839 LOSS: 61.1038029744\n", + "EPOCH: 4840 LOSS: 61.1037735684\n", + "EPOCH: 4841 LOSS: 61.1037441688\n", + "EPOCH: 4842 LOSS: 61.1037147757\n", + "EPOCH: 4843 LOSS: 61.103685389\n", + "EPOCH: 4844 LOSS: 61.1036560087\n", + "EPOCH: 4845 LOSS: 61.1036266348\n", + "EPOCH: 4846 LOSS: 61.1035972674\n", + "EPOCH: 4847 LOSS: 61.1035679064\n", + "EPOCH: 4848 LOSS: 61.1035385518\n", + "EPOCH: 4849 LOSS: 61.1035092036\n", + "EPOCH: 4850 LOSS: 61.1034798618\n", + "EPOCH: 4851 LOSS: 61.1034505265\n", + "EPOCH: 4852 LOSS: 61.1034211975\n", + "EPOCH: 4853 LOSS: 61.103391875\n", + "EPOCH: 4854 LOSS: 61.1033625588\n", + "EPOCH: 4855 LOSS: 61.1033332491\n", + "EPOCH: 4856 LOSS: 61.1033039457\n", + "EPOCH: 4857 LOSS: 61.1032746488\n", + "EPOCH: 4858 LOSS: 61.1032453582\n", + "EPOCH: 4859 LOSS: 61.1032160741\n", + "EPOCH: 4860 LOSS: 61.1031867963\n", + "EPOCH: 4861 LOSS: 61.1031575249\n", + "EPOCH: 4862 LOSS: 61.1031282599\n", + "EPOCH: 4863 LOSS: 61.1030990013\n", + "EPOCH: 4864 LOSS: 61.103069749\n", + "EPOCH: 4865 LOSS: 61.1030405031\n", + "EPOCH: 4866 LOSS: 61.1030112636\n", + "EPOCH: 4867 LOSS: 61.1029820305\n", + "EPOCH: 4868 LOSS: 61.1029528037\n", + "EPOCH: 4869 LOSS: 61.1029235833\n", + "EPOCH: 4870 LOSS: 61.1028943693\n", + "EPOCH: 4871 LOSS: 61.1028651616\n", + "EPOCH: 4872 LOSS: 61.1028359603\n", + "EPOCH: 4873 LOSS: 61.1028067653\n", + "EPOCH: 4874 LOSS: 61.1027775767\n", + "EPOCH: 4875 LOSS: 61.1027483945\n", + "EPOCH: 4876 LOSS: 61.1027192186\n", + "EPOCH: 4877 LOSS: 61.102690049\n", + "EPOCH: 4878 LOSS: 61.1026608858\n", + "EPOCH: 4879 LOSS: 61.1026317289\n", + "EPOCH: 4880 LOSS: 61.1026025784\n", + "EPOCH: 4881 LOSS: 61.1025734342\n", + "EPOCH: 4882 LOSS: 61.1025442963\n", + "EPOCH: 4883 LOSS: 61.1025151648\n", + "EPOCH: 4884 LOSS: 61.1024860396\n", + "EPOCH: 4885 LOSS: 61.1024569207\n", + "EPOCH: 4886 LOSS: 61.1024278081\n", + "EPOCH: 4887 LOSS: 61.1023987019\n", + "EPOCH: 4888 LOSS: 61.102369602\n", + "EPOCH: 4889 LOSS: 61.1023405084\n", + "EPOCH: 4890 LOSS: 61.1023114211\n", + "EPOCH: 4891 LOSS: 61.1022823402\n", + "EPOCH: 4892 LOSS: 61.1022532655\n", + "EPOCH: 4893 LOSS: 61.1022241971\n", + "EPOCH: 4894 LOSS: 61.1021951351\n", + "EPOCH: 4895 LOSS: 61.1021660793\n", + "EPOCH: 4896 LOSS: 61.1021370299\n", + "EPOCH: 4897 LOSS: 61.1021079868\n", + "EPOCH: 4898 LOSS: 61.1020789499\n", + "EPOCH: 4899 LOSS: 61.1020499193\n", + "EPOCH: 4900 LOSS: 61.1020208951\n", + "EPOCH: 4901 LOSS: 61.1019918771\n", + "EPOCH: 4902 LOSS: 61.1019628654\n", + "EPOCH: 4903 LOSS: 61.10193386\n", + "EPOCH: 4904 LOSS: 61.1019048608\n", + "EPOCH: 4905 LOSS: 61.101875868\n", + "EPOCH: 4906 LOSS: 61.1018468814\n", + "EPOCH: 4907 LOSS: 61.1018179011\n", + "EPOCH: 4908 LOSS: 61.1017889271\n", + "EPOCH: 4909 LOSS: 61.1017599593\n", + "EPOCH: 4910 LOSS: 61.1017309978\n", + "EPOCH: 4911 LOSS: 61.1017020426\n", + "EPOCH: 4912 LOSS: 61.1016730936\n", + "EPOCH: 4913 LOSS: 61.1016441509\n", + "EPOCH: 4914 LOSS: 61.1016152145\n", + "EPOCH: 4915 LOSS: 61.1015862843\n", + "EPOCH: 4916 LOSS: 61.1015573603\n", + "EPOCH: 4917 LOSS: 61.1015284426\n", + "EPOCH: 4918 LOSS: 61.1014995312\n", + "EPOCH: 4919 LOSS: 61.101470626\n", + "EPOCH: 4920 LOSS: 61.101441727\n", + "EPOCH: 4921 LOSS: 61.1014128343\n", + "EPOCH: 4922 LOSS: 61.1013839478\n", + "EPOCH: 4923 LOSS: 61.1013550675\n", + "EPOCH: 4924 LOSS: 61.1013261935\n", + "EPOCH: 4925 LOSS: 61.1012973257\n", + "EPOCH: 4926 LOSS: 61.1012684642\n", + "EPOCH: 4927 LOSS: 61.1012396089\n", + "EPOCH: 4928 LOSS: 61.1012107597\n", + "EPOCH: 4929 LOSS: 61.1011819169\n", + "EPOCH: 4930 LOSS: 61.1011530802\n", + "EPOCH: 4931 LOSS: 61.1011242498\n", + "EPOCH: 4932 LOSS: 61.1010954255\n", + "EPOCH: 4933 LOSS: 61.1010666075\n", + "EPOCH: 4934 LOSS: 61.1010377957\n", + "EPOCH: 4935 LOSS: 61.1010089901\n", + "EPOCH: 4936 LOSS: 61.1009801907\n", + "EPOCH: 4937 LOSS: 61.1009513975\n", + "EPOCH: 4938 LOSS: 61.1009226105\n", + "EPOCH: 4939 LOSS: 61.1008938297\n", + "EPOCH: 4940 LOSS: 61.1008650551\n", + "EPOCH: 4941 LOSS: 61.1008362867\n", + "EPOCH: 4942 LOSS: 61.1008075245\n", + "EPOCH: 4943 LOSS: 61.1007787684\n", + "EPOCH: 4944 LOSS: 61.1007500186\n", + "EPOCH: 4945 LOSS: 61.1007212749\n", + "EPOCH: 4946 LOSS: 61.1006925375\n", + "EPOCH: 4947 LOSS: 61.1006638062\n", + "EPOCH: 4948 LOSS: 61.100635081\n", + "EPOCH: 4949 LOSS: 61.1006063621\n", + "EPOCH: 4950 LOSS: 61.1005776493\n", + "EPOCH: 4951 LOSS: 61.1005489427\n", + "EPOCH: 4952 LOSS: 61.1005202423\n", + "EPOCH: 4953 LOSS: 61.100491548\n", + "EPOCH: 4954 LOSS: 61.1004628599\n", + "EPOCH: 4955 LOSS: 61.100434178\n", + "EPOCH: 4956 LOSS: 61.1004055022\n", + "EPOCH: 4957 LOSS: 61.1003768325\n", + "EPOCH: 4958 LOSS: 61.1003481691\n", + "EPOCH: 4959 LOSS: 61.1003195117\n", + "EPOCH: 4960 LOSS: 61.1002908606\n", + "EPOCH: 4961 LOSS: 61.1002622155\n", + "EPOCH: 4962 LOSS: 61.1002335767\n", + "EPOCH: 4963 LOSS: 61.1002049439\n", + "EPOCH: 4964 LOSS: 61.1001763173\n", + "EPOCH: 4965 LOSS: 61.1001476969\n", + "EPOCH: 4966 LOSS: 61.1001190825\n", + "EPOCH: 4967 LOSS: 61.1000904743\n", + "EPOCH: 4968 LOSS: 61.1000618723\n", + "EPOCH: 4969 LOSS: 61.1000332763\n", + "EPOCH: 4970 LOSS: 61.1000046865\n", + "EPOCH: 4971 LOSS: 61.0999761028\n", + "EPOCH: 4972 LOSS: 61.0999475253\n", + "EPOCH: 4973 LOSS: 61.0999189538\n", + "EPOCH: 4974 LOSS: 61.0998903885\n", + "EPOCH: 4975 LOSS: 61.0998618293\n", + "EPOCH: 4976 LOSS: 61.0998332762\n", + "EPOCH: 4977 LOSS: 61.0998047292\n", + "EPOCH: 4978 LOSS: 61.0997761883\n", + "EPOCH: 4979 LOSS: 61.0997476536\n", + "EPOCH: 4980 LOSS: 61.0997191249\n", + "EPOCH: 4981 LOSS: 61.0996906023\n", + "EPOCH: 4982 LOSS: 61.0996620859\n", + "EPOCH: 4983 LOSS: 61.0996335755\n", + "EPOCH: 4984 LOSS: 61.0996050712\n", + "EPOCH: 4985 LOSS: 61.099576573\n", + "EPOCH: 4986 LOSS: 61.0995480809\n", + "EPOCH: 4987 LOSS: 61.0995195949\n", + "EPOCH: 4988 LOSS: 61.099491115\n", + "EPOCH: 4989 LOSS: 61.0994626412\n", + "EPOCH: 4990 LOSS: 61.0994341734\n", + "EPOCH: 4991 LOSS: 61.0994057117\n", + "EPOCH: 4992 LOSS: 61.0993772561\n", + "EPOCH: 4993 LOSS: 61.0993488066\n", + "EPOCH: 4994 LOSS: 61.0993203631\n", + "EPOCH: 4995 LOSS: 61.0992919257\n", + "EPOCH: 4996 LOSS: 61.0992634944\n", + "EPOCH: 4997 LOSS: 61.0992350692\n", + "EPOCH: 4998 LOSS: 61.09920665\n", + "EPOCH: 4999 LOSS: 61.0991782368\n" + ] + } + ], + "source": [ + "#------------------------------------------------------------------------------+\n", + "#\n", + "# Nathan A. Rooy\n", + "# Simple word2vec from scratch with Python\n", + "# 2018-FEB\n", + "#\n", + "#------------------------------------------------------------------------------+\n", + "\n", + "#--- IMPORT DEPENDENCIES ------------------------------------------------------+\n", + "\n", + "import numpy as np\n", + "import re\n", + "from collections import defaultdict\n", + "\n", + "#--- CONSTANTS ----------------------------------------------------------------+\n", + "\n", + "\n", + "class word2vec():\n", + " def __init__ (self):\n", + " self.n = settings['n']\n", + " self.eta = settings['learning_rate']\n", + " self.epochs = settings['epochs']\n", + " self.window = settings['window_size']\n", + " pass\n", + " \n", + " \n", + " # GENERATE TRAINING DATA\n", + " def generate_training_data(self, settings, corpus):\n", + "\n", + " # GENERATE WORD COUNTS\n", + " word_counts = defaultdict(int)\n", + " for row in corpus:\n", + " for word in row:\n", + " word_counts[word] += 1\n", + "\n", + " self.v_count = len(word_counts.keys())\n", + "\n", + " # GENERATE LOOKUP DICTIONARIES\n", + " self.words_list = sorted(list(word_counts.keys()),reverse=False)\n", + " self.word_index = dict((word, i) for i, word in enumerate(self.words_list))\n", + " self.index_word = dict((i, word) for i, word in enumerate(self.words_list))\n", + "\n", + " training_data = []\n", + " # CYCLE THROUGH EACH SENTENCE IN CORPUS\n", + " for sentence in corpus:\n", + " sent_len = len(sentence)\n", + "\n", + " # CYCLE THROUGH EACH WORD IN SENTENCE\n", + " for i, word in enumerate(sentence):\n", + " \n", + " #w_target = sentence[i]\n", + " w_target = self.word2onehot(sentence[i])\n", + "\n", + " # CYCLE THROUGH CONTEXT WINDOW\n", + " w_context = []\n", + " for j in range(i-self.window, i+self.window+1):\n", + " if j!=i and j<=sent_len-1 and j>=0:\n", + " w_context.append(self.word2onehot(sentence[j]))\n", + " training_data.append([w_target, w_context])\n", + " return np.array(training_data)\n", + "\n", + "\n", + " # SOFTMAX ACTIVATION FUNCTION\n", + " def softmax(self, x):\n", + " e_x = np.exp(x - np.max(x))\n", + " return e_x / e_x.sum(axis=0)\n", + "\n", + "\n", + " # CONVERT WORD TO ONE HOT ENCODING\n", + " def word2onehot(self, word):\n", + " word_vec = [0 for i in range(0, self.v_count)]\n", + " word_index = self.word_index[word]\n", + " word_vec[word_index] = 1\n", + " return word_vec\n", + "\n", + "\n", + " # FORWARD PASS\n", + " def forward_pass(self, x):\n", + " h = np.dot(self.w1.T, x)\n", + " u = np.dot(self.w2.T, h)\n", + " y_c = self.softmax(u)\n", + " return y_c, h, u\n", + " \n", + "\n", + " # BACKPROPAGATION\n", + " def backprop(self, e, h, x):\n", + " dl_dw2 = np.outer(h, e) \n", + " dl_dw1 = np.outer(x, np.dot(self.w2, e.T))\n", + "\n", + " # UPDATE WEIGHTS\n", + " self.w1 = self.w1 - (self.eta * dl_dw1)\n", + " self.w2 = self.w2 - (self.eta * dl_dw2)\n", + " pass\n", + "\n", + "\n", + " # TRAIN W2V model\n", + " def train(self, training_data):\n", + " # INITIALIZE WEIGHT MATRICES\n", + " self.w1 = np.random.uniform(-0.8, 0.8, (self.v_count, self.n)) # context matrix\n", + " self.w2 = np.random.uniform(-0.8, 0.8, (self.n, self.v_count)) # embedding matrix\n", + " \n", + " # CYCLE THROUGH EACH EPOCH\n", + " for i in range(0, self.epochs):\n", + "\n", + " self.loss = 0\n", + "\n", + " # CYCLE THROUGH EACH TRAINING SAMPLE\n", + " for w_t, w_c in training_data:\n", + "\n", + " # FORWARD PASS\n", + " y_pred, h, u = self.forward_pass(w_t)\n", + " \n", + " # CALCULATE ERROR\n", + " EI = np.sum([np.subtract(y_pred, word) for word in w_c], axis=0)\n", + "\n", + " # BACKPROPAGATION\n", + " self.backprop(EI, h, w_t)\n", + "\n", + " # CALCULATE LOSS\n", + " self.loss += -np.sum([u[word.index(1)] for word in w_c]) + len(w_c) * np.log(np.sum(np.exp(u)))\n", + " self.loss += -2*np.log(len(w_c)) -np.sum([u[word.index(1)] for word in w_c]) + (len(w_c) * np.log(np.sum(np.exp(u))))\n", + " \n", + " print('EPOCH:',i, 'LOSS:', self.loss)\n", + " pass\n", + "\n", + "\n", + " # input a word, returns a vector (if available)\n", + " def word_vec(self, word):\n", + " w_index = self.word_index[word]\n", + " v_w = self.w1[w_index]\n", + " return v_w\n", + "\n", + "\n", + " # input a vector, returns nearest word(s)\n", + " def vec_sim(self, vec, top_n):\n", + "\n", + " # CYCLE THROUGH VOCAB\n", + " word_sim = {}\n", + " for i in range(self.v_count):\n", + " v_w2 = self.w1[i]\n", + " theta_num = np.dot(vec, v_w2)\n", + " theta_den = np.linalg.norm(vec) * np.linalg.norm(v_w2)\n", + " theta = theta_num / theta_den\n", + "\n", + " word = self.index_word[i]\n", + " word_sim[word] = theta\n", + "\n", + " words_sorted = sorted(word_sim.items(), key=lambda word, sim: sim, reverse=True)\n", + "\n", + " for word, sim in words_sorted[:top_n]:\n", + " print(word, sim)\n", + " \n", + " pass\n", + "\n", + " # input word, returns top [n] most similar words\n", + " def word_sim(self, word, top_n):\n", + " \n", + " w1_index = self.word_index[word]\n", + " v_w1 = self.w1[w1_index]\n", + "\n", + " # CYCLE THROUGH VOCAB\n", + " word_sim = {}\n", + " for i in range(self.v_count):\n", + " v_w2 = self.w1[i]\n", + " theta_num = np.dot(v_w1, v_w2)\n", + " theta_den = np.linalg.norm(v_w1) * np.linalg.norm(v_w2)\n", + " theta = theta_num / theta_den\n", + "\n", + " word = self.index_word[i]\n", + " word_sim[word] = theta\n", + "\n", + " words_sorted = sorted(word_sim.items(), key=lambda word, sim:sim, reverse=True)\n", + "\n", + " for word, sim in words_sorted[:top_n]:\n", + " print(word, sim)\n", + " \n", + " pass\n", + "\n", + "#--- EXAMPLE RUN --------------------------------------------------------------+\n", + "\n", + "settings = {}\n", + "settings['n'] = 5 # dimension of word embeddings\n", + "settings['window_size'] = 2 # context window +/- center word\n", + "settings['min_count'] = 0 # minimum word count\n", + "settings['epochs'] = 5000 # number of training epochs\n", + "settings['neg_samp'] = 10 # number of negative words to use during training\n", + "settings['learning_rate'] = 0.01 # learning rate\n", + "np.random.seed(0) # set the seed for reproducibility\n", + "\n", + "corpus = [['the','quick','brown','fox','jumped','over','the','lazy','dog']]\n", + "\n", + "# INITIALIZE W2V MODEL\n", + "w2v = word2vec()\n", + "\n", + "# generate training data\n", + "training_data = w2v.generate_training_data(settings, corpus)\n", + "\n", + "# train word2vec model\n", + "w2v.train(training_data)\n", + "\n", + "#--- END ----------------------------------------------------------------------+" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.5.4" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/NLP/WordEmbedding/word2vec/.ipynb_checkpoints/word2vec-checkpoint.ipynb b/NLP/WordEmbedding/word2vec/.ipynb_checkpoints/word2vec-checkpoint.ipynb new file mode 100644 index 0000000..0987c69 --- /dev/null +++ b/NLP/WordEmbedding/word2vec/.ipynb_checkpoints/word2vec-checkpoint.ipynb @@ -0,0 +1,1103 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Preparing the text data\n", + "\n", + "The previously mentioned TensorFlow tutorial has a few functions that take a text database and transform it so that we can extract input words and their associated grams in mini-batches for training the Word2Vec system / embeddings (if you’re not sure what “mini-batch” means, check out this tutorial). I’ll briefly talk about each of these functions in turn:" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import urllib.request\n", + "import zipfile\n", + "import tensorflow as tf\n", + "import collections\n", + "import numpy as np\n", + "\n", + "def maybe_download(filename, url, expected_bytes):\n", + " \"\"\"Download a file if not present, and make sure it's the right size.\"\"\"\n", + " if not os.path.exists(filename):\n", + " filename, _ = urllib.request.urlretrieve(url + filename, filename)\n", + " statinfo = os.stat(filename)\n", + " if statinfo.st_size == expected_bytes:\n", + " print('Found and verified', filename)\n", + " else:\n", + " print(statinfo.st_size)\n", + " raise Exception(\n", + " 'Failed to verify ' + filename + '. Can you get to it with a browser?')\n", + " return filename" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This function checks to see if the filename already has been downloaded from the supplied url. If not, it uses the urllib.request Python module which retrieves a file from the given url argument, and downloads the file into the local code directory. If the file already exists (i.e. os.path.exists(filename) returns true), then the function does not try to download the file again. Next, the function checks the size of the file and makes sure it lines up with the expected file size, expected_bytes. If all is well, it returns the filename object which can be used to extract the data from. To call the function with the data-set we are using in this example, we execute the following code:" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Found and verified text8.zip\n" + ] + } + ], + "source": [ + "url = 'http://mattmahoney.net/dc/'\n", + "filename = maybe_download('text8.zip', url, 31344016)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The next thing we have to do is take the filename object, which points to the downloaded file, and extract the data using the Python zipfile module." + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Read the data into a list of strings.\n", + "def read_data(filename):\n", + " \"\"\"Extract the first file enclosed in a zip file as a list of words.\"\"\"\n", + " with zipfile.ZipFile(filename) as f:\n", + " data = tf.compat.as_str(f.read(f.namelist()[0])).split()\n", + " return data" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Using zipfile.ZipFile() to extract the zipped file, we can then use the reader functionality found in this zipfile module. First, the namelist() function retrieves all the members of the archive – in this case there is only one member, so we access this using the zero index. Then we use the read() function which reads all the text in the file and pass this through the TensorFlow function as_str which ensures that the text is created as a string data-type. Finally, we use split() function to create a list with all the words in the text file, separated by white-space characters. We can see some of the output here:" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['anarchism', 'originated', 'as', 'a', 'term', 'of', 'abuse']\n" + ] + } + ], + "source": [ + "vocabulary = read_data(filename)\n", + "print(vocabulary[:7])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "As you can observe, the returned vocabulary data contains a list of plain English words, ordered as they are in the sentences of the original extracted text file. Now that we have all the words extracted in a list, we have to do some further processing to enable us to create our skip-gram batch data. These further steps are:\n", + "\n", + "- Extract the top 10,000 most common words to include in our embedding vector\n", + "- Gather together all the unique words and index them with a unique integer value – this is what is required to create an equivalent one-hot type input for the word. We’ll use a dictionary to do this\n", + "- Loop through every word in the dataset (vocabulary variable) and assign it to the unique integer word identified, created in Step 2 above. This will allow easy lookup / processing of the word data stream\n", + "\n", + "The function which performs all this magic is shown below:" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "def build_dataset(words, n_words):\n", + " \"\"\"Process raw inputs into a dataset.\"\"\"\n", + " count = [['UNK', -1]]\n", + " count.extend(collections.Counter(words).most_common(n_words - 1))\n", + " dictionary = dict()\n", + " for word, _ in count:\n", + " dictionary[word] = len(dictionary)\n", + " data = list()\n", + " unk_count = 0\n", + " for word in words:\n", + " if word in dictionary:\n", + " index = dictionary[word]\n", + " else:\n", + " index = 0 # dictionary['UNK']\n", + " unk_count += 1\n", + " data.append(index)\n", + " count[0][1] = unk_count\n", + " reversed_dictionary = dict(zip(dictionary.values(), dictionary.keys()))\n", + " return data, count, dictionary, reversed_dictionary" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The first step is setting up a “counter” list, which will store the number of times a word is found within the data-set. Because we are restricting our vocabulary to only 10,000 words, any words not within the top 10,000 most common words will be marked with an “UNK” designation, standing for “unknown”. The initialized count list is then extended, using the Python collections module and the Counter() class and the associated most_common() function. These count the number of words in the given argument (words) and then returns the n most common words in a list format.\n", + "\n", + "The next part of this function creates a dictionary, called dictionary which is populated by keys corresponding to each unique word. The value assigned to each unique word key is simply an increasing integer count of the size of the dictionary. So, for instance, the most common word will receive the value 1, the second most common the value 2, the third most common word the value 3, and so on (the integer 0 is assigned to the ‘UNK’ words). This step creates a unique integer value for each word within the vocabulary – accomplishing the second step of the process which was defined above.\n", + "\n", + "Next, the function loops through each word in our full words data set – the data set which was output from the read_data() function. A list called data is created, which will be the same length as words but instead of being a list of individual words, it will instead be a list of integers – with each word now being represented by the unique integer that was assigned to this word in dictionary. So, for the first sentence of our data-set [‘anarchism’, ‘originated’, ‘as’, ‘a’, ‘term’, ‘of’, ‘abuse’], now looks like this in the data variable: [5242, 3083, 12, 6, 195, 2, 3136]. This part of the function addresses step 3 in the list above.\n", + "\n", + "Finally, the function creates a dictionary called reverse_dictionary that allows us to look up a word based on its unique integer identifier, rather than looking up the identifier based on the word i.e. the original dictionary. \n", + "\n", + "The final aspect of setting up our data is now to create a data set comprising of our input words and associated grams, which can be used to train our Word2Vec embedding system. The code to do this is:" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "data_index = 0\n", + "# generate batch data\n", + "def generate_batch(data, batch_size, num_skips, skip_window):\n", + " global data_index\n", + " assert batch_size % num_skips == 0\n", + " assert num_skips <= 2 * skip_window\n", + " batch = np.ndarray(shape=(batch_size), dtype=np.int32)\n", + " context = np.ndarray(shape=(batch_size, 1), dtype=np.int32)\n", + " span = 2 * skip_window + 1 # [ skip_window input_word skip_window ]\n", + " buffer = collections.deque(maxlen=span)\n", + " for _ in range(span):\n", + " buffer.append(data[data_index])\n", + " data_index = (data_index + 1) % len(data)\n", + " for i in range(batch_size // num_skips):\n", + " target = skip_window # input word at the center of the buffer\n", + " targets_to_avoid = [skip_window]\n", + " for j in range(num_skips):\n", + " while target in targets_to_avoid:\n", + " target = random.randint(0, span - 1)\n", + " targets_to_avoid.append(target)\n", + " batch[i * num_skips + j] = buffer[skip_window] # this is the input word\n", + " context[i * num_skips + j, 0] = buffer[target] # these are the context words\n", + " buffer.append(data[data_index])\n", + " data_index = (data_index + 1) % len(data)\n", + " # Backtrack a little bit to avoid skipping words in the end of a batch\n", + " data_index = (data_index + len(data) - span) % len(data)\n", + " return batch, context" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This function will generate mini-batches to use during our training (again, see here for information on mini-batch training). These batches will consist of input words (stored in batch) and random associated context words within the gram as the labels to predict (stored in context). For instance, in the 5-gram “the cat sat on the”, the input word will be center word i.e. “sat” and the context words that will be predicted will be drawn randomly from the remaining words of the gram: [‘the’, ‘cat’, ‘on’, ‘the’]. In this function, the number of words drawn randomly from the surrounding context is defined by the argument num_skips. The size of the window of context words to draw from around the input word is defined in the argument skip_window – in the example above (“the cat sat on the”), we have a skip window width of 2 around the input word “sat”.\n", + "\n", + "In the function above, first the batch and label outputs are defined as variables of size batch_size. Then the span size is defined, which is basically the size of the word list that the input word and context samples will be drawn from. In the example sub-sentence above “the cat sat on the”, the span is 5 = 2 x skip window + 1. After this a buffer is created:" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'buffer = collections.deque(maxlen=span)\\nfor _ in range(span):\\n buffer.append(data[data_index])\\n data_index = (data_index + 1) % len(data)'" + ] + }, + "execution_count": 21, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "'''buffer = collections.deque(maxlen=span)\n", + "for _ in range(span):\n", + " buffer.append(data[data_index])\n", + " data_index = (data_index + 1) % len(data)'''" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This buffer will hold a maximum of span elements and will be a kind of moving window of words that samples are drawn from. Whenever a new word index is added to the buffer, the left most element will drop out of the buffer to allow room for the new word index being added. The position of the buffer in the input text stream is stored in a global variable data_index which is incremented each time a new word is added to the buffer. If it gets to the end of the text stream, the “% len(data)” component of the index update will basically reset the count back to zero.\n", + "\n", + "The code below fills out the batch and context variables: " + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'for i in range(batch_size // num_skips):\\n target = skip_window # input word at the center of the buffer\\n targets_to_avoid = [skip_window]\\n for j in range(num_skips):\\n while target in targets_to_avoid:\\n target = random.randint(0, span - 1)\\n targets_to_avoid.append(target)\\n batch[i * num_skips + j] = buffer[skip_window] # this is the input word\\n context[i * num_skips + j, 0] = buffer[target] # these are the context words\\n buffer.append(data[data_index])\\n data_index = (data_index + 1) % len(data)'" + ] + }, + "execution_count": 22, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "'''for i in range(batch_size // num_skips):\n", + " target = skip_window # input word at the center of the buffer\n", + " targets_to_avoid = [skip_window]\n", + " for j in range(num_skips):\n", + " while target in targets_to_avoid:\n", + " target = random.randint(0, span - 1)\n", + " targets_to_avoid.append(target)\n", + " batch[i * num_skips + j] = buffer[skip_window] # this is the input word\n", + " context[i * num_skips + j, 0] = buffer[target] # these are the context words\n", + " buffer.append(data[data_index])\n", + " data_index = (data_index + 1) % len(data)'''" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The first “target” word selected is the word at the center of the span of words and is therefore the input word. Then other words are randomly selected from the span of words, making sure that the input word is not selected as part of the context, and each context word is unique. The batch variable will feature repeated input words (buffer[skip_window]) which are matched with each context word in context.\n", + "\n", + "The batch and context variables are then returned – and now we have a means of drawing batches of data from the data set. We are now in a position to create our Word2Vec training code in TensorFlow. However, before we get to that, we’ll first create a validation data-set that we can use to test how our model is doing. We do that by measuring the vectors closest together in vector-space, and make sure these words indeed are similar using our knowledge of English. This will be discussed more in the next section. However, for now, the code below shows how to grab some random validation words from the most common words in our vocabulary:" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": {}, + "outputs": [], + "source": [ + "# We pick a random validation set to sample nearest neighbors. Here we limit the\n", + "# validation samples to the words that have a low numeric ID, which by\n", + "# construction are also the most frequent.\n", + "valid_size = 16 # Random set of words to evaluate similarity on.\n", + "valid_window = 100 # Only pick dev samples in the head of the distribution.\n", + "valid_examples = np.random.choice(valid_window, valid_size, replace=False)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The code above randomly chooses 16 integers from 0-100 – this corresponds to the integer indexes of the most common 100 words in our text data. These will be the words we examine to assess how our learning is progressing in associating related words together in the vector-space. Now, onto creating the TensorFlow model." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Creating the TensorFlow model" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "For a refresher on TensorFlow, check out this tutorial. Below I will step through the process of creating our Word2Vec word embeddings in TensorFlow. What does this involve? Simply, we need to setup the neural network which I previously presented, with a word embedding matrix acting as the hidden layer and an output softmax layer in TensorFlow. By training this model, we’ll be learning the best word embedding matrix and therefore we’ll be learning a reduced, context maintaining, mapping of words to vectors.\n", + "\n", + "The first thing to do is set-up some variables which we’ll use later on in the code – the purposes of these variables will become clear as we progress:" + ] + }, + { + "cell_type": "code", + "execution_count": 41, + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'collect_data' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0mvocabulary_size\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m10000\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0mdata\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcount\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdictionary\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mreverse_dictionary\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcollect_data\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mvocabulary_size\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mvocabulary_size\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 3\u001b[0m \u001b[0mbatch_size\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m128\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0membedding_size\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m128\u001b[0m \u001b[0;31m# Dimension of the embedding vector.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0mskip_window\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m1\u001b[0m \u001b[0;31m# How many words to consider left and right.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mNameError\u001b[0m: name 'collect_data' is not defined" + ] + } + ], + "source": [ + "vocabulary_size = 10000\n", + "data, count, dictionary, reverse_dictionary = collect_data(vocabulary_size=vocabulary_size)\n", + "batch_size = 128\n", + "embedding_size = 128 # Dimension of the embedding vector.\n", + "skip_window = 1 # How many words to consider left and right.\n", + "num_skips = 2 # How many times to reuse an input to generate a context." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Found and verified text8.zip\n", + "['anarchism', 'originated', 'as', 'a', 'term', 'of', 'abuse']\n", + "Initialized\n", + "Average loss at step 0 : 9.24670124054\n", + "Nearest to seven: garcia, baba, numeral, distinguished, campaigns, pride, notable, frisian,\n", + "Nearest to are: sea, associate, tons, absorbed, derived, val, mccarthy, mixture,\n", + "Nearest to up: coronation, equilibrium, fm, championships, latvian, routine, bring, lowest,\n", + "Nearest to one: ancestry, torah, mc, happen, re, especially, mercy, album,\n", + "Nearest to a: joke, principal, desert, dramatic, gradual, dimensional, particularly, gaza,\n", + "Nearest to will: prosecution, eye, kentucky, dangers, ontario, graham, immediate, feet,\n", + "Nearest to two: economics, coast, sing, occupation, adding, harris, context, centuries,\n", + "Nearest to often: emerge, puerto, continue, angeles, proclamation, aa, discipline, nerve,\n", + "Nearest to four: convert, web, khmer, hispanic, constraints, items, definite, napoleonic,\n", + "Nearest to five: ward, proponents, hypoglycemia, life, cavity, worker, developments, oak,\n", + "Nearest to it: pull, paint, tie, fu, piercing, gin, harvard, studies,\n", + "Nearest to have: edwin, avoid, involuntary, alpha, absolute, motors, venus, trail,\n", + "Nearest to at: veteran, possible, paths, korea, khazar, thirty, homosexual, duties,\n", + "Nearest to also: sympathetic, outbreak, shock, factories, shortened, metro, casino, independence,\n", + "Nearest to than: adrian, caucasus, contested, borne, flora, runs, crossing, countryside,\n", + "Nearest to however: complaints, substantial, ties, race, ask, good, notorious, offices,\n", + "Softmax method took 53.639996 minutes to run 100 iterations\n", + "Initialized\n", + "Average loss at step 0 : 9.35233211517\n", + "Nearest to seven: epic, macau, formations, theatre, jason, thereby, mason, goal,\n", + "Nearest to are: principles, washington, cast, walking, holy, strange, wrote, sunday,\n", + "Nearest to up: manuel, differ, we, longest, perl, morris, effective, roger,\n", + "Nearest to one: openly, wayne, spoke, isaiah, http, importantly, regulations, remote,\n", + "Nearest to a: friends, what, broadcasting, less, bigfoot, city, conquered, poem,\n", + "Nearest to will: great, organizations, remote, binomial, released, possesses, christmas, UNK,\n", + "Nearest to two: involuntary, nashville, insane, energy, concludes, reference, enterprises, remote,\n", + "Nearest to often: sensory, juan, amended, electronics, resembles, mandatory, edge, lit,\n", + "Nearest to four: surprising, singapore, constellations, shoulder, bounded, specially, rogers, eggs,\n", + "Nearest to five: springer, creed, salvation, header, critics, tr, case, presence,\n", + "Nearest to it: essence, cannot, jets, seemed, baptists, cryonics, hunt, graves,\n", + "Nearest to have: pre, theoretical, xiv, popularized, cook, seal, phrases, sur,\n", + "Nearest to at: contribution, leslie, probable, derived, pradesh, neither, philosopher, different,\n", + "Nearest to also: intentionally, render, servants, correctly, muslim, modules, connecticut, invoked,\n", + "Nearest to than: suited, coastal, color, agave, coaches, hezbollah, indirectly, heroin,\n", + "Nearest to however: estate, richard, footage, oak, material, districts, nicknamed, solve,\n", + "Average loss at step 2000 : 9.33152878189\n", + "Average loss at step 4000 : 9.32041128635\n", + "Average loss at step 6000 : 9.31568084288\n", + "Average loss at step 8000 : 9.3075892849\n", + "Average loss at step 10000 : 9.30183052254\n", + "Nearest to seven: zero, vs, epic, theatre, in, thereby, mason, risk,\n", + "Nearest to are: gb, vs, and, is, principles, washington, holy, quarters,\n", + "Nearest to up: differ, vs, we, longest, manuel, effective, if, sigma,\n", + "Nearest to one: zero, UNK, vs, and, the, nine, phi, in,\n", + "Nearest to a: the, UNK, vs, and, one, fao, zero, phi,\n", + "Nearest to will: great, remote, he, binomial, in, organizations, trial, changing,\n", + "Nearest to two: vs, UNK, one, nine, and, the, in, zero,\n", + "Nearest to often: sensory, juan, vowels, amended, resembles, edge, close, arizona,\n", + "Nearest to four: zero, one, surprising, constellations, UNK, psi, agave, volume,\n", + "Nearest to five: UNK, zero, vs, critics, and, one, in, jpg,\n", + "Nearest to it: vs, cannot, UNK, a, essence, jets, seemed, he,\n", + "Nearest to have: pre, xiv, theoretical, seal, cook, teams, ibid, sur,\n", + "Nearest to at: in, probable, contribution, leslie, and, derived, one, philosopher,\n", + "Nearest to also: intentionally, launch, muslim, vs, UNK, deceased, correctly, usually,\n", + "Nearest to than: agave, and, one, coaches, coastal, color, suited, indirectly,\n", + "Nearest to however: UNK, estate, richard, hugh, districts, nicknamed, brother, footage,\n" + ] + } + ], + "source": [ + "import urllib.request\n", + "import collections\n", + "import math\n", + "import os\n", + "import random\n", + "import zipfile\n", + "import datetime as dt\n", + "\n", + "import numpy as np\n", + "import tensorflow as tf\n", + "\n", + "\n", + "def maybe_download(filename, url, expected_bytes):\n", + " \"\"\"Download a file if not present, and make sure it's the right size.\"\"\"\n", + " if not os.path.exists(filename):\n", + " filename, _ = urllib.request.urlretrieve(url + filename, filename)\n", + " statinfo = os.stat(filename)\n", + " if statinfo.st_size == expected_bytes:\n", + " print('Found and verified', filename)\n", + " else:\n", + " print(statinfo.st_size)\n", + " raise Exception(\n", + " 'Failed to verify ' + filename + '. Can you get to it with a browser?')\n", + " return filename\n", + "\n", + "\n", + "# Read the data into a list of strings.\n", + "def read_data(filename):\n", + " \"\"\"Extract the first file enclosed in a zip file as a list of words.\"\"\"\n", + " with zipfile.ZipFile(filename) as f:\n", + " data = tf.compat.as_str(f.read(f.namelist()[0])).split()\n", + " return data\n", + "\n", + "def build_dataset(words, n_words):\n", + " \"\"\"Process raw inputs into a dataset.\"\"\"\n", + " count = [['UNK', -1]]\n", + " count.extend(collections.Counter(words).most_common(n_words - 1))\n", + " dictionary = dict()\n", + " for word, _ in count:\n", + " dictionary[word] = len(dictionary)\n", + " data = list()\n", + " unk_count = 0\n", + " for word in words:\n", + " if word in dictionary:\n", + " index = dictionary[word]\n", + " else:\n", + " index = 0 # dictionary['UNK']\n", + " unk_count += 1\n", + " data.append(index)\n", + " count[0][1] = unk_count\n", + " reversed_dictionary = dict(zip(dictionary.values(), dictionary.keys()))\n", + " return data, count, dictionary, reversed_dictionary\n", + "\n", + "\n", + "def collect_data(vocabulary_size=10000):\n", + " url = 'http://mattmahoney.net/dc/'\n", + " filename = maybe_download('text8.zip', url, 31344016)\n", + " vocabulary = read_data(filename)\n", + " print(vocabulary[:7])\n", + " data, count, dictionary, reverse_dictionary = build_dataset(vocabulary,\n", + " vocabulary_size)\n", + " del vocabulary # Hint to reduce memory.\n", + " return data, count, dictionary, reverse_dictionary\n", + "\n", + "data_index = 0\n", + "# generate batch data\n", + "def generate_batch(data, batch_size, num_skips, skip_window):\n", + " global data_index\n", + " assert batch_size % num_skips == 0\n", + " assert num_skips <= 2 * skip_window\n", + " batch = np.ndarray(shape=(batch_size), dtype=np.int32)\n", + " context = np.ndarray(shape=(batch_size, 1), dtype=np.int32)\n", + " span = 2 * skip_window + 1 # [ skip_window input_word skip_window ]\n", + " buffer = collections.deque(maxlen=span)\n", + " for _ in range(span):\n", + " buffer.append(data[data_index])\n", + " data_index = (data_index + 1) % len(data)\n", + " for i in range(batch_size // num_skips):\n", + " target = skip_window # input word at the center of the buffer\n", + " targets_to_avoid = [skip_window]\n", + " for j in range(num_skips):\n", + " while target in targets_to_avoid:\n", + " target = random.randint(0, span - 1)\n", + " targets_to_avoid.append(target)\n", + " batch[i * num_skips + j] = buffer[skip_window] # this is the input word\n", + " context[i * num_skips + j, 0] = buffer[target] # these are the context words\n", + " buffer.append(data[data_index])\n", + " data_index = (data_index + 1) % len(data)\n", + " # Backtrack a little bit to avoid skipping words in the end of a batch\n", + " data_index = (data_index + len(data) - span) % len(data)\n", + " return batch, context\n", + "\n", + "vocabulary_size = 10000\n", + "data, count, dictionary, reverse_dictionary = collect_data(vocabulary_size=vocabulary_size)\n", + "\n", + "batch_size = 128\n", + "embedding_size = 300 # Dimension of the embedding vector.\n", + "skip_window = 2 # How many words to consider left and right.\n", + "num_skips = 2 # How many times to reuse an input to generate a label.\n", + "\n", + "# We pick a random validation set to sample nearest neighbors. Here we limit the\n", + "# validation samples to the words that have a low numeric ID, which by\n", + "# construction are also the most frequent.\n", + "valid_size = 16 # Random set of words to evaluate similarity on.\n", + "valid_window = 100 # Only pick dev samples in the head of the distribution.\n", + "valid_examples = np.random.choice(valid_window, valid_size, replace=False)\n", + "num_sampled = 64 # Number of negative examples to sample.\n", + "\n", + "graph = tf.Graph()\n", + "\n", + "with graph.as_default():\n", + "\n", + " # Input data.\n", + " train_inputs = tf.placeholder(tf.int32, shape=[batch_size])\n", + " train_context = tf.placeholder(tf.int32, shape=[batch_size, 1])\n", + " valid_dataset = tf.constant(valid_examples, dtype=tf.int32)\n", + "\n", + " # Look up embeddings for inputs.\n", + " embeddings = tf.Variable(\n", + " tf.random_uniform([vocabulary_size, embedding_size], -1.0, 1.0))\n", + " embed = tf.nn.embedding_lookup(embeddings, train_inputs)\n", + "\n", + " # Construct the variables for the softmax\n", + " weights = tf.Variable(\n", + " tf.truncated_normal([embedding_size, vocabulary_size],\n", + " stddev=1.0 / math.sqrt(embedding_size)))\n", + " biases = tf.Variable(tf.zeros([vocabulary_size]))\n", + " hidden_out = tf.transpose(tf.matmul(tf.transpose(weights), tf.transpose(embed))) + biases\n", + "\n", + " # convert train_context to a one-hot format\n", + " train_one_hot = tf.one_hot(train_context, vocabulary_size)\n", + "\n", + " cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=hidden_out, labels=train_one_hot))\n", + "\n", + " # Construct the SGD optimizer using a learning rate of 1.0.\n", + " optimizer = tf.train.GradientDescentOptimizer(1.0).minimize(cross_entropy)\n", + "\n", + " # Compute the cosine similarity between minibatch examples and all embeddings.\n", + " norm = tf.sqrt(tf.reduce_sum(tf.square(embeddings), 1, keep_dims=True))\n", + " normalized_embeddings = embeddings / norm\n", + " valid_embeddings = tf.nn.embedding_lookup(\n", + " normalized_embeddings, valid_dataset)\n", + " similarity = tf.matmul(\n", + " valid_embeddings, normalized_embeddings, transpose_b=True)\n", + "\n", + " # Add variable initializer.\n", + " init = tf.global_variables_initializer()\n", + "\n", + "\n", + "def run(graph, num_steps):\n", + " with tf.Session(graph=graph) as session:\n", + " # We must initialize all variables before we use them.\n", + " init.run()\n", + " print('Initialized')\n", + "\n", + " average_loss = 0\n", + " for step in range(num_steps):\n", + " batch_inputs, batch_context = generate_batch(data,\n", + " batch_size, num_skips, skip_window)\n", + " feed_dict = {train_inputs: batch_inputs, train_context: batch_context}\n", + "\n", + " # We perform one update step by evaluating the optimizer op (including it\n", + " # in the list of returned values for session.run()\n", + " _, loss_val = session.run([optimizer, cross_entropy], feed_dict=feed_dict)\n", + " average_loss += loss_val\n", + "\n", + " if step % 2000 == 0:\n", + " if step > 0:\n", + " average_loss /= 2000\n", + " # The average loss is an estimate of the loss over the last 2000 batches.\n", + " print('Average loss at step ', step, ': ', average_loss)\n", + " average_loss = 0\n", + "\n", + " # Note that this is expensive (~20% slowdown if computed every 500 steps)\n", + " if step % 10000 == 0:\n", + " sim = similarity.eval()\n", + " for i in range(valid_size):\n", + " valid_word = reverse_dictionary[valid_examples[i]]\n", + " top_k = 8 # number of nearest neighbors\n", + " nearest = (-sim[i, :]).argsort()[1:top_k + 1]\n", + " log_str = 'Nearest to %s:' % valid_word\n", + " for k in range(top_k):\n", + " close_word = reverse_dictionary[nearest[k]]\n", + " log_str = '%s %s,' % (log_str, close_word)\n", + " print(log_str)\n", + " final_embeddings = normalized_embeddings.eval()\n", + "\n", + "num_steps = 100\n", + "softmax_start_time = dt.datetime.now()\n", + "run(graph, num_steps=num_steps)\n", + "softmax_end_time = dt.datetime.now()\n", + "print(\"Softmax method took {} minutes to run 100 iterations\".format((softmax_end_time-softmax_start_time).total_seconds()))\n", + "\n", + "with graph.as_default():\n", + "\n", + " # Construct the variables for the NCE loss\n", + " nce_weights = tf.Variable(\n", + " tf.truncated_normal([vocabulary_size, embedding_size],\n", + " stddev=1.0 / math.sqrt(embedding_size)))\n", + " nce_biases = tf.Variable(tf.zeros([vocabulary_size]))\n", + "\n", + " nce_loss = tf.reduce_mean(\n", + " tf.nn.nce_loss(weights=nce_weights,\n", + " biases=nce_biases,\n", + " labels=train_context,\n", + " inputs=embed,\n", + " num_sampled=num_sampled,\n", + " num_classes=vocabulary_size))\n", + "\n", + " optimizer = tf.train.GradientDescentOptimizer(1.0).minimize(nce_loss)\n", + "\n", + " # Add variable initializer.\n", + " init = tf.global_variables_initializer()\n", + "\n", + "num_steps = 50000\n", + "nce_start_time = dt.datetime.now()\n", + "run(graph, num_steps)\n", + "nce_end_time = dt.datetime.now()\n", + "print(\"NCE method took {} minutes to run 100 iterations\".format((nce_end_time-nce_start_time).total_seconds()))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Next we setup some TensorFlow placeholders that will hold our input words (their integer indexes) and context words which we are trying to predict. We also need to create a constant to hold our validation set indexes in TensorFlow:" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "train_inputs = tf.placeholder(tf.int32, shape=[batch_size])\n", + "train_labels = tf.placeholder(tf.int32, shape=[batch_size, 1])\n", + "valid_dataset = tf.constant(valid_examples, dtype=tf.int32)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Next, we need to setup the embedding matrix variable / tensor – this is straight-forward using the TensorFlow embedding_lookup() function, which I’ll explain shortly:" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'vocabulary_size' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;31m# Look up embeddings for inputs.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2\u001b[0m embeddings = tf.Variable(\n\u001b[0;32m----> 3\u001b[0;31m tf.random_uniform([vocabulary_size, embedding_size], -1.0, 1.0))\n\u001b[0m\u001b[1;32m 4\u001b[0m \u001b[0membed\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnn\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0membedding_lookup\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0membeddings\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtrain_inputs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mNameError\u001b[0m: name 'vocabulary_size' is not defined" + ] + } + ], + "source": [ + "# Look up embeddings for inputs.\n", + "embeddings = tf.Variable(\n", + " tf.random_uniform([vocabulary_size, embedding_size], -1.0, 1.0))\n", + "embed = tf.nn.embedding_lookup(embeddings, train_inputs)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The first step in the code above is to create the embeddings variable, which is effectively the weights of the connections to the linear hidden layer. We initialize the variable with a random uniform distribution between -1.0 to 1.0. The size of this variable is (vocabulary_size, embedding_size) – the vocabulary_size is the 10,000 words that we have used to setup our data in the previous section. This is basically our one-hot vector input, where the only element with a value of “1” is the current input word, all the other values are set to “0”. The second dimension, embedding_size, is our hidden layer size, and is the length of our new, smaller, representation of our words. We can also think of this tensor as a big lookup table – the rows are each word in our vocabulary, and the columns are our new vector representation of each of these words. " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "As can be observed, “anarchism” (which would actually be represented by a unique integer or one-hot vector) is now expressed as [0.5, 0.1, -0.1]. We can “look up” anarchism by finding its integer index and searching the rows of embeddings to find the embedding vector: [0.5, 0.1, -0.1].\n", + "\n", + "The next line in the code involves the tf.nn.embedding_lookup() function, which is a useful helper function in TensorFlow for this type of task. Here’s how it works – it takes an input vector of integer indexes – in this case our train_input tensor of training input words, and “looks up” these indexes in the supplied embeddings tensor. Therefore, this command will return the current embedding vector for each of the supplied input words in the training batch. The full embedding tensor will be optimized during the training process.\n", + "\n", + "Next we have to create some weights and bias values to connect the output softmax layer, and perform the appropriate multiplication and addition. This looks like:" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'vocabulary_size' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;31m# Construct the variables for the softmax\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m weights = tf.Variable(tf.truncated_normal([vocabulary_size, embedding_size],\n\u001b[0m\u001b[1;32m 3\u001b[0m stddev=1.0 / math.sqrt(embedding_size)))\n\u001b[1;32m 4\u001b[0m \u001b[0mbiases\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mVariable\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mzeros\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mvocabulary_size\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0mhidden_out\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmatmul\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0membed\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtranspose\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mweights\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0mbiases\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mNameError\u001b[0m: name 'vocabulary_size' is not defined" + ] + } + ], + "source": [ + "# Construct the variables for the softmax\n", + "weights = tf.Variable(tf.truncated_normal([vocabulary_size, embedding_size],\n", + " stddev=1.0 / math.sqrt(embedding_size)))\n", + "biases = tf.Variable(tf.zeros([vocabulary_size]))\n", + "hidden_out = tf.matmul(embed, tf.transpose(weights)) + biases" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The weight variable, as it is connecting the hidden layer and the output layer, is of size (out_layer_size, hidden_layer_size) = (vocabulary_size, embedding_size). The biases, as usual, will only be single dimensional and the size of the output layer. We then multiply the embedded variable (embed) by the weights and add the bias. Now we are ready to create a softmax operation and we will use cross entropy loss to optimize the weights, biases and embeddings of the model. To do this easily, we will use the TensorFlow function softmax_cross_entropy_with_logits(). However, to use this function we first have to convert the context words / integer indices into one-hot vectors. The code below performs both of these steps, and also adds a gradient descent optimization operation:" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'train_context' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;31m# convert train_context to a one-hot format\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0mtrain_one_hot\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mone_hot\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtrain_context\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvocabulary_size\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 3\u001b[0m cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=hidden_out, \n\u001b[1;32m 4\u001b[0m labels=train_one_hot))\n\u001b[1;32m 5\u001b[0m \u001b[0;31m# Construct the SGD optimizer using a learning rate of 1.0.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mNameError\u001b[0m: name 'train_context' is not defined" + ] + } + ], + "source": [ + "# convert train_context to a one-hot format\n", + "train_one_hot = tf.one_hot(train_context, vocabulary_size)\n", + "cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=hidden_out, \n", + " labels=train_one_hot))\n", + "# Construct the SGD optimizer using a learning rate of 1.0.\n", + "optimizer = tf.train.GradientDescentOptimizer(1.0).minimize(cross_entropy)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Next, we need to perform our similarity assessments to check on how the model is performing as it trains. To determine which words are similar to each other, we need to perform some sort of operation that measures the “distances” between the various word embedding vectors for the different words. In this case, we will use the cosine similarity measure of distance between vectors." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "First, we calculate the L2 norm of each vector using the tf.square(), tf.reduce_sum() and tf.sqrt() functions to calculate the square, summation and square root of the norm, respectively:" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'embeddings' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;31m# Compute the cosine similarity between minibatch examples and all embeddings.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0mnorm\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msqrt\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mreduce_sum\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msquare\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0membeddings\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mkeep_dims\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 3\u001b[0m \u001b[0mnormalized_embeddings\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0membeddings\u001b[0m \u001b[0;34m/\u001b[0m \u001b[0mnorm\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mNameError\u001b[0m: name 'embeddings' is not defined" + ] + } + ], + "source": [ + "# Compute the cosine similarity between minibatch examples and all embeddings.\n", + "norm = tf.sqrt(tf.reduce_sum(tf.square(embeddings), 1, keep_dims=True))\n", + "normalized_embeddings = embeddings / norm" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now we can look up our validation words / vectors using the tf.nn.embedding_lookup() that we discussed earlier\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'normalized_embeddings' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m valid_embeddings = tf.nn.embedding_lookup(\n\u001b[0;32m----> 2\u001b[0;31m normalized_embeddings, valid_dataset)\n\u001b[0m", + "\u001b[0;31mNameError\u001b[0m: name 'normalized_embeddings' is not defined" + ] + } + ], + "source": [ + "valid_embeddings = tf.nn.embedding_lookup(\n", + " normalized_embeddings, valid_dataset)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "As before, we are supplying a list of integers (that correspond to our validation vocabulary words) to the embedding_lookup() function, which looks up these rows in the normalized_embeddings tensor, and returns the subset of validation normalized embeddings. Now that we have the normalized validation tensor, valid_embeddings, we can multiply this by the full normalized vocabulary (normalized_embedding) to finalize our similarity calculation:" + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'valid_embeddings' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m similarity = tf.matmul(\n\u001b[0;32m----> 2\u001b[0;31m valid_embeddings, normalized_embeddings, transpose_b=True)\n\u001b[0m", + "\u001b[0;31mNameError\u001b[0m: name 'valid_embeddings' is not defined" + ] + } + ], + "source": [ + "similarity = tf.matmul(\n", + " valid_embeddings, normalized_embeddings, transpose_b=True)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This operation will return a (validation_size, vocabulary_size) sized tensor, where each row refers to one of our validation words and the columns refer to the similarity between the validation word and all the other words in the vocabulary." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Running the TensorFlow model" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The code below initializes the variables and feeds in each data batch to the training loop, printing the average loss every 2000 iterations. If this code doesn’t make sense to you, check out my TensorFlow tutorial." + ] + }, + { + "cell_type": "code", + "execution_count": 36, + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'graph' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0;32mwith\u001b[0m \u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mSession\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mgraph\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mgraph\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0msession\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2\u001b[0m \u001b[0;31m# We must initialize all variables before we use them.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0minit\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrun\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'Initialized'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mNameError\u001b[0m: name 'graph' is not defined" + ] + } + ], + "source": [ + "with tf.Session(graph=graph) as session:\n", + " # We must initialize all variables before we use them.\n", + " init.run()\n", + " print('Initialized')\n", + "\n", + " average_loss = 0\n", + " for step in range(num_steps):\n", + " batch_inputs, batch_context = generate_batch(data,\n", + " batch_size, num_skips, skip_window)\n", + " feed_dict = {train_inputs: batch_inputs, train_context: batch_context}\n", + "\n", + " # We perform one update step by evaluating the optimizer op (including it\n", + " # in the list of returned values for session.run()\n", + " _, loss_val = session.run([optimizer, cross_entropy], feed_dict=feed_dict)\n", + " average_loss += loss_val\n", + "\n", + " if step % 2000 == 0:\n", + " if step > 0:\n", + " average_loss /= 2000\n", + " # The average loss is an estimate of the loss over the last 2000 batches.\n", + " print('Average loss at step ', step, ': ', average_loss)\n", + " average_loss = 0" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Next, we want to print out the words which are most similar to our validation words – we do this by calling the similarity operation we defined above and sorting the results (note, this is only performed every 10,000 iterations as it is computationally expensive):" + ] + }, + { + "cell_type": "code", + "execution_count": 37, + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'step' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;31m# Note that this is expensive (~20% slowdown if computed every 500 steps)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0;32mif\u001b[0m \u001b[0mstep\u001b[0m \u001b[0;34m%\u001b[0m \u001b[0;36m10000\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 3\u001b[0m \u001b[0msim\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0msimilarity\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0meval\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mi\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mvalid_size\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0mvalid_word\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mreverse_dictionary\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mvalid_examples\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mNameError\u001b[0m: name 'step' is not defined" + ] + } + ], + "source": [ + "# Note that this is expensive (~20% slowdown if computed every 500 steps)\n", + "if step % 10000 == 0:\n", + " sim = similarity.eval()\n", + " for i in range(valid_size):\n", + " valid_word = reverse_dictionary[valid_examples[i]]\n", + " top_k = 8 # number of nearest neighbors\n", + " nearest = (-sim[i, :]).argsort()[1:top_k + 1]\n", + " log_str = 'Nearest to %s:' % valid_word\n", + " for k in range(top_k):\n", + " close_word = reverse_dictionary[nearest[k]]\n", + " log_str = '%s %s,' % (log_str, close_word)\n", + " print(log_str)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This function first evaluates the similarity operation, which returns an array of cosine similarity values for each of the validation words. Then we iterate through each of the validation words, taking the top 8 closest words by using argsort() on the negative of the similarity to arrange the values in descending order. The code then prints out these 8 closest words so we can monitor how the embedding process is performing.\n", + "\n", + "Finally, after all the training iterations are finished, we can assign the final embeddings to a separate tensor for use later (most likely in some sort of other deep learning or machine learning process):" + ] + }, + { + "cell_type": "code", + "execution_count": 38, + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'normalized_embeddings' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mfinal_embeddings\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnormalized_embeddings\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0meval\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;31mNameError\u001b[0m: name 'normalized_embeddings' is not defined" + ] + } + ], + "source": [ + "final_embeddings = normalized_embeddings.eval()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "So now we’re done – or are we? The code for this softmax method of Word2Vec is on this site’s Github repository – you could try running it, but I wouldn’t recommend it. Why? Because it is seriously slow." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Speeding things up – the “true” Word2Vec method" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The fact is, performing softmax evaluations and updating the weights over a 10,000 word output/vocabulary is really slow. Why’s that?" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In the context of what we are working on, the softmax function will predict what words have the highest probability of being in the context of the input word. To determine that probability however, the denominator of the softmax function has to evaluate all the possible context words in the vocabulary. Therefore, we need 300 x 10,000 = 3M weights, all of which need to be trained for the softmax output. This slows things down.\n", + "\n", + "There is an alternative, faster scheme called Noise Contrastive Estimation (NCE). Instead of taking the probability of the context word compared to all of the possible context words in the vocabulary, this method randomly samples 2-20 possible context words and evaluates the probability only from these. I won’t go into the nitty gritty details here, but suffice to say that this method has been shown to perform well and drastically speeds up the training process.\n", + "\n", + "TensorFlow has helped us out here, and has supplied an NCE loss function that we can use called tf.nn.nce_loss() which we can supply weight and bias variables to. Using this function, the time to perform 100 training iterations reduced from 25 seconds with the softmax method to less than 1 second using the NCE method. An awesome improvement! We replace the softmax lines with the following in our code:" + ] + }, + { + "cell_type": "code", + "execution_count": 39, + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'vocabulary_size' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;31m# Construct the variables for the NCE loss\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2\u001b[0m nce_weights = tf.Variable(\n\u001b[0;32m----> 3\u001b[0;31m tf.truncated_normal([vocabulary_size, embedding_size],\n\u001b[0m\u001b[1;32m 4\u001b[0m stddev=1.0 / math.sqrt(embedding_size)))\n\u001b[1;32m 5\u001b[0m \u001b[0mnce_biases\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mVariable\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mzeros\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mvocabulary_size\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mNameError\u001b[0m: name 'vocabulary_size' is not defined" + ] + } + ], + "source": [ + "# Construct the variables for the NCE loss\n", + "nce_weights = tf.Variable(\n", + " tf.truncated_normal([vocabulary_size, embedding_size],\n", + " stddev=1.0 / math.sqrt(embedding_size)))\n", + "nce_biases = tf.Variable(tf.zeros([vocabulary_size]))\n", + "\n", + "nce_loss = tf.reduce_mean(\n", + " tf.nn.nce_loss(weights=nce_weights,\n", + " biases=nce_biases,\n", + " labels=train_context,\n", + " inputs=embed,\n", + " num_sampled=num_sampled,\n", + " num_classes=vocabulary_size))\n", + "\n", + "optimizer = tf.train.GradientDescentOptimizer(1.0).minimize(nce_loss)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now we are good to run the code. You can get the full code here. As discussed, every 10,000 iterations the code outputs the validation words and the words that the Word2Vec system deems are similar. Below, you can see the improvement for some selected validation words between the random initialization and at the 50,000 iteration mark:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.5.4" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/NLP/WordEmbedding/word2vec/simple_wrd2vec.ipynb b/NLP/WordEmbedding/word2vec/simple_wrd2vec.ipynb new file mode 100644 index 0000000..2d0a9c7 --- /dev/null +++ b/NLP/WordEmbedding/word2vec/simple_wrd2vec.ipynb @@ -0,0 +1,5353 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "import numpy as np" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Code" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "import re\n", + "from collections import defaultdict\n", + "\n", + "\n", + "class word2vec():\n", + " def __init__ (self):\n", + " self.n = settings['n']\n", + " self.eta = settings['learning_rate']\n", + " self.epochs = settings['epochs']\n", + " self.window = settings['window_size']\n", + " pass\n", + " \n", + " \n", + " # GENERATE TRAINING DATA\n", + " def generate_training_data(self, settings, corpus):\n", + "\n", + " # GENERATE WORD COUNTS\n", + " word_counts = defaultdict(int)\n", + " for row in corpus:\n", + " for word in row:\n", + " word_counts[word] += 1\n", + "\n", + " self.v_count = len(word_counts.keys())\n", + "\n", + " # GENERATE LOOKUP DICTIONARIES\n", + " self.words_list = sorted(list(word_counts.keys()),reverse=False)\n", + " self.word_index = dict((word, i) for i, word in enumerate(self.words_list))\n", + " self.index_word = dict((i, word) for i, word in enumerate(self.words_list))\n", + "\n", + " training_data = []\n", + " # CYCLE THROUGH EACH SENTENCE IN CORPUS\n", + " for sentence in corpus:\n", + " sent_len = len(sentence)\n", + "\n", + " # CYCLE THROUGH EACH WORD IN SENTENCE\n", + " for i, word in enumerate(sentence):\n", + " \n", + " #w_target = sentence[i]\n", + " w_target = self.word2onehot(sentence[i])\n", + "\n", + " # CYCLE THROUGH CONTEXT WINDOW\n", + " w_context = []\n", + " for j in range(i-self.window, i+self.window+1):\n", + " if j!=i and j<=sent_len-1 and j>=0:\n", + " w_context.append(self.word2onehot(sentence[j]))\n", + " training_data.append([w_target, w_context])\n", + " return np.array(training_data)\n", + "\n", + "\n", + " # SOFTMAX ACTIVATION FUNCTION\n", + " def softmax(self, x):\n", + " e_x = np.exp(x - np.max(x))\n", + " return e_x / e_x.sum(axis=0)\n", + "\n", + "\n", + " # CONVERT WORD TO ONE HOT ENCODING\n", + " def word2onehot(self, word):\n", + " word_vec = [0 for i in range(0, self.v_count)]\n", + " word_index = self.word_index[word]\n", + " word_vec[word_index] = 1\n", + " return word_vec\n", + "\n", + "\n", + " # FORWARD PASS\n", + " def forward_pass(self, x):\n", + " h = np.dot(self.w1.T, x)\n", + " u = np.dot(self.w2.T, h)\n", + " y_c = self.softmax(u)\n", + " return y_c, h, u\n", + " \n", + "\n", + " # BACKPROPAGATION\n", + " def backprop(self, e, h, x):\n", + " dl_dw2 = np.outer(h, e) \n", + " dl_dw1 = np.outer(x, np.dot(self.w2, e.T))\n", + "\n", + " # UPDATE WEIGHTS\n", + " self.w1 = self.w1 - (self.eta * dl_dw1)\n", + " self.w2 = self.w2 - (self.eta * dl_dw2)\n", + " pass\n", + "\n", + "\n", + " # TRAIN W2V model\n", + " def train(self, training_data):\n", + " # INITIALIZE WEIGHT MATRICES\n", + " self.w1 = np.random.uniform(-0.8, 0.8, (self.v_count, self.n)) # context matrix\n", + " self.w2 = np.random.uniform(-0.8, 0.8, (self.n, self.v_count)) # embedding matrix\n", + " \n", + " # CYCLE THROUGH EACH EPOCH\n", + " for i in range(0, self.epochs):\n", + "\n", + " self.loss = 0\n", + "\n", + " # CYCLE THROUGH EACH TRAINING SAMPLE\n", + " for w_t, w_c in training_data:\n", + "\n", + " # FORWARD PASS\n", + " y_pred, h, u = self.forward_pass(w_t)\n", + " \n", + " # CALCULATE ERROR\n", + " EI = np.sum([np.subtract(y_pred, word) for word in w_c], axis=0)\n", + "\n", + " # BACKPROPAGATION\n", + " self.backprop(EI, h, w_t)\n", + "\n", + " # CALCULATE LOSS\n", + " self.loss += -np.sum([u[word.index(1)] for word in w_c]) + len(w_c) * np.log(np.sum(np.exp(u)))\n", + " self.loss += -2*np.log(len(w_c)) -np.sum([u[word.index(1)] for word in w_c]) + (len(w_c) * np.log(np.sum(np.exp(u))))\n", + " \n", + " print('EPOCH:',i, 'LOSS:', self.loss)\n", + " pass\n", + "\n", + "\n", + " # input a word, returns a vector (if available)\n", + " def word_vec(self, word):\n", + " w_index = self.word_index[word]\n", + " v_w = self.w1[w_index]\n", + " return v_w\n", + "\n", + "\n", + " # input a vector, returns nearest word(s)\n", + " def vec_sim(self, vec, top_n):\n", + "\n", + " # CYCLE THROUGH VOCAB\n", + " word_sim = {}\n", + " for i in range(self.v_count):\n", + " v_w2 = self.w1[i]\n", + " theta_num = np.dot(vec, v_w2)\n", + " theta_den = np.linalg.norm(vec) * np.linalg.norm(v_w2)\n", + " theta = theta_num / theta_den\n", + "\n", + " word = self.index_word[i]\n", + " word_sim[word] = theta\n", + "\n", + " words_sorted = sorted(word_sim.items(), key=lambda word, sim: sim, reverse=True)\n", + "\n", + " for word, sim in words_sorted[:top_n]:\n", + " print(word, sim)\n", + " \n", + " pass\n", + "\n", + " # input word, returns top [n] most similar words\n", + " def word_sim(self, word, top_n):\n", + " \n", + " w1_index = self.word_index[word]\n", + " v_w1 = self.w1[w1_index]\n", + "\n", + " # CYCLE THROUGH VOCAB\n", + " word_sim = {}\n", + " for i in range(self.v_count):\n", + " v_w2 = self.w1[i]\n", + " theta_num = np.dot(v_w1, v_w2)\n", + " theta_den = np.linalg.norm(v_w1) * np.linalg.norm(v_w2)\n", + " theta = theta_num / theta_den\n", + "\n", + " word = self.index_word[i]\n", + " word_sim[word] = theta\n", + "\n", + " words_sorted = sorted(word_sim.items(), key=lambda word, sim:sim, reverse=True)\n", + "\n", + " for word, sim in words_sorted[:top_n]:\n", + " print(word, sim)\n", + " \n", + " pass\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 0 LOSS: 115.711946046\n", + "EPOCH: 1 LOSS: 114.736293974\n", + "EPOCH: 2 LOSS: 113.832568632\n", + "EPOCH: 3 LOSS: 112.992428501\n", + "EPOCH: 4 LOSS: 112.208726729\n", + "EPOCH: 5 LOSS: 111.47530631\n", + "EPOCH: 6 LOSS: 110.786835298\n", + "EPOCH: 7 LOSS: 110.138673405\n", + "EPOCH: 8 LOSS: 109.526763334\n", + "EPOCH: 9 LOSS: 108.947541748\n", + "EPOCH: 10 LOSS: 108.397865917\n", + "EPOCH: 11 LOSS: 107.874952963\n", + "EPOCH: 12 LOSS: 107.376329274\n", + "EPOCH: 13 LOSS: 106.899788188\n", + "EPOCH: 14 LOSS: 106.443354426\n", + "EPOCH: 15 LOSS: 106.005254059\n", + "EPOCH: 16 LOSS: 105.583889059\n", + "EPOCH: 17 LOSS: 105.177815627\n", + "EPOCH: 18 LOSS: 104.785725681\n", + "EPOCH: 19 LOSS: 104.406430981\n", + "EPOCH: 20 LOSS: 104.038849466\n", + "EPOCH: 21 LOSS: 103.681993468\n", + "EPOCH: 22 LOSS: 103.334959495\n", + "EPOCH: 23 LOSS: 102.99691937\n", + "EPOCH: 24 LOSS: 102.667112505\n", + "EPOCH: 25 LOSS: 102.344839165\n", + "EPOCH: 26 LOSS: 102.029454568\n", + "EPOCH: 27 LOSS: 101.720363708\n", + "EPOCH: 28 LOSS: 101.417016813\n", + "EPOCH: 29 LOSS: 101.118905338\n", + "EPOCH: 30 LOSS: 100.825558431\n", + "EPOCH: 31 LOSS: 100.536539809\n", + "EPOCH: 32 LOSS: 100.251444996\n", + "EPOCH: 33 LOSS: 99.9698988659\n", + "EPOCH: 34 LOSS: 99.6915534695\n", + "EPOCH: 35 LOSS: 99.4160860939\n", + "EPOCH: 36 LOSS: 99.1431975376\n", + "EPOCH: 37 LOSS: 98.8726105697\n", + "EPOCH: 38 LOSS: 98.6040685537\n", + "EPOCH: 39 LOSS: 98.3373342132\n", + "EPOCH: 40 LOSS: 98.0721885256\n", + "EPOCH: 41 LOSS: 97.8084297249\n", + "EPOCH: 42 LOSS: 97.5458724031\n", + "EPOCH: 43 LOSS: 97.2843466953\n", + "EPOCH: 44 LOSS: 97.023697539\n", + "EPOCH: 45 LOSS: 96.7637839976\n", + "EPOCH: 46 LOSS: 96.5044786387\n", + "EPOCH: 47 LOSS: 96.2456669596\n", + "EPOCH: 48 LOSS: 95.9872468527\n", + "EPOCH: 49 LOSS: 95.7291281038\n", + "EPOCH: 50 LOSS: 95.4712319185\n", + "EPOCH: 51 LOSS: 95.2134904698\n", + "EPOCH: 52 LOSS: 94.9558464635\n", + "EPOCH: 53 LOSS: 94.6982527157\n", + "EPOCH: 54 LOSS: 94.4406717406\n", + "EPOCH: 55 LOSS: 94.1830753422\n", + "EPOCH: 56 LOSS: 93.9254442103\n", + "EPOCH: 57 LOSS: 93.6677675148\n", + "EPOCH: 58 LOSS: 93.4100424998\n", + "EPOCH: 59 LOSS: 93.1522740719\n", + "EPOCH: 60 LOSS: 92.8944743846\n", + "EPOCH: 61 LOSS: 92.6366624158\n", + "EPOCH: 62 LOSS: 92.3788635387\n", + "EPOCH: 63 LOSS: 92.121109085\n", + "EPOCH: 64 LOSS: 91.8634359016\n", + "EPOCH: 65 LOSS: 91.6058858998\n", + "EPOCH: 66 LOSS: 91.3485055991\n", + "EPOCH: 67 LOSS: 91.0913456655\n", + "EPOCH: 68 LOSS: 90.8344604456\n", + "EPOCH: 69 LOSS: 90.5779074992\n", + "EPOCH: 70 LOSS: 90.3217471301\n", + "EPOCH: 71 LOSS: 90.0660419185\n", + "EPOCH: 72 LOSS: 89.8108562565\n", + "EPOCH: 73 LOSS: 89.5562558884\n", + "EPOCH: 74 LOSS: 89.302307459\n", + "EPOCH: 75 LOSS: 89.0490780705\n", + "EPOCH: 76 LOSS: 88.7966348519\n", + "EPOCH: 77 LOSS: 88.5450445417\n", + "EPOCH: 78 LOSS: 88.2943730876\n", + "EPOCH: 79 LOSS: 88.0446852628\n", + "EPOCH: 80 LOSS: 87.7960443033\n", + "EPOCH: 81 LOSS: 87.548511567\n", + "EPOCH: 82 LOSS: 87.3021462152\n", + "EPOCH: 83 LOSS: 87.0570049197\n", + "EPOCH: 84 LOSS: 86.8131415956\n", + "EPOCH: 85 LOSS: 86.5706071611\n", + "EPOCH: 86 LOSS: 86.3294493252\n", + "EPOCH: 87 LOSS: 86.089712404\n", + "EPOCH: 88 LOSS: 85.8514371661\n", + "EPOCH: 89 LOSS: 85.6146607069\n", + "EPOCH: 90 LOSS: 85.3794163524\n", + "EPOCH: 91 LOSS: 85.1457335922\n", + "EPOCH: 92 LOSS: 84.9136380411\n", + "EPOCH: 93 LOSS: 84.6831514288\n", + "EPOCH: 94 LOSS: 84.4542916182\n", + "EPOCH: 95 LOSS: 84.2270726492\n", + "EPOCH: 96 LOSS: 84.0015048091\n", + "EPOCH: 97 LOSS: 83.7775947279\n", + "EPOCH: 98 LOSS: 83.5553454962\n", + "EPOCH: 99 LOSS: 83.3347568059\n", + "EPOCH: 100 LOSS: 83.115825111\n", + "EPOCH: 101 LOSS: 82.8985438075\n", + "EPOCH: 102 LOSS: 82.6829034306\n", + "EPOCH: 103 LOSS: 82.4688918669\n", + "EPOCH: 104 LOSS: 82.2564945797\n", + "EPOCH: 105 LOSS: 82.0456948472\n", + "EPOCH: 106 LOSS: 81.8364740077\n", + "EPOCH: 107 LOSS: 81.6288117148\n", + "EPOCH: 108 LOSS: 81.422686195\n", + "EPOCH: 109 LOSS: 81.2180745108\n", + "EPOCH: 110 LOSS: 81.0149528225\n", + "EPOCH: 111 LOSS: 80.8132966505\n", + "EPOCH: 112 LOSS: 80.6130811329\n", + "EPOCH: 113 LOSS: 80.4142812789\n", + "EPOCH: 114 LOSS: 80.2168722136\n", + "EPOCH: 115 LOSS: 80.020829415\n", + "EPOCH: 116 LOSS: 79.8261289392\n", + "EPOCH: 117 LOSS: 79.6327476335\n", + "EPOCH: 118 LOSS: 79.4406633353\n", + "EPOCH: 119 LOSS: 79.2498550559\n", + "EPOCH: 120 LOSS: 79.0603031483\n", + "EPOCH: 121 LOSS: 78.8719894567\n", + "EPOCH: 122 LOSS: 78.6848974485\n", + "EPOCH: 123 LOSS: 78.4990123276\n", + "EPOCH: 124 LOSS: 78.3143211282\n", + "EPOCH: 125 LOSS: 78.1308127893\n", + "EPOCH: 126 LOSS: 77.9484782097\n", + "EPOCH: 127 LOSS: 77.7673102838\n", + "EPOCH: 128 LOSS: 77.5873039184\n", + "EPOCH: 129 LOSS: 77.4084560306\n", + "EPOCH: 130 LOSS: 77.230765528\n", + "EPOCH: 131 LOSS: 77.054233272\n", + "EPOCH: 132 LOSS: 76.8788620246\n", + "EPOCH: 133 LOSS: 76.7046563795\n", + "EPOCH: 134 LOSS: 76.5316226801\n", + "EPOCH: 135 LOSS: 76.3597689237\n", + "EPOCH: 136 LOSS: 76.189104655\n", + "EPOCH: 137 LOSS: 76.0196408483\n", + "EPOCH: 138 LOSS: 75.8513897822\n", + "EPOCH: 139 LOSS: 75.684364906\n", + "EPOCH: 140 LOSS: 75.5185807005\n", + "EPOCH: 141 LOSS: 75.3540525345\n", + "EPOCH: 142 LOSS: 75.1907965186\n", + "EPOCH: 143 LOSS: 75.0288293566\n", + "EPOCH: 144 LOSS: 74.8681681975\n", + "EPOCH: 145 LOSS: 74.7088304885\n", + "EPOCH: 146 LOSS: 74.5508338305\n", + "EPOCH: 147 LOSS: 74.3941958364\n", + "EPOCH: 148 LOSS: 74.2389339954\n", + "EPOCH: 149 LOSS: 74.0850655418\n", + "EPOCH: 150 LOSS: 73.932607331\n", + "EPOCH: 151 LOSS: 73.7815757229\n", + "EPOCH: 152 LOSS: 73.6319864728\n", + "EPOCH: 153 LOSS: 73.483854631\n", + "EPOCH: 154 LOSS: 73.3371944525\n", + "EPOCH: 155 LOSS: 73.1920193142\n", + "EPOCH: 156 LOSS: 73.0483416432\n", + "EPOCH: 157 LOSS: 72.9061728542\n", + "EPOCH: 158 LOSS: 72.7655232968\n", + "EPOCH: 159 LOSS: 72.6264022119\n", + "EPOCH: 160 LOSS: 72.4888176983\n", + "EPOCH: 161 LOSS: 72.352776688\n", + "EPOCH: 162 LOSS: 72.2182849303\n", + "EPOCH: 163 LOSS: 72.0853469845\n", + "EPOCH: 164 LOSS: 71.9539662208\n", + "EPOCH: 165 LOSS: 71.8241448279\n", + "EPOCH: 166 LOSS: 71.6958838283\n", + "EPOCH: 167 LOSS: 71.5691830993\n", + "EPOCH: 168 LOSS: 71.4440414003\n", + "EPOCH: 169 LOSS: 71.3204564045\n", + "EPOCH: 170 LOSS: 71.1984247359\n", + "EPOCH: 171 LOSS: 71.0779420091\n", + "EPOCH: 172 LOSS: 70.9590028735\n", + "EPOCH: 173 LOSS: 70.8416010592\n", + "EPOCH: 174 LOSS: 70.7257294256\n", + "EPOCH: 175 LOSS: 70.6113800114\n", + "EPOCH: 176 LOSS: 70.4985440849\n", + "EPOCH: 177 LOSS: 70.3872121964\n", + "EPOCH: 178 LOSS: 70.2773742289\n", + "EPOCH: 179 LOSS: 70.1690194499\n", + "EPOCH: 180 LOSS: 70.0621365616\n", + "EPOCH: 181 LOSS: 69.9567137504\n", + "EPOCH: 182 LOSS: 69.8527387356\n", + "EPOCH: 183 LOSS: 69.7501988153\n", + "EPOCH: 184 LOSS: 69.6490809116\n", + "EPOCH: 185 LOSS: 69.5493716136\n", + "EPOCH: 186 LOSS: 69.451057218\n", + "EPOCH: 187 LOSS: 69.3541237677\n", + "EPOCH: 188 LOSS: 69.2585570881\n", + "EPOCH: 189 LOSS: 69.1643428208\n", + "EPOCH: 190 LOSS: 69.0714664555\n", + "EPOCH: 191 LOSS: 68.9799133592\n", + "EPOCH: 192 LOSS: 68.8896688025\n", + "EPOCH: 193 LOSS: 68.800717985\n", + "EPOCH: 194 LOSS: 68.7130460566\n", + "EPOCH: 195 LOSS: 68.6266381388\n", + "EPOCH: 196 LOSS: 68.5414793417\n", + "EPOCH: 197 LOSS: 68.4575547809\n", + "EPOCH: 198 LOSS: 68.3748495912\n", + "EPOCH: 199 LOSS: 68.2933489394\n", + "EPOCH: 200 LOSS: 68.2130380347\n", + "EPOCH: 201 LOSS: 68.1339021387\n", + "EPOCH: 202 LOSS: 68.0559265726\n", + "EPOCH: 203 LOSS: 67.9790967242\n", + "EPOCH: 204 LOSS: 67.903398053\n", + "EPOCH: 205 LOSS: 67.8288160954\n", + "EPOCH: 206 LOSS: 67.7553364672\n", + "EPOCH: 207 LOSS: 67.682944867\n", + "EPOCH: 208 LOSS: 67.611627078\n", + "EPOCH: 209 LOSS: 67.5413689692\n", + "EPOCH: 210 LOSS: 67.4721564968\n", + "EPOCH: 211 LOSS: 67.4039757038\n", + "EPOCH: 212 LOSS: 67.3368127213\n", + "EPOCH: 213 LOSS: 67.2706537673\n", + "EPOCH: 214 LOSS: 67.2054851472\n", + "EPOCH: 215 LOSS: 67.141293253\n", + "EPOCH: 216 LOSS: 67.0780645632\n", + "EPOCH: 217 LOSS: 67.0157856418\n", + "EPOCH: 218 LOSS: 66.9544431387\n", + "EPOCH: 219 LOSS: 66.8940237886\n", + "EPOCH: 220 LOSS: 66.8345144113\n", + "EPOCH: 221 LOSS: 66.7759019115\n", + "EPOCH: 222 LOSS: 66.7181732787\n", + "EPOCH: 223 LOSS: 66.661315588\n", + "EPOCH: 224 LOSS: 66.6053159999\n", + "EPOCH: 225 LOSS: 66.5501617615\n", + "EPOCH: 226 LOSS: 66.4958402072\n", + "EPOCH: 227 LOSS: 66.4423387598\n", + "EPOCH: 228 LOSS: 66.3896449316\n", + "EPOCH: 229 LOSS: 66.3377463262\n", + "EPOCH: 230 LOSS: 66.2866306397\n", + "EPOCH: 231 LOSS: 66.2362856631\n", + "EPOCH: 232 LOSS: 66.1866992837\n", + "EPOCH: 233 LOSS: 66.1378594872\n", + "EPOCH: 234 LOSS: 66.0897543606\n", + "EPOCH: 235 LOSS: 66.0423720938\n", + "EPOCH: 236 LOSS: 65.995700982\n", + "EPOCH: 237 LOSS: 65.9497294288\n", + "EPOCH: 238 LOSS: 65.9044459481\n", + "EPOCH: 239 LOSS: 65.8598391668\n", + "EPOCH: 240 LOSS: 65.8158978275\n", + "EPOCH: 241 LOSS: 65.7726107905\n", + "EPOCH: 242 LOSS: 65.7299670372\n", + "EPOCH: 243 LOSS: 65.6879556714\n", + "EPOCH: 244 LOSS: 65.6465659227\n", + "EPOCH: 245 LOSS: 65.6057871481\n", + "EPOCH: 246 LOSS: 65.5656088345\n", + "EPOCH: 247 LOSS: 65.5260206006\n", + "EPOCH: 248 LOSS: 65.4870121992\n", + "EPOCH: 249 LOSS: 65.4485735184\n", + "EPOCH: 250 LOSS: 65.4106945842\n", + "EPOCH: 251 LOSS: 65.373365561\n", + "EPOCH: 252 LOSS: 65.336576754\n", + "EPOCH: 253 LOSS: 65.3003186097\n", + "EPOCH: 254 LOSS: 65.2645817173\n", + "EPOCH: 255 LOSS: 65.2293568096\n", + "EPOCH: 256 LOSS: 65.1946347636\n", + "EPOCH: 257 LOSS: 65.1604066012\n", + "EPOCH: 258 LOSS: 65.1266634896\n", + "EPOCH: 259 LOSS: 65.0933967414\n", + "EPOCH: 260 LOSS: 65.0605978151\n", + "EPOCH: 261 LOSS: 65.0282583143\n", + "EPOCH: 262 LOSS: 64.9963699881\n", + "EPOCH: 263 LOSS: 64.9649247305\n", + "EPOCH: 264 LOSS: 64.9339145797\n", + "EPOCH: 265 LOSS: 64.9033317178\n", + "EPOCH: 266 LOSS: 64.8731684696\n", + "EPOCH: 267 LOSS: 64.8434173019\n", + "EPOCH: 268 LOSS: 64.8140708222\n", + "EPOCH: 269 LOSS: 64.7851217779\n", + "EPOCH: 270 LOSS: 64.7565630548\n", + "EPOCH: 271 LOSS: 64.7283876755\n", + "EPOCH: 272 LOSS: 64.7005887983\n", + "EPOCH: 273 LOSS: 64.6731597155\n", + "EPOCH: 274 LOSS: 64.6460938516\n", + "EPOCH: 275 LOSS: 64.6193847617\n", + "EPOCH: 276 LOSS: 64.5930261299\n", + "EPOCH: 277 LOSS: 64.5670117669\n", + "EPOCH: 278 LOSS: 64.5413356089\n", + "EPOCH: 279 LOSS: 64.515991715\n", + "EPOCH: 280 LOSS: 64.4909742653\n", + "EPOCH: 281 LOSS: 64.4662775593\n", + "EPOCH: 282 LOSS: 64.4418960135\n", + "EPOCH: 283 LOSS: 64.4178241593\n", + "EPOCH: 284 LOSS: 64.3940566411\n", + "EPOCH: 285 LOSS: 64.3705882141\n", + "EPOCH: 286 LOSS: 64.3474137422\n", + "EPOCH: 287 LOSS: 64.3245281959\n", + "EPOCH: 288 LOSS: 64.3019266503\n", + "EPOCH: 289 LOSS: 64.2796042828\n", + "EPOCH: 290 LOSS: 64.257556371\n", + "EPOCH: 291 LOSS: 64.2357782908\n", + "EPOCH: 292 LOSS: 64.2142655143\n", + "EPOCH: 293 LOSS: 64.1930136076\n", + "EPOCH: 294 LOSS: 64.1720182287\n", + "EPOCH: 295 LOSS: 64.1512751258\n", + "EPOCH: 296 LOSS: 64.1307801351\n", + "EPOCH: 297 LOSS: 64.1105291787\n", + "EPOCH: 298 LOSS: 64.0905182631\n", + "EPOCH: 299 LOSS: 64.0707434767\n", + "EPOCH: 300 LOSS: 64.0512009885\n", + "EPOCH: 301 LOSS: 64.0318870458\n", + "EPOCH: 302 LOSS: 64.0127979727\n", + "EPOCH: 303 LOSS: 63.993930168\n", + "EPOCH: 304 LOSS: 63.9752801039\n", + "EPOCH: 305 LOSS: 63.9568443236\n", + "EPOCH: 306 LOSS: 63.9386194406\n", + "EPOCH: 307 LOSS: 63.9206021359\n", + "EPOCH: 308 LOSS: 63.9027891574\n", + "EPOCH: 309 LOSS: 63.8851773178\n", + "EPOCH: 310 LOSS: 63.8677634929\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 311 LOSS: 63.8505446207\n", + "EPOCH: 312 LOSS: 63.8335176993\n", + "EPOCH: 313 LOSS: 63.8166797859\n", + "EPOCH: 314 LOSS: 63.8000279953\n", + "EPOCH: 315 LOSS: 63.7835594981\n", + "EPOCH: 316 LOSS: 63.7672715202\n", + "EPOCH: 317 LOSS: 63.7511613406\n", + "EPOCH: 318 LOSS: 63.7352262908\n", + "EPOCH: 319 LOSS: 63.7194637533\n", + "EPOCH: 320 LOSS: 63.7038711602\n", + "EPOCH: 321 LOSS: 63.6884459925\n", + "EPOCH: 322 LOSS: 63.6731857784\n", + "EPOCH: 323 LOSS: 63.6580880927\n", + "EPOCH: 324 LOSS: 63.6431505554\n", + "EPOCH: 325 LOSS: 63.6283708305\n", + "EPOCH: 326 LOSS: 63.6137466255\n", + "EPOCH: 327 LOSS: 63.5992756899\n", + "EPOCH: 328 LOSS: 63.5849558143\n", + "EPOCH: 329 LOSS: 63.5707848299\n", + "EPOCH: 330 LOSS: 63.5567606069\n", + "EPOCH: 331 LOSS: 63.5428810539\n", + "EPOCH: 332 LOSS: 63.5291441172\n", + "EPOCH: 333 LOSS: 63.5155477798\n", + "EPOCH: 334 LOSS: 63.5020900604\n", + "EPOCH: 335 LOSS: 63.4887690129\n", + "EPOCH: 336 LOSS: 63.4755827253\n", + "EPOCH: 337 LOSS: 63.4625293191\n", + "EPOCH: 338 LOSS: 63.4496069488\n", + "EPOCH: 339 LOSS: 63.4368138007\n", + "EPOCH: 340 LOSS: 63.4241480923\n", + "EPOCH: 341 LOSS: 63.4116080719\n", + "EPOCH: 342 LOSS: 63.3991920178\n", + "EPOCH: 343 LOSS: 63.3868982374\n", + "EPOCH: 344 LOSS: 63.3747250669\n", + "EPOCH: 345 LOSS: 63.3626708706\n", + "EPOCH: 346 LOSS: 63.3507340401\n", + "EPOCH: 347 LOSS: 63.338912994\n", + "EPOCH: 348 LOSS: 63.327206177\n", + "EPOCH: 349 LOSS: 63.3156120597\n", + "EPOCH: 350 LOSS: 63.3041291378\n", + "EPOCH: 351 LOSS: 63.2927559318\n", + "EPOCH: 352 LOSS: 63.2814909863\n", + "EPOCH: 353 LOSS: 63.2703328693\n", + "EPOCH: 354 LOSS: 63.2592801723\n", + "EPOCH: 355 LOSS: 63.2483315094\n", + "EPOCH: 356 LOSS: 63.2374855168\n", + "EPOCH: 357 LOSS: 63.2267408527\n", + "EPOCH: 358 LOSS: 63.2160961963\n", + "EPOCH: 359 LOSS: 63.2055502481\n", + "EPOCH: 360 LOSS: 63.1951017287\n", + "EPOCH: 361 LOSS: 63.1847493792\n", + "EPOCH: 362 LOSS: 63.1744919599\n", + "EPOCH: 363 LOSS: 63.1643282509\n", + "EPOCH: 364 LOSS: 63.1542570509\n", + "EPOCH: 365 LOSS: 63.1442771771\n", + "EPOCH: 366 LOSS: 63.1343874652\n", + "EPOCH: 367 LOSS: 63.1245867685\n", + "EPOCH: 368 LOSS: 63.1148739579\n", + "EPOCH: 369 LOSS: 63.1052479213\n", + "EPOCH: 370 LOSS: 63.0957075637\n", + "EPOCH: 371 LOSS: 63.0862518065\n", + "EPOCH: 372 LOSS: 63.0768795872\n", + "EPOCH: 373 LOSS: 63.0675898594\n", + "EPOCH: 374 LOSS: 63.0583815922\n", + "EPOCH: 375 LOSS: 63.04925377\n", + "EPOCH: 376 LOSS: 63.0402053923\n", + "EPOCH: 377 LOSS: 63.0312354735\n", + "EPOCH: 378 LOSS: 63.0223430422\n", + "EPOCH: 379 LOSS: 63.0135271414\n", + "EPOCH: 380 LOSS: 63.0047868281\n", + "EPOCH: 381 LOSS: 62.996121173\n", + "EPOCH: 382 LOSS: 62.9875292603\n", + "EPOCH: 383 LOSS: 62.9790101874\n", + "EPOCH: 384 LOSS: 62.9705630647\n", + "EPOCH: 385 LOSS: 62.9621870155\n", + "EPOCH: 386 LOSS: 62.9538811754\n", + "EPOCH: 387 LOSS: 62.9456446927\n", + "EPOCH: 388 LOSS: 62.9374767274\n", + "EPOCH: 389 LOSS: 62.9293764519\n", + "EPOCH: 390 LOSS: 62.9213430498\n", + "EPOCH: 391 LOSS: 62.9133757166\n", + "EPOCH: 392 LOSS: 62.9054736589\n", + "EPOCH: 393 LOSS: 62.8976360945\n", + "EPOCH: 394 LOSS: 62.8898622522\n", + "EPOCH: 395 LOSS: 62.8821513714\n", + "EPOCH: 396 LOSS: 62.8745027022\n", + "EPOCH: 397 LOSS: 62.866915505\n", + "EPOCH: 398 LOSS: 62.8593890505\n", + "EPOCH: 399 LOSS: 62.8519226194\n", + "EPOCH: 400 LOSS: 62.8445155024\n", + "EPOCH: 401 LOSS: 62.8371669997\n", + "EPOCH: 402 LOSS: 62.8298764214\n", + "EPOCH: 403 LOSS: 62.8226430867\n", + "EPOCH: 404 LOSS: 62.8154663243\n", + "EPOCH: 405 LOSS: 62.8083454718\n", + "EPOCH: 406 LOSS: 62.8012798761\n", + "EPOCH: 407 LOSS: 62.7942688925\n", + "EPOCH: 408 LOSS: 62.7873118854\n", + "EPOCH: 409 LOSS: 62.7804082275\n", + "EPOCH: 410 LOSS: 62.7735573\n", + "EPOCH: 411 LOSS: 62.7667584923\n", + "EPOCH: 412 LOSS: 62.7600112022\n", + "EPOCH: 413 LOSS: 62.7533148352\n", + "EPOCH: 414 LOSS: 62.7466688051\n", + "EPOCH: 415 LOSS: 62.740072533\n", + "EPOCH: 416 LOSS: 62.7335254482\n", + "EPOCH: 417 LOSS: 62.7270269872\n", + "EPOCH: 418 LOSS: 62.720576594\n", + "EPOCH: 419 LOSS: 62.71417372\n", + "EPOCH: 420 LOSS: 62.7078178238\n", + "EPOCH: 421 LOSS: 62.701508371\n", + "EPOCH: 422 LOSS: 62.6952448344\n", + "EPOCH: 423 LOSS: 62.6890266936\n", + "EPOCH: 424 LOSS: 62.6828534349\n", + "EPOCH: 425 LOSS: 62.6767245514\n", + "EPOCH: 426 LOSS: 62.6706395428\n", + "EPOCH: 427 LOSS: 62.6645979153\n", + "EPOCH: 428 LOSS: 62.6585991814\n", + "EPOCH: 429 LOSS: 62.6526428602\n", + "EPOCH: 430 LOSS: 62.6467284767\n", + "EPOCH: 431 LOSS: 62.6408555621\n", + "EPOCH: 432 LOSS: 62.6350236538\n", + "EPOCH: 433 LOSS: 62.629232295\n", + "EPOCH: 434 LOSS: 62.623481035\n", + "EPOCH: 435 LOSS: 62.6177694285\n", + "EPOCH: 436 LOSS: 62.6120970363\n", + "EPOCH: 437 LOSS: 62.6064634246\n", + "EPOCH: 438 LOSS: 62.6008681652\n", + "EPOCH: 439 LOSS: 62.5953108354\n", + "EPOCH: 440 LOSS: 62.589791018\n", + "EPOCH: 441 LOSS: 62.5843083008\n", + "EPOCH: 442 LOSS: 62.5788622771\n", + "EPOCH: 443 LOSS: 62.5734525454\n", + "EPOCH: 444 LOSS: 62.5680787092\n", + "EPOCH: 445 LOSS: 62.5627403769\n", + "EPOCH: 446 LOSS: 62.5574371622\n", + "EPOCH: 447 LOSS: 62.5521686835\n", + "EPOCH: 448 LOSS: 62.5469345639\n", + "EPOCH: 449 LOSS: 62.5417344314\n", + "EPOCH: 450 LOSS: 62.5365679189\n", + "EPOCH: 451 LOSS: 62.5314346635\n", + "EPOCH: 452 LOSS: 62.5263343072\n", + "EPOCH: 453 LOSS: 62.5212664965\n", + "EPOCH: 454 LOSS: 62.5162308821\n", + "EPOCH: 455 LOSS: 62.5112271194\n", + "EPOCH: 456 LOSS: 62.5062548679\n", + "EPOCH: 457 LOSS: 62.5013137914\n", + "EPOCH: 458 LOSS: 62.4964035581\n", + "EPOCH: 459 LOSS: 62.4915238403\n", + "EPOCH: 460 LOSS: 62.4866743142\n", + "EPOCH: 461 LOSS: 62.4818546603\n", + "EPOCH: 462 LOSS: 62.4770645629\n", + "EPOCH: 463 LOSS: 62.4723037105\n", + "EPOCH: 464 LOSS: 62.4675717953\n", + "EPOCH: 465 LOSS: 62.4628685133\n", + "EPOCH: 466 LOSS: 62.4581935646\n", + "EPOCH: 467 LOSS: 62.4535466526\n", + "EPOCH: 468 LOSS: 62.4489274848\n", + "EPOCH: 469 LOSS: 62.4443357722\n", + "EPOCH: 470 LOSS: 62.4397712294\n", + "EPOCH: 471 LOSS: 62.4352335744\n", + "EPOCH: 472 LOSS: 62.4307225291\n", + "EPOCH: 473 LOSS: 62.4262378184\n", + "EPOCH: 474 LOSS: 62.4217791711\n", + "EPOCH: 475 LOSS: 62.417346319\n", + "EPOCH: 476 LOSS: 62.4129389973\n", + "EPOCH: 477 LOSS: 62.4085569448\n", + "EPOCH: 478 LOSS: 62.4041999031\n", + "EPOCH: 479 LOSS: 62.3998676174\n", + "EPOCH: 480 LOSS: 62.3955598358\n", + "EPOCH: 481 LOSS: 62.3912763097\n", + "EPOCH: 482 LOSS: 62.3870167935\n", + "EPOCH: 483 LOSS: 62.3827810447\n", + "EPOCH: 484 LOSS: 62.3785688239\n", + "EPOCH: 485 LOSS: 62.3743798944\n", + "EPOCH: 486 LOSS: 62.3702140228\n", + "EPOCH: 487 LOSS: 62.3660709783\n", + "EPOCH: 488 LOSS: 62.3619505332\n", + "EPOCH: 489 LOSS: 62.3578524624\n", + "EPOCH: 490 LOSS: 62.3537765439\n", + "EPOCH: 491 LOSS: 62.3497225582\n", + "EPOCH: 492 LOSS: 62.3456902886\n", + "EPOCH: 493 LOSS: 62.3416795212\n", + "EPOCH: 494 LOSS: 62.3376900446\n", + "EPOCH: 495 LOSS: 62.3337216501\n", + "EPOCH: 496 LOSS: 62.3297741317\n", + "EPOCH: 497 LOSS: 62.3258472859\n", + "EPOCH: 498 LOSS: 62.3219409116\n", + "EPOCH: 499 LOSS: 62.3180548104\n", + "EPOCH: 500 LOSS: 62.3141887862\n", + "EPOCH: 501 LOSS: 62.3103426455\n", + "EPOCH: 502 LOSS: 62.3065161972\n", + "EPOCH: 503 LOSS: 62.3027092525\n", + "EPOCH: 504 LOSS: 62.2989216249\n", + "EPOCH: 505 LOSS: 62.2951531305\n", + "EPOCH: 506 LOSS: 62.2914035874\n", + "EPOCH: 507 LOSS: 62.2876728161\n", + "EPOCH: 508 LOSS: 62.2839606393\n", + "EPOCH: 509 LOSS: 62.280266882\n", + "EPOCH: 510 LOSS: 62.2765913714\n", + "EPOCH: 511 LOSS: 62.2729339368\n", + "EPOCH: 512 LOSS: 62.2692944095\n", + "EPOCH: 513 LOSS: 62.2656726232\n", + "EPOCH: 514 LOSS: 62.2620684136\n", + "EPOCH: 515 LOSS: 62.2584816183\n", + "EPOCH: 516 LOSS: 62.254912077\n", + "EPOCH: 517 LOSS: 62.2513596316\n", + "EPOCH: 518 LOSS: 62.2478241259\n", + "EPOCH: 519 LOSS: 62.2443054054\n", + "EPOCH: 520 LOSS: 62.240803318\n", + "EPOCH: 521 LOSS: 62.2373177132\n", + "EPOCH: 522 LOSS: 62.2338484424\n", + "EPOCH: 523 LOSS: 62.230395359\n", + "EPOCH: 524 LOSS: 62.2269583183\n", + "EPOCH: 525 LOSS: 62.2235371773\n", + "EPOCH: 526 LOSS: 62.2201317948\n", + "EPOCH: 527 LOSS: 62.2167420315\n", + "EPOCH: 528 LOSS: 62.2133677498\n", + "EPOCH: 529 LOSS: 62.2100088138\n", + "EPOCH: 530 LOSS: 62.2066650894\n", + "EPOCH: 531 LOSS: 62.2033364441\n", + "EPOCH: 532 LOSS: 62.2000227474\n", + "EPOCH: 533 LOSS: 62.19672387\n", + "EPOCH: 534 LOSS: 62.1934396846\n", + "EPOCH: 535 LOSS: 62.1901700653\n", + "EPOCH: 536 LOSS: 62.186914888\n", + "EPOCH: 537 LOSS: 62.1836740301\n", + "EPOCH: 538 LOSS: 62.1804473705\n", + "EPOCH: 539 LOSS: 62.1772347897\n", + "EPOCH: 540 LOSS: 62.1740361698\n", + "EPOCH: 541 LOSS: 62.1708513943\n", + "EPOCH: 542 LOSS: 62.1676803483\n", + "EPOCH: 543 LOSS: 62.1645229184\n", + "EPOCH: 544 LOSS: 62.1613789924\n", + "EPOCH: 545 LOSS: 62.15824846\n", + "EPOCH: 546 LOSS: 62.1551312118\n", + "EPOCH: 547 LOSS: 62.1520271403\n", + "EPOCH: 548 LOSS: 62.148936139\n", + "EPOCH: 549 LOSS: 62.1458581031\n", + "EPOCH: 550 LOSS: 62.1427929288\n", + "EPOCH: 551 LOSS: 62.1397405141\n", + "EPOCH: 552 LOSS: 62.1367007579\n", + "EPOCH: 553 LOSS: 62.1336735606\n", + "EPOCH: 554 LOSS: 62.130658824\n", + "EPOCH: 555 LOSS: 62.1276564509\n", + "EPOCH: 556 LOSS: 62.1246663457\n", + "EPOCH: 557 LOSS: 62.1216884139\n", + "EPOCH: 558 LOSS: 62.1187225621\n", + "EPOCH: 559 LOSS: 62.1157686983\n", + "EPOCH: 560 LOSS: 62.1128267316\n", + "EPOCH: 561 LOSS: 62.1098965725\n", + "EPOCH: 562 LOSS: 62.1069781323\n", + "EPOCH: 563 LOSS: 62.104071324\n", + "EPOCH: 564 LOSS: 62.1011760611\n", + "EPOCH: 565 LOSS: 62.0982922589\n", + "EPOCH: 566 LOSS: 62.0954198333\n", + "EPOCH: 567 LOSS: 62.0925587015\n", + "EPOCH: 568 LOSS: 62.0897087819\n", + "EPOCH: 569 LOSS: 62.0868699939\n", + "EPOCH: 570 LOSS: 62.0840422579\n", + "EPOCH: 571 LOSS: 62.0812254955\n", + "EPOCH: 572 LOSS: 62.0784196291\n", + "EPOCH: 573 LOSS: 62.0756245825\n", + "EPOCH: 574 LOSS: 62.0728402802\n", + "EPOCH: 575 LOSS: 62.0700666478\n", + "EPOCH: 576 LOSS: 62.067303612\n", + "EPOCH: 577 LOSS: 62.0645511003\n", + "EPOCH: 578 LOSS: 62.0618090413\n", + "EPOCH: 579 LOSS: 62.0590773646\n", + "EPOCH: 580 LOSS: 62.0563560007\n", + "EPOCH: 581 LOSS: 62.0536448808\n", + "EPOCH: 582 LOSS: 62.0509439374\n", + "EPOCH: 583 LOSS: 62.0482531036\n", + "EPOCH: 584 LOSS: 62.0455723137\n", + "EPOCH: 585 LOSS: 62.0429015027\n", + "EPOCH: 586 LOSS: 62.0402406064\n", + "EPOCH: 587 LOSS: 62.0375895617\n", + "EPOCH: 588 LOSS: 62.0349483062\n", + "EPOCH: 589 LOSS: 62.0323167782\n", + "EPOCH: 590 LOSS: 62.0296949173\n", + "EPOCH: 591 LOSS: 62.0270826634\n", + "EPOCH: 592 LOSS: 62.0244799575\n", + "EPOCH: 593 LOSS: 62.0218867414\n", + "EPOCH: 594 LOSS: 62.0193029577\n", + "EPOCH: 595 LOSS: 62.0167285495\n", + "EPOCH: 596 LOSS: 62.0141634611\n", + "EPOCH: 597 LOSS: 62.0116076373\n", + "EPOCH: 598 LOSS: 62.0090610236\n", + "EPOCH: 599 LOSS: 62.0065235665\n", + "EPOCH: 600 LOSS: 62.0039952129\n", + "EPOCH: 601 LOSS: 62.0014759108\n", + "EPOCH: 602 LOSS: 61.9989656086\n", + "EPOCH: 603 LOSS: 61.9964642556\n", + "EPOCH: 604 LOSS: 61.9939718016\n", + "EPOCH: 605 LOSS: 61.9914881973\n", + "EPOCH: 606 LOSS: 61.9890133938\n", + "EPOCH: 607 LOSS: 61.9865473433\n", + "EPOCH: 608 LOSS: 61.9840899982\n", + "EPOCH: 609 LOSS: 61.9816413118\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 610 LOSS: 61.9792012379\n", + "EPOCH: 611 LOSS: 61.9767697312\n", + "EPOCH: 612 LOSS: 61.9743467467\n", + "EPOCH: 613 LOSS: 61.9719322401\n", + "EPOCH: 614 LOSS: 61.9695261679\n", + "EPOCH: 615 LOSS: 61.9671284869\n", + "EPOCH: 616 LOSS: 61.9647391548\n", + "EPOCH: 617 LOSS: 61.9623581295\n", + "EPOCH: 618 LOSS: 61.9599853698\n", + "EPOCH: 619 LOSS: 61.957620835\n", + "EPOCH: 620 LOSS: 61.9552644848\n", + "EPOCH: 621 LOSS: 61.9529162796\n", + "EPOCH: 622 LOSS: 61.9505761802\n", + "EPOCH: 623 LOSS: 61.9482441481\n", + "EPOCH: 624 LOSS: 61.9459201452\n", + "EPOCH: 625 LOSS: 61.943604134\n", + "EPOCH: 626 LOSS: 61.9412960774\n", + "EPOCH: 627 LOSS: 61.9389959388\n", + "EPOCH: 628 LOSS: 61.9367036823\n", + "EPOCH: 629 LOSS: 61.9344192722\n", + "EPOCH: 630 LOSS: 61.9321426735\n", + "EPOCH: 631 LOSS: 61.9298738514\n", + "EPOCH: 632 LOSS: 61.927612772\n", + "EPOCH: 633 LOSS: 61.9253594014\n", + "EPOCH: 634 LOSS: 61.9231137064\n", + "EPOCH: 635 LOSS: 61.9208756542\n", + "EPOCH: 636 LOSS: 61.9186452123\n", + "EPOCH: 637 LOSS: 61.9164223489\n", + "EPOCH: 638 LOSS: 61.9142070324\n", + "EPOCH: 639 LOSS: 61.9119992316\n", + "EPOCH: 640 LOSS: 61.9097989158\n", + "EPOCH: 641 LOSS: 61.9076060548\n", + "EPOCH: 642 LOSS: 61.9054206185\n", + "EPOCH: 643 LOSS: 61.9032425774\n", + "EPOCH: 644 LOSS: 61.9010719025\n", + "EPOCH: 645 LOSS: 61.8989085647\n", + "EPOCH: 646 LOSS: 61.8967525358\n", + "EPOCH: 647 LOSS: 61.8946037877\n", + "EPOCH: 648 LOSS: 61.8924622926\n", + "EPOCH: 649 LOSS: 61.8903280232\n", + "EPOCH: 650 LOSS: 61.8882009524\n", + "EPOCH: 651 LOSS: 61.8860810536\n", + "EPOCH: 652 LOSS: 61.8839683004\n", + "EPOCH: 653 LOSS: 61.8818626667\n", + "EPOCH: 654 LOSS: 61.8797641269\n", + "EPOCH: 655 LOSS: 61.8776726555\n", + "EPOCH: 656 LOSS: 61.8755882274\n", + "EPOCH: 657 LOSS: 61.8735108178\n", + "EPOCH: 658 LOSS: 61.8714404021\n", + "EPOCH: 659 LOSS: 61.8693769563\n", + "EPOCH: 660 LOSS: 61.8673204563\n", + "EPOCH: 661 LOSS: 61.8652708784\n", + "EPOCH: 662 LOSS: 61.8632281993\n", + "EPOCH: 663 LOSS: 61.8611923958\n", + "EPOCH: 664 LOSS: 61.8591634451\n", + "EPOCH: 665 LOSS: 61.8571413246\n", + "EPOCH: 666 LOSS: 61.8551260118\n", + "EPOCH: 667 LOSS: 61.8531174848\n", + "EPOCH: 668 LOSS: 61.8511157215\n", + "EPOCH: 669 LOSS: 61.8491207005\n", + "EPOCH: 670 LOSS: 61.8471324002\n", + "EPOCH: 671 LOSS: 61.8451507995\n", + "EPOCH: 672 LOSS: 61.8431758774\n", + "EPOCH: 673 LOSS: 61.8412076132\n", + "EPOCH: 674 LOSS: 61.8392459863\n", + "EPOCH: 675 LOSS: 61.8372909765\n", + "EPOCH: 676 LOSS: 61.8353425635\n", + "EPOCH: 677 LOSS: 61.8334007275\n", + "EPOCH: 678 LOSS: 61.8314654487\n", + "EPOCH: 679 LOSS: 61.8295367076\n", + "EPOCH: 680 LOSS: 61.8276144847\n", + "EPOCH: 681 LOSS: 61.825698761\n", + "EPOCH: 682 LOSS: 61.8237895174\n", + "EPOCH: 683 LOSS: 61.821886735\n", + "EPOCH: 684 LOSS: 61.8199903952\n", + "EPOCH: 685 LOSS: 61.8181004795\n", + "EPOCH: 686 LOSS: 61.8162169695\n", + "EPOCH: 687 LOSS: 61.814339847\n", + "EPOCH: 688 LOSS: 61.812469094\n", + "EPOCH: 689 LOSS: 61.8106046925\n", + "EPOCH: 690 LOSS: 61.8087466248\n", + "EPOCH: 691 LOSS: 61.8068948733\n", + "EPOCH: 692 LOSS: 61.8050494205\n", + "EPOCH: 693 LOSS: 61.803210249\n", + "EPOCH: 694 LOSS: 61.8013773416\n", + "EPOCH: 695 LOSS: 61.7995506812\n", + "EPOCH: 696 LOSS: 61.7977302508\n", + "EPOCH: 697 LOSS: 61.7959160335\n", + "EPOCH: 698 LOSS: 61.7941080126\n", + "EPOCH: 699 LOSS: 61.7923061715\n", + "EPOCH: 700 LOSS: 61.7905104936\n", + "EPOCH: 701 LOSS: 61.7887209624\n", + "EPOCH: 702 LOSS: 61.7869375616\n", + "EPOCH: 703 LOSS: 61.7851602751\n", + "EPOCH: 704 LOSS: 61.7833890865\n", + "EPOCH: 705 LOSS: 61.78162398\n", + "EPOCH: 706 LOSS: 61.7798649395\n", + "EPOCH: 707 LOSS: 61.7781119492\n", + "EPOCH: 708 LOSS: 61.7763649932\n", + "EPOCH: 709 LOSS: 61.7746240558\n", + "EPOCH: 710 LOSS: 61.7728891215\n", + "EPOCH: 711 LOSS: 61.7711601745\n", + "EPOCH: 712 LOSS: 61.7694371995\n", + "EPOCH: 713 LOSS: 61.7677201809\n", + "EPOCH: 714 LOSS: 61.7660091035\n", + "EPOCH: 715 LOSS: 61.7643039519\n", + "EPOCH: 716 LOSS: 61.7626047109\n", + "EPOCH: 717 LOSS: 61.7609113653\n", + "EPOCH: 718 LOSS: 61.7592238999\n", + "EPOCH: 719 LOSS: 61.7575422998\n", + "EPOCH: 720 LOSS: 61.7558665498\n", + "EPOCH: 721 LOSS: 61.754196635\n", + "EPOCH: 722 LOSS: 61.7525325405\n", + "EPOCH: 723 LOSS: 61.7508742514\n", + "EPOCH: 724 LOSS: 61.7492217528\n", + "EPOCH: 725 LOSS: 61.7475750299\n", + "EPOCH: 726 LOSS: 61.745934068\n", + "EPOCH: 727 LOSS: 61.7442988523\n", + "EPOCH: 728 LOSS: 61.7426693682\n", + "EPOCH: 729 LOSS: 61.741045601\n", + "EPOCH: 730 LOSS: 61.739427536\n", + "EPOCH: 731 LOSS: 61.7378151587\n", + "EPOCH: 732 LOSS: 61.7362084544\n", + "EPOCH: 733 LOSS: 61.7346074086\n", + "EPOCH: 734 LOSS: 61.7330120068\n", + "EPOCH: 735 LOSS: 61.7314222345\n", + "EPOCH: 736 LOSS: 61.7298380772\n", + "EPOCH: 737 LOSS: 61.7282595204\n", + "EPOCH: 738 LOSS: 61.7266865496\n", + "EPOCH: 739 LOSS: 61.7251191505\n", + "EPOCH: 740 LOSS: 61.7235573086\n", + "EPOCH: 741 LOSS: 61.7220010094\n", + "EPOCH: 742 LOSS: 61.7204502387\n", + "EPOCH: 743 LOSS: 61.7189049819\n", + "EPOCH: 744 LOSS: 61.7173652247\n", + "EPOCH: 745 LOSS: 61.7158309528\n", + "EPOCH: 746 LOSS: 61.7143021518\n", + "EPOCH: 747 LOSS: 61.7127788073\n", + "EPOCH: 748 LOSS: 61.7112609049\n", + "EPOCH: 749 LOSS: 61.7097484303\n", + "EPOCH: 750 LOSS: 61.7082413692\n", + "EPOCH: 751 LOSS: 61.7067397072\n", + "EPOCH: 752 LOSS: 61.7052434299\n", + "EPOCH: 753 LOSS: 61.7037525229\n", + "EPOCH: 754 LOSS: 61.702266972\n", + "EPOCH: 755 LOSS: 61.7007867628\n", + "EPOCH: 756 LOSS: 61.6993118808\n", + "EPOCH: 757 LOSS: 61.6978423118\n", + "EPOCH: 758 LOSS: 61.6963780414\n", + "EPOCH: 759 LOSS: 61.6949190551\n", + "EPOCH: 760 LOSS: 61.6934653386\n", + "EPOCH: 761 LOSS: 61.6920168776\n", + "EPOCH: 762 LOSS: 61.6905736575\n", + "EPOCH: 763 LOSS: 61.689135664\n", + "EPOCH: 764 LOSS: 61.6877028828\n", + "EPOCH: 765 LOSS: 61.6862752992\n", + "EPOCH: 766 LOSS: 61.684852899\n", + "EPOCH: 767 LOSS: 61.6834356676\n", + "EPOCH: 768 LOSS: 61.6820235907\n", + "EPOCH: 769 LOSS: 61.6806166537\n", + "EPOCH: 770 LOSS: 61.6792148421\n", + "EPOCH: 771 LOSS: 61.6778181415\n", + "EPOCH: 772 LOSS: 61.6764265373\n", + "EPOCH: 773 LOSS: 61.6750400151\n", + "EPOCH: 774 LOSS: 61.6736585603\n", + "EPOCH: 775 LOSS: 61.6722821583\n", + "EPOCH: 776 LOSS: 61.6709107946\n", + "EPOCH: 777 LOSS: 61.6695444547\n", + "EPOCH: 778 LOSS: 61.6681831238\n", + "EPOCH: 779 LOSS: 61.6668267875\n", + "EPOCH: 780 LOSS: 61.6654754311\n", + "EPOCH: 781 LOSS: 61.66412904\n", + "EPOCH: 782 LOSS: 61.6627875995\n", + "EPOCH: 783 LOSS: 61.6614510951\n", + "EPOCH: 784 LOSS: 61.6601195119\n", + "EPOCH: 785 LOSS: 61.6587928353\n", + "EPOCH: 786 LOSS: 61.6574710507\n", + "EPOCH: 787 LOSS: 61.6561541432\n", + "EPOCH: 788 LOSS: 61.6548420982\n", + "EPOCH: 789 LOSS: 61.653534901\n", + "EPOCH: 790 LOSS: 61.6522325366\n", + "EPOCH: 791 LOSS: 61.6509349905\n", + "EPOCH: 792 LOSS: 61.6496422478\n", + "EPOCH: 793 LOSS: 61.6483542937\n", + "EPOCH: 794 LOSS: 61.6470711133\n", + "EPOCH: 795 LOSS: 61.6457926919\n", + "EPOCH: 796 LOSS: 61.6445190146\n", + "EPOCH: 797 LOSS: 61.6432500666\n", + "EPOCH: 798 LOSS: 61.6419858329\n", + "EPOCH: 799 LOSS: 61.6407262988\n", + "EPOCH: 800 LOSS: 61.6394714492\n", + "EPOCH: 801 LOSS: 61.6382212693\n", + "EPOCH: 802 LOSS: 61.6369757442\n", + "EPOCH: 803 LOSS: 61.635734859\n", + "EPOCH: 804 LOSS: 61.6344985987\n", + "EPOCH: 805 LOSS: 61.6332669483\n", + "EPOCH: 806 LOSS: 61.6320398929\n", + "EPOCH: 807 LOSS: 61.6308174176\n", + "EPOCH: 808 LOSS: 61.6295995072\n", + "EPOCH: 809 LOSS: 61.628386147\n", + "EPOCH: 810 LOSS: 61.6271773217\n", + "EPOCH: 811 LOSS: 61.6259730165\n", + "EPOCH: 812 LOSS: 61.6247732163\n", + "EPOCH: 813 LOSS: 61.6235779061\n", + "EPOCH: 814 LOSS: 61.6223870708\n", + "EPOCH: 815 LOSS: 61.6212006955\n", + "EPOCH: 816 LOSS: 61.620018765\n", + "EPOCH: 817 LOSS: 61.6188412643\n", + "EPOCH: 818 LOSS: 61.6176681784\n", + "EPOCH: 819 LOSS: 61.6164994921\n", + "EPOCH: 820 LOSS: 61.6153351905\n", + "EPOCH: 821 LOSS: 61.6141752584\n", + "EPOCH: 822 LOSS: 61.6130196807\n", + "EPOCH: 823 LOSS: 61.6118684425\n", + "EPOCH: 824 LOSS: 61.6107215285\n", + "EPOCH: 825 LOSS: 61.6095789237\n", + "EPOCH: 826 LOSS: 61.6084406131\n", + "EPOCH: 827 LOSS: 61.6073065815\n", + "EPOCH: 828 LOSS: 61.6061768138\n", + "EPOCH: 829 LOSS: 61.605051295\n", + "EPOCH: 830 LOSS: 61.6039300099\n", + "EPOCH: 831 LOSS: 61.6028129435\n", + "EPOCH: 832 LOSS: 61.6017000807\n", + "EPOCH: 833 LOSS: 61.6005914064\n", + "EPOCH: 834 LOSS: 61.5994869055\n", + "EPOCH: 835 LOSS: 61.5983865629\n", + "EPOCH: 836 LOSS: 61.5972903636\n", + "EPOCH: 837 LOSS: 61.5961982925\n", + "EPOCH: 838 LOSS: 61.5951103345\n", + "EPOCH: 839 LOSS: 61.5940264745\n", + "EPOCH: 840 LOSS: 61.5929466975\n", + "EPOCH: 841 LOSS: 61.5918709885\n", + "EPOCH: 842 LOSS: 61.5907993324\n", + "EPOCH: 843 LOSS: 61.5897317142\n", + "EPOCH: 844 LOSS: 61.5886681188\n", + "EPOCH: 845 LOSS: 61.5876085312\n", + "EPOCH: 846 LOSS: 61.5865529364\n", + "EPOCH: 847 LOSS: 61.5855013194\n", + "EPOCH: 848 LOSS: 61.5844536653\n", + "EPOCH: 849 LOSS: 61.583409959\n", + "EPOCH: 850 LOSS: 61.5823701855\n", + "EPOCH: 851 LOSS: 61.58133433\n", + "EPOCH: 852 LOSS: 61.5803023774\n", + "EPOCH: 853 LOSS: 61.5792743129\n", + "EPOCH: 854 LOSS: 61.5782501216\n", + "EPOCH: 855 LOSS: 61.5772297884\n", + "EPOCH: 856 LOSS: 61.5762132986\n", + "EPOCH: 857 LOSS: 61.5752006372\n", + "EPOCH: 858 LOSS: 61.5741917895\n", + "EPOCH: 859 LOSS: 61.5731867405\n", + "EPOCH: 860 LOSS: 61.5721854755\n", + "EPOCH: 861 LOSS: 61.5711879796\n", + "EPOCH: 862 LOSS: 61.5701942381\n", + "EPOCH: 863 LOSS: 61.5692042361\n", + "EPOCH: 864 LOSS: 61.568217959\n", + "EPOCH: 865 LOSS: 61.567235392\n", + "EPOCH: 866 LOSS: 61.5662565204\n", + "EPOCH: 867 LOSS: 61.5652813295\n", + "EPOCH: 868 LOSS: 61.5643098046\n", + "EPOCH: 869 LOSS: 61.5633419311\n", + "EPOCH: 870 LOSS: 61.5623776944\n", + "EPOCH: 871 LOSS: 61.5614170799\n", + "EPOCH: 872 LOSS: 61.5604600729\n", + "EPOCH: 873 LOSS: 61.559506659\n", + "EPOCH: 874 LOSS: 61.5585568236\n", + "EPOCH: 875 LOSS: 61.5576105522\n", + "EPOCH: 876 LOSS: 61.5566678303\n", + "EPOCH: 877 LOSS: 61.5557286434\n", + "EPOCH: 878 LOSS: 61.5547929772\n", + "EPOCH: 879 LOSS: 61.5538608173\n", + "EPOCH: 880 LOSS: 61.5529321492\n", + "EPOCH: 881 LOSS: 61.5520069586\n", + "EPOCH: 882 LOSS: 61.5510852312\n", + "EPOCH: 883 LOSS: 61.5501669527\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 884 LOSS: 61.5492521088\n", + "EPOCH: 885 LOSS: 61.5483406853\n", + "EPOCH: 886 LOSS: 61.5474326681\n", + "EPOCH: 887 LOSS: 61.5465280428\n", + "EPOCH: 888 LOSS: 61.5456267955\n", + "EPOCH: 889 LOSS: 61.5447289119\n", + "EPOCH: 890 LOSS: 61.5438343781\n", + "EPOCH: 891 LOSS: 61.5429431798\n", + "EPOCH: 892 LOSS: 61.5420553032\n", + "EPOCH: 893 LOSS: 61.5411707343\n", + "EPOCH: 894 LOSS: 61.540289459\n", + "EPOCH: 895 LOSS: 61.5394114636\n", + "EPOCH: 896 LOSS: 61.538536734\n", + "EPOCH: 897 LOSS: 61.5376652564\n", + "EPOCH: 898 LOSS: 61.5367970171\n", + "EPOCH: 899 LOSS: 61.5359320023\n", + "EPOCH: 900 LOSS: 61.5350701981\n", + "EPOCH: 901 LOSS: 61.534211591\n", + "EPOCH: 902 LOSS: 61.5333561671\n", + "EPOCH: 903 LOSS: 61.532503913\n", + "EPOCH: 904 LOSS: 61.5316548149\n", + "EPOCH: 905 LOSS: 61.5308088594\n", + "EPOCH: 906 LOSS: 61.5299660328\n", + "EPOCH: 907 LOSS: 61.5291263217\n", + "EPOCH: 908 LOSS: 61.5282897127\n", + "EPOCH: 909 LOSS: 61.5274561923\n", + "EPOCH: 910 LOSS: 61.5266257472\n", + "EPOCH: 911 LOSS: 61.525798364\n", + "EPOCH: 912 LOSS: 61.5249740295\n", + "EPOCH: 913 LOSS: 61.5241527303\n", + "EPOCH: 914 LOSS: 61.5233344532\n", + "EPOCH: 915 LOSS: 61.5225191851\n", + "EPOCH: 916 LOSS: 61.5217069128\n", + "EPOCH: 917 LOSS: 61.5208976233\n", + "EPOCH: 918 LOSS: 61.5200913034\n", + "EPOCH: 919 LOSS: 61.5192879402\n", + "EPOCH: 920 LOSS: 61.5184875207\n", + "EPOCH: 921 LOSS: 61.5176900318\n", + "EPOCH: 922 LOSS: 61.5168954608\n", + "EPOCH: 923 LOSS: 61.5161037949\n", + "EPOCH: 924 LOSS: 61.515315021\n", + "EPOCH: 925 LOSS: 61.5145291266\n", + "EPOCH: 926 LOSS: 61.5137460989\n", + "EPOCH: 927 LOSS: 61.5129659251\n", + "EPOCH: 928 LOSS: 61.5121885927\n", + "EPOCH: 929 LOSS: 61.511414089\n", + "EPOCH: 930 LOSS: 61.5106424015\n", + "EPOCH: 931 LOSS: 61.5098735177\n", + "EPOCH: 932 LOSS: 61.5091074252\n", + "EPOCH: 933 LOSS: 61.5083441113\n", + "EPOCH: 934 LOSS: 61.5075835639\n", + "EPOCH: 935 LOSS: 61.5068257705\n", + "EPOCH: 936 LOSS: 61.5060707189\n", + "EPOCH: 937 LOSS: 61.5053183968\n", + "EPOCH: 938 LOSS: 61.504568792\n", + "EPOCH: 939 LOSS: 61.5038218923\n", + "EPOCH: 940 LOSS: 61.5030776856\n", + "EPOCH: 941 LOSS: 61.5023361598\n", + "EPOCH: 942 LOSS: 61.501597303\n", + "EPOCH: 943 LOSS: 61.500861103\n", + "EPOCH: 944 LOSS: 61.5001275481\n", + "EPOCH: 945 LOSS: 61.4993966262\n", + "EPOCH: 946 LOSS: 61.4986683255\n", + "EPOCH: 947 LOSS: 61.4979426343\n", + "EPOCH: 948 LOSS: 61.4972195407\n", + "EPOCH: 949 LOSS: 61.4964990331\n", + "EPOCH: 950 LOSS: 61.4957810997\n", + "EPOCH: 951 LOSS: 61.495065729\n", + "EPOCH: 952 LOSS: 61.4943529093\n", + "EPOCH: 953 LOSS: 61.4936426292\n", + "EPOCH: 954 LOSS: 61.492934877\n", + "EPOCH: 955 LOSS: 61.4922296415\n", + "EPOCH: 956 LOSS: 61.4915269112\n", + "EPOCH: 957 LOSS: 61.4908266748\n", + "EPOCH: 958 LOSS: 61.4901289208\n", + "EPOCH: 959 LOSS: 61.4894336382\n", + "EPOCH: 960 LOSS: 61.4887408155\n", + "EPOCH: 961 LOSS: 61.4880504418\n", + "EPOCH: 962 LOSS: 61.4873625059\n", + "EPOCH: 963 LOSS: 61.4866769966\n", + "EPOCH: 964 LOSS: 61.485993903\n", + "EPOCH: 965 LOSS: 61.485313214\n", + "EPOCH: 966 LOSS: 61.4846349188\n", + "EPOCH: 967 LOSS: 61.4839590064\n", + "EPOCH: 968 LOSS: 61.4832854659\n", + "EPOCH: 969 LOSS: 61.4826142867\n", + "EPOCH: 970 LOSS: 61.4819454578\n", + "EPOCH: 971 LOSS: 61.4812789687\n", + "EPOCH: 972 LOSS: 61.4806148086\n", + "EPOCH: 973 LOSS: 61.4799529669\n", + "EPOCH: 974 LOSS: 61.479293433\n", + "EPOCH: 975 LOSS: 61.4786361965\n", + "EPOCH: 976 LOSS: 61.4779812468\n", + "EPOCH: 977 LOSS: 61.4773285734\n", + "EPOCH: 978 LOSS: 61.4766781661\n", + "EPOCH: 979 LOSS: 61.4760300144\n", + "EPOCH: 980 LOSS: 61.475384108\n", + "EPOCH: 981 LOSS: 61.4747404367\n", + "EPOCH: 982 LOSS: 61.4740989904\n", + "EPOCH: 983 LOSS: 61.4734597587\n", + "EPOCH: 984 LOSS: 61.4728227316\n", + "EPOCH: 985 LOSS: 61.472187899\n", + "EPOCH: 986 LOSS: 61.4715552509\n", + "EPOCH: 987 LOSS: 61.4709247772\n", + "EPOCH: 988 LOSS: 61.4702964682\n", + "EPOCH: 989 LOSS: 61.4696703138\n", + "EPOCH: 990 LOSS: 61.4690463042\n", + "EPOCH: 991 LOSS: 61.4684244296\n", + "EPOCH: 992 LOSS: 61.4678046802\n", + "EPOCH: 993 LOSS: 61.4671870464\n", + "EPOCH: 994 LOSS: 61.4665715184\n", + "EPOCH: 995 LOSS: 61.4659580866\n", + "EPOCH: 996 LOSS: 61.4653467415\n", + "EPOCH: 997 LOSS: 61.4647374735\n", + "EPOCH: 998 LOSS: 61.464130273\n", + "EPOCH: 999 LOSS: 61.4635251307\n", + "EPOCH: 1000 LOSS: 61.4629220372\n", + "EPOCH: 1001 LOSS: 61.462320983\n", + "EPOCH: 1002 LOSS: 61.4617219589\n", + "EPOCH: 1003 LOSS: 61.4611249556\n", + "EPOCH: 1004 LOSS: 61.4605299639\n", + "EPOCH: 1005 LOSS: 61.4599369745\n", + "EPOCH: 1006 LOSS: 61.4593459783\n", + "EPOCH: 1007 LOSS: 61.4587569662\n", + "EPOCH: 1008 LOSS: 61.4581699291\n", + "EPOCH: 1009 LOSS: 61.4575848581\n", + "EPOCH: 1010 LOSS: 61.4570017442\n", + "EPOCH: 1011 LOSS: 61.4564205784\n", + "EPOCH: 1012 LOSS: 61.4558413518\n", + "EPOCH: 1013 LOSS: 61.4552640557\n", + "EPOCH: 1014 LOSS: 61.4546886811\n", + "EPOCH: 1015 LOSS: 61.4541152193\n", + "EPOCH: 1016 LOSS: 61.4535436617\n", + "EPOCH: 1017 LOSS: 61.4529739995\n", + "EPOCH: 1018 LOSS: 61.4524062241\n", + "EPOCH: 1019 LOSS: 61.4518403268\n", + "EPOCH: 1020 LOSS: 61.4512762993\n", + "EPOCH: 1021 LOSS: 61.4507141329\n", + "EPOCH: 1022 LOSS: 61.4501538191\n", + "EPOCH: 1023 LOSS: 61.4495953496\n", + "EPOCH: 1024 LOSS: 61.4490387159\n", + "EPOCH: 1025 LOSS: 61.4484839098\n", + "EPOCH: 1026 LOSS: 61.4479309228\n", + "EPOCH: 1027 LOSS: 61.4473797468\n", + "EPOCH: 1028 LOSS: 61.4468303735\n", + "EPOCH: 1029 LOSS: 61.4462827948\n", + "EPOCH: 1030 LOSS: 61.4457370024\n", + "EPOCH: 1031 LOSS: 61.4451929883\n", + "EPOCH: 1032 LOSS: 61.4446507444\n", + "EPOCH: 1033 LOSS: 61.4441102627\n", + "EPOCH: 1034 LOSS: 61.4435715352\n", + "EPOCH: 1035 LOSS: 61.443034554\n", + "EPOCH: 1036 LOSS: 61.4424993111\n", + "EPOCH: 1037 LOSS: 61.4419657988\n", + "EPOCH: 1038 LOSS: 61.4414340091\n", + "EPOCH: 1039 LOSS: 61.4409039343\n", + "EPOCH: 1040 LOSS: 61.4403755666\n", + "EPOCH: 1041 LOSS: 61.4398488983\n", + "EPOCH: 1042 LOSS: 61.4393239218\n", + "EPOCH: 1043 LOSS: 61.4388006293\n", + "EPOCH: 1044 LOSS: 61.4382790134\n", + "EPOCH: 1045 LOSS: 61.4377590665\n", + "EPOCH: 1046 LOSS: 61.4372407809\n", + "EPOCH: 1047 LOSS: 61.4367241494\n", + "EPOCH: 1048 LOSS: 61.4362091643\n", + "EPOCH: 1049 LOSS: 61.4356958184\n", + "EPOCH: 1050 LOSS: 61.4351841041\n", + "EPOCH: 1051 LOSS: 61.4346740143\n", + "EPOCH: 1052 LOSS: 61.4341655416\n", + "EPOCH: 1053 LOSS: 61.4336586787\n", + "EPOCH: 1054 LOSS: 61.4331534184\n", + "EPOCH: 1055 LOSS: 61.4326497535\n", + "EPOCH: 1056 LOSS: 61.432147677\n", + "EPOCH: 1057 LOSS: 61.4316471816\n", + "EPOCH: 1058 LOSS: 61.4311482603\n", + "EPOCH: 1059 LOSS: 61.430650906\n", + "EPOCH: 1060 LOSS: 61.4301551118\n", + "EPOCH: 1061 LOSS: 61.4296608706\n", + "EPOCH: 1062 LOSS: 61.4291681756\n", + "EPOCH: 1063 LOSS: 61.4286770198\n", + "EPOCH: 1064 LOSS: 61.4281873963\n", + "EPOCH: 1065 LOSS: 61.4276992984\n", + "EPOCH: 1066 LOSS: 61.4272127192\n", + "EPOCH: 1067 LOSS: 61.4267276521\n", + "EPOCH: 1068 LOSS: 61.4262440901\n", + "EPOCH: 1069 LOSS: 61.4257620267\n", + "EPOCH: 1070 LOSS: 61.4252814552\n", + "EPOCH: 1071 LOSS: 61.424802369\n", + "EPOCH: 1072 LOSS: 61.4243247615\n", + "EPOCH: 1073 LOSS: 61.4238486261\n", + "EPOCH: 1074 LOSS: 61.4233739563\n", + "EPOCH: 1075 LOSS: 61.4229007456\n", + "EPOCH: 1076 LOSS: 61.4224289875\n", + "EPOCH: 1077 LOSS: 61.4219586757\n", + "EPOCH: 1078 LOSS: 61.4214898037\n", + "EPOCH: 1079 LOSS: 61.4210223652\n", + "EPOCH: 1080 LOSS: 61.4205563538\n", + "EPOCH: 1081 LOSS: 61.4200917633\n", + "EPOCH: 1082 LOSS: 61.4196285873\n", + "EPOCH: 1083 LOSS: 61.4191668198\n", + "EPOCH: 1084 LOSS: 61.4187064544\n", + "EPOCH: 1085 LOSS: 61.418247485\n", + "EPOCH: 1086 LOSS: 61.4177899054\n", + "EPOCH: 1087 LOSS: 61.4173337096\n", + "EPOCH: 1088 LOSS: 61.4168788915\n", + "EPOCH: 1089 LOSS: 61.4164254451\n", + "EPOCH: 1090 LOSS: 61.4159733643\n", + "EPOCH: 1091 LOSS: 61.4155226431\n", + "EPOCH: 1092 LOSS: 61.4150732757\n", + "EPOCH: 1093 LOSS: 61.414625256\n", + "EPOCH: 1094 LOSS: 61.4141785782\n", + "EPOCH: 1095 LOSS: 61.4137332365\n", + "EPOCH: 1096 LOSS: 61.413289225\n", + "EPOCH: 1097 LOSS: 61.4128465379\n", + "EPOCH: 1098 LOSS: 61.4124051694\n", + "EPOCH: 1099 LOSS: 61.4119651139\n", + "EPOCH: 1100 LOSS: 61.4115263656\n", + "EPOCH: 1101 LOSS: 61.4110889188\n", + "EPOCH: 1102 LOSS: 61.4106527678\n", + "EPOCH: 1103 LOSS: 61.4102179071\n", + "EPOCH: 1104 LOSS: 61.4097843311\n", + "EPOCH: 1105 LOSS: 61.4093520342\n", + "EPOCH: 1106 LOSS: 61.4089210108\n", + "EPOCH: 1107 LOSS: 61.4084912555\n", + "EPOCH: 1108 LOSS: 61.4080627627\n", + "EPOCH: 1109 LOSS: 61.407635527\n", + "EPOCH: 1110 LOSS: 61.4072095431\n", + "EPOCH: 1111 LOSS: 61.4067848055\n", + "EPOCH: 1112 LOSS: 61.4063613087\n", + "EPOCH: 1113 LOSS: 61.4059390476\n", + "EPOCH: 1114 LOSS: 61.4055180168\n", + "EPOCH: 1115 LOSS: 61.405098211\n", + "EPOCH: 1116 LOSS: 61.4046796249\n", + "EPOCH: 1117 LOSS: 61.4042622533\n", + "EPOCH: 1118 LOSS: 61.403846091\n", + "EPOCH: 1119 LOSS: 61.4034311328\n", + "EPOCH: 1120 LOSS: 61.4030173736\n", + "EPOCH: 1121 LOSS: 61.4026048083\n", + "EPOCH: 1122 LOSS: 61.4021934317\n", + "EPOCH: 1123 LOSS: 61.4017832388\n", + "EPOCH: 1124 LOSS: 61.4013742245\n", + "EPOCH: 1125 LOSS: 61.4009663838\n", + "EPOCH: 1126 LOSS: 61.4005597117\n", + "EPOCH: 1127 LOSS: 61.4001542033\n", + "EPOCH: 1128 LOSS: 61.3997498536\n", + "EPOCH: 1129 LOSS: 61.3993466576\n", + "EPOCH: 1130 LOSS: 61.3989446106\n", + "EPOCH: 1131 LOSS: 61.3985437075\n", + "EPOCH: 1132 LOSS: 61.3981439436\n", + "EPOCH: 1133 LOSS: 61.3977453141\n", + "EPOCH: 1134 LOSS: 61.3973478142\n", + "EPOCH: 1135 LOSS: 61.396951439\n", + "EPOCH: 1136 LOSS: 61.3965561839\n", + "EPOCH: 1137 LOSS: 61.3961620441\n", + "EPOCH: 1138 LOSS: 61.3957690149\n", + "EPOCH: 1139 LOSS: 61.3953770916\n", + "EPOCH: 1140 LOSS: 61.3949862697\n", + "EPOCH: 1141 LOSS: 61.3945965445\n", + "EPOCH: 1142 LOSS: 61.3942079113\n", + "EPOCH: 1143 LOSS: 61.3938203656\n", + "EPOCH: 1144 LOSS: 61.3934339028\n", + "EPOCH: 1145 LOSS: 61.3930485184\n", + "EPOCH: 1146 LOSS: 61.392664208\n", + "EPOCH: 1147 LOSS: 61.3922809669\n", + "EPOCH: 1148 LOSS: 61.3918987908\n", + "EPOCH: 1149 LOSS: 61.3915176751\n", + "EPOCH: 1150 LOSS: 61.3911376156\n", + "EPOCH: 1151 LOSS: 61.3907586077\n", + "EPOCH: 1152 LOSS: 61.3903806472\n", + "EPOCH: 1153 LOSS: 61.3900037296\n", + "EPOCH: 1154 LOSS: 61.3896278506\n", + "EPOCH: 1155 LOSS: 61.389253006\n", + "EPOCH: 1156 LOSS: 61.3888791913\n", + "EPOCH: 1157 LOSS: 61.3885064025\n", + "EPOCH: 1158 LOSS: 61.3881346352\n", + "EPOCH: 1159 LOSS: 61.3877638851\n", + "EPOCH: 1160 LOSS: 61.3873941482\n", + "EPOCH: 1161 LOSS: 61.3870254202\n", + "EPOCH: 1162 LOSS: 61.3866576969\n", + "EPOCH: 1163 LOSS: 61.3862909743\n", + "EPOCH: 1164 LOSS: 61.3859252482\n", + "EPOCH: 1165 LOSS: 61.3855605145\n", + "EPOCH: 1166 LOSS: 61.3851967692\n", + "EPOCH: 1167 LOSS: 61.3848340081\n", + "EPOCH: 1168 LOSS: 61.3844722273\n", + "EPOCH: 1169 LOSS: 61.3841114227\n", + "EPOCH: 1170 LOSS: 61.3837515904\n", + "EPOCH: 1171 LOSS: 61.3833927263\n", + "EPOCH: 1172 LOSS: 61.3830348266\n", + "EPOCH: 1173 LOSS: 61.3826778872\n", + "EPOCH: 1174 LOSS: 61.3823219043\n", + "EPOCH: 1175 LOSS: 61.381966874\n", + "EPOCH: 1176 LOSS: 61.3816127924\n", + "EPOCH: 1177 LOSS: 61.3812596556\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 1178 LOSS: 61.3809074598\n", + "EPOCH: 1179 LOSS: 61.3805562012\n", + "EPOCH: 1180 LOSS: 61.380205876\n", + "EPOCH: 1181 LOSS: 61.3798564804\n", + "EPOCH: 1182 LOSS: 61.3795080107\n", + "EPOCH: 1183 LOSS: 61.379160463\n", + "EPOCH: 1184 LOSS: 61.3788138338\n", + "EPOCH: 1185 LOSS: 61.3784681192\n", + "EPOCH: 1186 LOSS: 61.3781233157\n", + "EPOCH: 1187 LOSS: 61.3777794194\n", + "EPOCH: 1188 LOSS: 61.3774364269\n", + "EPOCH: 1189 LOSS: 61.3770943344\n", + "EPOCH: 1190 LOSS: 61.3767531383\n", + "EPOCH: 1191 LOSS: 61.376412835\n", + "EPOCH: 1192 LOSS: 61.3760734211\n", + "EPOCH: 1193 LOSS: 61.3757348928\n", + "EPOCH: 1194 LOSS: 61.3753972467\n", + "EPOCH: 1195 LOSS: 61.3750604792\n", + "EPOCH: 1196 LOSS: 61.3747245868\n", + "EPOCH: 1197 LOSS: 61.3743895661\n", + "EPOCH: 1198 LOSS: 61.3740554136\n", + "EPOCH: 1199 LOSS: 61.3737221257\n", + "EPOCH: 1200 LOSS: 61.3733896991\n", + "EPOCH: 1201 LOSS: 61.3730581304\n", + "EPOCH: 1202 LOSS: 61.3727274161\n", + "EPOCH: 1203 LOSS: 61.3723975529\n", + "EPOCH: 1204 LOSS: 61.3720685374\n", + "EPOCH: 1205 LOSS: 61.3717403662\n", + "EPOCH: 1206 LOSS: 61.371413036\n", + "EPOCH: 1207 LOSS: 61.3710865434\n", + "EPOCH: 1208 LOSS: 61.3707608852\n", + "EPOCH: 1209 LOSS: 61.3704360581\n", + "EPOCH: 1210 LOSS: 61.3701120588\n", + "EPOCH: 1211 LOSS: 61.3697888841\n", + "EPOCH: 1212 LOSS: 61.3694665306\n", + "EPOCH: 1213 LOSS: 61.3691449953\n", + "EPOCH: 1214 LOSS: 61.3688242748\n", + "EPOCH: 1215 LOSS: 61.368504366\n", + "EPOCH: 1216 LOSS: 61.3681852656\n", + "EPOCH: 1217 LOSS: 61.3678669706\n", + "EPOCH: 1218 LOSS: 61.3675494778\n", + "EPOCH: 1219 LOSS: 61.3672327841\n", + "EPOCH: 1220 LOSS: 61.3669168863\n", + "EPOCH: 1221 LOSS: 61.3666017813\n", + "EPOCH: 1222 LOSS: 61.3662874661\n", + "EPOCH: 1223 LOSS: 61.3659739375\n", + "EPOCH: 1224 LOSS: 61.3656611926\n", + "EPOCH: 1225 LOSS: 61.3653492283\n", + "EPOCH: 1226 LOSS: 61.3650380415\n", + "EPOCH: 1227 LOSS: 61.3647276292\n", + "EPOCH: 1228 LOSS: 61.3644179885\n", + "EPOCH: 1229 LOSS: 61.3641091163\n", + "EPOCH: 1230 LOSS: 61.3638010098\n", + "EPOCH: 1231 LOSS: 61.3634936658\n", + "EPOCH: 1232 LOSS: 61.3631870816\n", + "EPOCH: 1233 LOSS: 61.3628812541\n", + "EPOCH: 1234 LOSS: 61.3625761805\n", + "EPOCH: 1235 LOSS: 61.3622718578\n", + "EPOCH: 1236 LOSS: 61.3619682832\n", + "EPOCH: 1237 LOSS: 61.3616654538\n", + "EPOCH: 1238 LOSS: 61.3613633668\n", + "EPOCH: 1239 LOSS: 61.3610620192\n", + "EPOCH: 1240 LOSS: 61.3607614084\n", + "EPOCH: 1241 LOSS: 61.3604615313\n", + "EPOCH: 1242 LOSS: 61.3601623854\n", + "EPOCH: 1243 LOSS: 61.3598639677\n", + "EPOCH: 1244 LOSS: 61.3595662755\n", + "EPOCH: 1245 LOSS: 61.359269306\n", + "EPOCH: 1246 LOSS: 61.3589730565\n", + "EPOCH: 1247 LOSS: 61.3586775242\n", + "EPOCH: 1248 LOSS: 61.3583827065\n", + "EPOCH: 1249 LOSS: 61.3580886005\n", + "EPOCH: 1250 LOSS: 61.3577952037\n", + "EPOCH: 1251 LOSS: 61.3575025133\n", + "EPOCH: 1252 LOSS: 61.3572105266\n", + "EPOCH: 1253 LOSS: 61.3569192411\n", + "EPOCH: 1254 LOSS: 61.356628654\n", + "EPOCH: 1255 LOSS: 61.3563387627\n", + "EPOCH: 1256 LOSS: 61.3560495646\n", + "EPOCH: 1257 LOSS: 61.3557610571\n", + "EPOCH: 1258 LOSS: 61.3554732376\n", + "EPOCH: 1259 LOSS: 61.3551861035\n", + "EPOCH: 1260 LOSS: 61.3548996522\n", + "EPOCH: 1261 LOSS: 61.3546138812\n", + "EPOCH: 1262 LOSS: 61.3543287879\n", + "EPOCH: 1263 LOSS: 61.3540443698\n", + "EPOCH: 1264 LOSS: 61.3537606244\n", + "EPOCH: 1265 LOSS: 61.3534775491\n", + "EPOCH: 1266 LOSS: 61.3531951414\n", + "EPOCH: 1267 LOSS: 61.352913399\n", + "EPOCH: 1268 LOSS: 61.3526323192\n", + "EPOCH: 1269 LOSS: 61.3523518996\n", + "EPOCH: 1270 LOSS: 61.3520721378\n", + "EPOCH: 1271 LOSS: 61.3517930314\n", + "EPOCH: 1272 LOSS: 61.3515145778\n", + "EPOCH: 1273 LOSS: 61.3512367747\n", + "EPOCH: 1274 LOSS: 61.3509596196\n", + "EPOCH: 1275 LOSS: 61.3506831102\n", + "EPOCH: 1276 LOSS: 61.3504072442\n", + "EPOCH: 1277 LOSS: 61.350132019\n", + "EPOCH: 1278 LOSS: 61.3498574323\n", + "EPOCH: 1279 LOSS: 61.3495834819\n", + "EPOCH: 1280 LOSS: 61.3493101653\n", + "EPOCH: 1281 LOSS: 61.3490374802\n", + "EPOCH: 1282 LOSS: 61.3487654243\n", + "EPOCH: 1283 LOSS: 61.3484939953\n", + "EPOCH: 1284 LOSS: 61.3482231909\n", + "EPOCH: 1285 LOSS: 61.3479530087\n", + "EPOCH: 1286 LOSS: 61.3476834467\n", + "EPOCH: 1287 LOSS: 61.3474145023\n", + "EPOCH: 1288 LOSS: 61.3471461735\n", + "EPOCH: 1289 LOSS: 61.3468784579\n", + "EPOCH: 1290 LOSS: 61.3466113533\n", + "EPOCH: 1291 LOSS: 61.3463448576\n", + "EPOCH: 1292 LOSS: 61.3460789684\n", + "EPOCH: 1293 LOSS: 61.3458136836\n", + "EPOCH: 1294 LOSS: 61.3455490009\n", + "EPOCH: 1295 LOSS: 61.3452849182\n", + "EPOCH: 1296 LOSS: 61.3450214334\n", + "EPOCH: 1297 LOSS: 61.3447585442\n", + "EPOCH: 1298 LOSS: 61.3444962485\n", + "EPOCH: 1299 LOSS: 61.3442345442\n", + "EPOCH: 1300 LOSS: 61.3439734291\n", + "EPOCH: 1301 LOSS: 61.343712901\n", + "EPOCH: 1302 LOSS: 61.343452958\n", + "EPOCH: 1303 LOSS: 61.3431935978\n", + "EPOCH: 1304 LOSS: 61.3429348184\n", + "EPOCH: 1305 LOSS: 61.3426766176\n", + "EPOCH: 1306 LOSS: 61.3424189935\n", + "EPOCH: 1307 LOSS: 61.3421619439\n", + "EPOCH: 1308 LOSS: 61.3419054668\n", + "EPOCH: 1309 LOSS: 61.3416495601\n", + "EPOCH: 1310 LOSS: 61.3413942218\n", + "EPOCH: 1311 LOSS: 61.3411394499\n", + "EPOCH: 1312 LOSS: 61.3408852423\n", + "EPOCH: 1313 LOSS: 61.340631597\n", + "EPOCH: 1314 LOSS: 61.340378512\n", + "EPOCH: 1315 LOSS: 61.3401259854\n", + "EPOCH: 1316 LOSS: 61.3398740151\n", + "EPOCH: 1317 LOSS: 61.3396225992\n", + "EPOCH: 1318 LOSS: 61.3393717356\n", + "EPOCH: 1319 LOSS: 61.3391214225\n", + "EPOCH: 1320 LOSS: 61.3388716579\n", + "EPOCH: 1321 LOSS: 61.3386224398\n", + "EPOCH: 1322 LOSS: 61.3383737664\n", + "EPOCH: 1323 LOSS: 61.3381256356\n", + "EPOCH: 1324 LOSS: 61.3378780456\n", + "EPOCH: 1325 LOSS: 61.3376309945\n", + "EPOCH: 1326 LOSS: 61.3373844803\n", + "EPOCH: 1327 LOSS: 61.3371385012\n", + "EPOCH: 1328 LOSS: 61.3368930553\n", + "EPOCH: 1329 LOSS: 61.3366481407\n", + "EPOCH: 1330 LOSS: 61.3364037556\n", + "EPOCH: 1331 LOSS: 61.3361598981\n", + "EPOCH: 1332 LOSS: 61.3359165664\n", + "EPOCH: 1333 LOSS: 61.3356737585\n", + "EPOCH: 1334 LOSS: 61.3354314728\n", + "EPOCH: 1335 LOSS: 61.3351897073\n", + "EPOCH: 1336 LOSS: 61.3349484602\n", + "EPOCH: 1337 LOSS: 61.3347077298\n", + "EPOCH: 1338 LOSS: 61.3344675142\n", + "EPOCH: 1339 LOSS: 61.3342278117\n", + "EPOCH: 1340 LOSS: 61.3339886204\n", + "EPOCH: 1341 LOSS: 61.3337499386\n", + "EPOCH: 1342 LOSS: 61.3335117645\n", + "EPOCH: 1343 LOSS: 61.3332740964\n", + "EPOCH: 1344 LOSS: 61.3330369325\n", + "EPOCH: 1345 LOSS: 61.332800271\n", + "EPOCH: 1346 LOSS: 61.3325641103\n", + "EPOCH: 1347 LOSS: 61.3323284486\n", + "EPOCH: 1348 LOSS: 61.3320932841\n", + "EPOCH: 1349 LOSS: 61.3318586152\n", + "EPOCH: 1350 LOSS: 61.3316244402\n", + "EPOCH: 1351 LOSS: 61.3313907573\n", + "EPOCH: 1352 LOSS: 61.3311575648\n", + "EPOCH: 1353 LOSS: 61.3309248611\n", + "EPOCH: 1354 LOSS: 61.3306926446\n", + "EPOCH: 1355 LOSS: 61.3304609134\n", + "EPOCH: 1356 LOSS: 61.3302296661\n", + "EPOCH: 1357 LOSS: 61.3299989008\n", + "EPOCH: 1358 LOSS: 61.329768616\n", + "EPOCH: 1359 LOSS: 61.3295388101\n", + "EPOCH: 1360 LOSS: 61.3293094813\n", + "EPOCH: 1361 LOSS: 61.3290806281\n", + "EPOCH: 1362 LOSS: 61.3288522488\n", + "EPOCH: 1363 LOSS: 61.3286243418\n", + "EPOCH: 1364 LOSS: 61.3283969056\n", + "EPOCH: 1365 LOSS: 61.3281699385\n", + "EPOCH: 1366 LOSS: 61.327943439\n", + "EPOCH: 1367 LOSS: 61.3277174053\n", + "EPOCH: 1368 LOSS: 61.3274918361\n", + "EPOCH: 1369 LOSS: 61.3272667297\n", + "EPOCH: 1370 LOSS: 61.3270420845\n", + "EPOCH: 1371 LOSS: 61.3268178989\n", + "EPOCH: 1372 LOSS: 61.3265941715\n", + "EPOCH: 1373 LOSS: 61.3263709007\n", + "EPOCH: 1374 LOSS: 61.326148085\n", + "EPOCH: 1375 LOSS: 61.3259257227\n", + "EPOCH: 1376 LOSS: 61.3257038124\n", + "EPOCH: 1377 LOSS: 61.3254823527\n", + "EPOCH: 1378 LOSS: 61.3252613418\n", + "EPOCH: 1379 LOSS: 61.3250407784\n", + "EPOCH: 1380 LOSS: 61.324820661\n", + "EPOCH: 1381 LOSS: 61.3246009881\n", + "EPOCH: 1382 LOSS: 61.3243817581\n", + "EPOCH: 1383 LOSS: 61.3241629696\n", + "EPOCH: 1384 LOSS: 61.3239446212\n", + "EPOCH: 1385 LOSS: 61.3237267113\n", + "EPOCH: 1386 LOSS: 61.3235092385\n", + "EPOCH: 1387 LOSS: 61.3232922013\n", + "EPOCH: 1388 LOSS: 61.3230755983\n", + "EPOCH: 1389 LOSS: 61.3228594281\n", + "EPOCH: 1390 LOSS: 61.3226436893\n", + "EPOCH: 1391 LOSS: 61.3224283802\n", + "EPOCH: 1392 LOSS: 61.3222134997\n", + "EPOCH: 1393 LOSS: 61.3219990462\n", + "EPOCH: 1394 LOSS: 61.3217850184\n", + "EPOCH: 1395 LOSS: 61.3215714147\n", + "EPOCH: 1396 LOSS: 61.3213582339\n", + "EPOCH: 1397 LOSS: 61.3211454745\n", + "EPOCH: 1398 LOSS: 61.3209331352\n", + "EPOCH: 1399 LOSS: 61.3207212145\n", + "EPOCH: 1400 LOSS: 61.3205097112\n", + "EPOCH: 1401 LOSS: 61.3202986237\n", + "EPOCH: 1402 LOSS: 61.3200879508\n", + "EPOCH: 1403 LOSS: 61.319877691\n", + "EPOCH: 1404 LOSS: 61.3196678432\n", + "EPOCH: 1405 LOSS: 61.3194584057\n", + "EPOCH: 1406 LOSS: 61.3192493775\n", + "EPOCH: 1407 LOSS: 61.319040757\n", + "EPOCH: 1408 LOSS: 61.3188325431\n", + "EPOCH: 1409 LOSS: 61.3186247342\n", + "EPOCH: 1410 LOSS: 61.3184173293\n", + "EPOCH: 1411 LOSS: 61.3182103268\n", + "EPOCH: 1412 LOSS: 61.3180037255\n", + "EPOCH: 1413 LOSS: 61.3177975242\n", + "EPOCH: 1414 LOSS: 61.3175917214\n", + "EPOCH: 1415 LOSS: 61.317386316\n", + "EPOCH: 1416 LOSS: 61.3171813066\n", + "EPOCH: 1417 LOSS: 61.3169766919\n", + "EPOCH: 1418 LOSS: 61.3167724707\n", + "EPOCH: 1419 LOSS: 61.3165686417\n", + "EPOCH: 1420 LOSS: 61.3163652036\n", + "EPOCH: 1421 LOSS: 61.3161621552\n", + "EPOCH: 1422 LOSS: 61.3159594952\n", + "EPOCH: 1423 LOSS: 61.3157572223\n", + "EPOCH: 1424 LOSS: 61.3155553354\n", + "EPOCH: 1425 LOSS: 61.3153538331\n", + "EPOCH: 1426 LOSS: 61.3151527143\n", + "EPOCH: 1427 LOSS: 61.3149519777\n", + "EPOCH: 1428 LOSS: 61.3147516221\n", + "EPOCH: 1429 LOSS: 61.3145516462\n", + "EPOCH: 1430 LOSS: 61.3143520489\n", + "EPOCH: 1431 LOSS: 61.3141528289\n", + "EPOCH: 1432 LOSS: 61.3139539851\n", + "EPOCH: 1433 LOSS: 61.3137555161\n", + "EPOCH: 1434 LOSS: 61.313557421\n", + "EPOCH: 1435 LOSS: 61.3133596983\n", + "EPOCH: 1436 LOSS: 61.3131623471\n", + "EPOCH: 1437 LOSS: 61.312965366\n", + "EPOCH: 1438 LOSS: 61.3127687539\n", + "EPOCH: 1439 LOSS: 61.3125725096\n", + "EPOCH: 1440 LOSS: 61.312376632\n", + "EPOCH: 1441 LOSS: 61.3121811199\n", + "EPOCH: 1442 LOSS: 61.3119859721\n", + "EPOCH: 1443 LOSS: 61.3117911876\n", + "EPOCH: 1444 LOSS: 61.3115967651\n", + "EPOCH: 1445 LOSS: 61.3114027035\n", + "EPOCH: 1446 LOSS: 61.3112090016\n", + "EPOCH: 1447 LOSS: 61.3110156584\n", + "EPOCH: 1448 LOSS: 61.3108226727\n", + "EPOCH: 1449 LOSS: 61.3106300433\n", + "EPOCH: 1450 LOSS: 61.3104377692\n", + "EPOCH: 1451 LOSS: 61.3102458493\n", + "EPOCH: 1452 LOSS: 61.3100542824\n", + "EPOCH: 1453 LOSS: 61.3098630674\n", + "EPOCH: 1454 LOSS: 61.3096722032\n", + "EPOCH: 1455 LOSS: 61.3094816887\n", + "EPOCH: 1456 LOSS: 61.3092915229\n", + "EPOCH: 1457 LOSS: 61.3091017046\n", + "EPOCH: 1458 LOSS: 61.3089122328\n", + "EPOCH: 1459 LOSS: 61.3087231063\n", + "EPOCH: 1460 LOSS: 61.3085343241\n", + "EPOCH: 1461 LOSS: 61.3083458852\n", + "EPOCH: 1462 LOSS: 61.3081577884\n", + "EPOCH: 1463 LOSS: 61.3079700327\n", + "EPOCH: 1464 LOSS: 61.3077826171\n", + "EPOCH: 1465 LOSS: 61.3075955404\n", + "EPOCH: 1466 LOSS: 61.3074088017\n", + "EPOCH: 1467 LOSS: 61.3072223998\n", + "EPOCH: 1468 LOSS: 61.3070363338\n", + "EPOCH: 1469 LOSS: 61.3068506026\n", + "EPOCH: 1470 LOSS: 61.3066652052\n", + "EPOCH: 1471 LOSS: 61.3064801405\n", + "EPOCH: 1472 LOSS: 61.3062954075\n", + "EPOCH: 1473 LOSS: 61.3061110052\n", + "EPOCH: 1474 LOSS: 61.3059269326\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 1475 LOSS: 61.3057431886\n", + "EPOCH: 1476 LOSS: 61.3055597723\n", + "EPOCH: 1477 LOSS: 61.3053766827\n", + "EPOCH: 1478 LOSS: 61.3051939187\n", + "EPOCH: 1479 LOSS: 61.3050114793\n", + "EPOCH: 1480 LOSS: 61.3048293636\n", + "EPOCH: 1481 LOSS: 61.3046475705\n", + "EPOCH: 1482 LOSS: 61.3044660992\n", + "EPOCH: 1483 LOSS: 61.3042849485\n", + "EPOCH: 1484 LOSS: 61.3041041175\n", + "EPOCH: 1485 LOSS: 61.3039236053\n", + "EPOCH: 1486 LOSS: 61.3037434109\n", + "EPOCH: 1487 LOSS: 61.3035635332\n", + "EPOCH: 1488 LOSS: 61.3033839714\n", + "EPOCH: 1489 LOSS: 61.3032047245\n", + "EPOCH: 1490 LOSS: 61.3030257915\n", + "EPOCH: 1491 LOSS: 61.3028471714\n", + "EPOCH: 1492 LOSS: 61.3026688634\n", + "EPOCH: 1493 LOSS: 61.3024908665\n", + "EPOCH: 1494 LOSS: 61.3023131797\n", + "EPOCH: 1495 LOSS: 61.3021358022\n", + "EPOCH: 1496 LOSS: 61.3019587329\n", + "EPOCH: 1497 LOSS: 61.3017819709\n", + "EPOCH: 1498 LOSS: 61.3016055153\n", + "EPOCH: 1499 LOSS: 61.3014293653\n", + "EPOCH: 1500 LOSS: 61.3012535198\n", + "EPOCH: 1501 LOSS: 61.301077978\n", + "EPOCH: 1502 LOSS: 61.3009027389\n", + "EPOCH: 1503 LOSS: 61.3007278016\n", + "EPOCH: 1504 LOSS: 61.3005531653\n", + "EPOCH: 1505 LOSS: 61.300378829\n", + "EPOCH: 1506 LOSS: 61.3002047918\n", + "EPOCH: 1507 LOSS: 61.3000310529\n", + "EPOCH: 1508 LOSS: 61.2998576113\n", + "EPOCH: 1509 LOSS: 61.2996844662\n", + "EPOCH: 1510 LOSS: 61.2995116166\n", + "EPOCH: 1511 LOSS: 61.2993390618\n", + "EPOCH: 1512 LOSS: 61.2991668007\n", + "EPOCH: 1513 LOSS: 61.2989948326\n", + "EPOCH: 1514 LOSS: 61.2988231566\n", + "EPOCH: 1515 LOSS: 61.2986517718\n", + "EPOCH: 1516 LOSS: 61.2984806773\n", + "EPOCH: 1517 LOSS: 61.2983098723\n", + "EPOCH: 1518 LOSS: 61.2981393559\n", + "EPOCH: 1519 LOSS: 61.2979691273\n", + "EPOCH: 1520 LOSS: 61.2977991855\n", + "EPOCH: 1521 LOSS: 61.2976295299\n", + "EPOCH: 1522 LOSS: 61.2974601595\n", + "EPOCH: 1523 LOSS: 61.2972910734\n", + "EPOCH: 1524 LOSS: 61.2971222709\n", + "EPOCH: 1525 LOSS: 61.2969537511\n", + "EPOCH: 1526 LOSS: 61.2967855132\n", + "EPOCH: 1527 LOSS: 61.2966175564\n", + "EPOCH: 1528 LOSS: 61.2964498797\n", + "EPOCH: 1529 LOSS: 61.2962824825\n", + "EPOCH: 1530 LOSS: 61.2961153638\n", + "EPOCH: 1531 LOSS: 61.2959485229\n", + "EPOCH: 1532 LOSS: 61.295781959\n", + "EPOCH: 1533 LOSS: 61.2956156711\n", + "EPOCH: 1534 LOSS: 61.2954496587\n", + "EPOCH: 1535 LOSS: 61.2952839207\n", + "EPOCH: 1536 LOSS: 61.2951184565\n", + "EPOCH: 1537 LOSS: 61.2949532652\n", + "EPOCH: 1538 LOSS: 61.2947883461\n", + "EPOCH: 1539 LOSS: 61.2946236983\n", + "EPOCH: 1540 LOSS: 61.294459321\n", + "EPOCH: 1541 LOSS: 61.2942952135\n", + "EPOCH: 1542 LOSS: 61.294131375\n", + "EPOCH: 1543 LOSS: 61.2939678048\n", + "EPOCH: 1544 LOSS: 61.2938045019\n", + "EPOCH: 1545 LOSS: 61.2936414657\n", + "EPOCH: 1546 LOSS: 61.2934786954\n", + "EPOCH: 1547 LOSS: 61.2933161902\n", + "EPOCH: 1548 LOSS: 61.2931539493\n", + "EPOCH: 1549 LOSS: 61.2929919721\n", + "EPOCH: 1550 LOSS: 61.2928302576\n", + "EPOCH: 1551 LOSS: 61.2926688052\n", + "EPOCH: 1552 LOSS: 61.2925076142\n", + "EPOCH: 1553 LOSS: 61.2923466837\n", + "EPOCH: 1554 LOSS: 61.292186013\n", + "EPOCH: 1555 LOSS: 61.2920256014\n", + "EPOCH: 1556 LOSS: 61.2918654481\n", + "EPOCH: 1557 LOSS: 61.2917055524\n", + "EPOCH: 1558 LOSS: 61.2915459136\n", + "EPOCH: 1559 LOSS: 61.2913865308\n", + "EPOCH: 1560 LOSS: 61.2912274035\n", + "EPOCH: 1561 LOSS: 61.2910685308\n", + "EPOCH: 1562 LOSS: 61.290909912\n", + "EPOCH: 1563 LOSS: 61.2907515464\n", + "EPOCH: 1564 LOSS: 61.2905934333\n", + "EPOCH: 1565 LOSS: 61.290435572\n", + "EPOCH: 1566 LOSS: 61.2902779617\n", + "EPOCH: 1567 LOSS: 61.2901206017\n", + "EPOCH: 1568 LOSS: 61.2899634914\n", + "EPOCH: 1569 LOSS: 61.2898066299\n", + "EPOCH: 1570 LOSS: 61.2896500167\n", + "EPOCH: 1571 LOSS: 61.289493651\n", + "EPOCH: 1572 LOSS: 61.289337532\n", + "EPOCH: 1573 LOSS: 61.2891816592\n", + "EPOCH: 1574 LOSS: 61.2890260318\n", + "EPOCH: 1575 LOSS: 61.2888706491\n", + "EPOCH: 1576 LOSS: 61.2887155104\n", + "EPOCH: 1577 LOSS: 61.2885606151\n", + "EPOCH: 1578 LOSS: 61.2884059624\n", + "EPOCH: 1579 LOSS: 61.2882515516\n", + "EPOCH: 1580 LOSS: 61.2880973822\n", + "EPOCH: 1581 LOSS: 61.2879434534\n", + "EPOCH: 1582 LOSS: 61.2877897645\n", + "EPOCH: 1583 LOSS: 61.2876363149\n", + "EPOCH: 1584 LOSS: 61.2874831038\n", + "EPOCH: 1585 LOSS: 61.2873301307\n", + "EPOCH: 1586 LOSS: 61.2871773949\n", + "EPOCH: 1587 LOSS: 61.2870248956\n", + "EPOCH: 1588 LOSS: 61.2868726323\n", + "EPOCH: 1589 LOSS: 61.2867206042\n", + "EPOCH: 1590 LOSS: 61.2865688108\n", + "EPOCH: 1591 LOSS: 61.2864172513\n", + "EPOCH: 1592 LOSS: 61.2862659251\n", + "EPOCH: 1593 LOSS: 61.2861148317\n", + "EPOCH: 1594 LOSS: 61.2859639702\n", + "EPOCH: 1595 LOSS: 61.2858133401\n", + "EPOCH: 1596 LOSS: 61.2856629407\n", + "EPOCH: 1597 LOSS: 61.2855127714\n", + "EPOCH: 1598 LOSS: 61.2853628316\n", + "EPOCH: 1599 LOSS: 61.2852131206\n", + "EPOCH: 1600 LOSS: 61.2850636378\n", + "EPOCH: 1601 LOSS: 61.2849143825\n", + "EPOCH: 1602 LOSS: 61.2847653542\n", + "EPOCH: 1603 LOSS: 61.2846165522\n", + "EPOCH: 1604 LOSS: 61.2844679758\n", + "EPOCH: 1605 LOSS: 61.2843196245\n", + "EPOCH: 1606 LOSS: 61.2841714976\n", + "EPOCH: 1607 LOSS: 61.2840235946\n", + "EPOCH: 1608 LOSS: 61.2838759147\n", + "EPOCH: 1609 LOSS: 61.2837284575\n", + "EPOCH: 1610 LOSS: 61.2835812222\n", + "EPOCH: 1611 LOSS: 61.2834342083\n", + "EPOCH: 1612 LOSS: 61.2832874152\n", + "EPOCH: 1613 LOSS: 61.2831408422\n", + "EPOCH: 1614 LOSS: 61.2829944888\n", + "EPOCH: 1615 LOSS: 61.2828483544\n", + "EPOCH: 1616 LOSS: 61.2827024383\n", + "EPOCH: 1617 LOSS: 61.2825567399\n", + "EPOCH: 1618 LOSS: 61.2824112588\n", + "EPOCH: 1619 LOSS: 61.2822659942\n", + "EPOCH: 1620 LOSS: 61.2821209456\n", + "EPOCH: 1621 LOSS: 61.2819761125\n", + "EPOCH: 1622 LOSS: 61.2818314941\n", + "EPOCH: 1623 LOSS: 61.28168709\n", + "EPOCH: 1624 LOSS: 61.2815428995\n", + "EPOCH: 1625 LOSS: 61.2813989221\n", + "EPOCH: 1626 LOSS: 61.2812551573\n", + "EPOCH: 1627 LOSS: 61.2811116043\n", + "EPOCH: 1628 LOSS: 61.2809682627\n", + "EPOCH: 1629 LOSS: 61.2808251318\n", + "EPOCH: 1630 LOSS: 61.2806822112\n", + "EPOCH: 1631 LOSS: 61.2805395002\n", + "EPOCH: 1632 LOSS: 61.2803969984\n", + "EPOCH: 1633 LOSS: 61.280254705\n", + "EPOCH: 1634 LOSS: 61.2801126196\n", + "EPOCH: 1635 LOSS: 61.2799707415\n", + "EPOCH: 1636 LOSS: 61.2798290703\n", + "EPOCH: 1637 LOSS: 61.2796876054\n", + "EPOCH: 1638 LOSS: 61.2795463463\n", + "EPOCH: 1639 LOSS: 61.2794052923\n", + "EPOCH: 1640 LOSS: 61.2792644429\n", + "EPOCH: 1641 LOSS: 61.2791237976\n", + "EPOCH: 1642 LOSS: 61.2789833559\n", + "EPOCH: 1643 LOSS: 61.2788431171\n", + "EPOCH: 1644 LOSS: 61.2787030808\n", + "EPOCH: 1645 LOSS: 61.2785632465\n", + "EPOCH: 1646 LOSS: 61.2784236134\n", + "EPOCH: 1647 LOSS: 61.2782841813\n", + "EPOCH: 1648 LOSS: 61.2781449494\n", + "EPOCH: 1649 LOSS: 61.2780059173\n", + "EPOCH: 1650 LOSS: 61.2778670844\n", + "EPOCH: 1651 LOSS: 61.2777284503\n", + "EPOCH: 1652 LOSS: 61.2775900143\n", + "EPOCH: 1653 LOSS: 61.277451776\n", + "EPOCH: 1654 LOSS: 61.2773137349\n", + "EPOCH: 1655 LOSS: 61.2771758903\n", + "EPOCH: 1656 LOSS: 61.2770382419\n", + "EPOCH: 1657 LOSS: 61.276900789\n", + "EPOCH: 1658 LOSS: 61.2767635312\n", + "EPOCH: 1659 LOSS: 61.2766264679\n", + "EPOCH: 1660 LOSS: 61.2764895987\n", + "EPOCH: 1661 LOSS: 61.276352923\n", + "EPOCH: 1662 LOSS: 61.2762164403\n", + "EPOCH: 1663 LOSS: 61.2760801501\n", + "EPOCH: 1664 LOSS: 61.2759440519\n", + "EPOCH: 1665 LOSS: 61.2758081452\n", + "EPOCH: 1666 LOSS: 61.2756724295\n", + "EPOCH: 1667 LOSS: 61.2755369043\n", + "EPOCH: 1668 LOSS: 61.2754015691\n", + "EPOCH: 1669 LOSS: 61.2752664234\n", + "EPOCH: 1670 LOSS: 61.2751314666\n", + "EPOCH: 1671 LOSS: 61.2749966984\n", + "EPOCH: 1672 LOSS: 61.2748621183\n", + "EPOCH: 1673 LOSS: 61.2747277256\n", + "EPOCH: 1674 LOSS: 61.27459352\n", + "EPOCH: 1675 LOSS: 61.2744595009\n", + "EPOCH: 1676 LOSS: 61.2743256679\n", + "EPOCH: 1677 LOSS: 61.2741920205\n", + "EPOCH: 1678 LOSS: 61.2740585582\n", + "EPOCH: 1679 LOSS: 61.2739252805\n", + "EPOCH: 1680 LOSS: 61.2737921869\n", + "EPOCH: 1681 LOSS: 61.2736592771\n", + "EPOCH: 1682 LOSS: 61.2735265504\n", + "EPOCH: 1683 LOSS: 61.2733940064\n", + "EPOCH: 1684 LOSS: 61.2732616446\n", + "EPOCH: 1685 LOSS: 61.2731294646\n", + "EPOCH: 1686 LOSS: 61.272997466\n", + "EPOCH: 1687 LOSS: 61.2728656481\n", + "EPOCH: 1688 LOSS: 61.2727340107\n", + "EPOCH: 1689 LOSS: 61.2726025531\n", + "EPOCH: 1690 LOSS: 61.272471275\n", + "EPOCH: 1691 LOSS: 61.2723401758\n", + "EPOCH: 1692 LOSS: 61.2722092552\n", + "EPOCH: 1693 LOSS: 61.2720785126\n", + "EPOCH: 1694 LOSS: 61.2719479477\n", + "EPOCH: 1695 LOSS: 61.2718175599\n", + "EPOCH: 1696 LOSS: 61.2716873487\n", + "EPOCH: 1697 LOSS: 61.2715573139\n", + "EPOCH: 1698 LOSS: 61.2714274548\n", + "EPOCH: 1699 LOSS: 61.2712977711\n", + "EPOCH: 1700 LOSS: 61.2711682622\n", + "EPOCH: 1701 LOSS: 61.2710389278\n", + "EPOCH: 1702 LOSS: 61.2709097674\n", + "EPOCH: 1703 LOSS: 61.2707807806\n", + "EPOCH: 1704 LOSS: 61.2706519669\n", + "EPOCH: 1705 LOSS: 61.2705233259\n", + "EPOCH: 1706 LOSS: 61.2703948571\n", + "EPOCH: 1707 LOSS: 61.2702665601\n", + "EPOCH: 1708 LOSS: 61.2701384345\n", + "EPOCH: 1709 LOSS: 61.2700104799\n", + "EPOCH: 1710 LOSS: 61.2698826957\n", + "EPOCH: 1711 LOSS: 61.2697550816\n", + "EPOCH: 1712 LOSS: 61.2696276371\n", + "EPOCH: 1713 LOSS: 61.2695003618\n", + "EPOCH: 1714 LOSS: 61.2693732554\n", + "EPOCH: 1715 LOSS: 61.2692463173\n", + "EPOCH: 1716 LOSS: 61.2691195471\n", + "EPOCH: 1717 LOSS: 61.2689929444\n", + "EPOCH: 1718 LOSS: 61.2688665088\n", + "EPOCH: 1719 LOSS: 61.2687402399\n", + "EPOCH: 1720 LOSS: 61.2686141372\n", + "EPOCH: 1721 LOSS: 61.2684882004\n", + "EPOCH: 1722 LOSS: 61.268362429\n", + "EPOCH: 1723 LOSS: 61.2682368225\n", + "EPOCH: 1724 LOSS: 61.2681113807\n", + "EPOCH: 1725 LOSS: 61.267986103\n", + "EPOCH: 1726 LOSS: 61.2678609891\n", + "EPOCH: 1727 LOSS: 61.2677360386\n", + "EPOCH: 1728 LOSS: 61.267611251\n", + "EPOCH: 1729 LOSS: 61.2674866259\n", + "EPOCH: 1730 LOSS: 61.267362163\n", + "EPOCH: 1731 LOSS: 61.2672378618\n", + "EPOCH: 1732 LOSS: 61.2671137219\n", + "EPOCH: 1733 LOSS: 61.266989743\n", + "EPOCH: 1734 LOSS: 61.2668659245\n", + "EPOCH: 1735 LOSS: 61.2667422663\n", + "EPOCH: 1736 LOSS: 61.2666187677\n", + "EPOCH: 1737 LOSS: 61.2664954285\n", + "EPOCH: 1738 LOSS: 61.2663722482\n", + "EPOCH: 1739 LOSS: 61.2662492264\n", + "EPOCH: 1740 LOSS: 61.2661263628\n", + "EPOCH: 1741 LOSS: 61.266003657\n", + "EPOCH: 1742 LOSS: 61.2658811085\n", + "EPOCH: 1743 LOSS: 61.2657587171\n", + "EPOCH: 1744 LOSS: 61.2656364822\n", + "EPOCH: 1745 LOSS: 61.2655144035\n", + "EPOCH: 1746 LOSS: 61.2653924806\n", + "EPOCH: 1747 LOSS: 61.2652707132\n", + "EPOCH: 1748 LOSS: 61.2651491009\n", + "EPOCH: 1749 LOSS: 61.2650276432\n", + "EPOCH: 1750 LOSS: 61.2649063398\n", + "EPOCH: 1751 LOSS: 61.2647851904\n", + "EPOCH: 1752 LOSS: 61.2646641944\n", + "EPOCH: 1753 LOSS: 61.2645433517\n", + "EPOCH: 1754 LOSS: 61.2644226617\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 1755 LOSS: 61.2643021242\n", + "EPOCH: 1756 LOSS: 61.2641817387\n", + "EPOCH: 1757 LOSS: 61.2640615048\n", + "EPOCH: 1758 LOSS: 61.2639414223\n", + "EPOCH: 1759 LOSS: 61.2638214907\n", + "EPOCH: 1760 LOSS: 61.2637017097\n", + "EPOCH: 1761 LOSS: 61.2635820788\n", + "EPOCH: 1762 LOSS: 61.2634625978\n", + "EPOCH: 1763 LOSS: 61.2633432663\n", + "EPOCH: 1764 LOSS: 61.2632240839\n", + "EPOCH: 1765 LOSS: 61.2631050502\n", + "EPOCH: 1766 LOSS: 61.2629861649\n", + "EPOCH: 1767 LOSS: 61.2628674276\n", + "EPOCH: 1768 LOSS: 61.262748838\n", + "EPOCH: 1769 LOSS: 61.2626303957\n", + "EPOCH: 1770 LOSS: 61.2625121003\n", + "EPOCH: 1771 LOSS: 61.2623939516\n", + "EPOCH: 1772 LOSS: 61.262275949\n", + "EPOCH: 1773 LOSS: 61.2621580924\n", + "EPOCH: 1774 LOSS: 61.2620403813\n", + "EPOCH: 1775 LOSS: 61.2619228154\n", + "EPOCH: 1776 LOSS: 61.2618053943\n", + "EPOCH: 1777 LOSS: 61.2616881177\n", + "EPOCH: 1778 LOSS: 61.2615709853\n", + "EPOCH: 1779 LOSS: 61.2614539966\n", + "EPOCH: 1780 LOSS: 61.2613371514\n", + "EPOCH: 1781 LOSS: 61.2612204493\n", + "EPOCH: 1782 LOSS: 61.26110389\n", + "EPOCH: 1783 LOSS: 61.2609874731\n", + "EPOCH: 1784 LOSS: 61.2608711983\n", + "EPOCH: 1785 LOSS: 61.2607550652\n", + "EPOCH: 1786 LOSS: 61.2606390735\n", + "EPOCH: 1787 LOSS: 61.2605232228\n", + "EPOCH: 1788 LOSS: 61.2604075129\n", + "EPOCH: 1789 LOSS: 61.2602919434\n", + "EPOCH: 1790 LOSS: 61.2601765139\n", + "EPOCH: 1791 LOSS: 61.2600612242\n", + "EPOCH: 1792 LOSS: 61.2599460739\n", + "EPOCH: 1793 LOSS: 61.2598310626\n", + "EPOCH: 1794 LOSS: 61.25971619\n", + "EPOCH: 1795 LOSS: 61.2596014559\n", + "EPOCH: 1796 LOSS: 61.2594868598\n", + "EPOCH: 1797 LOSS: 61.2593724014\n", + "EPOCH: 1798 LOSS: 61.2592580805\n", + "EPOCH: 1799 LOSS: 61.2591438967\n", + "EPOCH: 1800 LOSS: 61.2590298496\n", + "EPOCH: 1801 LOSS: 61.258915939\n", + "EPOCH: 1802 LOSS: 61.2588021645\n", + "EPOCH: 1803 LOSS: 61.2586885258\n", + "EPOCH: 1804 LOSS: 61.2585750226\n", + "EPOCH: 1805 LOSS: 61.2584616546\n", + "EPOCH: 1806 LOSS: 61.2583484214\n", + "EPOCH: 1807 LOSS: 61.2582353227\n", + "EPOCH: 1808 LOSS: 61.2581223583\n", + "EPOCH: 1809 LOSS: 61.2580095277\n", + "EPOCH: 1810 LOSS: 61.2578968308\n", + "EPOCH: 1811 LOSS: 61.2577842671\n", + "EPOCH: 1812 LOSS: 61.2576718363\n", + "EPOCH: 1813 LOSS: 61.2575595382\n", + "EPOCH: 1814 LOSS: 61.2574473724\n", + "EPOCH: 1815 LOSS: 61.2573353387\n", + "EPOCH: 1816 LOSS: 61.2572234367\n", + "EPOCH: 1817 LOSS: 61.2571116661\n", + "EPOCH: 1818 LOSS: 61.2570000266\n", + "EPOCH: 1819 LOSS: 61.2568885179\n", + "EPOCH: 1820 LOSS: 61.2567771397\n", + "EPOCH: 1821 LOSS: 61.2566658916\n", + "EPOCH: 1822 LOSS: 61.2565547735\n", + "EPOCH: 1823 LOSS: 61.2564437849\n", + "EPOCH: 1824 LOSS: 61.2563329257\n", + "EPOCH: 1825 LOSS: 61.2562221954\n", + "EPOCH: 1826 LOSS: 61.2561115938\n", + "EPOCH: 1827 LOSS: 61.2560011206\n", + "EPOCH: 1828 LOSS: 61.2558907755\n", + "EPOCH: 1829 LOSS: 61.2557805581\n", + "EPOCH: 1830 LOSS: 61.2556704683\n", + "EPOCH: 1831 LOSS: 61.2555605057\n", + "EPOCH: 1832 LOSS: 61.25545067\n", + "EPOCH: 1833 LOSS: 61.2553409609\n", + "EPOCH: 1834 LOSS: 61.2552313781\n", + "EPOCH: 1835 LOSS: 61.2551219214\n", + "EPOCH: 1836 LOSS: 61.2550125904\n", + "EPOCH: 1837 LOSS: 61.2549033848\n", + "EPOCH: 1838 LOSS: 61.2547943045\n", + "EPOCH: 1839 LOSS: 61.254685349\n", + "EPOCH: 1840 LOSS: 61.2545765181\n", + "EPOCH: 1841 LOSS: 61.2544678115\n", + "EPOCH: 1842 LOSS: 61.2543592289\n", + "EPOCH: 1843 LOSS: 61.2542507701\n", + "EPOCH: 1844 LOSS: 61.2541424347\n", + "EPOCH: 1845 LOSS: 61.2540342225\n", + "EPOCH: 1846 LOSS: 61.2539261332\n", + "EPOCH: 1847 LOSS: 61.2538181665\n", + "EPOCH: 1848 LOSS: 61.2537103222\n", + "EPOCH: 1849 LOSS: 61.2536025999\n", + "EPOCH: 1850 LOSS: 61.2534949993\n", + "EPOCH: 1851 LOSS: 61.2533875203\n", + "EPOCH: 1852 LOSS: 61.2532801625\n", + "EPOCH: 1853 LOSS: 61.2531729256\n", + "EPOCH: 1854 LOSS: 61.2530658094\n", + "EPOCH: 1855 LOSS: 61.2529588136\n", + "EPOCH: 1856 LOSS: 61.252851938\n", + "EPOCH: 1857 LOSS: 61.2527451821\n", + "EPOCH: 1858 LOSS: 61.2526385459\n", + "EPOCH: 1859 LOSS: 61.252532029\n", + "EPOCH: 1860 LOSS: 61.2524256311\n", + "EPOCH: 1861 LOSS: 61.252319352\n", + "EPOCH: 1862 LOSS: 61.2522131914\n", + "EPOCH: 1863 LOSS: 61.252107149\n", + "EPOCH: 1864 LOSS: 61.2520012246\n", + "EPOCH: 1865 LOSS: 61.2518954179\n", + "EPOCH: 1866 LOSS: 61.2517897286\n", + "EPOCH: 1867 LOSS: 61.2516841565\n", + "EPOCH: 1868 LOSS: 61.2515787014\n", + "EPOCH: 1869 LOSS: 61.2514733628\n", + "EPOCH: 1870 LOSS: 61.2513681407\n", + "EPOCH: 1871 LOSS: 61.2512630347\n", + "EPOCH: 1872 LOSS: 61.2511580445\n", + "EPOCH: 1873 LOSS: 61.25105317\n", + "EPOCH: 1874 LOSS: 61.2509484108\n", + "EPOCH: 1875 LOSS: 61.2508437667\n", + "EPOCH: 1876 LOSS: 61.2507392375\n", + "EPOCH: 1877 LOSS: 61.2506348228\n", + "EPOCH: 1878 LOSS: 61.2505305224\n", + "EPOCH: 1879 LOSS: 61.2504263361\n", + "EPOCH: 1880 LOSS: 61.2503222636\n", + "EPOCH: 1881 LOSS: 61.2502183047\n", + "EPOCH: 1882 LOSS: 61.2501144591\n", + "EPOCH: 1883 LOSS: 61.2500107265\n", + "EPOCH: 1884 LOSS: 61.2499071067\n", + "EPOCH: 1885 LOSS: 61.2498035995\n", + "EPOCH: 1886 LOSS: 61.2497002046\n", + "EPOCH: 1887 LOSS: 61.2495969218\n", + "EPOCH: 1888 LOSS: 61.2494937507\n", + "EPOCH: 1889 LOSS: 61.2493906912\n", + "EPOCH: 1890 LOSS: 61.2492877431\n", + "EPOCH: 1891 LOSS: 61.249184906\n", + "EPOCH: 1892 LOSS: 61.2490821797\n", + "EPOCH: 1893 LOSS: 61.248979564\n", + "EPOCH: 1894 LOSS: 61.2488770586\n", + "EPOCH: 1895 LOSS: 61.2487746633\n", + "EPOCH: 1896 LOSS: 61.2486723779\n", + "EPOCH: 1897 LOSS: 61.248570202\n", + "EPOCH: 1898 LOSS: 61.2484681356\n", + "EPOCH: 1899 LOSS: 61.2483661782\n", + "EPOCH: 1900 LOSS: 61.2482643298\n", + "EPOCH: 1901 LOSS: 61.24816259\n", + "EPOCH: 1902 LOSS: 61.2480609586\n", + "EPOCH: 1903 LOSS: 61.2479594353\n", + "EPOCH: 1904 LOSS: 61.24785802\n", + "EPOCH: 1905 LOSS: 61.2477567124\n", + "EPOCH: 1906 LOSS: 61.2476555123\n", + "EPOCH: 1907 LOSS: 61.2475544194\n", + "EPOCH: 1908 LOSS: 61.2474534335\n", + "EPOCH: 1909 LOSS: 61.2473525543\n", + "EPOCH: 1910 LOSS: 61.2472517817\n", + "EPOCH: 1911 LOSS: 61.2471511153\n", + "EPOCH: 1912 LOSS: 61.2470505551\n", + "EPOCH: 1913 LOSS: 61.2469501006\n", + "EPOCH: 1914 LOSS: 61.2468497518\n", + "EPOCH: 1915 LOSS: 61.2467495084\n", + "EPOCH: 1916 LOSS: 61.2466493701\n", + "EPOCH: 1917 LOSS: 61.2465493367\n", + "EPOCH: 1918 LOSS: 61.246449408\n", + "EPOCH: 1919 LOSS: 61.2463495837\n", + "EPOCH: 1920 LOSS: 61.2462498637\n", + "EPOCH: 1921 LOSS: 61.2461502477\n", + "EPOCH: 1922 LOSS: 61.2460507355\n", + "EPOCH: 1923 LOSS: 61.2459513269\n", + "EPOCH: 1924 LOSS: 61.2458520216\n", + "EPOCH: 1925 LOSS: 61.2457528195\n", + "EPOCH: 1926 LOSS: 61.2456537202\n", + "EPOCH: 1927 LOSS: 61.2455547236\n", + "EPOCH: 1928 LOSS: 61.2454558294\n", + "EPOCH: 1929 LOSS: 61.2453570375\n", + "EPOCH: 1930 LOSS: 61.2452583476\n", + "EPOCH: 1931 LOSS: 61.2451597595\n", + "EPOCH: 1932 LOSS: 61.245061273\n", + "EPOCH: 1933 LOSS: 61.2449628878\n", + "EPOCH: 1934 LOSS: 61.2448646038\n", + "EPOCH: 1935 LOSS: 61.2447664207\n", + "EPOCH: 1936 LOSS: 61.2446683383\n", + "EPOCH: 1937 LOSS: 61.2445703564\n", + "EPOCH: 1938 LOSS: 61.2444724747\n", + "EPOCH: 1939 LOSS: 61.2443746932\n", + "EPOCH: 1940 LOSS: 61.2442770115\n", + "EPOCH: 1941 LOSS: 61.2441794294\n", + "EPOCH: 1942 LOSS: 61.2440819468\n", + "EPOCH: 1943 LOSS: 61.2439845634\n", + "EPOCH: 1944 LOSS: 61.243887279\n", + "EPOCH: 1945 LOSS: 61.2437900933\n", + "EPOCH: 1946 LOSS: 61.2436930063\n", + "EPOCH: 1947 LOSS: 61.2435960177\n", + "EPOCH: 1948 LOSS: 61.2434991272\n", + "EPOCH: 1949 LOSS: 61.2434023346\n", + "EPOCH: 1950 LOSS: 61.2433056399\n", + "EPOCH: 1951 LOSS: 61.2432090426\n", + "EPOCH: 1952 LOSS: 61.2431125427\n", + "EPOCH: 1953 LOSS: 61.24301614\n", + "EPOCH: 1954 LOSS: 61.2429198342\n", + "EPOCH: 1955 LOSS: 61.2428236251\n", + "EPOCH: 1956 LOSS: 61.2427275125\n", + "EPOCH: 1957 LOSS: 61.2426314962\n", + "EPOCH: 1958 LOSS: 61.2425355761\n", + "EPOCH: 1959 LOSS: 61.2424397518\n", + "EPOCH: 1960 LOSS: 61.2423440233\n", + "EPOCH: 1961 LOSS: 61.2422483903\n", + "EPOCH: 1962 LOSS: 61.2421528526\n", + "EPOCH: 1963 LOSS: 61.24205741\n", + "EPOCH: 1964 LOSS: 61.2419620623\n", + "EPOCH: 1965 LOSS: 61.2418668093\n", + "EPOCH: 1966 LOSS: 61.2417716508\n", + "EPOCH: 1967 LOSS: 61.2416765867\n", + "EPOCH: 1968 LOSS: 61.2415816166\n", + "EPOCH: 1969 LOSS: 61.2414867405\n", + "EPOCH: 1970 LOSS: 61.2413919582\n", + "EPOCH: 1971 LOSS: 61.2412972693\n", + "EPOCH: 1972 LOSS: 61.2412026738\n", + "EPOCH: 1973 LOSS: 61.2411081714\n", + "EPOCH: 1974 LOSS: 61.241013762\n", + "EPOCH: 1975 LOSS: 61.2409194453\n", + "EPOCH: 1976 LOSS: 61.2408252212\n", + "EPOCH: 1977 LOSS: 61.2407310895\n", + "EPOCH: 1978 LOSS: 61.2406370499\n", + "EPOCH: 1979 LOSS: 61.2405431024\n", + "EPOCH: 1980 LOSS: 61.2404492466\n", + "EPOCH: 1981 LOSS: 61.2403554824\n", + "EPOCH: 1982 LOSS: 61.2402618097\n", + "EPOCH: 1983 LOSS: 61.2401682281\n", + "EPOCH: 1984 LOSS: 61.2400747377\n", + "EPOCH: 1985 LOSS: 61.239981338\n", + "EPOCH: 1986 LOSS: 61.2398880291\n", + "EPOCH: 1987 LOSS: 61.2397948106\n", + "EPOCH: 1988 LOSS: 61.2397016824\n", + "EPOCH: 1989 LOSS: 61.2396086443\n", + "EPOCH: 1990 LOSS: 61.2395156961\n", + "EPOCH: 1991 LOSS: 61.2394228377\n", + "EPOCH: 1992 LOSS: 61.2393300688\n", + "EPOCH: 1993 LOSS: 61.2392373893\n", + "EPOCH: 1994 LOSS: 61.2391447989\n", + "EPOCH: 1995 LOSS: 61.2390522976\n", + "EPOCH: 1996 LOSS: 61.2389598851\n", + "EPOCH: 1997 LOSS: 61.2388675612\n", + "EPOCH: 1998 LOSS: 61.2387753258\n", + "EPOCH: 1999 LOSS: 61.2386831786\n", + "EPOCH: 2000 LOSS: 61.2385911196\n", + "EPOCH: 2001 LOSS: 61.2384991485\n", + "EPOCH: 2002 LOSS: 61.2384072651\n", + "EPOCH: 2003 LOSS: 61.2383154692\n", + "EPOCH: 2004 LOSS: 61.2382237608\n", + "EPOCH: 2005 LOSS: 61.2381321395\n", + "EPOCH: 2006 LOSS: 61.2380406053\n", + "EPOCH: 2007 LOSS: 61.2379491579\n", + "EPOCH: 2008 LOSS: 61.2378577972\n", + "EPOCH: 2009 LOSS: 61.2377665229\n", + "EPOCH: 2010 LOSS: 61.237675335\n", + "EPOCH: 2011 LOSS: 61.2375842333\n", + "EPOCH: 2012 LOSS: 61.2374932175\n", + "EPOCH: 2013 LOSS: 61.2374022875\n", + "EPOCH: 2014 LOSS: 61.2373114431\n", + "EPOCH: 2015 LOSS: 61.2372206842\n", + "EPOCH: 2016 LOSS: 61.2371300106\n", + "EPOCH: 2017 LOSS: 61.237039422\n", + "EPOCH: 2018 LOSS: 61.2369489184\n", + "EPOCH: 2019 LOSS: 61.2368584996\n", + "EPOCH: 2020 LOSS: 61.2367681654\n", + "EPOCH: 2021 LOSS: 61.2366779155\n", + "EPOCH: 2022 LOSS: 61.23658775\n", + "EPOCH: 2023 LOSS: 61.2364976685\n", + "EPOCH: 2024 LOSS: 61.2364076709\n", + "EPOCH: 2025 LOSS: 61.2363177571\n", + "EPOCH: 2026 LOSS: 61.2362279268\n", + "EPOCH: 2027 LOSS: 61.23613818\n", + "EPOCH: 2028 LOSS: 61.2360485164\n", + "EPOCH: 2029 LOSS: 61.2359589359\n", + "EPOCH: 2030 LOSS: 61.2358694383\n", + "EPOCH: 2031 LOSS: 61.2357800234\n", + "EPOCH: 2032 LOSS: 61.2356906912\n", + "EPOCH: 2033 LOSS: 61.2356014413\n", + "EPOCH: 2034 LOSS: 61.2355122737\n", + "EPOCH: 2035 LOSS: 61.2354231882\n", + "EPOCH: 2036 LOSS: 61.2353341846\n", + "EPOCH: 2037 LOSS: 61.2352452628\n", + "EPOCH: 2038 LOSS: 61.2351564225\n", + "EPOCH: 2039 LOSS: 61.2350676637\n", + "EPOCH: 2040 LOSS: 61.2349789862\n", + "EPOCH: 2041 LOSS: 61.2348903898\n", + "EPOCH: 2042 LOSS: 61.2348018744\n", + "EPOCH: 2043 LOSS: 61.2347134397\n", + "EPOCH: 2044 LOSS: 61.2346250857\n", + "EPOCH: 2045 LOSS: 61.2345368121\n", + "EPOCH: 2046 LOSS: 61.2344486189\n", + "EPOCH: 2047 LOSS: 61.2343605058\n", + "EPOCH: 2048 LOSS: 61.2342724727\n", + "EPOCH: 2049 LOSS: 61.2341845195\n", + "EPOCH: 2050 LOSS: 61.2340966459\n", + "EPOCH: 2051 LOSS: 61.2340088519\n", + "EPOCH: 2052 LOSS: 61.2339211373\n", + "EPOCH: 2053 LOSS: 61.2338335018\n", + "EPOCH: 2054 LOSS: 61.2337459454\n", + "EPOCH: 2055 LOSS: 61.2336584679\n", + "EPOCH: 2056 LOSS: 61.2335710692\n", + "EPOCH: 2057 LOSS: 61.2334837491\n", + "EPOCH: 2058 LOSS: 61.2333965074\n", + "EPOCH: 2059 LOSS: 61.2333093439\n", + "EPOCH: 2060 LOSS: 61.2332222587\n", + "EPOCH: 2061 LOSS: 61.2331352514\n", + "EPOCH: 2062 LOSS: 61.2330483219\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 2063 LOSS: 61.2329614701\n", + "EPOCH: 2064 LOSS: 61.2328746958\n", + "EPOCH: 2065 LOSS: 61.2327879989\n", + "EPOCH: 2066 LOSS: 61.2327013793\n", + "EPOCH: 2067 LOSS: 61.2326148366\n", + "EPOCH: 2068 LOSS: 61.232528371\n", + "EPOCH: 2069 LOSS: 61.2324419821\n", + "EPOCH: 2070 LOSS: 61.2323556698\n", + "EPOCH: 2071 LOSS: 61.232269434\n", + "EPOCH: 2072 LOSS: 61.2321832745\n", + "EPOCH: 2073 LOSS: 61.2320971912\n", + "EPOCH: 2074 LOSS: 61.2320111839\n", + "EPOCH: 2075 LOSS: 61.2319252525\n", + "EPOCH: 2076 LOSS: 61.2318393969\n", + "EPOCH: 2077 LOSS: 61.2317536168\n", + "EPOCH: 2078 LOSS: 61.2316679122\n", + "EPOCH: 2079 LOSS: 61.2315822829\n", + "EPOCH: 2080 LOSS: 61.2314967287\n", + "EPOCH: 2081 LOSS: 61.2314112496\n", + "EPOCH: 2082 LOSS: 61.2313258453\n", + "EPOCH: 2083 LOSS: 61.2312405157\n", + "EPOCH: 2084 LOSS: 61.2311552608\n", + "EPOCH: 2085 LOSS: 61.2310700802\n", + "EPOCH: 2086 LOSS: 61.2309849739\n", + "EPOCH: 2087 LOSS: 61.2308999418\n", + "EPOCH: 2088 LOSS: 61.2308149837\n", + "EPOCH: 2089 LOSS: 61.2307300995\n", + "EPOCH: 2090 LOSS: 61.2306452889\n", + "EPOCH: 2091 LOSS: 61.230560552\n", + "EPOCH: 2092 LOSS: 61.2304758885\n", + "EPOCH: 2093 LOSS: 61.2303912983\n", + "EPOCH: 2094 LOSS: 61.2303067813\n", + "EPOCH: 2095 LOSS: 61.2302223372\n", + "EPOCH: 2096 LOSS: 61.2301379661\n", + "EPOCH: 2097 LOSS: 61.2300536677\n", + "EPOCH: 2098 LOSS: 61.2299694419\n", + "EPOCH: 2099 LOSS: 61.2298852886\n", + "EPOCH: 2100 LOSS: 61.2298012076\n", + "EPOCH: 2101 LOSS: 61.2297171988\n", + "EPOCH: 2102 LOSS: 61.229633262\n", + "EPOCH: 2103 LOSS: 61.2295493972\n", + "EPOCH: 2104 LOSS: 61.2294656041\n", + "EPOCH: 2105 LOSS: 61.2293818827\n", + "EPOCH: 2106 LOSS: 61.2292982328\n", + "EPOCH: 2107 LOSS: 61.2292146543\n", + "EPOCH: 2108 LOSS: 61.229131147\n", + "EPOCH: 2109 LOSS: 61.2290477108\n", + "EPOCH: 2110 LOSS: 61.2289643456\n", + "EPOCH: 2111 LOSS: 61.2288810512\n", + "EPOCH: 2112 LOSS: 61.2287978275\n", + "EPOCH: 2113 LOSS: 61.2287146744\n", + "EPOCH: 2114 LOSS: 61.2286315917\n", + "EPOCH: 2115 LOSS: 61.2285485793\n", + "EPOCH: 2116 LOSS: 61.2284656371\n", + "EPOCH: 2117 LOSS: 61.228382765\n", + "EPOCH: 2118 LOSS: 61.2282999627\n", + "EPOCH: 2119 LOSS: 61.2282172302\n", + "EPOCH: 2120 LOSS: 61.2281345674\n", + "EPOCH: 2121 LOSS: 61.2280519741\n", + "EPOCH: 2122 LOSS: 61.2279694502\n", + "EPOCH: 2123 LOSS: 61.2278869955\n", + "EPOCH: 2124 LOSS: 61.2278046099\n", + "EPOCH: 2125 LOSS: 61.2277222934\n", + "EPOCH: 2126 LOSS: 61.2276400457\n", + "EPOCH: 2127 LOSS: 61.2275578667\n", + "EPOCH: 2128 LOSS: 61.2274757564\n", + "EPOCH: 2129 LOSS: 61.2273937145\n", + "EPOCH: 2130 LOSS: 61.227311741\n", + "EPOCH: 2131 LOSS: 61.2272298357\n", + "EPOCH: 2132 LOSS: 61.2271479986\n", + "EPOCH: 2133 LOSS: 61.2270662294\n", + "EPOCH: 2134 LOSS: 61.226984528\n", + "EPOCH: 2135 LOSS: 61.2269028944\n", + "EPOCH: 2136 LOSS: 61.2268213284\n", + "EPOCH: 2137 LOSS: 61.2267398298\n", + "EPOCH: 2138 LOSS: 61.2266583986\n", + "EPOCH: 2139 LOSS: 61.2265770346\n", + "EPOCH: 2140 LOSS: 61.2264957377\n", + "EPOCH: 2141 LOSS: 61.2264145078\n", + "EPOCH: 2142 LOSS: 61.2263333448\n", + "EPOCH: 2143 LOSS: 61.2262522484\n", + "EPOCH: 2144 LOSS: 61.2261712187\n", + "EPOCH: 2145 LOSS: 61.2260902554\n", + "EPOCH: 2146 LOSS: 61.2260093586\n", + "EPOCH: 2147 LOSS: 61.2259285279\n", + "EPOCH: 2148 LOSS: 61.2258477634\n", + "EPOCH: 2149 LOSS: 61.2257670648\n", + "EPOCH: 2150 LOSS: 61.2256864321\n", + "EPOCH: 2151 LOSS: 61.2256058652\n", + "EPOCH: 2152 LOSS: 61.2255253639\n", + "EPOCH: 2153 LOSS: 61.2254449281\n", + "EPOCH: 2154 LOSS: 61.2253645578\n", + "EPOCH: 2155 LOSS: 61.2252842526\n", + "EPOCH: 2156 LOSS: 61.2252040127\n", + "EPOCH: 2157 LOSS: 61.2251238377\n", + "EPOCH: 2158 LOSS: 61.2250437277\n", + "EPOCH: 2159 LOSS: 61.2249636825\n", + "EPOCH: 2160 LOSS: 61.224883702\n", + "EPOCH: 2161 LOSS: 61.224803786\n", + "EPOCH: 2162 LOSS: 61.2247239344\n", + "EPOCH: 2163 LOSS: 61.2246441472\n", + "EPOCH: 2164 LOSS: 61.2245644242\n", + "EPOCH: 2165 LOSS: 61.2244847653\n", + "EPOCH: 2166 LOSS: 61.2244051703\n", + "EPOCH: 2167 LOSS: 61.2243256392\n", + "EPOCH: 2168 LOSS: 61.2242461719\n", + "EPOCH: 2169 LOSS: 61.2241667682\n", + "EPOCH: 2170 LOSS: 61.224087428\n", + "EPOCH: 2171 LOSS: 61.2240081512\n", + "EPOCH: 2172 LOSS: 61.2239289377\n", + "EPOCH: 2173 LOSS: 61.2238497873\n", + "EPOCH: 2174 LOSS: 61.2237707\n", + "EPOCH: 2175 LOSS: 61.2236916756\n", + "EPOCH: 2176 LOSS: 61.2236127141\n", + "EPOCH: 2177 LOSS: 61.2235338153\n", + "EPOCH: 2178 LOSS: 61.2234549791\n", + "EPOCH: 2179 LOSS: 61.2233762053\n", + "EPOCH: 2180 LOSS: 61.223297494\n", + "EPOCH: 2181 LOSS: 61.2232188449\n", + "EPOCH: 2182 LOSS: 61.223140258\n", + "EPOCH: 2183 LOSS: 61.2230617331\n", + "EPOCH: 2184 LOSS: 61.2229832701\n", + "EPOCH: 2185 LOSS: 61.222904869\n", + "EPOCH: 2186 LOSS: 61.2228265295\n", + "EPOCH: 2187 LOSS: 61.2227482517\n", + "EPOCH: 2188 LOSS: 61.2226700353\n", + "EPOCH: 2189 LOSS: 61.2225918803\n", + "EPOCH: 2190 LOSS: 61.2225137866\n", + "EPOCH: 2191 LOSS: 61.2224357541\n", + "EPOCH: 2192 LOSS: 61.2223577826\n", + "EPOCH: 2193 LOSS: 61.222279872\n", + "EPOCH: 2194 LOSS: 61.2222020223\n", + "EPOCH: 2195 LOSS: 61.2221242333\n", + "EPOCH: 2196 LOSS: 61.2220465049\n", + "EPOCH: 2197 LOSS: 61.221968837\n", + "EPOCH: 2198 LOSS: 61.2218912295\n", + "EPOCH: 2199 LOSS: 61.2218136824\n", + "EPOCH: 2200 LOSS: 61.2217361954\n", + "EPOCH: 2201 LOSS: 61.2216587684\n", + "EPOCH: 2202 LOSS: 61.2215814015\n", + "EPOCH: 2203 LOSS: 61.2215040944\n", + "EPOCH: 2204 LOSS: 61.2214268471\n", + "EPOCH: 2205 LOSS: 61.2213496594\n", + "EPOCH: 2206 LOSS: 61.2212725313\n", + "EPOCH: 2207 LOSS: 61.2211954627\n", + "EPOCH: 2208 LOSS: 61.2211184534\n", + "EPOCH: 2209 LOSS: 61.2210415033\n", + "EPOCH: 2210 LOSS: 61.2209646123\n", + "EPOCH: 2211 LOSS: 61.2208877804\n", + "EPOCH: 2212 LOSS: 61.2208110074\n", + "EPOCH: 2213 LOSS: 61.2207342932\n", + "EPOCH: 2214 LOSS: 61.2206576378\n", + "EPOCH: 2215 LOSS: 61.2205810409\n", + "EPOCH: 2216 LOSS: 61.2205045026\n", + "EPOCH: 2217 LOSS: 61.2204280227\n", + "EPOCH: 2218 LOSS: 61.220351601\n", + "EPOCH: 2219 LOSS: 61.2202752376\n", + "EPOCH: 2220 LOSS: 61.2201989323\n", + "EPOCH: 2221 LOSS: 61.220122685\n", + "EPOCH: 2222 LOSS: 61.2200464956\n", + "EPOCH: 2223 LOSS: 61.219970364\n", + "EPOCH: 2224 LOSS: 61.2198942901\n", + "EPOCH: 2225 LOSS: 61.2198182738\n", + "EPOCH: 2226 LOSS: 61.219742315\n", + "EPOCH: 2227 LOSS: 61.2196664136\n", + "EPOCH: 2228 LOSS: 61.2195905694\n", + "EPOCH: 2229 LOSS: 61.2195147825\n", + "EPOCH: 2230 LOSS: 61.2194390527\n", + "EPOCH: 2231 LOSS: 61.2193633799\n", + "EPOCH: 2232 LOSS: 61.219287764\n", + "EPOCH: 2233 LOSS: 61.2192122049\n", + "EPOCH: 2234 LOSS: 61.2191367025\n", + "EPOCH: 2235 LOSS: 61.2190612567\n", + "EPOCH: 2236 LOSS: 61.2189858674\n", + "EPOCH: 2237 LOSS: 61.2189105345\n", + "EPOCH: 2238 LOSS: 61.2188352579\n", + "EPOCH: 2239 LOSS: 61.2187600376\n", + "EPOCH: 2240 LOSS: 61.2186848733\n", + "EPOCH: 2241 LOSS: 61.2186097651\n", + "EPOCH: 2242 LOSS: 61.2185347128\n", + "EPOCH: 2243 LOSS: 61.2184597164\n", + "EPOCH: 2244 LOSS: 61.2183847756\n", + "EPOCH: 2245 LOSS: 61.2183098906\n", + "EPOCH: 2246 LOSS: 61.218235061\n", + "EPOCH: 2247 LOSS: 61.2181602869\n", + "EPOCH: 2248 LOSS: 61.2180855682\n", + "EPOCH: 2249 LOSS: 61.2180109047\n", + "EPOCH: 2250 LOSS: 61.2179362964\n", + "EPOCH: 2251 LOSS: 61.2178617431\n", + "EPOCH: 2252 LOSS: 61.2177872448\n", + "EPOCH: 2253 LOSS: 61.2177128014\n", + "EPOCH: 2254 LOSS: 61.2176384128\n", + "EPOCH: 2255 LOSS: 61.2175640789\n", + "EPOCH: 2256 LOSS: 61.2174897996\n", + "EPOCH: 2257 LOSS: 61.2174155748\n", + "EPOCH: 2258 LOSS: 61.2173414044\n", + "EPOCH: 2259 LOSS: 61.2172672883\n", + "EPOCH: 2260 LOSS: 61.2171932265\n", + "EPOCH: 2261 LOSS: 61.2171192187\n", + "EPOCH: 2262 LOSS: 61.2170452651\n", + "EPOCH: 2263 LOSS: 61.2169713654\n", + "EPOCH: 2264 LOSS: 61.2168975195\n", + "EPOCH: 2265 LOSS: 61.2168237274\n", + "EPOCH: 2266 LOSS: 61.216749989\n", + "EPOCH: 2267 LOSS: 61.2166763042\n", + "EPOCH: 2268 LOSS: 61.2166026729\n", + "EPOCH: 2269 LOSS: 61.216529095\n", + "EPOCH: 2270 LOSS: 61.2164555704\n", + "EPOCH: 2271 LOSS: 61.216382099\n", + "EPOCH: 2272 LOSS: 61.2163086808\n", + "EPOCH: 2273 LOSS: 61.2162353156\n", + "EPOCH: 2274 LOSS: 61.2161620034\n", + "EPOCH: 2275 LOSS: 61.2160887441\n", + "EPOCH: 2276 LOSS: 61.2160155375\n", + "EPOCH: 2277 LOSS: 61.2159423836\n", + "EPOCH: 2278 LOSS: 61.2158692824\n", + "EPOCH: 2279 LOSS: 61.2157962336\n", + "EPOCH: 2280 LOSS: 61.2157232373\n", + "EPOCH: 2281 LOSS: 61.2156502933\n", + "EPOCH: 2282 LOSS: 61.2155774016\n", + "EPOCH: 2283 LOSS: 61.215504562\n", + "EPOCH: 2284 LOSS: 61.2154317745\n", + "EPOCH: 2285 LOSS: 61.215359039\n", + "EPOCH: 2286 LOSS: 61.2152863554\n", + "EPOCH: 2287 LOSS: 61.2152137236\n", + "EPOCH: 2288 LOSS: 61.2151411435\n", + "EPOCH: 2289 LOSS: 61.2150686151\n", + "EPOCH: 2290 LOSS: 61.2149961383\n", + "EPOCH: 2291 LOSS: 61.2149237129\n", + "EPOCH: 2292 LOSS: 61.2148513389\n", + "EPOCH: 2293 LOSS: 61.2147790162\n", + "EPOCH: 2294 LOSS: 61.2147067447\n", + "EPOCH: 2295 LOSS: 61.2146345244\n", + "EPOCH: 2296 LOSS: 61.2145623551\n", + "EPOCH: 2297 LOSS: 61.2144902367\n", + "EPOCH: 2298 LOSS: 61.2144181693\n", + "EPOCH: 2299 LOSS: 61.2143461526\n", + "EPOCH: 2300 LOSS: 61.2142741866\n", + "EPOCH: 2301 LOSS: 61.2142022713\n", + "EPOCH: 2302 LOSS: 61.2141304065\n", + "EPOCH: 2303 LOSS: 61.2140585922\n", + "EPOCH: 2304 LOSS: 61.2139868282\n", + "EPOCH: 2305 LOSS: 61.2139151146\n", + "EPOCH: 2306 LOSS: 61.2138434511\n", + "EPOCH: 2307 LOSS: 61.2137718378\n", + "EPOCH: 2308 LOSS: 61.2137002745\n", + "EPOCH: 2309 LOSS: 61.2136287612\n", + "EPOCH: 2310 LOSS: 61.2135572977\n", + "EPOCH: 2311 LOSS: 61.2134858841\n", + "EPOCH: 2312 LOSS: 61.2134145201\n", + "EPOCH: 2313 LOSS: 61.2133432058\n", + "EPOCH: 2314 LOSS: 61.2132719411\n", + "EPOCH: 2315 LOSS: 61.2132007258\n", + "EPOCH: 2316 LOSS: 61.2131295599\n", + "EPOCH: 2317 LOSS: 61.2130584434\n", + "EPOCH: 2318 LOSS: 61.212987376\n", + "EPOCH: 2319 LOSS: 61.2129163579\n", + "EPOCH: 2320 LOSS: 61.2128453887\n", + "EPOCH: 2321 LOSS: 61.2127744686\n", + "EPOCH: 2322 LOSS: 61.2127035974\n", + "EPOCH: 2323 LOSS: 61.212632775\n", + "EPOCH: 2324 LOSS: 61.2125620014\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 2325 LOSS: 61.2124912764\n", + "EPOCH: 2326 LOSS: 61.2124206\n", + "EPOCH: 2327 LOSS: 61.2123499722\n", + "EPOCH: 2328 LOSS: 61.2122793927\n", + "EPOCH: 2329 LOSS: 61.2122088617\n", + "EPOCH: 2330 LOSS: 61.2121383789\n", + "EPOCH: 2331 LOSS: 61.2120679443\n", + "EPOCH: 2332 LOSS: 61.2119975578\n", + "EPOCH: 2333 LOSS: 61.2119272194\n", + "EPOCH: 2334 LOSS: 61.2118569289\n", + "EPOCH: 2335 LOSS: 61.2117866864\n", + "EPOCH: 2336 LOSS: 61.2117164916\n", + "EPOCH: 2337 LOSS: 61.2116463446\n", + "EPOCH: 2338 LOSS: 61.2115762452\n", + "EPOCH: 2339 LOSS: 61.2115061934\n", + "EPOCH: 2340 LOSS: 61.2114361892\n", + "EPOCH: 2341 LOSS: 61.2113662323\n", + "EPOCH: 2342 LOSS: 61.2112963228\n", + "EPOCH: 2343 LOSS: 61.2112264606\n", + "EPOCH: 2344 LOSS: 61.2111566456\n", + "EPOCH: 2345 LOSS: 61.2110868777\n", + "EPOCH: 2346 LOSS: 61.2110171568\n", + "EPOCH: 2347 LOSS: 61.210947483\n", + "EPOCH: 2348 LOSS: 61.210877856\n", + "EPOCH: 2349 LOSS: 61.2108082758\n", + "EPOCH: 2350 LOSS: 61.2107387424\n", + "EPOCH: 2351 LOSS: 61.2106692557\n", + "EPOCH: 2352 LOSS: 61.2105998156\n", + "EPOCH: 2353 LOSS: 61.210530422\n", + "EPOCH: 2354 LOSS: 61.2104610748\n", + "EPOCH: 2355 LOSS: 61.2103917741\n", + "EPOCH: 2356 LOSS: 61.2103225196\n", + "EPOCH: 2357 LOSS: 61.2102533114\n", + "EPOCH: 2358 LOSS: 61.2101841493\n", + "EPOCH: 2359 LOSS: 61.2101150333\n", + "EPOCH: 2360 LOSS: 61.2100459633\n", + "EPOCH: 2361 LOSS: 61.2099769393\n", + "EPOCH: 2362 LOSS: 61.2099079611\n", + "EPOCH: 2363 LOSS: 61.2098390287\n", + "EPOCH: 2364 LOSS: 61.209770142\n", + "EPOCH: 2365 LOSS: 61.209701301\n", + "EPOCH: 2366 LOSS: 61.2096325055\n", + "EPOCH: 2367 LOSS: 61.2095637556\n", + "EPOCH: 2368 LOSS: 61.2094950511\n", + "EPOCH: 2369 LOSS: 61.2094263919\n", + "EPOCH: 2370 LOSS: 61.209357778\n", + "EPOCH: 2371 LOSS: 61.2092892094\n", + "EPOCH: 2372 LOSS: 61.2092206859\n", + "EPOCH: 2373 LOSS: 61.2091522075\n", + "EPOCH: 2374 LOSS: 61.209083774\n", + "EPOCH: 2375 LOSS: 61.2090153855\n", + "EPOCH: 2376 LOSS: 61.2089470419\n", + "EPOCH: 2377 LOSS: 61.2088787431\n", + "EPOCH: 2378 LOSS: 61.208810489\n", + "EPOCH: 2379 LOSS: 61.2087422795\n", + "EPOCH: 2380 LOSS: 61.2086741146\n", + "EPOCH: 2381 LOSS: 61.2086059943\n", + "EPOCH: 2382 LOSS: 61.2085379183\n", + "EPOCH: 2383 LOSS: 61.2084698868\n", + "EPOCH: 2384 LOSS: 61.2084018995\n", + "EPOCH: 2385 LOSS: 61.2083339565\n", + "EPOCH: 2386 LOSS: 61.2082660577\n", + "EPOCH: 2387 LOSS: 61.2081982029\n", + "EPOCH: 2388 LOSS: 61.2081303922\n", + "EPOCH: 2389 LOSS: 61.2080626255\n", + "EPOCH: 2390 LOSS: 61.2079949026\n", + "EPOCH: 2391 LOSS: 61.2079272236\n", + "EPOCH: 2392 LOSS: 61.2078595883\n", + "EPOCH: 2393 LOSS: 61.2077919967\n", + "EPOCH: 2394 LOSS: 61.2077244487\n", + "EPOCH: 2395 LOSS: 61.2076569443\n", + "EPOCH: 2396 LOSS: 61.2075894834\n", + "EPOCH: 2397 LOSS: 61.2075220659\n", + "EPOCH: 2398 LOSS: 61.2074546917\n", + "EPOCH: 2399 LOSS: 61.2073873608\n", + "EPOCH: 2400 LOSS: 61.2073200732\n", + "EPOCH: 2401 LOSS: 61.2072528286\n", + "EPOCH: 2402 LOSS: 61.2071856272\n", + "EPOCH: 2403 LOSS: 61.2071184688\n", + "EPOCH: 2404 LOSS: 61.2070513534\n", + "EPOCH: 2405 LOSS: 61.2069842808\n", + "EPOCH: 2406 LOSS: 61.206917251\n", + "EPOCH: 2407 LOSS: 61.206850264\n", + "EPOCH: 2408 LOSS: 61.2067833197\n", + "EPOCH: 2409 LOSS: 61.206716418\n", + "EPOCH: 2410 LOSS: 61.2066495589\n", + "EPOCH: 2411 LOSS: 61.2065827423\n", + "EPOCH: 2412 LOSS: 61.2065159681\n", + "EPOCH: 2413 LOSS: 61.2064492362\n", + "EPOCH: 2414 LOSS: 61.2063825467\n", + "EPOCH: 2415 LOSS: 61.2063158994\n", + "EPOCH: 2416 LOSS: 61.2062492942\n", + "EPOCH: 2417 LOSS: 61.2061827312\n", + "EPOCH: 2418 LOSS: 61.2061162102\n", + "EPOCH: 2419 LOSS: 61.2060497312\n", + "EPOCH: 2420 LOSS: 61.2059832941\n", + "EPOCH: 2421 LOSS: 61.2059168988\n", + "EPOCH: 2422 LOSS: 61.2058505454\n", + "EPOCH: 2423 LOSS: 61.2057842336\n", + "EPOCH: 2424 LOSS: 61.2057179635\n", + "EPOCH: 2425 LOSS: 61.205651735\n", + "EPOCH: 2426 LOSS: 61.205585548\n", + "EPOCH: 2427 LOSS: 61.2055194026\n", + "EPOCH: 2428 LOSS: 61.2054532985\n", + "EPOCH: 2429 LOSS: 61.2053872357\n", + "EPOCH: 2430 LOSS: 61.2053212143\n", + "EPOCH: 2431 LOSS: 61.205255234\n", + "EPOCH: 2432 LOSS: 61.2051892949\n", + "EPOCH: 2433 LOSS: 61.2051233969\n", + "EPOCH: 2434 LOSS: 61.20505754\n", + "EPOCH: 2435 LOSS: 61.204991724\n", + "EPOCH: 2436 LOSS: 61.2049259489\n", + "EPOCH: 2437 LOSS: 61.2048602147\n", + "EPOCH: 2438 LOSS: 61.2047945212\n", + "EPOCH: 2439 LOSS: 61.2047288685\n", + "EPOCH: 2440 LOSS: 61.2046632564\n", + "EPOCH: 2441 LOSS: 61.204597685\n", + "EPOCH: 2442 LOSS: 61.2045321541\n", + "EPOCH: 2443 LOSS: 61.2044666636\n", + "EPOCH: 2444 LOSS: 61.2044012136\n", + "EPOCH: 2445 LOSS: 61.204335804\n", + "EPOCH: 2446 LOSS: 61.2042704346\n", + "EPOCH: 2447 LOSS: 61.2042051055\n", + "EPOCH: 2448 LOSS: 61.2041398166\n", + "EPOCH: 2449 LOSS: 61.2040745678\n", + "EPOCH: 2450 LOSS: 61.204009359\n", + "EPOCH: 2451 LOSS: 61.2039441903\n", + "EPOCH: 2452 LOSS: 61.2038790615\n", + "EPOCH: 2453 LOSS: 61.2038139725\n", + "EPOCH: 2454 LOSS: 61.2037489234\n", + "EPOCH: 2455 LOSS: 61.2036839141\n", + "EPOCH: 2456 LOSS: 61.2036189445\n", + "EPOCH: 2457 LOSS: 61.2035540145\n", + "EPOCH: 2458 LOSS: 61.2034891241\n", + "EPOCH: 2459 LOSS: 61.2034242732\n", + "EPOCH: 2460 LOSS: 61.2033594618\n", + "EPOCH: 2461 LOSS: 61.2032946898\n", + "EPOCH: 2462 LOSS: 61.2032299571\n", + "EPOCH: 2463 LOSS: 61.2031652638\n", + "EPOCH: 2464 LOSS: 61.2031006096\n", + "EPOCH: 2465 LOSS: 61.2030359947\n", + "EPOCH: 2466 LOSS: 61.2029714189\n", + "EPOCH: 2467 LOSS: 61.2029068821\n", + "EPOCH: 2468 LOSS: 61.2028423844\n", + "EPOCH: 2469 LOSS: 61.2027779256\n", + "EPOCH: 2470 LOSS: 61.2027135057\n", + "EPOCH: 2471 LOSS: 61.2026491246\n", + "EPOCH: 2472 LOSS: 61.2025847823\n", + "EPOCH: 2473 LOSS: 61.2025204787\n", + "EPOCH: 2474 LOSS: 61.2024562138\n", + "EPOCH: 2475 LOSS: 61.2023919875\n", + "EPOCH: 2476 LOSS: 61.2023277997\n", + "EPOCH: 2477 LOSS: 61.2022636504\n", + "EPOCH: 2478 LOSS: 61.2021995396\n", + "EPOCH: 2479 LOSS: 61.2021354671\n", + "EPOCH: 2480 LOSS: 61.202071433\n", + "EPOCH: 2481 LOSS: 61.2020074372\n", + "EPOCH: 2482 LOSS: 61.2019434795\n", + "EPOCH: 2483 LOSS: 61.20187956\n", + "EPOCH: 2484 LOSS: 61.2018156786\n", + "EPOCH: 2485 LOSS: 61.2017518353\n", + "EPOCH: 2486 LOSS: 61.2016880299\n", + "EPOCH: 2487 LOSS: 61.2016242625\n", + "EPOCH: 2488 LOSS: 61.201560533\n", + "EPOCH: 2489 LOSS: 61.2014968413\n", + "EPOCH: 2490 LOSS: 61.2014331873\n", + "EPOCH: 2491 LOSS: 61.2013695711\n", + "EPOCH: 2492 LOSS: 61.2013059926\n", + "EPOCH: 2493 LOSS: 61.2012424516\n", + "EPOCH: 2494 LOSS: 61.2011789482\n", + "EPOCH: 2495 LOSS: 61.2011154823\n", + "EPOCH: 2496 LOSS: 61.2010520539\n", + "EPOCH: 2497 LOSS: 61.2009886628\n", + "EPOCH: 2498 LOSS: 61.2009253091\n", + "EPOCH: 2499 LOSS: 61.2008619926\n", + "EPOCH: 2500 LOSS: 61.2007987134\n", + "EPOCH: 2501 LOSS: 61.2007354713\n", + "EPOCH: 2502 LOSS: 61.2006722664\n", + "EPOCH: 2503 LOSS: 61.2006090986\n", + "EPOCH: 2504 LOSS: 61.2005459677\n", + "EPOCH: 2505 LOSS: 61.2004828739\n", + "EPOCH: 2506 LOSS: 61.2004198169\n", + "EPOCH: 2507 LOSS: 61.2003567968\n", + "EPOCH: 2508 LOSS: 61.2002938135\n", + "EPOCH: 2509 LOSS: 61.2002308669\n", + "EPOCH: 2510 LOSS: 61.200167957\n", + "EPOCH: 2511 LOSS: 61.2001050838\n", + "EPOCH: 2512 LOSS: 61.2000422472\n", + "EPOCH: 2513 LOSS: 61.1999794471\n", + "EPOCH: 2514 LOSS: 61.1999166835\n", + "EPOCH: 2515 LOSS: 61.1998539564\n", + "EPOCH: 2516 LOSS: 61.1997912656\n", + "EPOCH: 2517 LOSS: 61.1997286112\n", + "EPOCH: 2518 LOSS: 61.199665993\n", + "EPOCH: 2519 LOSS: 61.1996034111\n", + "EPOCH: 2520 LOSS: 61.1995408654\n", + "EPOCH: 2521 LOSS: 61.1994783557\n", + "EPOCH: 2522 LOSS: 61.1994158822\n", + "EPOCH: 2523 LOSS: 61.1993534447\n", + "EPOCH: 2524 LOSS: 61.1992910431\n", + "EPOCH: 2525 LOSS: 61.1992286775\n", + "EPOCH: 2526 LOSS: 61.1991663477\n", + "EPOCH: 2527 LOSS: 61.1991040538\n", + "EPOCH: 2528 LOSS: 61.1990417957\n", + "EPOCH: 2529 LOSS: 61.1989795732\n", + "EPOCH: 2530 LOSS: 61.1989173865\n", + "EPOCH: 2531 LOSS: 61.1988552353\n", + "EPOCH: 2532 LOSS: 61.1987931197\n", + "EPOCH: 2533 LOSS: 61.1987310397\n", + "EPOCH: 2534 LOSS: 61.1986689951\n", + "EPOCH: 2535 LOSS: 61.1986069859\n", + "EPOCH: 2536 LOSS: 61.1985450121\n", + "EPOCH: 2537 LOSS: 61.1984830736\n", + "EPOCH: 2538 LOSS: 61.1984211704\n", + "EPOCH: 2539 LOSS: 61.1983593024\n", + "EPOCH: 2540 LOSS: 61.1982974696\n", + "EPOCH: 2541 LOSS: 61.1982356719\n", + "EPOCH: 2542 LOSS: 61.1981739093\n", + "EPOCH: 2543 LOSS: 61.1981121817\n", + "EPOCH: 2544 LOSS: 61.198050489\n", + "EPOCH: 2545 LOSS: 61.1979888313\n", + "EPOCH: 2546 LOSS: 61.1979272085\n", + "EPOCH: 2547 LOSS: 61.1978656205\n", + "EPOCH: 2548 LOSS: 61.1978040673\n", + "EPOCH: 2549 LOSS: 61.1977425488\n", + "EPOCH: 2550 LOSS: 61.197681065\n", + "EPOCH: 2551 LOSS: 61.1976196159\n", + "EPOCH: 2552 LOSS: 61.1975582013\n", + "EPOCH: 2553 LOSS: 61.1974968212\n", + "EPOCH: 2554 LOSS: 61.1974354757\n", + "EPOCH: 2555 LOSS: 61.1973741646\n", + "EPOCH: 2556 LOSS: 61.1973128879\n", + "EPOCH: 2557 LOSS: 61.1972516455\n", + "EPOCH: 2558 LOSS: 61.1971904375\n", + "EPOCH: 2559 LOSS: 61.1971292637\n", + "EPOCH: 2560 LOSS: 61.1970681241\n", + "EPOCH: 2561 LOSS: 61.1970070187\n", + "EPOCH: 2562 LOSS: 61.1969459473\n", + "EPOCH: 2563 LOSS: 61.1968849101\n", + "EPOCH: 2564 LOSS: 61.1968239068\n", + "EPOCH: 2565 LOSS: 61.1967629375\n", + "EPOCH: 2566 LOSS: 61.1967020022\n", + "EPOCH: 2567 LOSS: 61.1966411007\n", + "EPOCH: 2568 LOSS: 61.1965802331\n", + "EPOCH: 2569 LOSS: 61.1965193992\n", + "EPOCH: 2570 LOSS: 61.1964585991\n", + "EPOCH: 2571 LOSS: 61.1963978327\n", + "EPOCH: 2572 LOSS: 61.1963370999\n", + "EPOCH: 2573 LOSS: 61.1962764007\n", + "EPOCH: 2574 LOSS: 61.1962157351\n", + "EPOCH: 2575 LOSS: 61.196155103\n", + "EPOCH: 2576 LOSS: 61.1960945043\n", + "EPOCH: 2577 LOSS: 61.1960339391\n", + "EPOCH: 2578 LOSS: 61.1959734072\n", + "EPOCH: 2579 LOSS: 61.1959129086\n", + "EPOCH: 2580 LOSS: 61.1958524434\n", + "EPOCH: 2581 LOSS: 61.1957920113\n", + "EPOCH: 2582 LOSS: 61.1957316125\n", + "EPOCH: 2583 LOSS: 61.1956712468\n", + "EPOCH: 2584 LOSS: 61.1956109142\n", + "EPOCH: 2585 LOSS: 61.1955506146\n", + "EPOCH: 2586 LOSS: 61.1954903481\n", + "EPOCH: 2587 LOSS: 61.1954301145\n", + "EPOCH: 2588 LOSS: 61.1953699138\n", + "EPOCH: 2589 LOSS: 61.1953097461\n", + "EPOCH: 2590 LOSS: 61.1952496111\n", + "EPOCH: 2591 LOSS: 61.1951895089\n", + "EPOCH: 2592 LOSS: 61.1951294395\n", + "EPOCH: 2593 LOSS: 61.1950694028\n", + "EPOCH: 2594 LOSS: 61.1950093987\n", + "EPOCH: 2595 LOSS: 61.1949494272\n", + "EPOCH: 2596 LOSS: 61.1948894883\n", + "EPOCH: 2597 LOSS: 61.194829582\n", + "EPOCH: 2598 LOSS: 61.1947697081\n", + "EPOCH: 2599 LOSS: 61.1947098666\n", + "EPOCH: 2600 LOSS: 61.1946500575\n", + "EPOCH: 2601 LOSS: 61.1945902808\n", + "EPOCH: 2602 LOSS: 61.1945305364\n", + "EPOCH: 2603 LOSS: 61.1944708242\n", + "EPOCH: 2604 LOSS: 61.1944111442\n", + "EPOCH: 2605 LOSS: 61.1943514965\n", + "EPOCH: 2606 LOSS: 61.1942918808\n", + "EPOCH: 2607 LOSS: 61.1942322973\n", + "EPOCH: 2608 LOSS: 61.1941727457\n", + "EPOCH: 2609 LOSS: 61.1941132262\n", + "EPOCH: 2610 LOSS: 61.1940537387\n", + "EPOCH: 2611 LOSS: 61.193994283\n", + "EPOCH: 2612 LOSS: 61.1939348593\n", + "EPOCH: 2613 LOSS: 61.1938754673\n", + "EPOCH: 2614 LOSS: 61.1938161072\n", + "EPOCH: 2615 LOSS: 61.1937567788\n", + "EPOCH: 2616 LOSS: 61.1936974821\n", + "EPOCH: 2617 LOSS: 61.1936382171\n", + "EPOCH: 2618 LOSS: 61.1935789837\n", + "EPOCH: 2619 LOSS: 61.1935197818\n", + "EPOCH: 2620 LOSS: 61.1934606115\n", + "EPOCH: 2621 LOSS: 61.1934014727\n", + "EPOCH: 2622 LOSS: 61.1933423654\n", + "EPOCH: 2623 LOSS: 61.1932832895\n", + "EPOCH: 2624 LOSS: 61.1932242449\n", + "EPOCH: 2625 LOSS: 61.1931652316\n", + "EPOCH: 2626 LOSS: 61.1931062497\n", + "EPOCH: 2627 LOSS: 61.193047299\n", + "EPOCH: 2628 LOSS: 61.1929883795\n", + "EPOCH: 2629 LOSS: 61.1929294912\n", + "EPOCH: 2630 LOSS: 61.1928706339\n", + "EPOCH: 2631 LOSS: 61.1928118078\n", + "EPOCH: 2632 LOSS: 61.1927530127\n", + "EPOCH: 2633 LOSS: 61.1926942486\n", + "EPOCH: 2634 LOSS: 61.1926355155\n", + "EPOCH: 2635 LOSS: 61.1925768133\n", + "EPOCH: 2636 LOSS: 61.1925181419\n", + "EPOCH: 2637 LOSS: 61.1924595014\n", + "EPOCH: 2638 LOSS: 61.1924008917\n", + "EPOCH: 2639 LOSS: 61.1923423128\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 2640 LOSS: 61.1922837646\n", + "EPOCH: 2641 LOSS: 61.192225247\n", + "EPOCH: 2642 LOSS: 61.1921667601\n", + "EPOCH: 2643 LOSS: 61.1921083038\n", + "EPOCH: 2644 LOSS: 61.192049878\n", + "EPOCH: 2645 LOSS: 61.1919914827\n", + "EPOCH: 2646 LOSS: 61.191933118\n", + "EPOCH: 2647 LOSS: 61.1918747836\n", + "EPOCH: 2648 LOSS: 61.1918164797\n", + "EPOCH: 2649 LOSS: 61.1917582061\n", + "EPOCH: 2650 LOSS: 61.1916999629\n", + "EPOCH: 2651 LOSS: 61.1916417499\n", + "EPOCH: 2652 LOSS: 61.1915835671\n", + "EPOCH: 2653 LOSS: 61.1915254146\n", + "EPOCH: 2654 LOSS: 61.1914672922\n", + "EPOCH: 2655 LOSS: 61.1914092\n", + "EPOCH: 2656 LOSS: 61.1913511378\n", + "EPOCH: 2657 LOSS: 61.1912931057\n", + "EPOCH: 2658 LOSS: 61.1912351035\n", + "EPOCH: 2659 LOSS: 61.1911771314\n", + "EPOCH: 2660 LOSS: 61.1911191892\n", + "EPOCH: 2661 LOSS: 61.1910612768\n", + "EPOCH: 2662 LOSS: 61.1910033943\n", + "EPOCH: 2663 LOSS: 61.1909455417\n", + "EPOCH: 2664 LOSS: 61.1908877188\n", + "EPOCH: 2665 LOSS: 61.1908299256\n", + "EPOCH: 2666 LOSS: 61.1907721621\n", + "EPOCH: 2667 LOSS: 61.1907144283\n", + "EPOCH: 2668 LOSS: 61.1906567242\n", + "EPOCH: 2669 LOSS: 61.1905990496\n", + "EPOCH: 2670 LOSS: 61.1905414045\n", + "EPOCH: 2671 LOSS: 61.190483789\n", + "EPOCH: 2672 LOSS: 61.1904262029\n", + "EPOCH: 2673 LOSS: 61.1903686462\n", + "EPOCH: 2674 LOSS: 61.190311119\n", + "EPOCH: 2675 LOSS: 61.1902536211\n", + "EPOCH: 2676 LOSS: 61.1901961525\n", + "EPOCH: 2677 LOSS: 61.1901387133\n", + "EPOCH: 2678 LOSS: 61.1900813032\n", + "EPOCH: 2679 LOSS: 61.1900239224\n", + "EPOCH: 2680 LOSS: 61.1899665707\n", + "EPOCH: 2681 LOSS: 61.1899092482\n", + "EPOCH: 2682 LOSS: 61.1898519548\n", + "EPOCH: 2683 LOSS: 61.1897946904\n", + "EPOCH: 2684 LOSS: 61.1897374551\n", + "EPOCH: 2685 LOSS: 61.1896802487\n", + "EPOCH: 2686 LOSS: 61.1896230713\n", + "EPOCH: 2687 LOSS: 61.1895659228\n", + "EPOCH: 2688 LOSS: 61.1895088032\n", + "EPOCH: 2689 LOSS: 61.1894517124\n", + "EPOCH: 2690 LOSS: 61.1893946504\n", + "EPOCH: 2691 LOSS: 61.1893376172\n", + "EPOCH: 2692 LOSS: 61.1892806127\n", + "EPOCH: 2693 LOSS: 61.189223637\n", + "EPOCH: 2694 LOSS: 61.1891666898\n", + "EPOCH: 2695 LOSS: 61.1891097713\n", + "EPOCH: 2696 LOSS: 61.1890528814\n", + "EPOCH: 2697 LOSS: 61.1889960201\n", + "EPOCH: 2698 LOSS: 61.1889391872\n", + "EPOCH: 2699 LOSS: 61.1888823828\n", + "EPOCH: 2700 LOSS: 61.1888256069\n", + "EPOCH: 2701 LOSS: 61.1887688594\n", + "EPOCH: 2702 LOSS: 61.1887121402\n", + "EPOCH: 2703 LOSS: 61.1886554494\n", + "EPOCH: 2704 LOSS: 61.1885987869\n", + "EPOCH: 2705 LOSS: 61.1885421526\n", + "EPOCH: 2706 LOSS: 61.1884855466\n", + "EPOCH: 2707 LOSS: 61.1884289688\n", + "EPOCH: 2708 LOSS: 61.1883724191\n", + "EPOCH: 2709 LOSS: 61.1883158976\n", + "EPOCH: 2710 LOSS: 61.1882594041\n", + "EPOCH: 2711 LOSS: 61.1882029387\n", + "EPOCH: 2712 LOSS: 61.1881465014\n", + "EPOCH: 2713 LOSS: 61.188090092\n", + "EPOCH: 2714 LOSS: 61.1880337105\n", + "EPOCH: 2715 LOSS: 61.187977357\n", + "EPOCH: 2716 LOSS: 61.1879210313\n", + "EPOCH: 2717 LOSS: 61.1878647335\n", + "EPOCH: 2718 LOSS: 61.1878084635\n", + "EPOCH: 2719 LOSS: 61.1877522213\n", + "EPOCH: 2720 LOSS: 61.1876960068\n", + "EPOCH: 2721 LOSS: 61.1876398201\n", + "EPOCH: 2722 LOSS: 61.187583661\n", + "EPOCH: 2723 LOSS: 61.1875275295\n", + "EPOCH: 2724 LOSS: 61.1874714257\n", + "EPOCH: 2725 LOSS: 61.1874153494\n", + "EPOCH: 2726 LOSS: 61.1873593007\n", + "EPOCH: 2727 LOSS: 61.1873032794\n", + "EPOCH: 2728 LOSS: 61.1872472857\n", + "EPOCH: 2729 LOSS: 61.1871913193\n", + "EPOCH: 2730 LOSS: 61.1871353804\n", + "EPOCH: 2731 LOSS: 61.1870794689\n", + "EPOCH: 2732 LOSS: 61.1870235846\n", + "EPOCH: 2733 LOSS: 61.1869677277\n", + "EPOCH: 2734 LOSS: 61.1869118981\n", + "EPOCH: 2735 LOSS: 61.1868560956\n", + "EPOCH: 2736 LOSS: 61.1868003204\n", + "EPOCH: 2737 LOSS: 61.1867445724\n", + "EPOCH: 2738 LOSS: 61.1866888515\n", + "EPOCH: 2739 LOSS: 61.1866331576\n", + "EPOCH: 2740 LOSS: 61.1865774909\n", + "EPOCH: 2741 LOSS: 61.1865218512\n", + "EPOCH: 2742 LOSS: 61.1864662385\n", + "EPOCH: 2743 LOSS: 61.1864106527\n", + "EPOCH: 2744 LOSS: 61.1863550939\n", + "EPOCH: 2745 LOSS: 61.186299562\n", + "EPOCH: 2746 LOSS: 61.186244057\n", + "EPOCH: 2747 LOSS: 61.1861885788\n", + "EPOCH: 2748 LOSS: 61.1861331274\n", + "EPOCH: 2749 LOSS: 61.1860777028\n", + "EPOCH: 2750 LOSS: 61.1860223049\n", + "EPOCH: 2751 LOSS: 61.1859669338\n", + "EPOCH: 2752 LOSS: 61.1859115893\n", + "EPOCH: 2753 LOSS: 61.1858562714\n", + "EPOCH: 2754 LOSS: 61.1858009802\n", + "EPOCH: 2755 LOSS: 61.1857457156\n", + "EPOCH: 2756 LOSS: 61.1856904774\n", + "EPOCH: 2757 LOSS: 61.1856352659\n", + "EPOCH: 2758 LOSS: 61.1855800808\n", + "EPOCH: 2759 LOSS: 61.1855249221\n", + "EPOCH: 2760 LOSS: 61.1854697899\n", + "EPOCH: 2761 LOSS: 61.185414684\n", + "EPOCH: 2762 LOSS: 61.1853596045\n", + "EPOCH: 2763 LOSS: 61.1853045514\n", + "EPOCH: 2764 LOSS: 61.1852495245\n", + "EPOCH: 2765 LOSS: 61.1851945239\n", + "EPOCH: 2766 LOSS: 61.1851395495\n", + "EPOCH: 2767 LOSS: 61.1850846013\n", + "EPOCH: 2768 LOSS: 61.1850296793\n", + "EPOCH: 2769 LOSS: 61.1849747834\n", + "EPOCH: 2770 LOSS: 61.1849199136\n", + "EPOCH: 2771 LOSS: 61.1848650699\n", + "EPOCH: 2772 LOSS: 61.1848102522\n", + "EPOCH: 2773 LOSS: 61.1847554606\n", + "EPOCH: 2774 LOSS: 61.1847006949\n", + "EPOCH: 2775 LOSS: 61.1846459552\n", + "EPOCH: 2776 LOSS: 61.1845912414\n", + "EPOCH: 2777 LOSS: 61.1845365534\n", + "EPOCH: 2778 LOSS: 61.1844818914\n", + "EPOCH: 2779 LOSS: 61.1844272551\n", + "EPOCH: 2780 LOSS: 61.1843726447\n", + "EPOCH: 2781 LOSS: 61.18431806\n", + "EPOCH: 2782 LOSS: 61.1842635011\n", + "EPOCH: 2783 LOSS: 61.1842089678\n", + "EPOCH: 2784 LOSS: 61.1841544602\n", + "EPOCH: 2785 LOSS: 61.1840999783\n", + "EPOCH: 2786 LOSS: 61.184045522\n", + "EPOCH: 2787 LOSS: 61.1839910912\n", + "EPOCH: 2788 LOSS: 61.1839366861\n", + "EPOCH: 2789 LOSS: 61.1838823064\n", + "EPOCH: 2790 LOSS: 61.1838279522\n", + "EPOCH: 2791 LOSS: 61.1837736235\n", + "EPOCH: 2792 LOSS: 61.1837193202\n", + "EPOCH: 2793 LOSS: 61.1836650423\n", + "EPOCH: 2794 LOSS: 61.1836107898\n", + "EPOCH: 2795 LOSS: 61.1835565627\n", + "EPOCH: 2796 LOSS: 61.1835023608\n", + "EPOCH: 2797 LOSS: 61.1834481842\n", + "EPOCH: 2798 LOSS: 61.1833940329\n", + "EPOCH: 2799 LOSS: 61.1833399068\n", + "EPOCH: 2800 LOSS: 61.1832858059\n", + "EPOCH: 2801 LOSS: 61.1832317301\n", + "EPOCH: 2802 LOSS: 61.1831776795\n", + "EPOCH: 2803 LOSS: 61.183123654\n", + "EPOCH: 2804 LOSS: 61.1830696536\n", + "EPOCH: 2805 LOSS: 61.1830156782\n", + "EPOCH: 2806 LOSS: 61.1829617278\n", + "EPOCH: 2807 LOSS: 61.1829078025\n", + "EPOCH: 2808 LOSS: 61.182853902\n", + "EPOCH: 2809 LOSS: 61.1828000265\n", + "EPOCH: 2810 LOSS: 61.1827461759\n", + "EPOCH: 2811 LOSS: 61.1826923502\n", + "EPOCH: 2812 LOSS: 61.1826385493\n", + "EPOCH: 2813 LOSS: 61.1825847733\n", + "EPOCH: 2814 LOSS: 61.182531022\n", + "EPOCH: 2815 LOSS: 61.1824772955\n", + "EPOCH: 2816 LOSS: 61.1824235937\n", + "EPOCH: 2817 LOSS: 61.1823699166\n", + "EPOCH: 2818 LOSS: 61.1823162641\n", + "EPOCH: 2819 LOSS: 61.1822626363\n", + "EPOCH: 2820 LOSS: 61.1822090332\n", + "EPOCH: 2821 LOSS: 61.1821554546\n", + "EPOCH: 2822 LOSS: 61.1821019005\n", + "EPOCH: 2823 LOSS: 61.182048371\n", + "EPOCH: 2824 LOSS: 61.181994866\n", + "EPOCH: 2825 LOSS: 61.1819413855\n", + "EPOCH: 2826 LOSS: 61.1818879294\n", + "EPOCH: 2827 LOSS: 61.1818344977\n", + "EPOCH: 2828 LOSS: 61.1817810904\n", + "EPOCH: 2829 LOSS: 61.1817277075\n", + "EPOCH: 2830 LOSS: 61.1816743489\n", + "EPOCH: 2831 LOSS: 61.1816210146\n", + "EPOCH: 2832 LOSS: 61.1815677046\n", + "EPOCH: 2833 LOSS: 61.1815144188\n", + "EPOCH: 2834 LOSS: 61.1814611572\n", + "EPOCH: 2835 LOSS: 61.1814079199\n", + "EPOCH: 2836 LOSS: 61.1813547067\n", + "EPOCH: 2837 LOSS: 61.1813015176\n", + "EPOCH: 2838 LOSS: 61.1812483527\n", + "EPOCH: 2839 LOSS: 61.1811952118\n", + "EPOCH: 2840 LOSS: 61.181142095\n", + "EPOCH: 2841 LOSS: 61.1810890022\n", + "EPOCH: 2842 LOSS: 61.1810359334\n", + "EPOCH: 2843 LOSS: 61.1809828886\n", + "EPOCH: 2844 LOSS: 61.1809298677\n", + "EPOCH: 2845 LOSS: 61.1808768707\n", + "EPOCH: 2846 LOSS: 61.1808238977\n", + "EPOCH: 2847 LOSS: 61.1807709485\n", + "EPOCH: 2848 LOSS: 61.1807180231\n", + "EPOCH: 2849 LOSS: 61.1806651215\n", + "EPOCH: 2850 LOSS: 61.1806122437\n", + "EPOCH: 2851 LOSS: 61.1805593897\n", + "EPOCH: 2852 LOSS: 61.1805065594\n", + "EPOCH: 2853 LOSS: 61.1804537528\n", + "EPOCH: 2854 LOSS: 61.1804009699\n", + "EPOCH: 2855 LOSS: 61.1803482107\n", + "EPOCH: 2856 LOSS: 61.180295475\n", + "EPOCH: 2857 LOSS: 61.1802427629\n", + "EPOCH: 2858 LOSS: 61.1801900745\n", + "EPOCH: 2859 LOSS: 61.1801374095\n", + "EPOCH: 2860 LOSS: 61.1800847681\n", + "EPOCH: 2861 LOSS: 61.1800321502\n", + "EPOCH: 2862 LOSS: 61.1799795557\n", + "EPOCH: 2863 LOSS: 61.1799269846\n", + "EPOCH: 2864 LOSS: 61.179874437\n", + "EPOCH: 2865 LOSS: 61.1798219128\n", + "EPOCH: 2866 LOSS: 61.1797694119\n", + "EPOCH: 2867 LOSS: 61.1797169343\n", + "EPOCH: 2868 LOSS: 61.1796644801\n", + "EPOCH: 2869 LOSS: 61.1796120491\n", + "EPOCH: 2870 LOSS: 61.1795596414\n", + "EPOCH: 2871 LOSS: 61.1795072569\n", + "EPOCH: 2872 LOSS: 61.1794548956\n", + "EPOCH: 2873 LOSS: 61.1794025574\n", + "EPOCH: 2874 LOSS: 61.1793502425\n", + "EPOCH: 2875 LOSS: 61.1792979506\n", + "EPOCH: 2876 LOSS: 61.1792456819\n", + "EPOCH: 2877 LOSS: 61.1791934362\n", + "EPOCH: 2878 LOSS: 61.1791412136\n", + "EPOCH: 2879 LOSS: 61.179089014\n", + "EPOCH: 2880 LOSS: 61.1790368374\n", + "EPOCH: 2881 LOSS: 61.1789846837\n", + "EPOCH: 2882 LOSS: 61.178932553\n", + "EPOCH: 2883 LOSS: 61.1788804452\n", + "EPOCH: 2884 LOSS: 61.1788283603\n", + "EPOCH: 2885 LOSS: 61.1787762983\n", + "EPOCH: 2886 LOSS: 61.1787242592\n", + "EPOCH: 2887 LOSS: 61.1786722428\n", + "EPOCH: 2888 LOSS: 61.1786202492\n", + "EPOCH: 2889 LOSS: 61.1785682784\n", + "EPOCH: 2890 LOSS: 61.1785163304\n", + "EPOCH: 2891 LOSS: 61.178464405\n", + "EPOCH: 2892 LOSS: 61.1784125024\n", + "EPOCH: 2893 LOSS: 61.1783606224\n", + "EPOCH: 2894 LOSS: 61.1783087651\n", + "EPOCH: 2895 LOSS: 61.1782569304\n", + "EPOCH: 2896 LOSS: 61.1782051182\n", + "EPOCH: 2897 LOSS: 61.1781533287\n", + "EPOCH: 2898 LOSS: 61.1781015617\n", + "EPOCH: 2899 LOSS: 61.1780498172\n", + "EPOCH: 2900 LOSS: 61.1779980952\n", + "EPOCH: 2901 LOSS: 61.1779463957\n", + "EPOCH: 2902 LOSS: 61.1778947186\n", + "EPOCH: 2903 LOSS: 61.1778430639\n", + "EPOCH: 2904 LOSS: 61.1777914316\n", + "EPOCH: 2905 LOSS: 61.1777398217\n", + "EPOCH: 2906 LOSS: 61.1776882342\n", + "EPOCH: 2907 LOSS: 61.1776366689\n", + "EPOCH: 2908 LOSS: 61.177585126\n", + "EPOCH: 2909 LOSS: 61.1775336054\n", + "EPOCH: 2910 LOSS: 61.177482107\n", + "EPOCH: 2911 LOSS: 61.1774306308\n", + "EPOCH: 2912 LOSS: 61.1773791768\n", + "EPOCH: 2913 LOSS: 61.177327745\n", + "EPOCH: 2914 LOSS: 61.1772763353\n", + "EPOCH: 2915 LOSS: 61.1772249478\n", + "EPOCH: 2916 LOSS: 61.1771735824\n", + "EPOCH: 2917 LOSS: 61.1771222391\n", + "EPOCH: 2918 LOSS: 61.1770709178\n", + "EPOCH: 2919 LOSS: 61.1770196185\n", + "EPOCH: 2920 LOSS: 61.1769683413\n", + "EPOCH: 2921 LOSS: 61.1769170861\n", + "EPOCH: 2922 LOSS: 61.1768658528\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 2923 LOSS: 61.1768146414\n", + "EPOCH: 2924 LOSS: 61.176763452\n", + "EPOCH: 2925 LOSS: 61.1767122845\n", + "EPOCH: 2926 LOSS: 61.1766611388\n", + "EPOCH: 2927 LOSS: 61.176610015\n", + "EPOCH: 2928 LOSS: 61.176558913\n", + "EPOCH: 2929 LOSS: 61.1765078328\n", + "EPOCH: 2930 LOSS: 61.1764567743\n", + "EPOCH: 2931 LOSS: 61.1764057376\n", + "EPOCH: 2932 LOSS: 61.1763547227\n", + "EPOCH: 2933 LOSS: 61.1763037294\n", + "EPOCH: 2934 LOSS: 61.1762527579\n", + "EPOCH: 2935 LOSS: 61.176201808\n", + "EPOCH: 2936 LOSS: 61.1761508797\n", + "EPOCH: 2937 LOSS: 61.176099973\n", + "EPOCH: 2938 LOSS: 61.1760490879\n", + "EPOCH: 2939 LOSS: 61.1759982244\n", + "EPOCH: 2940 LOSS: 61.1759473825\n", + "EPOCH: 2941 LOSS: 61.175896562\n", + "EPOCH: 2942 LOSS: 61.1758457631\n", + "EPOCH: 2943 LOSS: 61.1757949856\n", + "EPOCH: 2944 LOSS: 61.1757442295\n", + "EPOCH: 2945 LOSS: 61.1756934949\n", + "EPOCH: 2946 LOSS: 61.1756427818\n", + "EPOCH: 2947 LOSS: 61.1755920899\n", + "EPOCH: 2948 LOSS: 61.1755414195\n", + "EPOCH: 2949 LOSS: 61.1754907704\n", + "EPOCH: 2950 LOSS: 61.1754401426\n", + "EPOCH: 2951 LOSS: 61.1753895361\n", + "EPOCH: 2952 LOSS: 61.1753389508\n", + "EPOCH: 2953 LOSS: 61.1752883869\n", + "EPOCH: 2954 LOSS: 61.1752378441\n", + "EPOCH: 2955 LOSS: 61.1751873225\n", + "EPOCH: 2956 LOSS: 61.1751368222\n", + "EPOCH: 2957 LOSS: 61.1750863429\n", + "EPOCH: 2958 LOSS: 61.1750358849\n", + "EPOCH: 2959 LOSS: 61.1749854479\n", + "EPOCH: 2960 LOSS: 61.174935032\n", + "EPOCH: 2961 LOSS: 61.1748846372\n", + "EPOCH: 2962 LOSS: 61.1748342634\n", + "EPOCH: 2963 LOSS: 61.1747839107\n", + "EPOCH: 2964 LOSS: 61.174733579\n", + "EPOCH: 2965 LOSS: 61.1746832682\n", + "EPOCH: 2966 LOSS: 61.1746329784\n", + "EPOCH: 2967 LOSS: 61.1745827095\n", + "EPOCH: 2968 LOSS: 61.1745324616\n", + "EPOCH: 2969 LOSS: 61.1744822345\n", + "EPOCH: 2970 LOSS: 61.1744320284\n", + "EPOCH: 2971 LOSS: 61.174381843\n", + "EPOCH: 2972 LOSS: 61.1743316785\n", + "EPOCH: 2973 LOSS: 61.1742815348\n", + "EPOCH: 2974 LOSS: 61.1742314119\n", + "EPOCH: 2975 LOSS: 61.1741813098\n", + "EPOCH: 2976 LOSS: 61.1741312284\n", + "EPOCH: 2977 LOSS: 61.1740811677\n", + "EPOCH: 2978 LOSS: 61.1740311277\n", + "EPOCH: 2979 LOSS: 61.1739811084\n", + "EPOCH: 2980 LOSS: 61.1739311097\n", + "EPOCH: 2981 LOSS: 61.1738811317\n", + "EPOCH: 2982 LOSS: 61.1738311743\n", + "EPOCH: 2983 LOSS: 61.1737812375\n", + "EPOCH: 2984 LOSS: 61.1737313213\n", + "EPOCH: 2985 LOSS: 61.1736814256\n", + "EPOCH: 2986 LOSS: 61.1736315504\n", + "EPOCH: 2987 LOSS: 61.1735816957\n", + "EPOCH: 2988 LOSS: 61.1735318615\n", + "EPOCH: 2989 LOSS: 61.1734820478\n", + "EPOCH: 2990 LOSS: 61.1734322546\n", + "EPOCH: 2991 LOSS: 61.1733824817\n", + "EPOCH: 2992 LOSS: 61.1733327293\n", + "EPOCH: 2993 LOSS: 61.1732829972\n", + "EPOCH: 2994 LOSS: 61.1732332855\n", + "EPOCH: 2995 LOSS: 61.1731835941\n", + "EPOCH: 2996 LOSS: 61.173133923\n", + "EPOCH: 2997 LOSS: 61.1730842723\n", + "EPOCH: 2998 LOSS: 61.1730346418\n", + "EPOCH: 2999 LOSS: 61.1729850316\n", + "EPOCH: 3000 LOSS: 61.1729354416\n", + "EPOCH: 3001 LOSS: 61.1728858718\n", + "EPOCH: 3002 LOSS: 61.1728363222\n", + "EPOCH: 3003 LOSS: 61.1727867928\n", + "EPOCH: 3004 LOSS: 61.1727372835\n", + "EPOCH: 3005 LOSS: 61.1726877944\n", + "EPOCH: 3006 LOSS: 61.1726383254\n", + "EPOCH: 3007 LOSS: 61.1725888765\n", + "EPOCH: 3008 LOSS: 61.1725394476\n", + "EPOCH: 3009 LOSS: 61.1724900388\n", + "EPOCH: 3010 LOSS: 61.17244065\n", + "EPOCH: 3011 LOSS: 61.1723912812\n", + "EPOCH: 3012 LOSS: 61.1723419324\n", + "EPOCH: 3013 LOSS: 61.1722926036\n", + "EPOCH: 3014 LOSS: 61.1722432947\n", + "EPOCH: 3015 LOSS: 61.1721940058\n", + "EPOCH: 3016 LOSS: 61.1721447368\n", + "EPOCH: 3017 LOSS: 61.1720954876\n", + "EPOCH: 3018 LOSS: 61.1720462583\n", + "EPOCH: 3019 LOSS: 61.1719970489\n", + "EPOCH: 3020 LOSS: 61.1719478593\n", + "EPOCH: 3021 LOSS: 61.1718986895\n", + "EPOCH: 3022 LOSS: 61.1718495395\n", + "EPOCH: 3023 LOSS: 61.1718004092\n", + "EPOCH: 3024 LOSS: 61.1717512987\n", + "EPOCH: 3025 LOSS: 61.1717022079\n", + "EPOCH: 3026 LOSS: 61.1716531369\n", + "EPOCH: 3027 LOSS: 61.1716040855\n", + "EPOCH: 3028 LOSS: 61.1715550538\n", + "EPOCH: 3029 LOSS: 61.1715060417\n", + "EPOCH: 3030 LOSS: 61.1714570493\n", + "EPOCH: 3031 LOSS: 61.1714080765\n", + "EPOCH: 3032 LOSS: 61.1713591232\n", + "EPOCH: 3033 LOSS: 61.1713101896\n", + "EPOCH: 3034 LOSS: 61.1712612755\n", + "EPOCH: 3035 LOSS: 61.1712123809\n", + "EPOCH: 3036 LOSS: 61.1711635058\n", + "EPOCH: 3037 LOSS: 61.1711146502\n", + "EPOCH: 3038 LOSS: 61.1710658141\n", + "EPOCH: 3039 LOSS: 61.1710169974\n", + "EPOCH: 3040 LOSS: 61.1709682002\n", + "EPOCH: 3041 LOSS: 61.1709194224\n", + "EPOCH: 3042 LOSS: 61.170870664\n", + "EPOCH: 3043 LOSS: 61.1708219249\n", + "EPOCH: 3044 LOSS: 61.1707732053\n", + "EPOCH: 3045 LOSS: 61.1707245049\n", + "EPOCH: 3046 LOSS: 61.1706758239\n", + "EPOCH: 3047 LOSS: 61.1706271621\n", + "EPOCH: 3048 LOSS: 61.1705785197\n", + "EPOCH: 3049 LOSS: 61.1705298965\n", + "EPOCH: 3050 LOSS: 61.1704812926\n", + "EPOCH: 3051 LOSS: 61.1704327078\n", + "EPOCH: 3052 LOSS: 61.1703841423\n", + "EPOCH: 3053 LOSS: 61.170335596\n", + "EPOCH: 3054 LOSS: 61.1702870688\n", + "EPOCH: 3055 LOSS: 61.1702385607\n", + "EPOCH: 3056 LOSS: 61.1701900718\n", + "EPOCH: 3057 LOSS: 61.1701416021\n", + "EPOCH: 3058 LOSS: 61.1700931514\n", + "EPOCH: 3059 LOSS: 61.1700447197\n", + "EPOCH: 3060 LOSS: 61.1699963071\n", + "EPOCH: 3061 LOSS: 61.1699479136\n", + "EPOCH: 3062 LOSS: 61.1698995391\n", + "EPOCH: 3063 LOSS: 61.1698511835\n", + "EPOCH: 3064 LOSS: 61.169802847\n", + "EPOCH: 3065 LOSS: 61.1697545294\n", + "EPOCH: 3066 LOSS: 61.1697062307\n", + "EPOCH: 3067 LOSS: 61.169657951\n", + "EPOCH: 3068 LOSS: 61.1696096901\n", + "EPOCH: 3069 LOSS: 61.1695614482\n", + "EPOCH: 3070 LOSS: 61.1695132251\n", + "EPOCH: 3071 LOSS: 61.1694650209\n", + "EPOCH: 3072 LOSS: 61.1694168355\n", + "EPOCH: 3073 LOSS: 61.1693686689\n", + "EPOCH: 3074 LOSS: 61.1693205211\n", + "EPOCH: 3075 LOSS: 61.1692723921\n", + "EPOCH: 3076 LOSS: 61.1692242818\n", + "EPOCH: 3077 LOSS: 61.1691761903\n", + "EPOCH: 3078 LOSS: 61.1691281175\n", + "EPOCH: 3079 LOSS: 61.1690800634\n", + "EPOCH: 3080 LOSS: 61.169032028\n", + "EPOCH: 3081 LOSS: 61.1689840112\n", + "EPOCH: 3082 LOSS: 61.1689360131\n", + "EPOCH: 3083 LOSS: 61.1688880336\n", + "EPOCH: 3084 LOSS: 61.1688400728\n", + "EPOCH: 3085 LOSS: 61.1687921305\n", + "EPOCH: 3086 LOSS: 61.1687442068\n", + "EPOCH: 3087 LOSS: 61.1686963017\n", + "EPOCH: 3088 LOSS: 61.1686484151\n", + "EPOCH: 3089 LOSS: 61.168600547\n", + "EPOCH: 3090 LOSS: 61.1685526974\n", + "EPOCH: 3091 LOSS: 61.1685048663\n", + "EPOCH: 3092 LOSS: 61.1684570537\n", + "EPOCH: 3093 LOSS: 61.1684092595\n", + "EPOCH: 3094 LOSS: 61.1683614838\n", + "EPOCH: 3095 LOSS: 61.1683137265\n", + "EPOCH: 3096 LOSS: 61.1682659876\n", + "EPOCH: 3097 LOSS: 61.168218267\n", + "EPOCH: 3098 LOSS: 61.1681705649\n", + "EPOCH: 3099 LOSS: 61.168122881\n", + "EPOCH: 3100 LOSS: 61.1680752155\n", + "EPOCH: 3101 LOSS: 61.1680275683\n", + "EPOCH: 3102 LOSS: 61.1679799394\n", + "EPOCH: 3103 LOSS: 61.1679323288\n", + "EPOCH: 3104 LOSS: 61.1678847364\n", + "EPOCH: 3105 LOSS: 61.1678371623\n", + "EPOCH: 3106 LOSS: 61.1677896064\n", + "EPOCH: 3107 LOSS: 61.1677420687\n", + "EPOCH: 3108 LOSS: 61.1676945492\n", + "EPOCH: 3109 LOSS: 61.1676470479\n", + "EPOCH: 3110 LOSS: 61.1675995647\n", + "EPOCH: 3111 LOSS: 61.1675520996\n", + "EPOCH: 3112 LOSS: 61.1675046527\n", + "EPOCH: 3113 LOSS: 61.1674572238\n", + "EPOCH: 3114 LOSS: 61.1674098131\n", + "EPOCH: 3115 LOSS: 61.1673624204\n", + "EPOCH: 3116 LOSS: 61.1673150458\n", + "EPOCH: 3117 LOSS: 61.1672676892\n", + "EPOCH: 3118 LOSS: 61.1672203506\n", + "EPOCH: 3119 LOSS: 61.16717303\n", + "EPOCH: 3120 LOSS: 61.1671257274\n", + "EPOCH: 3121 LOSS: 61.1670784428\n", + "EPOCH: 3122 LOSS: 61.1670311761\n", + "EPOCH: 3123 LOSS: 61.1669839274\n", + "EPOCH: 3124 LOSS: 61.1669366965\n", + "EPOCH: 3125 LOSS: 61.1668894836\n", + "EPOCH: 3126 LOSS: 61.1668422885\n", + "EPOCH: 3127 LOSS: 61.1667951113\n", + "EPOCH: 3128 LOSS: 61.166747952\n", + "EPOCH: 3129 LOSS: 61.1667008105\n", + "EPOCH: 3130 LOSS: 61.1666536868\n", + "EPOCH: 3131 LOSS: 61.1666065809\n", + "EPOCH: 3132 LOSS: 61.1665594927\n", + "EPOCH: 3133 LOSS: 61.1665124224\n", + "EPOCH: 3134 LOSS: 61.1664653698\n", + "EPOCH: 3135 LOSS: 61.1664183349\n", + "EPOCH: 3136 LOSS: 61.1663713177\n", + "EPOCH: 3137 LOSS: 61.1663243182\n", + "EPOCH: 3138 LOSS: 61.1662773364\n", + "EPOCH: 3139 LOSS: 61.1662303723\n", + "EPOCH: 3140 LOSS: 61.1661834258\n", + "EPOCH: 3141 LOSS: 61.166136497\n", + "EPOCH: 3142 LOSS: 61.1660895857\n", + "EPOCH: 3143 LOSS: 61.1660426921\n", + "EPOCH: 3144 LOSS: 61.1659958161\n", + "EPOCH: 3145 LOSS: 61.1659489576\n", + "EPOCH: 3146 LOSS: 61.1659021166\n", + "EPOCH: 3147 LOSS: 61.1658552932\n", + "EPOCH: 3148 LOSS: 61.1658084873\n", + "EPOCH: 3149 LOSS: 61.165761699\n", + "EPOCH: 3150 LOSS: 61.1657149281\n", + "EPOCH: 3151 LOSS: 61.1656681746\n", + "EPOCH: 3152 LOSS: 61.1656214387\n", + "EPOCH: 3153 LOSS: 61.1655747201\n", + "EPOCH: 3154 LOSS: 61.165528019\n", + "EPOCH: 3155 LOSS: 61.1654813353\n", + "EPOCH: 3156 LOSS: 61.1654346689\n", + "EPOCH: 3157 LOSS: 61.16538802\n", + "EPOCH: 3158 LOSS: 61.1653413884\n", + "EPOCH: 3159 LOSS: 61.1652947741\n", + "EPOCH: 3160 LOSS: 61.1652481772\n", + "EPOCH: 3161 LOSS: 61.1652015976\n", + "EPOCH: 3162 LOSS: 61.1651550352\n", + "EPOCH: 3163 LOSS: 61.1651084902\n", + "EPOCH: 3164 LOSS: 61.1650619624\n", + "EPOCH: 3165 LOSS: 61.1650154518\n", + "EPOCH: 3166 LOSS: 61.1649689585\n", + "EPOCH: 3167 LOSS: 61.1649224824\n", + "EPOCH: 3168 LOSS: 61.1648760235\n", + "EPOCH: 3169 LOSS: 61.1648295817\n", + "EPOCH: 3170 LOSS: 61.1647831572\n", + "EPOCH: 3171 LOSS: 61.1647367498\n", + "EPOCH: 3172 LOSS: 61.1646903595\n", + "EPOCH: 3173 LOSS: 61.1646439863\n", + "EPOCH: 3174 LOSS: 61.1645976303\n", + "EPOCH: 3175 LOSS: 61.1645512913\n", + "EPOCH: 3176 LOSS: 61.1645049694\n", + "EPOCH: 3177 LOSS: 61.1644586646\n", + "EPOCH: 3178 LOSS: 61.1644123768\n", + "EPOCH: 3179 LOSS: 61.164366106\n", + "EPOCH: 3180 LOSS: 61.1643198522\n", + "EPOCH: 3181 LOSS: 61.1642736155\n", + "EPOCH: 3182 LOSS: 61.1642273957\n", + "EPOCH: 3183 LOSS: 61.1641811928\n", + "EPOCH: 3184 LOSS: 61.164135007\n", + "EPOCH: 3185 LOSS: 61.164088838\n", + "EPOCH: 3186 LOSS: 61.164042686\n", + "EPOCH: 3187 LOSS: 61.1639965509\n", + "EPOCH: 3188 LOSS: 61.1639504327\n", + "EPOCH: 3189 LOSS: 61.1639043313\n", + "EPOCH: 3190 LOSS: 61.1638582468\n", + "EPOCH: 3191 LOSS: 61.1638121791\n", + "EPOCH: 3192 LOSS: 61.1637661283\n", + "EPOCH: 3193 LOSS: 61.1637200943\n", + "EPOCH: 3194 LOSS: 61.1636740771\n", + "EPOCH: 3195 LOSS: 61.1636280766\n", + "EPOCH: 3196 LOSS: 61.163582093\n", + "EPOCH: 3197 LOSS: 61.163536126\n", + "EPOCH: 3198 LOSS: 61.1634901758\n", + "EPOCH: 3199 LOSS: 61.1634442424\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 3200 LOSS: 61.1633983256\n", + "EPOCH: 3201 LOSS: 61.1633524256\n", + "EPOCH: 3202 LOSS: 61.1633065422\n", + "EPOCH: 3203 LOSS: 61.1632606754\n", + "EPOCH: 3204 LOSS: 61.1632148254\n", + "EPOCH: 3205 LOSS: 61.1631689919\n", + "EPOCH: 3206 LOSS: 61.1631231751\n", + "EPOCH: 3207 LOSS: 61.1630773748\n", + "EPOCH: 3208 LOSS: 61.1630315912\n", + "EPOCH: 3209 LOSS: 61.1629858241\n", + "EPOCH: 3210 LOSS: 61.1629400736\n", + "EPOCH: 3211 LOSS: 61.1628943397\n", + "EPOCH: 3212 LOSS: 61.1628486222\n", + "EPOCH: 3213 LOSS: 61.1628029213\n", + "EPOCH: 3214 LOSS: 61.1627572369\n", + "EPOCH: 3215 LOSS: 61.1627115689\n", + "EPOCH: 3216 LOSS: 61.1626659174\n", + "EPOCH: 3217 LOSS: 61.1626202824\n", + "EPOCH: 3218 LOSS: 61.1625746639\n", + "EPOCH: 3219 LOSS: 61.1625290617\n", + "EPOCH: 3220 LOSS: 61.162483476\n", + "EPOCH: 3221 LOSS: 61.1624379066\n", + "EPOCH: 3222 LOSS: 61.1623923537\n", + "EPOCH: 3223 LOSS: 61.1623468171\n", + "EPOCH: 3224 LOSS: 61.1623012969\n", + "EPOCH: 3225 LOSS: 61.162255793\n", + "EPOCH: 3226 LOSS: 61.1622103054\n", + "EPOCH: 3227 LOSS: 61.1621648341\n", + "EPOCH: 3228 LOSS: 61.1621193792\n", + "EPOCH: 3229 LOSS: 61.1620739405\n", + "EPOCH: 3230 LOSS: 61.1620285181\n", + "EPOCH: 3231 LOSS: 61.1619831119\n", + "EPOCH: 3232 LOSS: 61.161937722\n", + "EPOCH: 3233 LOSS: 61.1618923483\n", + "EPOCH: 3234 LOSS: 61.1618469908\n", + "EPOCH: 3235 LOSS: 61.1618016495\n", + "EPOCH: 3236 LOSS: 61.1617563244\n", + "EPOCH: 3237 LOSS: 61.1617110155\n", + "EPOCH: 3238 LOSS: 61.1616657227\n", + "EPOCH: 3239 LOSS: 61.1616204461\n", + "EPOCH: 3240 LOSS: 61.1615751855\n", + "EPOCH: 3241 LOSS: 61.1615299411\n", + "EPOCH: 3242 LOSS: 61.1614847128\n", + "EPOCH: 3243 LOSS: 61.1614395006\n", + "EPOCH: 3244 LOSS: 61.1613943045\n", + "EPOCH: 3245 LOSS: 61.1613491244\n", + "EPOCH: 3246 LOSS: 61.1613039603\n", + "EPOCH: 3247 LOSS: 61.1612588123\n", + "EPOCH: 3248 LOSS: 61.1612136803\n", + "EPOCH: 3249 LOSS: 61.1611685643\n", + "EPOCH: 3250 LOSS: 61.1611234642\n", + "EPOCH: 3251 LOSS: 61.1610783802\n", + "EPOCH: 3252 LOSS: 61.1610333121\n", + "EPOCH: 3253 LOSS: 61.1609882599\n", + "EPOCH: 3254 LOSS: 61.1609432237\n", + "EPOCH: 3255 LOSS: 61.1608982034\n", + "EPOCH: 3256 LOSS: 61.160853199\n", + "EPOCH: 3257 LOSS: 61.1608082105\n", + "EPOCH: 3258 LOSS: 61.1607632379\n", + "EPOCH: 3259 LOSS: 61.1607182811\n", + "EPOCH: 3260 LOSS: 61.1606733402\n", + "EPOCH: 3261 LOSS: 61.1606284151\n", + "EPOCH: 3262 LOSS: 61.1605835059\n", + "EPOCH: 3263 LOSS: 61.1605386124\n", + "EPOCH: 3264 LOSS: 61.1604937347\n", + "EPOCH: 3265 LOSS: 61.1604488729\n", + "EPOCH: 3266 LOSS: 61.1604040267\n", + "EPOCH: 3267 LOSS: 61.1603591964\n", + "EPOCH: 3268 LOSS: 61.1603143818\n", + "EPOCH: 3269 LOSS: 61.1602695829\n", + "EPOCH: 3270 LOSS: 61.1602247997\n", + "EPOCH: 3271 LOSS: 61.1601800322\n", + "EPOCH: 3272 LOSS: 61.1601352804\n", + "EPOCH: 3273 LOSS: 61.1600905443\n", + "EPOCH: 3274 LOSS: 61.1600458238\n", + "EPOCH: 3275 LOSS: 61.160001119\n", + "EPOCH: 3276 LOSS: 61.1599564299\n", + "EPOCH: 3277 LOSS: 61.1599117563\n", + "EPOCH: 3278 LOSS: 61.1598670983\n", + "EPOCH: 3279 LOSS: 61.159822456\n", + "EPOCH: 3280 LOSS: 61.1597778292\n", + "EPOCH: 3281 LOSS: 61.159733218\n", + "EPOCH: 3282 LOSS: 61.1596886223\n", + "EPOCH: 3283 LOSS: 61.1596440422\n", + "EPOCH: 3284 LOSS: 61.1595994776\n", + "EPOCH: 3285 LOSS: 61.1595549285\n", + "EPOCH: 3286 LOSS: 61.1595103949\n", + "EPOCH: 3287 LOSS: 61.1594658769\n", + "EPOCH: 3288 LOSS: 61.1594213742\n", + "EPOCH: 3289 LOSS: 61.1593768871\n", + "EPOCH: 3290 LOSS: 61.1593324154\n", + "EPOCH: 3291 LOSS: 61.1592879591\n", + "EPOCH: 3292 LOSS: 61.1592435183\n", + "EPOCH: 3293 LOSS: 61.1591990928\n", + "EPOCH: 3294 LOSS: 61.1591546828\n", + "EPOCH: 3295 LOSS: 61.1591102881\n", + "EPOCH: 3296 LOSS: 61.1590659088\n", + "EPOCH: 3297 LOSS: 61.1590215449\n", + "EPOCH: 3298 LOSS: 61.1589771963\n", + "EPOCH: 3299 LOSS: 61.158932863\n", + "EPOCH: 3300 LOSS: 61.1588885451\n", + "EPOCH: 3301 LOSS: 61.1588442425\n", + "EPOCH: 3302 LOSS: 61.1587999551\n", + "EPOCH: 3303 LOSS: 61.1587556831\n", + "EPOCH: 3304 LOSS: 61.1587114263\n", + "EPOCH: 3305 LOSS: 61.1586671847\n", + "EPOCH: 3306 LOSS: 61.1586229585\n", + "EPOCH: 3307 LOSS: 61.1585787474\n", + "EPOCH: 3308 LOSS: 61.1585345515\n", + "EPOCH: 3309 LOSS: 61.1584903709\n", + "EPOCH: 3310 LOSS: 61.1584462054\n", + "EPOCH: 3311 LOSS: 61.1584020552\n", + "EPOCH: 3312 LOSS: 61.1583579201\n", + "EPOCH: 3313 LOSS: 61.1583138001\n", + "EPOCH: 3314 LOSS: 61.1582696953\n", + "EPOCH: 3315 LOSS: 61.1582256056\n", + "EPOCH: 3316 LOSS: 61.158181531\n", + "EPOCH: 3317 LOSS: 61.1581374716\n", + "EPOCH: 3318 LOSS: 61.1580934272\n", + "EPOCH: 3319 LOSS: 61.1580493979\n", + "EPOCH: 3320 LOSS: 61.1580053837\n", + "EPOCH: 3321 LOSS: 61.1579613845\n", + "EPOCH: 3322 LOSS: 61.1579174003\n", + "EPOCH: 3323 LOSS: 61.1578734312\n", + "EPOCH: 3324 LOSS: 61.1578294771\n", + "EPOCH: 3325 LOSS: 61.157785538\n", + "EPOCH: 3326 LOSS: 61.1577416139\n", + "EPOCH: 3327 LOSS: 61.1576977048\n", + "EPOCH: 3328 LOSS: 61.1576538107\n", + "EPOCH: 3329 LOSS: 61.1576099314\n", + "EPOCH: 3330 LOSS: 61.1575660672\n", + "EPOCH: 3331 LOSS: 61.1575222179\n", + "EPOCH: 3332 LOSS: 61.1574783834\n", + "EPOCH: 3333 LOSS: 61.1574345639\n", + "EPOCH: 3334 LOSS: 61.1573907593\n", + "EPOCH: 3335 LOSS: 61.1573469696\n", + "EPOCH: 3336 LOSS: 61.1573031947\n", + "EPOCH: 3337 LOSS: 61.1572594347\n", + "EPOCH: 3338 LOSS: 61.1572156895\n", + "EPOCH: 3339 LOSS: 61.1571719592\n", + "EPOCH: 3340 LOSS: 61.1571282436\n", + "EPOCH: 3341 LOSS: 61.1570845429\n", + "EPOCH: 3342 LOSS: 61.157040857\n", + "EPOCH: 3343 LOSS: 61.1569971859\n", + "EPOCH: 3344 LOSS: 61.1569535295\n", + "EPOCH: 3345 LOSS: 61.1569098879\n", + "EPOCH: 3346 LOSS: 61.1568662611\n", + "EPOCH: 3347 LOSS: 61.1568226489\n", + "EPOCH: 3348 LOSS: 61.1567790515\n", + "EPOCH: 3349 LOSS: 61.1567354688\n", + "EPOCH: 3350 LOSS: 61.1566919008\n", + "EPOCH: 3351 LOSS: 61.1566483475\n", + "EPOCH: 3352 LOSS: 61.1566048089\n", + "EPOCH: 3353 LOSS: 61.156561285\n", + "EPOCH: 3354 LOSS: 61.1565177756\n", + "EPOCH: 3355 LOSS: 61.156474281\n", + "EPOCH: 3356 LOSS: 61.1564308009\n", + "EPOCH: 3357 LOSS: 61.1563873355\n", + "EPOCH: 3358 LOSS: 61.1563438847\n", + "EPOCH: 3359 LOSS: 61.1563004484\n", + "EPOCH: 3360 LOSS: 61.1562570268\n", + "EPOCH: 3361 LOSS: 61.1562136197\n", + "EPOCH: 3362 LOSS: 61.1561702272\n", + "EPOCH: 3363 LOSS: 61.1561268492\n", + "EPOCH: 3364 LOSS: 61.1560834857\n", + "EPOCH: 3365 LOSS: 61.1560401368\n", + "EPOCH: 3366 LOSS: 61.1559968024\n", + "EPOCH: 3367 LOSS: 61.1559534825\n", + "EPOCH: 3368 LOSS: 61.155910177\n", + "EPOCH: 3369 LOSS: 61.1558668861\n", + "EPOCH: 3370 LOSS: 61.1558236096\n", + "EPOCH: 3371 LOSS: 61.1557803475\n", + "EPOCH: 3372 LOSS: 61.1557370999\n", + "EPOCH: 3373 LOSS: 61.1556938668\n", + "EPOCH: 3374 LOSS: 61.155650648\n", + "EPOCH: 3375 LOSS: 61.1556074437\n", + "EPOCH: 3376 LOSS: 61.1555642537\n", + "EPOCH: 3377 LOSS: 61.1555210781\n", + "EPOCH: 3378 LOSS: 61.1554779169\n", + "EPOCH: 3379 LOSS: 61.1554347701\n", + "EPOCH: 3380 LOSS: 61.1553916376\n", + "EPOCH: 3381 LOSS: 61.1553485194\n", + "EPOCH: 3382 LOSS: 61.1553054156\n", + "EPOCH: 3383 LOSS: 61.1552623261\n", + "EPOCH: 3384 LOSS: 61.1552192509\n", + "EPOCH: 3385 LOSS: 61.1551761899\n", + "EPOCH: 3386 LOSS: 61.1551331433\n", + "EPOCH: 3387 LOSS: 61.1550901109\n", + "EPOCH: 3388 LOSS: 61.1550470928\n", + "EPOCH: 3389 LOSS: 61.1550040889\n", + "EPOCH: 3390 LOSS: 61.1549610993\n", + "EPOCH: 3391 LOSS: 61.1549181239\n", + "EPOCH: 3392 LOSS: 61.1548751627\n", + "EPOCH: 3393 LOSS: 61.1548322157\n", + "EPOCH: 3394 LOSS: 61.1547892829\n", + "EPOCH: 3395 LOSS: 61.1547463642\n", + "EPOCH: 3396 LOSS: 61.1547034598\n", + "EPOCH: 3397 LOSS: 61.1546605694\n", + "EPOCH: 3398 LOSS: 61.1546176933\n", + "EPOCH: 3399 LOSS: 61.1545748312\n", + "EPOCH: 3400 LOSS: 61.1545319833\n", + "EPOCH: 3401 LOSS: 61.1544891495\n", + "EPOCH: 3402 LOSS: 61.1544463298\n", + "EPOCH: 3403 LOSS: 61.1544035242\n", + "EPOCH: 3404 LOSS: 61.1543607327\n", + "EPOCH: 3405 LOSS: 61.1543179552\n", + "EPOCH: 3406 LOSS: 61.1542751918\n", + "EPOCH: 3407 LOSS: 61.1542324425\n", + "EPOCH: 3408 LOSS: 61.1541897071\n", + "EPOCH: 3409 LOSS: 61.1541469858\n", + "EPOCH: 3410 LOSS: 61.1541042785\n", + "EPOCH: 3411 LOSS: 61.1540615853\n", + "EPOCH: 3412 LOSS: 61.154018906\n", + "EPOCH: 3413 LOSS: 61.1539762406\n", + "EPOCH: 3414 LOSS: 61.1539335893\n", + "EPOCH: 3415 LOSS: 61.1538909519\n", + "EPOCH: 3416 LOSS: 61.1538483284\n", + "EPOCH: 3417 LOSS: 61.1538057189\n", + "EPOCH: 3418 LOSS: 61.1537631233\n", + "EPOCH: 3419 LOSS: 61.1537205417\n", + "EPOCH: 3420 LOSS: 61.1536779739\n", + "EPOCH: 3421 LOSS: 61.15363542\n", + "EPOCH: 3422 LOSS: 61.15359288\n", + "EPOCH: 3423 LOSS: 61.1535503539\n", + "EPOCH: 3424 LOSS: 61.1535078416\n", + "EPOCH: 3425 LOSS: 61.1534653432\n", + "EPOCH: 3426 LOSS: 61.1534228586\n", + "EPOCH: 3427 LOSS: 61.1533803879\n", + "EPOCH: 3428 LOSS: 61.153337931\n", + "EPOCH: 3429 LOSS: 61.1532954878\n", + "EPOCH: 3430 LOSS: 61.1532530585\n", + "EPOCH: 3431 LOSS: 61.153210643\n", + "EPOCH: 3432 LOSS: 61.1531682412\n", + "EPOCH: 3433 LOSS: 61.1531258532\n", + "EPOCH: 3434 LOSS: 61.1530834789\n", + "EPOCH: 3435 LOSS: 61.1530411184\n", + "EPOCH: 3436 LOSS: 61.1529987716\n", + "EPOCH: 3437 LOSS: 61.1529564386\n", + "EPOCH: 3438 LOSS: 61.1529141192\n", + "EPOCH: 3439 LOSS: 61.1528718136\n", + "EPOCH: 3440 LOSS: 61.1528295216\n", + "EPOCH: 3441 LOSS: 61.1527872434\n", + "EPOCH: 3442 LOSS: 61.1527449788\n", + "EPOCH: 3443 LOSS: 61.1527027278\n", + "EPOCH: 3444 LOSS: 61.1526604905\n", + "EPOCH: 3445 LOSS: 61.1526182669\n", + "EPOCH: 3446 LOSS: 61.1525760568\n", + "EPOCH: 3447 LOSS: 61.1525338604\n", + "EPOCH: 3448 LOSS: 61.1524916776\n", + "EPOCH: 3449 LOSS: 61.1524495084\n", + "EPOCH: 3450 LOSS: 61.1524073527\n", + "EPOCH: 3451 LOSS: 61.1523652107\n", + "EPOCH: 3452 LOSS: 61.1523230822\n", + "EPOCH: 3453 LOSS: 61.1522809673\n", + "EPOCH: 3454 LOSS: 61.1522388659\n", + "EPOCH: 3455 LOSS: 61.152196778\n", + "EPOCH: 3456 LOSS: 61.1521547037\n", + "EPOCH: 3457 LOSS: 61.1521126429\n", + "EPOCH: 3458 LOSS: 61.1520705956\n", + "EPOCH: 3459 LOSS: 61.1520285617\n", + "EPOCH: 3460 LOSS: 61.1519865414\n", + "EPOCH: 3461 LOSS: 61.1519445345\n", + "EPOCH: 3462 LOSS: 61.1519025411\n", + "EPOCH: 3463 LOSS: 61.1518605612\n", + "EPOCH: 3464 LOSS: 61.1518185947\n", + "EPOCH: 3465 LOSS: 61.1517766416\n", + "EPOCH: 3466 LOSS: 61.1517347019\n", + "EPOCH: 3467 LOSS: 61.1516927757\n", + "EPOCH: 3468 LOSS: 61.1516508628\n", + "EPOCH: 3469 LOSS: 61.1516089634\n", + "EPOCH: 3470 LOSS: 61.1515670773\n", + "EPOCH: 3471 LOSS: 61.1515252046\n", + "EPOCH: 3472 LOSS: 61.1514833453\n", + "EPOCH: 3473 LOSS: 61.1514414993\n", + "EPOCH: 3474 LOSS: 61.1513996666\n", + "EPOCH: 3475 LOSS: 61.1513578473\n", + "EPOCH: 3476 LOSS: 61.1513160413\n", + "EPOCH: 3477 LOSS: 61.1512742486\n", + "EPOCH: 3478 LOSS: 61.1512324692\n", + "EPOCH: 3479 LOSS: 61.1511907031\n", + "EPOCH: 3480 LOSS: 61.1511489503\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 3481 LOSS: 61.1511072108\n", + "EPOCH: 3482 LOSS: 61.1510654845\n", + "EPOCH: 3483 LOSS: 61.1510237715\n", + "EPOCH: 3484 LOSS: 61.1509820717\n", + "EPOCH: 3485 LOSS: 61.1509403851\n", + "EPOCH: 3486 LOSS: 61.1508987118\n", + "EPOCH: 3487 LOSS: 61.1508570516\n", + "EPOCH: 3488 LOSS: 61.1508154047\n", + "EPOCH: 3489 LOSS: 61.150773771\n", + "EPOCH: 3490 LOSS: 61.1507321504\n", + "EPOCH: 3491 LOSS: 61.1506905431\n", + "EPOCH: 3492 LOSS: 61.1506489488\n", + "EPOCH: 3493 LOSS: 61.1506073678\n", + "EPOCH: 3494 LOSS: 61.1505657999\n", + "EPOCH: 3495 LOSS: 61.1505242451\n", + "EPOCH: 3496 LOSS: 61.1504827034\n", + "EPOCH: 3497 LOSS: 61.1504411749\n", + "EPOCH: 3498 LOSS: 61.1503996594\n", + "EPOCH: 3499 LOSS: 61.1503581571\n", + "EPOCH: 3500 LOSS: 61.1503166678\n", + "EPOCH: 3501 LOSS: 61.1502751916\n", + "EPOCH: 3502 LOSS: 61.1502337285\n", + "EPOCH: 3503 LOSS: 61.1501922784\n", + "EPOCH: 3504 LOSS: 61.1501508414\n", + "EPOCH: 3505 LOSS: 61.1501094174\n", + "EPOCH: 3506 LOSS: 61.1500680064\n", + "EPOCH: 3507 LOSS: 61.1500266085\n", + "EPOCH: 3508 LOSS: 61.1499852236\n", + "EPOCH: 3509 LOSS: 61.1499438516\n", + "EPOCH: 3510 LOSS: 61.1499024927\n", + "EPOCH: 3511 LOSS: 61.1498611467\n", + "EPOCH: 3512 LOSS: 61.1498198137\n", + "EPOCH: 3513 LOSS: 61.1497784937\n", + "EPOCH: 3514 LOSS: 61.1497371866\n", + "EPOCH: 3515 LOSS: 61.1496958924\n", + "EPOCH: 3516 LOSS: 61.1496546112\n", + "EPOCH: 3517 LOSS: 61.1496133429\n", + "EPOCH: 3518 LOSS: 61.1495720875\n", + "EPOCH: 3519 LOSS: 61.149530845\n", + "EPOCH: 3520 LOSS: 61.1494896154\n", + "EPOCH: 3521 LOSS: 61.1494483987\n", + "EPOCH: 3522 LOSS: 61.1494071949\n", + "EPOCH: 3523 LOSS: 61.1493660039\n", + "EPOCH: 3524 LOSS: 61.1493248258\n", + "EPOCH: 3525 LOSS: 61.1492836606\n", + "EPOCH: 3526 LOSS: 61.1492425082\n", + "EPOCH: 3527 LOSS: 61.1492013686\n", + "EPOCH: 3528 LOSS: 61.1491602418\n", + "EPOCH: 3529 LOSS: 61.1491191278\n", + "EPOCH: 3530 LOSS: 61.1490780266\n", + "EPOCH: 3531 LOSS: 61.1490369383\n", + "EPOCH: 3532 LOSS: 61.1489958627\n", + "EPOCH: 3533 LOSS: 61.1489547998\n", + "EPOCH: 3534 LOSS: 61.1489137498\n", + "EPOCH: 3535 LOSS: 61.1488727125\n", + "EPOCH: 3536 LOSS: 61.1488316879\n", + "EPOCH: 3537 LOSS: 61.1487906761\n", + "EPOCH: 3538 LOSS: 61.148749677\n", + "EPOCH: 3539 LOSS: 61.1487086906\n", + "EPOCH: 3540 LOSS: 61.1486677169\n", + "EPOCH: 3541 LOSS: 61.1486267559\n", + "EPOCH: 3542 LOSS: 61.1485858077\n", + "EPOCH: 3543 LOSS: 61.148544872\n", + "EPOCH: 3544 LOSS: 61.1485039491\n", + "EPOCH: 3545 LOSS: 61.1484630388\n", + "EPOCH: 3546 LOSS: 61.1484221412\n", + "EPOCH: 3547 LOSS: 61.1483812562\n", + "EPOCH: 3548 LOSS: 61.1483403839\n", + "EPOCH: 3549 LOSS: 61.1482995242\n", + "EPOCH: 3550 LOSS: 61.1482586771\n", + "EPOCH: 3551 LOSS: 61.1482178426\n", + "EPOCH: 3552 LOSS: 61.1481770207\n", + "EPOCH: 3553 LOSS: 61.1481362113\n", + "EPOCH: 3554 LOSS: 61.1480954146\n", + "EPOCH: 3555 LOSS: 61.1480546305\n", + "EPOCH: 3556 LOSS: 61.1480138589\n", + "EPOCH: 3557 LOSS: 61.1479730998\n", + "EPOCH: 3558 LOSS: 61.1479323533\n", + "EPOCH: 3559 LOSS: 61.1478916194\n", + "EPOCH: 3560 LOSS: 61.1478508979\n", + "EPOCH: 3561 LOSS: 61.147810189\n", + "EPOCH: 3562 LOSS: 61.1477694926\n", + "EPOCH: 3563 LOSS: 61.1477288087\n", + "EPOCH: 3564 LOSS: 61.1476881373\n", + "EPOCH: 3565 LOSS: 61.1476474783\n", + "EPOCH: 3566 LOSS: 61.1476068319\n", + "EPOCH: 3567 LOSS: 61.1475661979\n", + "EPOCH: 3568 LOSS: 61.1475255763\n", + "EPOCH: 3569 LOSS: 61.1474849672\n", + "EPOCH: 3570 LOSS: 61.1474443706\n", + "EPOCH: 3571 LOSS: 61.1474037863\n", + "EPOCH: 3572 LOSS: 61.1473632145\n", + "EPOCH: 3573 LOSS: 61.1473226551\n", + "EPOCH: 3574 LOSS: 61.1472821081\n", + "EPOCH: 3575 LOSS: 61.1472415735\n", + "EPOCH: 3576 LOSS: 61.1472010513\n", + "EPOCH: 3577 LOSS: 61.1471605415\n", + "EPOCH: 3578 LOSS: 61.147120044\n", + "EPOCH: 3579 LOSS: 61.1470795589\n", + "EPOCH: 3580 LOSS: 61.1470390861\n", + "EPOCH: 3581 LOSS: 61.1469986257\n", + "EPOCH: 3582 LOSS: 61.1469581776\n", + "EPOCH: 3583 LOSS: 61.1469177418\n", + "EPOCH: 3584 LOSS: 61.1468773184\n", + "EPOCH: 3585 LOSS: 61.1468369072\n", + "EPOCH: 3586 LOSS: 61.1467965084\n", + "EPOCH: 3587 LOSS: 61.1467561218\n", + "EPOCH: 3588 LOSS: 61.1467157476\n", + "EPOCH: 3589 LOSS: 61.1466753856\n", + "EPOCH: 3590 LOSS: 61.1466350358\n", + "EPOCH: 3591 LOSS: 61.1465946983\n", + "EPOCH: 3592 LOSS: 61.1465543731\n", + "EPOCH: 3593 LOSS: 61.1465140601\n", + "EPOCH: 3594 LOSS: 61.1464737593\n", + "EPOCH: 3595 LOSS: 61.1464334708\n", + "EPOCH: 3596 LOSS: 61.1463931944\n", + "EPOCH: 3597 LOSS: 61.1463529303\n", + "EPOCH: 3598 LOSS: 61.1463126783\n", + "EPOCH: 3599 LOSS: 61.1462724386\n", + "EPOCH: 3600 LOSS: 61.146232211\n", + "EPOCH: 3601 LOSS: 61.1461919956\n", + "EPOCH: 3602 LOSS: 61.1461517924\n", + "EPOCH: 3603 LOSS: 61.1461116013\n", + "EPOCH: 3604 LOSS: 61.1460714223\n", + "EPOCH: 3605 LOSS: 61.1460312555\n", + "EPOCH: 3606 LOSS: 61.1459911008\n", + "EPOCH: 3607 LOSS: 61.1459509583\n", + "EPOCH: 3608 LOSS: 61.1459108278\n", + "EPOCH: 3609 LOSS: 61.1458707095\n", + "EPOCH: 3610 LOSS: 61.1458306032\n", + "EPOCH: 3611 LOSS: 61.145790509\n", + "EPOCH: 3612 LOSS: 61.1457504269\n", + "EPOCH: 3613 LOSS: 61.1457103569\n", + "EPOCH: 3614 LOSS: 61.145670299\n", + "EPOCH: 3615 LOSS: 61.145630253\n", + "EPOCH: 3616 LOSS: 61.1455902192\n", + "EPOCH: 3617 LOSS: 61.1455501973\n", + "EPOCH: 3618 LOSS: 61.1455101875\n", + "EPOCH: 3619 LOSS: 61.1454701898\n", + "EPOCH: 3620 LOSS: 61.145430204\n", + "EPOCH: 3621 LOSS: 61.1453902302\n", + "EPOCH: 3622 LOSS: 61.1453502684\n", + "EPOCH: 3623 LOSS: 61.1453103186\n", + "EPOCH: 3624 LOSS: 61.1452703808\n", + "EPOCH: 3625 LOSS: 61.145230455\n", + "EPOCH: 3626 LOSS: 61.1451905411\n", + "EPOCH: 3627 LOSS: 61.1451506392\n", + "EPOCH: 3628 LOSS: 61.1451107492\n", + "EPOCH: 3629 LOSS: 61.1450708711\n", + "EPOCH: 3630 LOSS: 61.145031005\n", + "EPOCH: 3631 LOSS: 61.1449911508\n", + "EPOCH: 3632 LOSS: 61.1449513085\n", + "EPOCH: 3633 LOSS: 61.1449114781\n", + "EPOCH: 3634 LOSS: 61.1448716596\n", + "EPOCH: 3635 LOSS: 61.144831853\n", + "EPOCH: 3636 LOSS: 61.1447920583\n", + "EPOCH: 3637 LOSS: 61.1447522755\n", + "EPOCH: 3638 LOSS: 61.1447125045\n", + "EPOCH: 3639 LOSS: 61.1446727454\n", + "EPOCH: 3640 LOSS: 61.1446329981\n", + "EPOCH: 3641 LOSS: 61.1445932627\n", + "EPOCH: 3642 LOSS: 61.144553539\n", + "EPOCH: 3643 LOSS: 61.1445138273\n", + "EPOCH: 3644 LOSS: 61.1444741273\n", + "EPOCH: 3645 LOSS: 61.1444344391\n", + "EPOCH: 3646 LOSS: 61.1443947628\n", + "EPOCH: 3647 LOSS: 61.1443550982\n", + "EPOCH: 3648 LOSS: 61.1443154454\n", + "EPOCH: 3649 LOSS: 61.1442758044\n", + "EPOCH: 3650 LOSS: 61.1442361751\n", + "EPOCH: 3651 LOSS: 61.1441965577\n", + "EPOCH: 3652 LOSS: 61.1441569519\n", + "EPOCH: 3653 LOSS: 61.1441173579\n", + "EPOCH: 3654 LOSS: 61.1440777757\n", + "EPOCH: 3655 LOSS: 61.1440382052\n", + "EPOCH: 3656 LOSS: 61.1439986463\n", + "EPOCH: 3657 LOSS: 61.1439590992\n", + "EPOCH: 3658 LOSS: 61.1439195639\n", + "EPOCH: 3659 LOSS: 61.1438800402\n", + "EPOCH: 3660 LOSS: 61.1438405282\n", + "EPOCH: 3661 LOSS: 61.1438010278\n", + "EPOCH: 3662 LOSS: 61.1437615392\n", + "EPOCH: 3663 LOSS: 61.1437220622\n", + "EPOCH: 3664 LOSS: 61.1436825969\n", + "EPOCH: 3665 LOSS: 61.1436431432\n", + "EPOCH: 3666 LOSS: 61.1436037011\n", + "EPOCH: 3667 LOSS: 61.1435642707\n", + "EPOCH: 3668 LOSS: 61.143524852\n", + "EPOCH: 3669 LOSS: 61.1434854448\n", + "EPOCH: 3670 LOSS: 61.1434460492\n", + "EPOCH: 3671 LOSS: 61.1434066653\n", + "EPOCH: 3672 LOSS: 61.1433672929\n", + "EPOCH: 3673 LOSS: 61.1433279322\n", + "EPOCH: 3674 LOSS: 61.143288583\n", + "EPOCH: 3675 LOSS: 61.1432492454\n", + "EPOCH: 3676 LOSS: 61.1432099193\n", + "EPOCH: 3677 LOSS: 61.1431706048\n", + "EPOCH: 3678 LOSS: 61.1431313019\n", + "EPOCH: 3679 LOSS: 61.1430920105\n", + "EPOCH: 3680 LOSS: 61.1430527306\n", + "EPOCH: 3681 LOSS: 61.1430134623\n", + "EPOCH: 3682 LOSS: 61.1429742054\n", + "EPOCH: 3683 LOSS: 61.1429349601\n", + "EPOCH: 3684 LOSS: 61.1428957263\n", + "EPOCH: 3685 LOSS: 61.142856504\n", + "EPOCH: 3686 LOSS: 61.1428172931\n", + "EPOCH: 3687 LOSS: 61.1427780938\n", + "EPOCH: 3688 LOSS: 61.1427389059\n", + "EPOCH: 3689 LOSS: 61.1426997295\n", + "EPOCH: 3690 LOSS: 61.1426605645\n", + "EPOCH: 3691 LOSS: 61.142621411\n", + "EPOCH: 3692 LOSS: 61.142582269\n", + "EPOCH: 3693 LOSS: 61.1425431383\n", + "EPOCH: 3694 LOSS: 61.1425040191\n", + "EPOCH: 3695 LOSS: 61.1424649113\n", + "EPOCH: 3696 LOSS: 61.142425815\n", + "EPOCH: 3697 LOSS: 61.14238673\n", + "EPOCH: 3698 LOSS: 61.1423476564\n", + "EPOCH: 3699 LOSS: 61.1423085942\n", + "EPOCH: 3700 LOSS: 61.1422695434\n", + "EPOCH: 3701 LOSS: 61.142230504\n", + "EPOCH: 3702 LOSS: 61.142191476\n", + "EPOCH: 3703 LOSS: 61.1421524593\n", + "EPOCH: 3704 LOSS: 61.1421134539\n", + "EPOCH: 3705 LOSS: 61.1420744599\n", + "EPOCH: 3706 LOSS: 61.1420354772\n", + "EPOCH: 3707 LOSS: 61.1419965059\n", + "EPOCH: 3708 LOSS: 61.1419575459\n", + "EPOCH: 3709 LOSS: 61.1419185972\n", + "EPOCH: 3710 LOSS: 61.1418796598\n", + "EPOCH: 3711 LOSS: 61.1418407337\n", + "EPOCH: 3712 LOSS: 61.1418018189\n", + "EPOCH: 3713 LOSS: 61.1417629154\n", + "EPOCH: 3714 LOSS: 61.1417240232\n", + "EPOCH: 3715 LOSS: 61.1416851422\n", + "EPOCH: 3716 LOSS: 61.1416462725\n", + "EPOCH: 3717 LOSS: 61.1416074141\n", + "EPOCH: 3718 LOSS: 61.1415685669\n", + "EPOCH: 3719 LOSS: 61.1415297309\n", + "EPOCH: 3720 LOSS: 61.1414909062\n", + "EPOCH: 3721 LOSS: 61.1414520927\n", + "EPOCH: 3722 LOSS: 61.1414132904\n", + "EPOCH: 3723 LOSS: 61.1413744994\n", + "EPOCH: 3724 LOSS: 61.1413357195\n", + "EPOCH: 3725 LOSS: 61.1412969509\n", + "EPOCH: 3726 LOSS: 61.1412581934\n", + "EPOCH: 3727 LOSS: 61.1412194471\n", + "EPOCH: 3728 LOSS: 61.141180712\n", + "EPOCH: 3729 LOSS: 61.1411419881\n", + "EPOCH: 3730 LOSS: 61.1411032753\n", + "EPOCH: 3731 LOSS: 61.1410645737\n", + "EPOCH: 3732 LOSS: 61.1410258832\n", + "EPOCH: 3733 LOSS: 61.1409872039\n", + "EPOCH: 3734 LOSS: 61.1409485357\n", + "EPOCH: 3735 LOSS: 61.1409098786\n", + "EPOCH: 3736 LOSS: 61.1408712327\n", + "EPOCH: 3737 LOSS: 61.1408325978\n", + "EPOCH: 3738 LOSS: 61.1407939741\n", + "EPOCH: 3739 LOSS: 61.1407553614\n", + "EPOCH: 3740 LOSS: 61.1407167599\n", + "EPOCH: 3741 LOSS: 61.1406781694\n", + "EPOCH: 3742 LOSS: 61.14063959\n", + "EPOCH: 3743 LOSS: 61.1406010217\n", + "EPOCH: 3744 LOSS: 61.1405624644\n", + "EPOCH: 3745 LOSS: 61.1405239182\n", + "EPOCH: 3746 LOSS: 61.1404853831\n", + "EPOCH: 3747 LOSS: 61.140446859\n", + "EPOCH: 3748 LOSS: 61.1404083459\n", + "EPOCH: 3749 LOSS: 61.1403698438\n", + "EPOCH: 3750 LOSS: 61.1403313528\n", + "EPOCH: 3751 LOSS: 61.1402928728\n", + "EPOCH: 3752 LOSS: 61.1402544037\n", + "EPOCH: 3753 LOSS: 61.1402159457\n", + "EPOCH: 3754 LOSS: 61.1401774987\n", + "EPOCH: 3755 LOSS: 61.1401390626\n", + "EPOCH: 3756 LOSS: 61.1401006376\n", + "EPOCH: 3757 LOSS: 61.1400622235\n", + "EPOCH: 3758 LOSS: 61.1400238203\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 3759 LOSS: 61.1399854282\n", + "EPOCH: 3760 LOSS: 61.1399470469\n", + "EPOCH: 3761 LOSS: 61.1399086767\n", + "EPOCH: 3762 LOSS: 61.1398703173\n", + "EPOCH: 3763 LOSS: 61.1398319689\n", + "EPOCH: 3764 LOSS: 61.1397936314\n", + "EPOCH: 3765 LOSS: 61.1397553048\n", + "EPOCH: 3766 LOSS: 61.1397169891\n", + "EPOCH: 3767 LOSS: 61.1396786844\n", + "EPOCH: 3768 LOSS: 61.1396403905\n", + "EPOCH: 3769 LOSS: 61.1396021075\n", + "EPOCH: 3770 LOSS: 61.1395638354\n", + "EPOCH: 3771 LOSS: 61.1395255741\n", + "EPOCH: 3772 LOSS: 61.1394873238\n", + "EPOCH: 3773 LOSS: 61.1394490843\n", + "EPOCH: 3774 LOSS: 61.1394108556\n", + "EPOCH: 3775 LOSS: 61.1393726378\n", + "EPOCH: 3776 LOSS: 61.1393344308\n", + "EPOCH: 3777 LOSS: 61.1392962347\n", + "EPOCH: 3778 LOSS: 61.1392580494\n", + "EPOCH: 3779 LOSS: 61.1392198749\n", + "EPOCH: 3780 LOSS: 61.1391817112\n", + "EPOCH: 3781 LOSS: 61.1391435583\n", + "EPOCH: 3782 LOSS: 61.1391054162\n", + "EPOCH: 3783 LOSS: 61.1390672849\n", + "EPOCH: 3784 LOSS: 61.1390291644\n", + "EPOCH: 3785 LOSS: 61.1389910547\n", + "EPOCH: 3786 LOSS: 61.1389529558\n", + "EPOCH: 3787 LOSS: 61.1389148676\n", + "EPOCH: 3788 LOSS: 61.1388767902\n", + "EPOCH: 3789 LOSS: 61.1388387235\n", + "EPOCH: 3790 LOSS: 61.1388006676\n", + "EPOCH: 3791 LOSS: 61.1387626224\n", + "EPOCH: 3792 LOSS: 61.1387245879\n", + "EPOCH: 3793 LOSS: 61.1386865642\n", + "EPOCH: 3794 LOSS: 61.1386485512\n", + "EPOCH: 3795 LOSS: 61.1386105488\n", + "EPOCH: 3796 LOSS: 61.1385725572\n", + "EPOCH: 3797 LOSS: 61.1385345763\n", + "EPOCH: 3798 LOSS: 61.1384966061\n", + "EPOCH: 3799 LOSS: 61.1384586466\n", + "EPOCH: 3800 LOSS: 61.1384206978\n", + "EPOCH: 3801 LOSS: 61.1383827596\n", + "EPOCH: 3802 LOSS: 61.1383448321\n", + "EPOCH: 3803 LOSS: 61.1383069152\n", + "EPOCH: 3804 LOSS: 61.138269009\n", + "EPOCH: 3805 LOSS: 61.1382311135\n", + "EPOCH: 3806 LOSS: 61.1381932285\n", + "EPOCH: 3807 LOSS: 61.1381553543\n", + "EPOCH: 3808 LOSS: 61.1381174906\n", + "EPOCH: 3809 LOSS: 61.1380796376\n", + "EPOCH: 3810 LOSS: 61.1380417951\n", + "EPOCH: 3811 LOSS: 61.1380039633\n", + "EPOCH: 3812 LOSS: 61.1379661421\n", + "EPOCH: 3813 LOSS: 61.1379283315\n", + "EPOCH: 3814 LOSS: 61.1378905314\n", + "EPOCH: 3815 LOSS: 61.137852742\n", + "EPOCH: 3816 LOSS: 61.1378149631\n", + "EPOCH: 3817 LOSS: 61.1377771947\n", + "EPOCH: 3818 LOSS: 61.137739437\n", + "EPOCH: 3819 LOSS: 61.1377016898\n", + "EPOCH: 3820 LOSS: 61.1376639531\n", + "EPOCH: 3821 LOSS: 61.137626227\n", + "EPOCH: 3822 LOSS: 61.1375885114\n", + "EPOCH: 3823 LOSS: 61.1375508063\n", + "EPOCH: 3824 LOSS: 61.1375131118\n", + "EPOCH: 3825 LOSS: 61.1374754278\n", + "EPOCH: 3826 LOSS: 61.1374377543\n", + "EPOCH: 3827 LOSS: 61.1374000913\n", + "EPOCH: 3828 LOSS: 61.1373624387\n", + "EPOCH: 3829 LOSS: 61.1373247967\n", + "EPOCH: 3830 LOSS: 61.1372871652\n", + "EPOCH: 3831 LOSS: 61.1372495441\n", + "EPOCH: 3832 LOSS: 61.1372119335\n", + "EPOCH: 3833 LOSS: 61.1371743334\n", + "EPOCH: 3834 LOSS: 61.1371367437\n", + "EPOCH: 3835 LOSS: 61.1370991645\n", + "EPOCH: 3836 LOSS: 61.1370615957\n", + "EPOCH: 3837 LOSS: 61.1370240374\n", + "EPOCH: 3838 LOSS: 61.1369864894\n", + "EPOCH: 3839 LOSS: 61.136948952\n", + "EPOCH: 3840 LOSS: 61.1369114249\n", + "EPOCH: 3841 LOSS: 61.1368739083\n", + "EPOCH: 3842 LOSS: 61.136836402\n", + "EPOCH: 3843 LOSS: 61.1367989062\n", + "EPOCH: 3844 LOSS: 61.1367614207\n", + "EPOCH: 3845 LOSS: 61.1367239457\n", + "EPOCH: 3846 LOSS: 61.136686481\n", + "EPOCH: 3847 LOSS: 61.1366490267\n", + "EPOCH: 3848 LOSS: 61.1366115827\n", + "EPOCH: 3849 LOSS: 61.1365741492\n", + "EPOCH: 3850 LOSS: 61.136536726\n", + "EPOCH: 3851 LOSS: 61.1364993131\n", + "EPOCH: 3852 LOSS: 61.1364619106\n", + "EPOCH: 3853 LOSS: 61.1364245184\n", + "EPOCH: 3854 LOSS: 61.1363871365\n", + "EPOCH: 3855 LOSS: 61.136349765\n", + "EPOCH: 3856 LOSS: 61.1363124038\n", + "EPOCH: 3857 LOSS: 61.1362750529\n", + "EPOCH: 3858 LOSS: 61.1362377123\n", + "EPOCH: 3859 LOSS: 61.136200382\n", + "EPOCH: 3860 LOSS: 61.1361630621\n", + "EPOCH: 3861 LOSS: 61.1361257523\n", + "EPOCH: 3862 LOSS: 61.1360884529\n", + "EPOCH: 3863 LOSS: 61.1360511638\n", + "EPOCH: 3864 LOSS: 61.1360138849\n", + "EPOCH: 3865 LOSS: 61.1359766163\n", + "EPOCH: 3866 LOSS: 61.1359393579\n", + "EPOCH: 3867 LOSS: 61.1359021098\n", + "EPOCH: 3868 LOSS: 61.135864872\n", + "EPOCH: 3869 LOSS: 61.1358276444\n", + "EPOCH: 3870 LOSS: 61.135790427\n", + "EPOCH: 3871 LOSS: 61.1357532198\n", + "EPOCH: 3872 LOSS: 61.1357160229\n", + "EPOCH: 3873 LOSS: 61.1356788362\n", + "EPOCH: 3874 LOSS: 61.1356416597\n", + "EPOCH: 3875 LOSS: 61.1356044933\n", + "EPOCH: 3876 LOSS: 61.1355673372\n", + "EPOCH: 3877 LOSS: 61.1355301913\n", + "EPOCH: 3878 LOSS: 61.1354930556\n", + "EPOCH: 3879 LOSS: 61.13545593\n", + "EPOCH: 3880 LOSS: 61.1354188146\n", + "EPOCH: 3881 LOSS: 61.1353817094\n", + "EPOCH: 3882 LOSS: 61.1353446143\n", + "EPOCH: 3883 LOSS: 61.1353075294\n", + "EPOCH: 3884 LOSS: 61.1352704547\n", + "EPOCH: 3885 LOSS: 61.13523339\n", + "EPOCH: 3886 LOSS: 61.1351963356\n", + "EPOCH: 3887 LOSS: 61.1351592912\n", + "EPOCH: 3888 LOSS: 61.135122257\n", + "EPOCH: 3889 LOSS: 61.1350852329\n", + "EPOCH: 3890 LOSS: 61.1350482189\n", + "EPOCH: 3891 LOSS: 61.135011215\n", + "EPOCH: 3892 LOSS: 61.1349742212\n", + "EPOCH: 3893 LOSS: 61.1349372375\n", + "EPOCH: 3894 LOSS: 61.1349002639\n", + "EPOCH: 3895 LOSS: 61.1348633004\n", + "EPOCH: 3896 LOSS: 61.1348263469\n", + "EPOCH: 3897 LOSS: 61.1347894035\n", + "EPOCH: 3898 LOSS: 61.1347524702\n", + "EPOCH: 3899 LOSS: 61.134715547\n", + "EPOCH: 3900 LOSS: 61.1346786338\n", + "EPOCH: 3901 LOSS: 61.1346417306\n", + "EPOCH: 3902 LOSS: 61.1346048375\n", + "EPOCH: 3903 LOSS: 61.1345679544\n", + "EPOCH: 3904 LOSS: 61.1345310814\n", + "EPOCH: 3905 LOSS: 61.1344942184\n", + "EPOCH: 3906 LOSS: 61.1344573654\n", + "EPOCH: 3907 LOSS: 61.1344205224\n", + "EPOCH: 3908 LOSS: 61.1343836894\n", + "EPOCH: 3909 LOSS: 61.1343468664\n", + "EPOCH: 3910 LOSS: 61.1343100534\n", + "EPOCH: 3911 LOSS: 61.1342732504\n", + "EPOCH: 3912 LOSS: 61.1342364574\n", + "EPOCH: 3913 LOSS: 61.1341996743\n", + "EPOCH: 3914 LOSS: 61.1341629012\n", + "EPOCH: 3915 LOSS: 61.1341261381\n", + "EPOCH: 3916 LOSS: 61.134089385\n", + "EPOCH: 3917 LOSS: 61.1340526418\n", + "EPOCH: 3918 LOSS: 61.1340159085\n", + "EPOCH: 3919 LOSS: 61.1339791852\n", + "EPOCH: 3920 LOSS: 61.1339424719\n", + "EPOCH: 3921 LOSS: 61.1339057684\n", + "EPOCH: 3922 LOSS: 61.1338690749\n", + "EPOCH: 3923 LOSS: 61.1338323913\n", + "EPOCH: 3924 LOSS: 61.1337957176\n", + "EPOCH: 3925 LOSS: 61.1337590538\n", + "EPOCH: 3926 LOSS: 61.1337224\n", + "EPOCH: 3927 LOSS: 61.133685756\n", + "EPOCH: 3928 LOSS: 61.1336491219\n", + "EPOCH: 3929 LOSS: 61.1336124977\n", + "EPOCH: 3930 LOSS: 61.1335758834\n", + "EPOCH: 3931 LOSS: 61.1335392789\n", + "EPOCH: 3932 LOSS: 61.1335026843\n", + "EPOCH: 3933 LOSS: 61.1334660996\n", + "EPOCH: 3934 LOSS: 61.1334295247\n", + "EPOCH: 3935 LOSS: 61.1333929597\n", + "EPOCH: 3936 LOSS: 61.1333564045\n", + "EPOCH: 3937 LOSS: 61.1333198592\n", + "EPOCH: 3938 LOSS: 61.1332833237\n", + "EPOCH: 3939 LOSS: 61.133246798\n", + "EPOCH: 3940 LOSS: 61.1332102822\n", + "EPOCH: 3941 LOSS: 61.1331737761\n", + "EPOCH: 3942 LOSS: 61.1331372799\n", + "EPOCH: 3943 LOSS: 61.1331007935\n", + "EPOCH: 3944 LOSS: 61.1330643169\n", + "EPOCH: 3945 LOSS: 61.13302785\n", + "EPOCH: 3946 LOSS: 61.132991393\n", + "EPOCH: 3947 LOSS: 61.1329549458\n", + "EPOCH: 3948 LOSS: 61.1329185083\n", + "EPOCH: 3949 LOSS: 61.1328820806\n", + "EPOCH: 3950 LOSS: 61.1328456626\n", + "EPOCH: 3951 LOSS: 61.1328092544\n", + "EPOCH: 3952 LOSS: 61.132772856\n", + "EPOCH: 3953 LOSS: 61.1327364673\n", + "EPOCH: 3954 LOSS: 61.1327000884\n", + "EPOCH: 3955 LOSS: 61.1326637192\n", + "EPOCH: 3956 LOSS: 61.1326273598\n", + "EPOCH: 3957 LOSS: 61.13259101\n", + "EPOCH: 3958 LOSS: 61.13255467\n", + "EPOCH: 3959 LOSS: 61.1325183397\n", + "EPOCH: 3960 LOSS: 61.1324820191\n", + "EPOCH: 3961 LOSS: 61.1324457082\n", + "EPOCH: 3962 LOSS: 61.1324094071\n", + "EPOCH: 3963 LOSS: 61.1323731156\n", + "EPOCH: 3964 LOSS: 61.1323368338\n", + "EPOCH: 3965 LOSS: 61.1323005616\n", + "EPOCH: 3966 LOSS: 61.1322642992\n", + "EPOCH: 3967 LOSS: 61.1322280464\n", + "EPOCH: 3968 LOSS: 61.1321918033\n", + "EPOCH: 3969 LOSS: 61.1321555699\n", + "EPOCH: 3970 LOSS: 61.1321193461\n", + "EPOCH: 3971 LOSS: 61.132083132\n", + "EPOCH: 3972 LOSS: 61.1320469275\n", + "EPOCH: 3973 LOSS: 61.1320107326\n", + "EPOCH: 3974 LOSS: 61.1319745474\n", + "EPOCH: 3975 LOSS: 61.1319383718\n", + "EPOCH: 3976 LOSS: 61.1319022058\n", + "EPOCH: 3977 LOSS: 61.1318660495\n", + "EPOCH: 3978 LOSS: 61.1318299027\n", + "EPOCH: 3979 LOSS: 61.1317937656\n", + "EPOCH: 3980 LOSS: 61.1317576381\n", + "EPOCH: 3981 LOSS: 61.1317215201\n", + "EPOCH: 3982 LOSS: 61.1316854118\n", + "EPOCH: 3983 LOSS: 61.131649313\n", + "EPOCH: 3984 LOSS: 61.1316132238\n", + "EPOCH: 3985 LOSS: 61.1315771442\n", + "EPOCH: 3986 LOSS: 61.1315410742\n", + "EPOCH: 3987 LOSS: 61.1315050137\n", + "EPOCH: 3988 LOSS: 61.1314689628\n", + "EPOCH: 3989 LOSS: 61.1314329214\n", + "EPOCH: 3990 LOSS: 61.1313968896\n", + "EPOCH: 3991 LOSS: 61.1313608673\n", + "EPOCH: 3992 LOSS: 61.1313248546\n", + "EPOCH: 3993 LOSS: 61.1312888514\n", + "EPOCH: 3994 LOSS: 61.1312528577\n", + "EPOCH: 3995 LOSS: 61.1312168736\n", + "EPOCH: 3996 LOSS: 61.1311808989\n", + "EPOCH: 3997 LOSS: 61.1311449338\n", + "EPOCH: 3998 LOSS: 61.1311089781\n", + "EPOCH: 3999 LOSS: 61.131073032\n", + "EPOCH: 4000 LOSS: 61.1310370954\n", + "EPOCH: 4001 LOSS: 61.1310011682\n", + "EPOCH: 4002 LOSS: 61.1309652506\n", + "EPOCH: 4003 LOSS: 61.1309293424\n", + "EPOCH: 4004 LOSS: 61.1308934437\n", + "EPOCH: 4005 LOSS: 61.1308575544\n", + "EPOCH: 4006 LOSS: 61.1308216746\n", + "EPOCH: 4007 LOSS: 61.1307858043\n", + "EPOCH: 4008 LOSS: 61.1307499434\n", + "EPOCH: 4009 LOSS: 61.130714092\n", + "EPOCH: 4010 LOSS: 61.13067825\n", + "EPOCH: 4011 LOSS: 61.1306424175\n", + "EPOCH: 4012 LOSS: 61.1306065943\n", + "EPOCH: 4013 LOSS: 61.1305707807\n", + "EPOCH: 4014 LOSS: 61.1305349764\n", + "EPOCH: 4015 LOSS: 61.1304991815\n", + "EPOCH: 4016 LOSS: 61.1304633961\n", + "EPOCH: 4017 LOSS: 61.13042762\n", + "EPOCH: 4018 LOSS: 61.1303918534\n", + "EPOCH: 4019 LOSS: 61.1303560962\n", + "EPOCH: 4020 LOSS: 61.1303203483\n", + "EPOCH: 4021 LOSS: 61.1302846098\n", + "EPOCH: 4022 LOSS: 61.1302488807\n", + "EPOCH: 4023 LOSS: 61.130213161\n", + "EPOCH: 4024 LOSS: 61.1301774507\n", + "EPOCH: 4025 LOSS: 61.1301417497\n", + "EPOCH: 4026 LOSS: 61.130106058\n", + "EPOCH: 4027 LOSS: 61.1300703758\n", + "EPOCH: 4028 LOSS: 61.1300347028\n", + "EPOCH: 4029 LOSS: 61.1299990393\n", + "EPOCH: 4030 LOSS: 61.129963385\n", + "EPOCH: 4031 LOSS: 61.1299277401\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 4032 LOSS: 61.1298921045\n", + "EPOCH: 4033 LOSS: 61.1298564783\n", + "EPOCH: 4034 LOSS: 61.1298208613\n", + "EPOCH: 4035 LOSS: 61.1297852537\n", + "EPOCH: 4036 LOSS: 61.1297496554\n", + "EPOCH: 4037 LOSS: 61.1297140663\n", + "EPOCH: 4038 LOSS: 61.1296784866\n", + "EPOCH: 4039 LOSS: 61.1296429162\n", + "EPOCH: 4040 LOSS: 61.129607355\n", + "EPOCH: 4041 LOSS: 61.1295718032\n", + "EPOCH: 4042 LOSS: 61.1295362606\n", + "EPOCH: 4043 LOSS: 61.1295007273\n", + "EPOCH: 4044 LOSS: 61.1294652032\n", + "EPOCH: 4045 LOSS: 61.1294296884\n", + "EPOCH: 4046 LOSS: 61.1293941829\n", + "EPOCH: 4047 LOSS: 61.1293586866\n", + "EPOCH: 4048 LOSS: 61.1293231996\n", + "EPOCH: 4049 LOSS: 61.1292877218\n", + "EPOCH: 4050 LOSS: 61.1292522533\n", + "EPOCH: 4051 LOSS: 61.1292167939\n", + "EPOCH: 4052 LOSS: 61.1291813438\n", + "EPOCH: 4053 LOSS: 61.129145903\n", + "EPOCH: 4054 LOSS: 61.1291104713\n", + "EPOCH: 4055 LOSS: 61.1290750489\n", + "EPOCH: 4056 LOSS: 61.1290396356\n", + "EPOCH: 4057 LOSS: 61.1290042316\n", + "EPOCH: 4058 LOSS: 61.1289688367\n", + "EPOCH: 4059 LOSS: 61.1289334511\n", + "EPOCH: 4060 LOSS: 61.1288980746\n", + "EPOCH: 4061 LOSS: 61.1288627074\n", + "EPOCH: 4062 LOSS: 61.1288273493\n", + "EPOCH: 4063 LOSS: 61.1287920003\n", + "EPOCH: 4064 LOSS: 61.1287566606\n", + "EPOCH: 4065 LOSS: 61.12872133\n", + "EPOCH: 4066 LOSS: 61.1286860085\n", + "EPOCH: 4067 LOSS: 61.1286506962\n", + "EPOCH: 4068 LOSS: 61.1286153931\n", + "EPOCH: 4069 LOSS: 61.1285800991\n", + "EPOCH: 4070 LOSS: 61.1285448142\n", + "EPOCH: 4071 LOSS: 61.1285095385\n", + "EPOCH: 4072 LOSS: 61.1284742719\n", + "EPOCH: 4073 LOSS: 61.1284390144\n", + "EPOCH: 4074 LOSS: 61.128403766\n", + "EPOCH: 4075 LOSS: 61.1283685268\n", + "EPOCH: 4076 LOSS: 61.1283332966\n", + "EPOCH: 4077 LOSS: 61.1282980756\n", + "EPOCH: 4078 LOSS: 61.1282628636\n", + "EPOCH: 4079 LOSS: 61.1282276608\n", + "EPOCH: 4080 LOSS: 61.128192467\n", + "EPOCH: 4081 LOSS: 61.1281572824\n", + "EPOCH: 4082 LOSS: 61.1281221068\n", + "EPOCH: 4083 LOSS: 61.1280869402\n", + "EPOCH: 4084 LOSS: 61.1280517828\n", + "EPOCH: 4085 LOSS: 61.1280166344\n", + "EPOCH: 4086 LOSS: 61.1279814951\n", + "EPOCH: 4087 LOSS: 61.1279463648\n", + "EPOCH: 4088 LOSS: 61.1279112436\n", + "EPOCH: 4089 LOSS: 61.1278761314\n", + "EPOCH: 4090 LOSS: 61.1278410282\n", + "EPOCH: 4091 LOSS: 61.1278059341\n", + "EPOCH: 4092 LOSS: 61.127770849\n", + "EPOCH: 4093 LOSS: 61.127735773\n", + "EPOCH: 4094 LOSS: 61.127700706\n", + "EPOCH: 4095 LOSS: 61.127665648\n", + "EPOCH: 4096 LOSS: 61.127630599\n", + "EPOCH: 4097 LOSS: 61.127595559\n", + "EPOCH: 4098 LOSS: 61.127560528\n", + "EPOCH: 4099 LOSS: 61.127525506\n", + "EPOCH: 4100 LOSS: 61.127490493\n", + "EPOCH: 4101 LOSS: 61.1274554889\n", + "EPOCH: 4102 LOSS: 61.1274204939\n", + "EPOCH: 4103 LOSS: 61.1273855079\n", + "EPOCH: 4104 LOSS: 61.1273505308\n", + "EPOCH: 4105 LOSS: 61.1273155627\n", + "EPOCH: 4106 LOSS: 61.1272806035\n", + "EPOCH: 4107 LOSS: 61.1272456533\n", + "EPOCH: 4108 LOSS: 61.1272107121\n", + "EPOCH: 4109 LOSS: 61.1271757798\n", + "EPOCH: 4110 LOSS: 61.1271408565\n", + "EPOCH: 4111 LOSS: 61.1271059421\n", + "EPOCH: 4112 LOSS: 61.1270710366\n", + "EPOCH: 4113 LOSS: 61.1270361401\n", + "EPOCH: 4114 LOSS: 61.1270012524\n", + "EPOCH: 4115 LOSS: 61.1269663738\n", + "EPOCH: 4116 LOSS: 61.126931504\n", + "EPOCH: 4117 LOSS: 61.1268966431\n", + "EPOCH: 4118 LOSS: 61.1268617912\n", + "EPOCH: 4119 LOSS: 61.1268269481\n", + "EPOCH: 4120 LOSS: 61.126792114\n", + "EPOCH: 4121 LOSS: 61.1267572887\n", + "EPOCH: 4122 LOSS: 61.1267224724\n", + "EPOCH: 4123 LOSS: 61.1266876649\n", + "EPOCH: 4124 LOSS: 61.1266528663\n", + "EPOCH: 4125 LOSS: 61.1266180765\n", + "EPOCH: 4126 LOSS: 61.1265832957\n", + "EPOCH: 4127 LOSS: 61.1265485237\n", + "EPOCH: 4128 LOSS: 61.1265137605\n", + "EPOCH: 4129 LOSS: 61.1264790063\n", + "EPOCH: 4130 LOSS: 61.1264442608\n", + "EPOCH: 4131 LOSS: 61.1264095243\n", + "EPOCH: 4132 LOSS: 61.1263747965\n", + "EPOCH: 4133 LOSS: 61.1263400776\n", + "EPOCH: 4134 LOSS: 61.1263053676\n", + "EPOCH: 4135 LOSS: 61.1262706663\n", + "EPOCH: 4136 LOSS: 61.1262359739\n", + "EPOCH: 4137 LOSS: 61.1262012903\n", + "EPOCH: 4138 LOSS: 61.1261666155\n", + "EPOCH: 4139 LOSS: 61.1261319496\n", + "EPOCH: 4140 LOSS: 61.1260972924\n", + "EPOCH: 4141 LOSS: 61.126062644\n", + "EPOCH: 4142 LOSS: 61.1260280045\n", + "EPOCH: 4143 LOSS: 61.1259933737\n", + "EPOCH: 4144 LOSS: 61.1259587517\n", + "EPOCH: 4145 LOSS: 61.1259241385\n", + "EPOCH: 4146 LOSS: 61.1258895341\n", + "EPOCH: 4147 LOSS: 61.1258549384\n", + "EPOCH: 4148 LOSS: 61.1258203516\n", + "EPOCH: 4149 LOSS: 61.1257857734\n", + "EPOCH: 4150 LOSS: 61.1257512041\n", + "EPOCH: 4151 LOSS: 61.1257166435\n", + "EPOCH: 4152 LOSS: 61.1256820916\n", + "EPOCH: 4153 LOSS: 61.1256475485\n", + "EPOCH: 4154 LOSS: 61.1256130142\n", + "EPOCH: 4155 LOSS: 61.1255784885\n", + "EPOCH: 4156 LOSS: 61.1255439716\n", + "EPOCH: 4157 LOSS: 61.1255094635\n", + "EPOCH: 4158 LOSS: 61.125474964\n", + "EPOCH: 4159 LOSS: 61.1254404733\n", + "EPOCH: 4160 LOSS: 61.1254059913\n", + "EPOCH: 4161 LOSS: 61.125371518\n", + "EPOCH: 4162 LOSS: 61.1253370534\n", + "EPOCH: 4163 LOSS: 61.1253025976\n", + "EPOCH: 4164 LOSS: 61.1252681504\n", + "EPOCH: 4165 LOSS: 61.1252337119\n", + "EPOCH: 4166 LOSS: 61.1251992821\n", + "EPOCH: 4167 LOSS: 61.1251648609\n", + "EPOCH: 4168 LOSS: 61.1251304485\n", + "EPOCH: 4169 LOSS: 61.1250960447\n", + "EPOCH: 4170 LOSS: 61.1250616496\n", + "EPOCH: 4171 LOSS: 61.1250272632\n", + "EPOCH: 4172 LOSS: 61.1249928854\n", + "EPOCH: 4173 LOSS: 61.1249585163\n", + "EPOCH: 4174 LOSS: 61.1249241559\n", + "EPOCH: 4175 LOSS: 61.124889804\n", + "EPOCH: 4176 LOSS: 61.1248554609\n", + "EPOCH: 4177 LOSS: 61.1248211263\n", + "EPOCH: 4178 LOSS: 61.1247868005\n", + "EPOCH: 4179 LOSS: 61.1247524832\n", + "EPOCH: 4180 LOSS: 61.1247181746\n", + "EPOCH: 4181 LOSS: 61.1246838745\n", + "EPOCH: 4182 LOSS: 61.1246495831\n", + "EPOCH: 4183 LOSS: 61.1246153004\n", + "EPOCH: 4184 LOSS: 61.1245810262\n", + "EPOCH: 4185 LOSS: 61.1245467606\n", + "EPOCH: 4186 LOSS: 61.1245125036\n", + "EPOCH: 4187 LOSS: 61.1244782553\n", + "EPOCH: 4188 LOSS: 61.1244440155\n", + "EPOCH: 4189 LOSS: 61.1244097843\n", + "EPOCH: 4190 LOSS: 61.1243755617\n", + "EPOCH: 4191 LOSS: 61.1243413476\n", + "EPOCH: 4192 LOSS: 61.1243071422\n", + "EPOCH: 4193 LOSS: 61.1242729453\n", + "EPOCH: 4194 LOSS: 61.124238757\n", + "EPOCH: 4195 LOSS: 61.1242045772\n", + "EPOCH: 4196 LOSS: 61.124170406\n", + "EPOCH: 4197 LOSS: 61.1241362433\n", + "EPOCH: 4198 LOSS: 61.1241020892\n", + "EPOCH: 4199 LOSS: 61.1240679437\n", + "EPOCH: 4200 LOSS: 61.1240338066\n", + "EPOCH: 4201 LOSS: 61.1239996781\n", + "EPOCH: 4202 LOSS: 61.1239655582\n", + "EPOCH: 4203 LOSS: 61.1239314467\n", + "EPOCH: 4204 LOSS: 61.1238973438\n", + "EPOCH: 4205 LOSS: 61.1238632494\n", + "EPOCH: 4206 LOSS: 61.1238291636\n", + "EPOCH: 4207 LOSS: 61.1237950862\n", + "EPOCH: 4208 LOSS: 61.1237610173\n", + "EPOCH: 4209 LOSS: 61.123726957\n", + "EPOCH: 4210 LOSS: 61.1236929051\n", + "EPOCH: 4211 LOSS: 61.1236588617\n", + "EPOCH: 4212 LOSS: 61.1236248268\n", + "EPOCH: 4213 LOSS: 61.1235908004\n", + "EPOCH: 4214 LOSS: 61.1235567825\n", + "EPOCH: 4215 LOSS: 61.123522773\n", + "EPOCH: 4216 LOSS: 61.1234887721\n", + "EPOCH: 4217 LOSS: 61.1234547795\n", + "EPOCH: 4218 LOSS: 61.1234207955\n", + "EPOCH: 4219 LOSS: 61.1233868199\n", + "EPOCH: 4220 LOSS: 61.1233528528\n", + "EPOCH: 4221 LOSS: 61.1233188941\n", + "EPOCH: 4222 LOSS: 61.1232849438\n", + "EPOCH: 4223 LOSS: 61.123251002\n", + "EPOCH: 4224 LOSS: 61.1232170687\n", + "EPOCH: 4225 LOSS: 61.1231831437\n", + "EPOCH: 4226 LOSS: 61.1231492272\n", + "EPOCH: 4227 LOSS: 61.1231153192\n", + "EPOCH: 4228 LOSS: 61.1230814195\n", + "EPOCH: 4229 LOSS: 61.1230475283\n", + "EPOCH: 4230 LOSS: 61.1230136454\n", + "EPOCH: 4231 LOSS: 61.122979771\n", + "EPOCH: 4232 LOSS: 61.122945905\n", + "EPOCH: 4233 LOSS: 61.1229120474\n", + "EPOCH: 4234 LOSS: 61.1228781981\n", + "EPOCH: 4235 LOSS: 61.1228443573\n", + "EPOCH: 4236 LOSS: 61.1228105249\n", + "EPOCH: 4237 LOSS: 61.1227767008\n", + "EPOCH: 4238 LOSS: 61.1227428851\n", + "EPOCH: 4239 LOSS: 61.1227090778\n", + "EPOCH: 4240 LOSS: 61.1226752789\n", + "EPOCH: 4241 LOSS: 61.1226414883\n", + "EPOCH: 4242 LOSS: 61.1226077061\n", + "EPOCH: 4243 LOSS: 61.1225739322\n", + "EPOCH: 4244 LOSS: 61.1225401667\n", + "EPOCH: 4245 LOSS: 61.1225064096\n", + "EPOCH: 4246 LOSS: 61.1224726608\n", + "EPOCH: 4247 LOSS: 61.1224389203\n", + "EPOCH: 4248 LOSS: 61.1224051882\n", + "EPOCH: 4249 LOSS: 61.1223714644\n", + "EPOCH: 4250 LOSS: 61.1223377489\n", + "EPOCH: 4251 LOSS: 61.1223040418\n", + "EPOCH: 4252 LOSS: 61.122270343\n", + "EPOCH: 4253 LOSS: 61.1222366525\n", + "EPOCH: 4254 LOSS: 61.1222029703\n", + "EPOCH: 4255 LOSS: 61.1221692964\n", + "EPOCH: 4256 LOSS: 61.1221356308\n", + "EPOCH: 4257 LOSS: 61.1221019736\n", + "EPOCH: 4258 LOSS: 61.1220683246\n", + "EPOCH: 4259 LOSS: 61.1220346839\n", + "EPOCH: 4260 LOSS: 61.1220010515\n", + "EPOCH: 4261 LOSS: 61.1219674274\n", + "EPOCH: 4262 LOSS: 61.1219338115\n", + "EPOCH: 4263 LOSS: 61.121900204\n", + "EPOCH: 4264 LOSS: 61.1218666047\n", + "EPOCH: 4265 LOSS: 61.1218330137\n", + "EPOCH: 4266 LOSS: 61.1217994309\n", + "EPOCH: 4267 LOSS: 61.1217658564\n", + "EPOCH: 4268 LOSS: 61.1217322902\n", + "EPOCH: 4269 LOSS: 61.1216987322\n", + "EPOCH: 4270 LOSS: 61.1216651825\n", + "EPOCH: 4271 LOSS: 61.121631641\n", + "EPOCH: 4272 LOSS: 61.1215981077\n", + "EPOCH: 4273 LOSS: 61.1215645827\n", + "EPOCH: 4274 LOSS: 61.1215310659\n", + "EPOCH: 4275 LOSS: 61.1214975573\n", + "EPOCH: 4276 LOSS: 61.121464057\n", + "EPOCH: 4277 LOSS: 61.1214305649\n", + "EPOCH: 4278 LOSS: 61.121397081\n", + "EPOCH: 4279 LOSS: 61.1213636053\n", + "EPOCH: 4280 LOSS: 61.1213301378\n", + "EPOCH: 4281 LOSS: 61.1212966785\n", + "EPOCH: 4282 LOSS: 61.1212632274\n", + "EPOCH: 4283 LOSS: 61.1212297846\n", + "EPOCH: 4284 LOSS: 61.1211963499\n", + "EPOCH: 4285 LOSS: 61.1211629234\n", + "EPOCH: 4286 LOSS: 61.121129505\n", + "EPOCH: 4287 LOSS: 61.1210960949\n", + "EPOCH: 4288 LOSS: 61.1210626929\n", + "EPOCH: 4289 LOSS: 61.1210292991\n", + "EPOCH: 4290 LOSS: 61.1209959135\n", + "EPOCH: 4291 LOSS: 61.1209625361\n", + "EPOCH: 4292 LOSS: 61.1209291668\n", + "EPOCH: 4293 LOSS: 61.1208958056\n", + "EPOCH: 4294 LOSS: 61.1208624526\n", + "EPOCH: 4295 LOSS: 61.1208291078\n", + "EPOCH: 4296 LOSS: 61.1207957711\n", + "EPOCH: 4297 LOSS: 61.1207624425\n", + "EPOCH: 4298 LOSS: 61.1207291221\n", + "EPOCH: 4299 LOSS: 61.1206958098\n", + "EPOCH: 4300 LOSS: 61.1206625056\n", + "EPOCH: 4301 LOSS: 61.1206292096\n", + "EPOCH: 4302 LOSS: 61.1205959217\n", + "EPOCH: 4303 LOSS: 61.1205626419\n", + "EPOCH: 4304 LOSS: 61.1205293702\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 4305 LOSS: 61.1204961066\n", + "EPOCH: 4306 LOSS: 61.1204628511\n", + "EPOCH: 4307 LOSS: 61.1204296038\n", + "EPOCH: 4308 LOSS: 61.1203963645\n", + "EPOCH: 4309 LOSS: 61.1203631333\n", + "EPOCH: 4310 LOSS: 61.1203299102\n", + "EPOCH: 4311 LOSS: 61.1202966952\n", + "EPOCH: 4312 LOSS: 61.1202634883\n", + "EPOCH: 4313 LOSS: 61.1202302894\n", + "EPOCH: 4314 LOSS: 61.1201970986\n", + "EPOCH: 4315 LOSS: 61.1201639159\n", + "EPOCH: 4316 LOSS: 61.1201307413\n", + "EPOCH: 4317 LOSS: 61.1200975747\n", + "EPOCH: 4318 LOSS: 61.1200644162\n", + "EPOCH: 4319 LOSS: 61.1200312658\n", + "EPOCH: 4320 LOSS: 61.1199981233\n", + "EPOCH: 4321 LOSS: 61.119964989\n", + "EPOCH: 4322 LOSS: 61.1199318627\n", + "EPOCH: 4323 LOSS: 61.1198987444\n", + "EPOCH: 4324 LOSS: 61.1198656341\n", + "EPOCH: 4325 LOSS: 61.1198325319\n", + "EPOCH: 4326 LOSS: 61.1197994377\n", + "EPOCH: 4327 LOSS: 61.1197663516\n", + "EPOCH: 4328 LOSS: 61.1197332734\n", + "EPOCH: 4329 LOSS: 61.1197002033\n", + "EPOCH: 4330 LOSS: 61.1196671412\n", + "EPOCH: 4331 LOSS: 61.1196340871\n", + "EPOCH: 4332 LOSS: 61.119601041\n", + "EPOCH: 4333 LOSS: 61.1195680029\n", + "EPOCH: 4334 LOSS: 61.1195349728\n", + "EPOCH: 4335 LOSS: 61.1195019507\n", + "EPOCH: 4336 LOSS: 61.1194689366\n", + "EPOCH: 4337 LOSS: 61.1194359305\n", + "EPOCH: 4338 LOSS: 61.1194029323\n", + "EPOCH: 4339 LOSS: 61.1193699422\n", + "EPOCH: 4340 LOSS: 61.11933696\n", + "EPOCH: 4341 LOSS: 61.1193039857\n", + "EPOCH: 4342 LOSS: 61.1192710195\n", + "EPOCH: 4343 LOSS: 61.1192380612\n", + "EPOCH: 4344 LOSS: 61.1192051109\n", + "EPOCH: 4345 LOSS: 61.1191721685\n", + "EPOCH: 4346 LOSS: 61.1191392341\n", + "EPOCH: 4347 LOSS: 61.1191063076\n", + "EPOCH: 4348 LOSS: 61.1190733891\n", + "EPOCH: 4349 LOSS: 61.1190404785\n", + "EPOCH: 4350 LOSS: 61.1190075758\n", + "EPOCH: 4351 LOSS: 61.1189746811\n", + "EPOCH: 4352 LOSS: 61.1189417943\n", + "EPOCH: 4353 LOSS: 61.1189089154\n", + "EPOCH: 4354 LOSS: 61.1188760445\n", + "EPOCH: 4355 LOSS: 61.1188431815\n", + "EPOCH: 4356 LOSS: 61.1188103264\n", + "EPOCH: 4357 LOSS: 61.1187774792\n", + "EPOCH: 4358 LOSS: 61.1187446399\n", + "EPOCH: 4359 LOSS: 61.1187118085\n", + "EPOCH: 4360 LOSS: 61.118678985\n", + "EPOCH: 4361 LOSS: 61.1186461694\n", + "EPOCH: 4362 LOSS: 61.1186133617\n", + "EPOCH: 4363 LOSS: 61.1185805619\n", + "EPOCH: 4364 LOSS: 61.11854777\n", + "EPOCH: 4365 LOSS: 61.118514986\n", + "EPOCH: 4366 LOSS: 61.1184822098\n", + "EPOCH: 4367 LOSS: 61.1184494415\n", + "EPOCH: 4368 LOSS: 61.1184166811\n", + "EPOCH: 4369 LOSS: 61.1183839285\n", + "EPOCH: 4370 LOSS: 61.1183511839\n", + "EPOCH: 4371 LOSS: 61.118318447\n", + "EPOCH: 4372 LOSS: 61.1182857181\n", + "EPOCH: 4373 LOSS: 61.1182529969\n", + "EPOCH: 4374 LOSS: 61.1182202837\n", + "EPOCH: 4375 LOSS: 61.1181875782\n", + "EPOCH: 4376 LOSS: 61.1181548806\n", + "EPOCH: 4377 LOSS: 61.1181221909\n", + "EPOCH: 4378 LOSS: 61.118089509\n", + "EPOCH: 4379 LOSS: 61.1180568349\n", + "EPOCH: 4380 LOSS: 61.1180241686\n", + "EPOCH: 4381 LOSS: 61.1179915102\n", + "EPOCH: 4382 LOSS: 61.1179588596\n", + "EPOCH: 4383 LOSS: 61.1179262168\n", + "EPOCH: 4384 LOSS: 61.1178935818\n", + "EPOCH: 4385 LOSS: 61.1178609546\n", + "EPOCH: 4386 LOSS: 61.1178283352\n", + "EPOCH: 4387 LOSS: 61.1177957237\n", + "EPOCH: 4388 LOSS: 61.1177631199\n", + "EPOCH: 4389 LOSS: 61.1177305239\n", + "EPOCH: 4390 LOSS: 61.1176979357\n", + "EPOCH: 4391 LOSS: 61.1176653553\n", + "EPOCH: 4392 LOSS: 61.1176327827\n", + "EPOCH: 4393 LOSS: 61.1176002178\n", + "EPOCH: 4394 LOSS: 61.1175676608\n", + "EPOCH: 4395 LOSS: 61.1175351115\n", + "EPOCH: 4396 LOSS: 61.1175025699\n", + "EPOCH: 4397 LOSS: 61.1174700362\n", + "EPOCH: 4398 LOSS: 61.1174375102\n", + "EPOCH: 4399 LOSS: 61.1174049919\n", + "EPOCH: 4400 LOSS: 61.1173724814\n", + "EPOCH: 4401 LOSS: 61.1173399787\n", + "EPOCH: 4402 LOSS: 61.1173074837\n", + "EPOCH: 4403 LOSS: 61.1172749965\n", + "EPOCH: 4404 LOSS: 61.1172425169\n", + "EPOCH: 4405 LOSS: 61.1172100452\n", + "EPOCH: 4406 LOSS: 61.1171775811\n", + "EPOCH: 4407 LOSS: 61.1171451248\n", + "EPOCH: 4408 LOSS: 61.1171126762\n", + "EPOCH: 4409 LOSS: 61.1170802354\n", + "EPOCH: 4410 LOSS: 61.1170478022\n", + "EPOCH: 4411 LOSS: 61.1170153768\n", + "EPOCH: 4412 LOSS: 61.1169829591\n", + "EPOCH: 4413 LOSS: 61.1169505491\n", + "EPOCH: 4414 LOSS: 61.1169181468\n", + "EPOCH: 4415 LOSS: 61.1168857522\n", + "EPOCH: 4416 LOSS: 61.1168533653\n", + "EPOCH: 4417 LOSS: 61.1168209861\n", + "EPOCH: 4418 LOSS: 61.1167886146\n", + "EPOCH: 4419 LOSS: 61.1167562507\n", + "EPOCH: 4420 LOSS: 61.1167238946\n", + "EPOCH: 4421 LOSS: 61.1166915461\n", + "EPOCH: 4422 LOSS: 61.1166592053\n", + "EPOCH: 4423 LOSS: 61.1166268722\n", + "EPOCH: 4424 LOSS: 61.1165945468\n", + "EPOCH: 4425 LOSS: 61.116562229\n", + "EPOCH: 4426 LOSS: 61.1165299189\n", + "EPOCH: 4427 LOSS: 61.1164976164\n", + "EPOCH: 4428 LOSS: 61.1164653216\n", + "EPOCH: 4429 LOSS: 61.1164330344\n", + "EPOCH: 4430 LOSS: 61.1164007549\n", + "EPOCH: 4431 LOSS: 61.1163684831\n", + "EPOCH: 4432 LOSS: 61.1163362188\n", + "EPOCH: 4433 LOSS: 61.1163039623\n", + "EPOCH: 4434 LOSS: 61.1162717133\n", + "EPOCH: 4435 LOSS: 61.116239472\n", + "EPOCH: 4436 LOSS: 61.1162072383\n", + "EPOCH: 4437 LOSS: 61.1161750123\n", + "EPOCH: 4438 LOSS: 61.1161427938\n", + "EPOCH: 4439 LOSS: 61.116110583\n", + "EPOCH: 4440 LOSS: 61.1160783798\n", + "EPOCH: 4441 LOSS: 61.1160461842\n", + "EPOCH: 4442 LOSS: 61.1160139962\n", + "EPOCH: 4443 LOSS: 61.1159818158\n", + "EPOCH: 4444 LOSS: 61.115949643\n", + "EPOCH: 4445 LOSS: 61.1159174778\n", + "EPOCH: 4446 LOSS: 61.1158853202\n", + "EPOCH: 4447 LOSS: 61.1158531702\n", + "EPOCH: 4448 LOSS: 61.1158210278\n", + "EPOCH: 4449 LOSS: 61.1157888929\n", + "EPOCH: 4450 LOSS: 61.1157567657\n", + "EPOCH: 4451 LOSS: 61.115724646\n", + "EPOCH: 4452 LOSS: 61.1156925339\n", + "EPOCH: 4453 LOSS: 61.1156604293\n", + "EPOCH: 4454 LOSS: 61.1156283323\n", + "EPOCH: 4455 LOSS: 61.1155962429\n", + "EPOCH: 4456 LOSS: 61.1155641611\n", + "EPOCH: 4457 LOSS: 61.1155320868\n", + "EPOCH: 4458 LOSS: 61.11550002\n", + "EPOCH: 4459 LOSS: 61.1154679608\n", + "EPOCH: 4460 LOSS: 61.1154359092\n", + "EPOCH: 4461 LOSS: 61.115403865\n", + "EPOCH: 4462 LOSS: 61.1153718285\n", + "EPOCH: 4463 LOSS: 61.1153397994\n", + "EPOCH: 4464 LOSS: 61.1153077779\n", + "EPOCH: 4465 LOSS: 61.1152757639\n", + "EPOCH: 4466 LOSS: 61.1152437575\n", + "EPOCH: 4467 LOSS: 61.1152117586\n", + "EPOCH: 4468 LOSS: 61.1151797671\n", + "EPOCH: 4469 LOSS: 61.1151477833\n", + "EPOCH: 4470 LOSS: 61.1151158069\n", + "EPOCH: 4471 LOSS: 61.115083838\n", + "EPOCH: 4472 LOSS: 61.1150518766\n", + "EPOCH: 4473 LOSS: 61.1150199227\n", + "EPOCH: 4474 LOSS: 61.1149879764\n", + "EPOCH: 4475 LOSS: 61.1149560375\n", + "EPOCH: 4476 LOSS: 61.1149241061\n", + "EPOCH: 4477 LOSS: 61.1148921822\n", + "EPOCH: 4478 LOSS: 61.1148602658\n", + "EPOCH: 4479 LOSS: 61.1148283569\n", + "EPOCH: 4480 LOSS: 61.1147964554\n", + "EPOCH: 4481 LOSS: 61.1147645614\n", + "EPOCH: 4482 LOSS: 61.1147326749\n", + "EPOCH: 4483 LOSS: 61.1147007959\n", + "EPOCH: 4484 LOSS: 61.1146689243\n", + "EPOCH: 4485 LOSS: 61.1146370602\n", + "EPOCH: 4486 LOSS: 61.1146052035\n", + "EPOCH: 4487 LOSS: 61.1145733543\n", + "EPOCH: 4488 LOSS: 61.1145415126\n", + "EPOCH: 4489 LOSS: 61.1145096783\n", + "EPOCH: 4490 LOSS: 61.1144778514\n", + "EPOCH: 4491 LOSS: 61.114446032\n", + "EPOCH: 4492 LOSS: 61.11441422\n", + "EPOCH: 4493 LOSS: 61.1143824155\n", + "EPOCH: 4494 LOSS: 61.1143506184\n", + "EPOCH: 4495 LOSS: 61.1143188287\n", + "EPOCH: 4496 LOSS: 61.1142870464\n", + "EPOCH: 4497 LOSS: 61.1142552716\n", + "EPOCH: 4498 LOSS: 61.1142235042\n", + "EPOCH: 4499 LOSS: 61.1141917442\n", + "EPOCH: 4500 LOSS: 61.1141599916\n", + "EPOCH: 4501 LOSS: 61.1141282464\n", + "EPOCH: 4502 LOSS: 61.1140965086\n", + "EPOCH: 4503 LOSS: 61.1140647783\n", + "EPOCH: 4504 LOSS: 61.1140330553\n", + "EPOCH: 4505 LOSS: 61.1140013397\n", + "EPOCH: 4506 LOSS: 61.1139696315\n", + "EPOCH: 4507 LOSS: 61.1139379307\n", + "EPOCH: 4508 LOSS: 61.1139062373\n", + "EPOCH: 4509 LOSS: 61.1138745513\n", + "EPOCH: 4510 LOSS: 61.1138428727\n", + "EPOCH: 4511 LOSS: 61.1138112014\n", + "EPOCH: 4512 LOSS: 61.1137795375\n", + "EPOCH: 4513 LOSS: 61.113747881\n", + "EPOCH: 4514 LOSS: 61.1137162318\n", + "EPOCH: 4515 LOSS: 61.11368459\n", + "EPOCH: 4516 LOSS: 61.1136529556\n", + "EPOCH: 4517 LOSS: 61.1136213285\n", + "EPOCH: 4518 LOSS: 61.1135897088\n", + "EPOCH: 4519 LOSS: 61.1135580964\n", + "EPOCH: 4520 LOSS: 61.1135264913\n", + "EPOCH: 4521 LOSS: 61.1134948937\n", + "EPOCH: 4522 LOSS: 61.1134633033\n", + "EPOCH: 4523 LOSS: 61.1134317203\n", + "EPOCH: 4524 LOSS: 61.1134001446\n", + "EPOCH: 4525 LOSS: 61.1133685763\n", + "EPOCH: 4526 LOSS: 61.1133370153\n", + "EPOCH: 4527 LOSS: 61.1133054616\n", + "EPOCH: 4528 LOSS: 61.1132739152\n", + "EPOCH: 4529 LOSS: 61.1132423761\n", + "EPOCH: 4530 LOSS: 61.1132108444\n", + "EPOCH: 4531 LOSS: 61.11317932\n", + "EPOCH: 4532 LOSS: 61.1131478029\n", + "EPOCH: 4533 LOSS: 61.1131162931\n", + "EPOCH: 4534 LOSS: 61.1130847905\n", + "EPOCH: 4535 LOSS: 61.1130532953\n", + "EPOCH: 4536 LOSS: 61.1130218074\n", + "EPOCH: 4537 LOSS: 61.1129903268\n", + "EPOCH: 4538 LOSS: 61.1129588535\n", + "EPOCH: 4539 LOSS: 61.1129273874\n", + "EPOCH: 4540 LOSS: 61.1128959287\n", + "EPOCH: 4541 LOSS: 61.1128644772\n", + "EPOCH: 4542 LOSS: 61.112833033\n", + "EPOCH: 4543 LOSS: 61.112801596\n", + "EPOCH: 4544 LOSS: 61.1127701664\n", + "EPOCH: 4545 LOSS: 61.112738744\n", + "EPOCH: 4546 LOSS: 61.1127073288\n", + "EPOCH: 4547 LOSS: 61.112675921\n", + "EPOCH: 4548 LOSS: 61.1126445204\n", + "EPOCH: 4549 LOSS: 61.112613127\n", + "EPOCH: 4550 LOSS: 61.1125817409\n", + "EPOCH: 4551 LOSS: 61.112550362\n", + "EPOCH: 4552 LOSS: 61.1125189904\n", + "EPOCH: 4553 LOSS: 61.1124876261\n", + "EPOCH: 4554 LOSS: 61.1124562689\n", + "EPOCH: 4555 LOSS: 61.112424919\n", + "EPOCH: 4556 LOSS: 61.1123935764\n", + "EPOCH: 4557 LOSS: 61.1123622409\n", + "EPOCH: 4558 LOSS: 61.1123309127\n", + "EPOCH: 4559 LOSS: 61.1122995917\n", + "EPOCH: 4560 LOSS: 61.112268278\n", + "EPOCH: 4561 LOSS: 61.1122369714\n", + "EPOCH: 4562 LOSS: 61.1122056721\n", + "EPOCH: 4563 LOSS: 61.11217438\n", + "EPOCH: 4564 LOSS: 61.1121430951\n", + "EPOCH: 4565 LOSS: 61.1121118174\n", + "EPOCH: 4566 LOSS: 61.1120805469\n", + "EPOCH: 4567 LOSS: 61.1120492836\n", + "EPOCH: 4568 LOSS: 61.1120180274\n", + "EPOCH: 4569 LOSS: 61.1119867785\n", + "EPOCH: 4570 LOSS: 61.1119555368\n", + "EPOCH: 4571 LOSS: 61.1119243022\n", + "EPOCH: 4572 LOSS: 61.1118930749\n", + "EPOCH: 4573 LOSS: 61.1118618547\n", + "EPOCH: 4574 LOSS: 61.1118306417\n", + "EPOCH: 4575 LOSS: 61.1117994359\n", + "EPOCH: 4576 LOSS: 61.1117682372\n", + "EPOCH: 4577 LOSS: 61.1117370457\n", + "EPOCH: 4578 LOSS: 61.1117058614\n", + "EPOCH: 4579 LOSS: 61.1116746842\n", + "EPOCH: 4580 LOSS: 61.1116435142\n", + "EPOCH: 4581 LOSS: 61.1116123514\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 4582 LOSS: 61.1115811957\n", + "EPOCH: 4583 LOSS: 61.1115500471\n", + "EPOCH: 4584 LOSS: 61.1115189057\n", + "EPOCH: 4585 LOSS: 61.1114877714\n", + "EPOCH: 4586 LOSS: 61.1114566443\n", + "EPOCH: 4587 LOSS: 61.1114255243\n", + "EPOCH: 4588 LOSS: 61.1113944115\n", + "EPOCH: 4589 LOSS: 61.1113633058\n", + "EPOCH: 4590 LOSS: 61.1113322072\n", + "EPOCH: 4591 LOSS: 61.1113011157\n", + "EPOCH: 4592 LOSS: 61.1112700314\n", + "EPOCH: 4593 LOSS: 61.1112389541\n", + "EPOCH: 4594 LOSS: 61.111207884\n", + "EPOCH: 4595 LOSS: 61.111176821\n", + "EPOCH: 4596 LOSS: 61.1111457651\n", + "EPOCH: 4597 LOSS: 61.1111147164\n", + "EPOCH: 4598 LOSS: 61.1110836747\n", + "EPOCH: 4599 LOSS: 61.1110526401\n", + "EPOCH: 4600 LOSS: 61.1110216126\n", + "EPOCH: 4601 LOSS: 61.1109905922\n", + "EPOCH: 4602 LOSS: 61.110959579\n", + "EPOCH: 4603 LOSS: 61.1109285727\n", + "EPOCH: 4604 LOSS: 61.1108975736\n", + "EPOCH: 4605 LOSS: 61.1108665816\n", + "EPOCH: 4606 LOSS: 61.1108355966\n", + "EPOCH: 4607 LOSS: 61.1108046188\n", + "EPOCH: 4608 LOSS: 61.1107736479\n", + "EPOCH: 4609 LOSS: 61.1107426842\n", + "EPOCH: 4610 LOSS: 61.1107117275\n", + "EPOCH: 4611 LOSS: 61.1106807779\n", + "EPOCH: 4612 LOSS: 61.1106498354\n", + "EPOCH: 4613 LOSS: 61.1106188999\n", + "EPOCH: 4614 LOSS: 61.1105879715\n", + "EPOCH: 4615 LOSS: 61.1105570501\n", + "EPOCH: 4616 LOSS: 61.1105261358\n", + "EPOCH: 4617 LOSS: 61.1104952285\n", + "EPOCH: 4618 LOSS: 61.1104643283\n", + "EPOCH: 4619 LOSS: 61.1104334351\n", + "EPOCH: 4620 LOSS: 61.1104025489\n", + "EPOCH: 4621 LOSS: 61.1103716698\n", + "EPOCH: 4622 LOSS: 61.1103407977\n", + "EPOCH: 4623 LOSS: 61.1103099326\n", + "EPOCH: 4624 LOSS: 61.1102790746\n", + "EPOCH: 4625 LOSS: 61.1102482236\n", + "EPOCH: 4626 LOSS: 61.1102173796\n", + "EPOCH: 4627 LOSS: 61.1101865426\n", + "EPOCH: 4628 LOSS: 61.1101557127\n", + "EPOCH: 4629 LOSS: 61.1101248897\n", + "EPOCH: 4630 LOSS: 61.1100940738\n", + "EPOCH: 4631 LOSS: 61.1100632648\n", + "EPOCH: 4632 LOSS: 61.1100324629\n", + "EPOCH: 4633 LOSS: 61.110001668\n", + "EPOCH: 4634 LOSS: 61.10997088\n", + "EPOCH: 4635 LOSS: 61.1099400991\n", + "EPOCH: 4636 LOSS: 61.1099093251\n", + "EPOCH: 4637 LOSS: 61.1098785582\n", + "EPOCH: 4638 LOSS: 61.1098477982\n", + "EPOCH: 4639 LOSS: 61.1098170452\n", + "EPOCH: 4640 LOSS: 61.1097862992\n", + "EPOCH: 4641 LOSS: 61.1097555601\n", + "EPOCH: 4642 LOSS: 61.109724828\n", + "EPOCH: 4643 LOSS: 61.1096941029\n", + "EPOCH: 4644 LOSS: 61.1096633848\n", + "EPOCH: 4645 LOSS: 61.1096326736\n", + "EPOCH: 4646 LOSS: 61.1096019694\n", + "EPOCH: 4647 LOSS: 61.1095712722\n", + "EPOCH: 4648 LOSS: 61.1095405819\n", + "EPOCH: 4649 LOSS: 61.1095098985\n", + "EPOCH: 4650 LOSS: 61.1094792222\n", + "EPOCH: 4651 LOSS: 61.1094485527\n", + "EPOCH: 4652 LOSS: 61.1094178902\n", + "EPOCH: 4653 LOSS: 61.1093872346\n", + "EPOCH: 4654 LOSS: 61.109356586\n", + "EPOCH: 4655 LOSS: 61.1093259443\n", + "EPOCH: 4656 LOSS: 61.1092953096\n", + "EPOCH: 4657 LOSS: 61.1092646818\n", + "EPOCH: 4658 LOSS: 61.1092340609\n", + "EPOCH: 4659 LOSS: 61.1092034469\n", + "EPOCH: 4660 LOSS: 61.1091728398\n", + "EPOCH: 4661 LOSS: 61.1091422397\n", + "EPOCH: 4662 LOSS: 61.1091116465\n", + "EPOCH: 4663 LOSS: 61.1090810602\n", + "EPOCH: 4664 LOSS: 61.1090504808\n", + "EPOCH: 4665 LOSS: 61.1090199083\n", + "EPOCH: 4666 LOSS: 61.1089893427\n", + "EPOCH: 4667 LOSS: 61.108958784\n", + "EPOCH: 4668 LOSS: 61.1089282322\n", + "EPOCH: 4669 LOSS: 61.1088976873\n", + "EPOCH: 4670 LOSS: 61.1088671493\n", + "EPOCH: 4671 LOSS: 61.1088366182\n", + "EPOCH: 4672 LOSS: 61.108806094\n", + "EPOCH: 4673 LOSS: 61.1087755766\n", + "EPOCH: 4674 LOSS: 61.1087450662\n", + "EPOCH: 4675 LOSS: 61.1087145626\n", + "EPOCH: 4676 LOSS: 61.1086840659\n", + "EPOCH: 4677 LOSS: 61.1086535761\n", + "EPOCH: 4678 LOSS: 61.1086230931\n", + "EPOCH: 4679 LOSS: 61.108592617\n", + "EPOCH: 4680 LOSS: 61.1085621478\n", + "EPOCH: 4681 LOSS: 61.1085316854\n", + "EPOCH: 4682 LOSS: 61.1085012299\n", + "EPOCH: 4683 LOSS: 61.1084707812\n", + "EPOCH: 4684 LOSS: 61.1084403394\n", + "EPOCH: 4685 LOSS: 61.1084099044\n", + "EPOCH: 4686 LOSS: 61.1083794763\n", + "EPOCH: 4687 LOSS: 61.1083490551\n", + "EPOCH: 4688 LOSS: 61.1083186406\n", + "EPOCH: 4689 LOSS: 61.108288233\n", + "EPOCH: 4690 LOSS: 61.1082578323\n", + "EPOCH: 4691 LOSS: 61.1082274384\n", + "EPOCH: 4692 LOSS: 61.1081970513\n", + "EPOCH: 4693 LOSS: 61.108166671\n", + "EPOCH: 4694 LOSS: 61.1081362976\n", + "EPOCH: 4695 LOSS: 61.108105931\n", + "EPOCH: 4696 LOSS: 61.1080755712\n", + "EPOCH: 4697 LOSS: 61.1080452182\n", + "EPOCH: 4698 LOSS: 61.108014872\n", + "EPOCH: 4699 LOSS: 61.1079845327\n", + "EPOCH: 4700 LOSS: 61.1079542001\n", + "EPOCH: 4701 LOSS: 61.1079238744\n", + "EPOCH: 4702 LOSS: 61.1078935554\n", + "EPOCH: 4703 LOSS: 61.1078632433\n", + "EPOCH: 4704 LOSS: 61.107832938\n", + "EPOCH: 4705 LOSS: 61.1078026394\n", + "EPOCH: 4706 LOSS: 61.1077723476\n", + "EPOCH: 4707 LOSS: 61.1077420627\n", + "EPOCH: 4708 LOSS: 61.1077117845\n", + "EPOCH: 4709 LOSS: 61.1076815131\n", + "EPOCH: 4710 LOSS: 61.1076512485\n", + "EPOCH: 4711 LOSS: 61.1076209906\n", + "EPOCH: 4712 LOSS: 61.1075907395\n", + "EPOCH: 4713 LOSS: 61.1075604952\n", + "EPOCH: 4714 LOSS: 61.1075302577\n", + "EPOCH: 4715 LOSS: 61.1075000269\n", + "EPOCH: 4716 LOSS: 61.1074698029\n", + "EPOCH: 4717 LOSS: 61.1074395857\n", + "EPOCH: 4718 LOSS: 61.1074093752\n", + "EPOCH: 4719 LOSS: 61.1073791715\n", + "EPOCH: 4720 LOSS: 61.1073489745\n", + "EPOCH: 4721 LOSS: 61.1073187843\n", + "EPOCH: 4722 LOSS: 61.1072886008\n", + "EPOCH: 4723 LOSS: 61.1072584241\n", + "EPOCH: 4724 LOSS: 61.1072282541\n", + "EPOCH: 4725 LOSS: 61.1071980908\n", + "EPOCH: 4726 LOSS: 61.1071679343\n", + "EPOCH: 4727 LOSS: 61.1071377845\n", + "EPOCH: 4728 LOSS: 61.1071076415\n", + "EPOCH: 4729 LOSS: 61.1070775051\n", + "EPOCH: 4730 LOSS: 61.1070473755\n", + "EPOCH: 4731 LOSS: 61.1070172527\n", + "EPOCH: 4732 LOSS: 61.1069871365\n", + "EPOCH: 4733 LOSS: 61.1069570271\n", + "EPOCH: 4734 LOSS: 61.1069269243\n", + "EPOCH: 4735 LOSS: 61.1068968283\n", + "EPOCH: 4736 LOSS: 61.106866739\n", + "EPOCH: 4737 LOSS: 61.1068366564\n", + "EPOCH: 4738 LOSS: 61.1068065805\n", + "EPOCH: 4739 LOSS: 61.1067765113\n", + "EPOCH: 4740 LOSS: 61.1067464488\n", + "EPOCH: 4741 LOSS: 61.106716393\n", + "EPOCH: 4742 LOSS: 61.1066863439\n", + "EPOCH: 4743 LOSS: 61.1066563015\n", + "EPOCH: 4744 LOSS: 61.1066262657\n", + "EPOCH: 4745 LOSS: 61.1065962367\n", + "EPOCH: 4746 LOSS: 61.1065662143\n", + "EPOCH: 4747 LOSS: 61.1065361986\n", + "EPOCH: 4748 LOSS: 61.1065061896\n", + "EPOCH: 4749 LOSS: 61.1064761873\n", + "EPOCH: 4750 LOSS: 61.1064461916\n", + "EPOCH: 4751 LOSS: 61.1064162026\n", + "EPOCH: 4752 LOSS: 61.1063862203\n", + "EPOCH: 4753 LOSS: 61.1063562446\n", + "EPOCH: 4754 LOSS: 61.1063262756\n", + "EPOCH: 4755 LOSS: 61.1062963132\n", + "EPOCH: 4756 LOSS: 61.1062663575\n", + "EPOCH: 4757 LOSS: 61.1062364085\n", + "EPOCH: 4758 LOSS: 61.1062064661\n", + "EPOCH: 4759 LOSS: 61.1061765303\n", + "EPOCH: 4760 LOSS: 61.1061466012\n", + "EPOCH: 4761 LOSS: 61.1061166788\n", + "EPOCH: 4762 LOSS: 61.1060867629\n", + "EPOCH: 4763 LOSS: 61.1060568538\n", + "EPOCH: 4764 LOSS: 61.1060269512\n", + "EPOCH: 4765 LOSS: 61.1059970553\n", + "EPOCH: 4766 LOSS: 61.105967166\n", + "EPOCH: 4767 LOSS: 61.1059372833\n", + "EPOCH: 4768 LOSS: 61.1059074073\n", + "EPOCH: 4769 LOSS: 61.1058775378\n", + "EPOCH: 4770 LOSS: 61.105847675\n", + "EPOCH: 4771 LOSS: 61.1058178188\n", + "EPOCH: 4772 LOSS: 61.1057879693\n", + "EPOCH: 4773 LOSS: 61.1057581263\n", + "EPOCH: 4774 LOSS: 61.1057282899\n", + "EPOCH: 4775 LOSS: 61.1056984602\n", + "EPOCH: 4776 LOSS: 61.105668637\n", + "EPOCH: 4777 LOSS: 61.1056388205\n", + "EPOCH: 4778 LOSS: 61.1056090105\n", + "EPOCH: 4779 LOSS: 61.1055792071\n", + "EPOCH: 4780 LOSS: 61.1055494104\n", + "EPOCH: 4781 LOSS: 61.1055196202\n", + "EPOCH: 4782 LOSS: 61.1054898366\n", + "EPOCH: 4783 LOSS: 61.1054600596\n", + "EPOCH: 4784 LOSS: 61.1054302891\n", + "EPOCH: 4785 LOSS: 61.1054005253\n", + "EPOCH: 4786 LOSS: 61.105370768\n", + "EPOCH: 4787 LOSS: 61.1053410173\n", + "EPOCH: 4788 LOSS: 61.1053112731\n", + "EPOCH: 4789 LOSS: 61.1052815356\n", + "EPOCH: 4790 LOSS: 61.1052518046\n", + "EPOCH: 4791 LOSS: 61.1052220801\n", + "EPOCH: 4792 LOSS: 61.1051923623\n", + "EPOCH: 4793 LOSS: 61.1051626509\n", + "EPOCH: 4794 LOSS: 61.1051329462\n", + "EPOCH: 4795 LOSS: 61.105103248\n", + "EPOCH: 4796 LOSS: 61.1050735563\n", + "EPOCH: 4797 LOSS: 61.1050438712\n", + "EPOCH: 4798 LOSS: 61.1050141926\n", + "EPOCH: 4799 LOSS: 61.1049845206\n", + "EPOCH: 4800 LOSS: 61.1049548551\n", + "EPOCH: 4801 LOSS: 61.1049251961\n", + "EPOCH: 4802 LOSS: 61.1048955437\n", + "EPOCH: 4803 LOSS: 61.1048658978\n", + "EPOCH: 4804 LOSS: 61.1048362585\n", + "EPOCH: 4805 LOSS: 61.1048066256\n", + "EPOCH: 4806 LOSS: 61.1047769993\n", + "EPOCH: 4807 LOSS: 61.1047473795\n", + "EPOCH: 4808 LOSS: 61.1047177663\n", + "EPOCH: 4809 LOSS: 61.1046881595\n", + "EPOCH: 4810 LOSS: 61.1046585593\n", + "EPOCH: 4811 LOSS: 61.1046289655\n", + "EPOCH: 4812 LOSS: 61.1045993783\n", + "EPOCH: 4813 LOSS: 61.1045697976\n", + "EPOCH: 4814 LOSS: 61.1045402234\n", + "EPOCH: 4815 LOSS: 61.1045106557\n", + "EPOCH: 4816 LOSS: 61.1044810945\n", + "EPOCH: 4817 LOSS: 61.1044515398\n", + "EPOCH: 4818 LOSS: 61.1044219916\n", + "EPOCH: 4819 LOSS: 61.1043924498\n", + "EPOCH: 4820 LOSS: 61.1043629146\n", + "EPOCH: 4821 LOSS: 61.1043333858\n", + "EPOCH: 4822 LOSS: 61.1043038636\n", + "EPOCH: 4823 LOSS: 61.1042743478\n", + "EPOCH: 4824 LOSS: 61.1042448385\n", + "EPOCH: 4825 LOSS: 61.1042153356\n", + "EPOCH: 4826 LOSS: 61.1041858393\n", + "EPOCH: 4827 LOSS: 61.1041563494\n", + "EPOCH: 4828 LOSS: 61.1041268659\n", + "EPOCH: 4829 LOSS: 61.104097389\n", + "EPOCH: 4830 LOSS: 61.1040679185\n", + "EPOCH: 4831 LOSS: 61.1040384544\n", + "EPOCH: 4832 LOSS: 61.1040089969\n", + "EPOCH: 4833 LOSS: 61.1039795457\n", + "EPOCH: 4834 LOSS: 61.1039501011\n", + "EPOCH: 4835 LOSS: 61.1039206628\n", + "EPOCH: 4836 LOSS: 61.1038912311\n", + "EPOCH: 4837 LOSS: 61.1038618057\n", + "EPOCH: 4838 LOSS: 61.1038323868\n", + "EPOCH: 4839 LOSS: 61.1038029744\n", + "EPOCH: 4840 LOSS: 61.1037735684\n", + "EPOCH: 4841 LOSS: 61.1037441688\n", + "EPOCH: 4842 LOSS: 61.1037147757\n", + "EPOCH: 4843 LOSS: 61.103685389\n", + "EPOCH: 4844 LOSS: 61.1036560087\n", + "EPOCH: 4845 LOSS: 61.1036266348\n", + "EPOCH: 4846 LOSS: 61.1035972674\n", + "EPOCH: 4847 LOSS: 61.1035679064\n", + "EPOCH: 4848 LOSS: 61.1035385518\n", + "EPOCH: 4849 LOSS: 61.1035092036\n", + "EPOCH: 4850 LOSS: 61.1034798618\n", + "EPOCH: 4851 LOSS: 61.1034505265\n", + "EPOCH: 4852 LOSS: 61.1034211975\n", + "EPOCH: 4853 LOSS: 61.103391875\n", + "EPOCH: 4854 LOSS: 61.1033625588\n", + "EPOCH: 4855 LOSS: 61.1033332491\n", + "EPOCH: 4856 LOSS: 61.1033039457\n", + "EPOCH: 4857 LOSS: 61.1032746488\n", + "EPOCH: 4858 LOSS: 61.1032453582\n", + "EPOCH: 4859 LOSS: 61.1032160741\n", + "EPOCH: 4860 LOSS: 61.1031867963\n", + "EPOCH: 4861 LOSS: 61.1031575249\n", + "EPOCH: 4862 LOSS: 61.1031282599\n", + "EPOCH: 4863 LOSS: 61.1030990013\n", + "EPOCH: 4864 LOSS: 61.103069749\n", + "EPOCH: 4865 LOSS: 61.1030405031\n", + "EPOCH: 4866 LOSS: 61.1030112636\n", + "EPOCH: 4867 LOSS: 61.1029820305\n", + "EPOCH: 4868 LOSS: 61.1029528037\n", + "EPOCH: 4869 LOSS: 61.1029235833\n", + "EPOCH: 4870 LOSS: 61.1028943693\n", + "EPOCH: 4871 LOSS: 61.1028651616\n", + "EPOCH: 4872 LOSS: 61.1028359603\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "EPOCH: 4873 LOSS: 61.1028067653\n", + "EPOCH: 4874 LOSS: 61.1027775767\n", + "EPOCH: 4875 LOSS: 61.1027483945\n", + "EPOCH: 4876 LOSS: 61.1027192186\n", + "EPOCH: 4877 LOSS: 61.102690049\n", + "EPOCH: 4878 LOSS: 61.1026608858\n", + "EPOCH: 4879 LOSS: 61.1026317289\n", + "EPOCH: 4880 LOSS: 61.1026025784\n", + "EPOCH: 4881 LOSS: 61.1025734342\n", + "EPOCH: 4882 LOSS: 61.1025442963\n", + "EPOCH: 4883 LOSS: 61.1025151648\n", + "EPOCH: 4884 LOSS: 61.1024860396\n", + "EPOCH: 4885 LOSS: 61.1024569207\n", + "EPOCH: 4886 LOSS: 61.1024278081\n", + "EPOCH: 4887 LOSS: 61.1023987019\n", + "EPOCH: 4888 LOSS: 61.102369602\n", + "EPOCH: 4889 LOSS: 61.1023405084\n", + "EPOCH: 4890 LOSS: 61.1023114211\n", + "EPOCH: 4891 LOSS: 61.1022823402\n", + "EPOCH: 4892 LOSS: 61.1022532655\n", + "EPOCH: 4893 LOSS: 61.1022241971\n", + "EPOCH: 4894 LOSS: 61.1021951351\n", + "EPOCH: 4895 LOSS: 61.1021660793\n", + "EPOCH: 4896 LOSS: 61.1021370299\n", + "EPOCH: 4897 LOSS: 61.1021079868\n", + "EPOCH: 4898 LOSS: 61.1020789499\n", + "EPOCH: 4899 LOSS: 61.1020499193\n", + "EPOCH: 4900 LOSS: 61.1020208951\n", + "EPOCH: 4901 LOSS: 61.1019918771\n", + "EPOCH: 4902 LOSS: 61.1019628654\n", + "EPOCH: 4903 LOSS: 61.10193386\n", + "EPOCH: 4904 LOSS: 61.1019048608\n", + "EPOCH: 4905 LOSS: 61.101875868\n", + "EPOCH: 4906 LOSS: 61.1018468814\n", + "EPOCH: 4907 LOSS: 61.1018179011\n", + "EPOCH: 4908 LOSS: 61.1017889271\n", + "EPOCH: 4909 LOSS: 61.1017599593\n", + "EPOCH: 4910 LOSS: 61.1017309978\n", + "EPOCH: 4911 LOSS: 61.1017020426\n", + "EPOCH: 4912 LOSS: 61.1016730936\n", + "EPOCH: 4913 LOSS: 61.1016441509\n", + "EPOCH: 4914 LOSS: 61.1016152145\n", + "EPOCH: 4915 LOSS: 61.1015862843\n", + "EPOCH: 4916 LOSS: 61.1015573603\n", + "EPOCH: 4917 LOSS: 61.1015284426\n", + "EPOCH: 4918 LOSS: 61.1014995312\n", + "EPOCH: 4919 LOSS: 61.101470626\n", + "EPOCH: 4920 LOSS: 61.101441727\n", + "EPOCH: 4921 LOSS: 61.1014128343\n", + "EPOCH: 4922 LOSS: 61.1013839478\n", + "EPOCH: 4923 LOSS: 61.1013550675\n", + "EPOCH: 4924 LOSS: 61.1013261935\n", + "EPOCH: 4925 LOSS: 61.1012973257\n", + "EPOCH: 4926 LOSS: 61.1012684642\n", + "EPOCH: 4927 LOSS: 61.1012396089\n", + "EPOCH: 4928 LOSS: 61.1012107597\n", + "EPOCH: 4929 LOSS: 61.1011819169\n", + "EPOCH: 4930 LOSS: 61.1011530802\n", + "EPOCH: 4931 LOSS: 61.1011242498\n", + "EPOCH: 4932 LOSS: 61.1010954255\n", + "EPOCH: 4933 LOSS: 61.1010666075\n", + "EPOCH: 4934 LOSS: 61.1010377957\n", + "EPOCH: 4935 LOSS: 61.1010089901\n", + "EPOCH: 4936 LOSS: 61.1009801907\n", + "EPOCH: 4937 LOSS: 61.1009513975\n", + "EPOCH: 4938 LOSS: 61.1009226105\n", + "EPOCH: 4939 LOSS: 61.1008938297\n", + "EPOCH: 4940 LOSS: 61.1008650551\n", + "EPOCH: 4941 LOSS: 61.1008362867\n", + "EPOCH: 4942 LOSS: 61.1008075245\n", + "EPOCH: 4943 LOSS: 61.1007787684\n", + "EPOCH: 4944 LOSS: 61.1007500186\n", + "EPOCH: 4945 LOSS: 61.1007212749\n", + "EPOCH: 4946 LOSS: 61.1006925375\n", + "EPOCH: 4947 LOSS: 61.1006638062\n", + "EPOCH: 4948 LOSS: 61.100635081\n", + "EPOCH: 4949 LOSS: 61.1006063621\n", + "EPOCH: 4950 LOSS: 61.1005776493\n", + "EPOCH: 4951 LOSS: 61.1005489427\n", + "EPOCH: 4952 LOSS: 61.1005202423\n", + "EPOCH: 4953 LOSS: 61.100491548\n", + "EPOCH: 4954 LOSS: 61.1004628599\n", + "EPOCH: 4955 LOSS: 61.100434178\n", + "EPOCH: 4956 LOSS: 61.1004055022\n", + "EPOCH: 4957 LOSS: 61.1003768325\n", + "EPOCH: 4958 LOSS: 61.1003481691\n", + "EPOCH: 4959 LOSS: 61.1003195117\n", + "EPOCH: 4960 LOSS: 61.1002908606\n", + "EPOCH: 4961 LOSS: 61.1002622155\n", + "EPOCH: 4962 LOSS: 61.1002335767\n", + "EPOCH: 4963 LOSS: 61.1002049439\n", + "EPOCH: 4964 LOSS: 61.1001763173\n", + "EPOCH: 4965 LOSS: 61.1001476969\n", + "EPOCH: 4966 LOSS: 61.1001190825\n", + "EPOCH: 4967 LOSS: 61.1000904743\n", + "EPOCH: 4968 LOSS: 61.1000618723\n", + "EPOCH: 4969 LOSS: 61.1000332763\n", + "EPOCH: 4970 LOSS: 61.1000046865\n", + "EPOCH: 4971 LOSS: 61.0999761028\n", + "EPOCH: 4972 LOSS: 61.0999475253\n", + "EPOCH: 4973 LOSS: 61.0999189538\n", + "EPOCH: 4974 LOSS: 61.0998903885\n", + "EPOCH: 4975 LOSS: 61.0998618293\n", + "EPOCH: 4976 LOSS: 61.0998332762\n", + "EPOCH: 4977 LOSS: 61.0998047292\n", + "EPOCH: 4978 LOSS: 61.0997761883\n", + "EPOCH: 4979 LOSS: 61.0997476536\n", + "EPOCH: 4980 LOSS: 61.0997191249\n", + "EPOCH: 4981 LOSS: 61.0996906023\n", + "EPOCH: 4982 LOSS: 61.0996620859\n", + "EPOCH: 4983 LOSS: 61.0996335755\n", + "EPOCH: 4984 LOSS: 61.0996050712\n", + "EPOCH: 4985 LOSS: 61.099576573\n", + "EPOCH: 4986 LOSS: 61.0995480809\n", + "EPOCH: 4987 LOSS: 61.0995195949\n", + "EPOCH: 4988 LOSS: 61.099491115\n", + "EPOCH: 4989 LOSS: 61.0994626412\n", + "EPOCH: 4990 LOSS: 61.0994341734\n", + "EPOCH: 4991 LOSS: 61.0994057117\n", + "EPOCH: 4992 LOSS: 61.0993772561\n", + "EPOCH: 4993 LOSS: 61.0993488066\n", + "EPOCH: 4994 LOSS: 61.0993203631\n", + "EPOCH: 4995 LOSS: 61.0992919257\n", + "EPOCH: 4996 LOSS: 61.0992634944\n", + "EPOCH: 4997 LOSS: 61.0992350692\n", + "EPOCH: 4998 LOSS: 61.09920665\n", + "EPOCH: 4999 LOSS: 61.0991782368\n" + ] + } + ], + "source": [ + "settings = {}\n", + "settings['n'] = 5 # dimension of word embeddings\n", + "settings['window_size'] = 2 # context window +/- center word\n", + "settings['min_count'] = 0 # minimum word count\n", + "settings['epochs'] = 5000 # number of training epochs\n", + "settings['neg_samp'] = 10 # number of negative words to use during training\n", + "settings['learning_rate'] = 0.01 # learning rate\n", + "np.random.seed(0) # set the seed for reproducibility\n", + "\n", + "corpus = [['the','quick','brown','fox','jumped','over','the','lazy','dog']]\n", + "\n", + "# INITIALIZE W2V MODEL\n", + "w2v = word2vec()\n", + "\n", + "# generate training data\n", + "training_data = w2v.generate_training_data(settings, corpus)\n", + "\n", + "# train word2vec model\n", + "w2v.train(training_data)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.5.4" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/NLP/WordEmbedding/word2vec/text8.zip b/NLP/WordEmbedding/word2vec/text8.zip new file mode 100644 index 0000000..436e05b Binary files /dev/null and b/NLP/WordEmbedding/word2vec/text8.zip differ diff --git a/NLP/WordEmbedding/word2vec/word2vec.ipynb b/NLP/WordEmbedding/word2vec/word2vec.ipynb new file mode 100644 index 0000000..0987c69 --- /dev/null +++ b/NLP/WordEmbedding/word2vec/word2vec.ipynb @@ -0,0 +1,1103 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Preparing the text data\n", + "\n", + "The previously mentioned TensorFlow tutorial has a few functions that take a text database and transform it so that we can extract input words and their associated grams in mini-batches for training the Word2Vec system / embeddings (if you’re not sure what “mini-batch” means, check out this tutorial). I’ll briefly talk about each of these functions in turn:" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import urllib.request\n", + "import zipfile\n", + "import tensorflow as tf\n", + "import collections\n", + "import numpy as np\n", + "\n", + "def maybe_download(filename, url, expected_bytes):\n", + " \"\"\"Download a file if not present, and make sure it's the right size.\"\"\"\n", + " if not os.path.exists(filename):\n", + " filename, _ = urllib.request.urlretrieve(url + filename, filename)\n", + " statinfo = os.stat(filename)\n", + " if statinfo.st_size == expected_bytes:\n", + " print('Found and verified', filename)\n", + " else:\n", + " print(statinfo.st_size)\n", + " raise Exception(\n", + " 'Failed to verify ' + filename + '. Can you get to it with a browser?')\n", + " return filename" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This function checks to see if the filename already has been downloaded from the supplied url. If not, it uses the urllib.request Python module which retrieves a file from the given url argument, and downloads the file into the local code directory. If the file already exists (i.e. os.path.exists(filename) returns true), then the function does not try to download the file again. Next, the function checks the size of the file and makes sure it lines up with the expected file size, expected_bytes. If all is well, it returns the filename object which can be used to extract the data from. To call the function with the data-set we are using in this example, we execute the following code:" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Found and verified text8.zip\n" + ] + } + ], + "source": [ + "url = 'http://mattmahoney.net/dc/'\n", + "filename = maybe_download('text8.zip', url, 31344016)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The next thing we have to do is take the filename object, which points to the downloaded file, and extract the data using the Python zipfile module." + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Read the data into a list of strings.\n", + "def read_data(filename):\n", + " \"\"\"Extract the first file enclosed in a zip file as a list of words.\"\"\"\n", + " with zipfile.ZipFile(filename) as f:\n", + " data = tf.compat.as_str(f.read(f.namelist()[0])).split()\n", + " return data" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Using zipfile.ZipFile() to extract the zipped file, we can then use the reader functionality found in this zipfile module. First, the namelist() function retrieves all the members of the archive – in this case there is only one member, so we access this using the zero index. Then we use the read() function which reads all the text in the file and pass this through the TensorFlow function as_str which ensures that the text is created as a string data-type. Finally, we use split() function to create a list with all the words in the text file, separated by white-space characters. We can see some of the output here:" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['anarchism', 'originated', 'as', 'a', 'term', 'of', 'abuse']\n" + ] + } + ], + "source": [ + "vocabulary = read_data(filename)\n", + "print(vocabulary[:7])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "As you can observe, the returned vocabulary data contains a list of plain English words, ordered as they are in the sentences of the original extracted text file. Now that we have all the words extracted in a list, we have to do some further processing to enable us to create our skip-gram batch data. These further steps are:\n", + "\n", + "- Extract the top 10,000 most common words to include in our embedding vector\n", + "- Gather together all the unique words and index them with a unique integer value – this is what is required to create an equivalent one-hot type input for the word. We’ll use a dictionary to do this\n", + "- Loop through every word in the dataset (vocabulary variable) and assign it to the unique integer word identified, created in Step 2 above. This will allow easy lookup / processing of the word data stream\n", + "\n", + "The function which performs all this magic is shown below:" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "def build_dataset(words, n_words):\n", + " \"\"\"Process raw inputs into a dataset.\"\"\"\n", + " count = [['UNK', -1]]\n", + " count.extend(collections.Counter(words).most_common(n_words - 1))\n", + " dictionary = dict()\n", + " for word, _ in count:\n", + " dictionary[word] = len(dictionary)\n", + " data = list()\n", + " unk_count = 0\n", + " for word in words:\n", + " if word in dictionary:\n", + " index = dictionary[word]\n", + " else:\n", + " index = 0 # dictionary['UNK']\n", + " unk_count += 1\n", + " data.append(index)\n", + " count[0][1] = unk_count\n", + " reversed_dictionary = dict(zip(dictionary.values(), dictionary.keys()))\n", + " return data, count, dictionary, reversed_dictionary" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The first step is setting up a “counter” list, which will store the number of times a word is found within the data-set. Because we are restricting our vocabulary to only 10,000 words, any words not within the top 10,000 most common words will be marked with an “UNK” designation, standing for “unknown”. The initialized count list is then extended, using the Python collections module and the Counter() class and the associated most_common() function. These count the number of words in the given argument (words) and then returns the n most common words in a list format.\n", + "\n", + "The next part of this function creates a dictionary, called dictionary which is populated by keys corresponding to each unique word. The value assigned to each unique word key is simply an increasing integer count of the size of the dictionary. So, for instance, the most common word will receive the value 1, the second most common the value 2, the third most common word the value 3, and so on (the integer 0 is assigned to the ‘UNK’ words). This step creates a unique integer value for each word within the vocabulary – accomplishing the second step of the process which was defined above.\n", + "\n", + "Next, the function loops through each word in our full words data set – the data set which was output from the read_data() function. A list called data is created, which will be the same length as words but instead of being a list of individual words, it will instead be a list of integers – with each word now being represented by the unique integer that was assigned to this word in dictionary. So, for the first sentence of our data-set [‘anarchism’, ‘originated’, ‘as’, ‘a’, ‘term’, ‘of’, ‘abuse’], now looks like this in the data variable: [5242, 3083, 12, 6, 195, 2, 3136]. This part of the function addresses step 3 in the list above.\n", + "\n", + "Finally, the function creates a dictionary called reverse_dictionary that allows us to look up a word based on its unique integer identifier, rather than looking up the identifier based on the word i.e. the original dictionary. \n", + "\n", + "The final aspect of setting up our data is now to create a data set comprising of our input words and associated grams, which can be used to train our Word2Vec embedding system. The code to do this is:" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "data_index = 0\n", + "# generate batch data\n", + "def generate_batch(data, batch_size, num_skips, skip_window):\n", + " global data_index\n", + " assert batch_size % num_skips == 0\n", + " assert num_skips <= 2 * skip_window\n", + " batch = np.ndarray(shape=(batch_size), dtype=np.int32)\n", + " context = np.ndarray(shape=(batch_size, 1), dtype=np.int32)\n", + " span = 2 * skip_window + 1 # [ skip_window input_word skip_window ]\n", + " buffer = collections.deque(maxlen=span)\n", + " for _ in range(span):\n", + " buffer.append(data[data_index])\n", + " data_index = (data_index + 1) % len(data)\n", + " for i in range(batch_size // num_skips):\n", + " target = skip_window # input word at the center of the buffer\n", + " targets_to_avoid = [skip_window]\n", + " for j in range(num_skips):\n", + " while target in targets_to_avoid:\n", + " target = random.randint(0, span - 1)\n", + " targets_to_avoid.append(target)\n", + " batch[i * num_skips + j] = buffer[skip_window] # this is the input word\n", + " context[i * num_skips + j, 0] = buffer[target] # these are the context words\n", + " buffer.append(data[data_index])\n", + " data_index = (data_index + 1) % len(data)\n", + " # Backtrack a little bit to avoid skipping words in the end of a batch\n", + " data_index = (data_index + len(data) - span) % len(data)\n", + " return batch, context" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This function will generate mini-batches to use during our training (again, see here for information on mini-batch training). These batches will consist of input words (stored in batch) and random associated context words within the gram as the labels to predict (stored in context). For instance, in the 5-gram “the cat sat on the”, the input word will be center word i.e. “sat” and the context words that will be predicted will be drawn randomly from the remaining words of the gram: [‘the’, ‘cat’, ‘on’, ‘the’]. In this function, the number of words drawn randomly from the surrounding context is defined by the argument num_skips. The size of the window of context words to draw from around the input word is defined in the argument skip_window – in the example above (“the cat sat on the”), we have a skip window width of 2 around the input word “sat”.\n", + "\n", + "In the function above, first the batch and label outputs are defined as variables of size batch_size. Then the span size is defined, which is basically the size of the word list that the input word and context samples will be drawn from. In the example sub-sentence above “the cat sat on the”, the span is 5 = 2 x skip window + 1. After this a buffer is created:" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'buffer = collections.deque(maxlen=span)\\nfor _ in range(span):\\n buffer.append(data[data_index])\\n data_index = (data_index + 1) % len(data)'" + ] + }, + "execution_count": 21, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "'''buffer = collections.deque(maxlen=span)\n", + "for _ in range(span):\n", + " buffer.append(data[data_index])\n", + " data_index = (data_index + 1) % len(data)'''" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This buffer will hold a maximum of span elements and will be a kind of moving window of words that samples are drawn from. Whenever a new word index is added to the buffer, the left most element will drop out of the buffer to allow room for the new word index being added. The position of the buffer in the input text stream is stored in a global variable data_index which is incremented each time a new word is added to the buffer. If it gets to the end of the text stream, the “% len(data)” component of the index update will basically reset the count back to zero.\n", + "\n", + "The code below fills out the batch and context variables: " + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'for i in range(batch_size // num_skips):\\n target = skip_window # input word at the center of the buffer\\n targets_to_avoid = [skip_window]\\n for j in range(num_skips):\\n while target in targets_to_avoid:\\n target = random.randint(0, span - 1)\\n targets_to_avoid.append(target)\\n batch[i * num_skips + j] = buffer[skip_window] # this is the input word\\n context[i * num_skips + j, 0] = buffer[target] # these are the context words\\n buffer.append(data[data_index])\\n data_index = (data_index + 1) % len(data)'" + ] + }, + "execution_count": 22, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "'''for i in range(batch_size // num_skips):\n", + " target = skip_window # input word at the center of the buffer\n", + " targets_to_avoid = [skip_window]\n", + " for j in range(num_skips):\n", + " while target in targets_to_avoid:\n", + " target = random.randint(0, span - 1)\n", + " targets_to_avoid.append(target)\n", + " batch[i * num_skips + j] = buffer[skip_window] # this is the input word\n", + " context[i * num_skips + j, 0] = buffer[target] # these are the context words\n", + " buffer.append(data[data_index])\n", + " data_index = (data_index + 1) % len(data)'''" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The first “target” word selected is the word at the center of the span of words and is therefore the input word. Then other words are randomly selected from the span of words, making sure that the input word is not selected as part of the context, and each context word is unique. The batch variable will feature repeated input words (buffer[skip_window]) which are matched with each context word in context.\n", + "\n", + "The batch and context variables are then returned – and now we have a means of drawing batches of data from the data set. We are now in a position to create our Word2Vec training code in TensorFlow. However, before we get to that, we’ll first create a validation data-set that we can use to test how our model is doing. We do that by measuring the vectors closest together in vector-space, and make sure these words indeed are similar using our knowledge of English. This will be discussed more in the next section. However, for now, the code below shows how to grab some random validation words from the most common words in our vocabulary:" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": {}, + "outputs": [], + "source": [ + "# We pick a random validation set to sample nearest neighbors. Here we limit the\n", + "# validation samples to the words that have a low numeric ID, which by\n", + "# construction are also the most frequent.\n", + "valid_size = 16 # Random set of words to evaluate similarity on.\n", + "valid_window = 100 # Only pick dev samples in the head of the distribution.\n", + "valid_examples = np.random.choice(valid_window, valid_size, replace=False)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The code above randomly chooses 16 integers from 0-100 – this corresponds to the integer indexes of the most common 100 words in our text data. These will be the words we examine to assess how our learning is progressing in associating related words together in the vector-space. Now, onto creating the TensorFlow model." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Creating the TensorFlow model" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "For a refresher on TensorFlow, check out this tutorial. Below I will step through the process of creating our Word2Vec word embeddings in TensorFlow. What does this involve? Simply, we need to setup the neural network which I previously presented, with a word embedding matrix acting as the hidden layer and an output softmax layer in TensorFlow. By training this model, we’ll be learning the best word embedding matrix and therefore we’ll be learning a reduced, context maintaining, mapping of words to vectors.\n", + "\n", + "The first thing to do is set-up some variables which we’ll use later on in the code – the purposes of these variables will become clear as we progress:" + ] + }, + { + "cell_type": "code", + "execution_count": 41, + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'collect_data' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0mvocabulary_size\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m10000\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0mdata\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcount\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdictionary\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mreverse_dictionary\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcollect_data\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mvocabulary_size\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mvocabulary_size\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 3\u001b[0m \u001b[0mbatch_size\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m128\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0membedding_size\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m128\u001b[0m \u001b[0;31m# Dimension of the embedding vector.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0mskip_window\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m1\u001b[0m \u001b[0;31m# How many words to consider left and right.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mNameError\u001b[0m: name 'collect_data' is not defined" + ] + } + ], + "source": [ + "vocabulary_size = 10000\n", + "data, count, dictionary, reverse_dictionary = collect_data(vocabulary_size=vocabulary_size)\n", + "batch_size = 128\n", + "embedding_size = 128 # Dimension of the embedding vector.\n", + "skip_window = 1 # How many words to consider left and right.\n", + "num_skips = 2 # How many times to reuse an input to generate a context." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Found and verified text8.zip\n", + "['anarchism', 'originated', 'as', 'a', 'term', 'of', 'abuse']\n", + "Initialized\n", + "Average loss at step 0 : 9.24670124054\n", + "Nearest to seven: garcia, baba, numeral, distinguished, campaigns, pride, notable, frisian,\n", + "Nearest to are: sea, associate, tons, absorbed, derived, val, mccarthy, mixture,\n", + "Nearest to up: coronation, equilibrium, fm, championships, latvian, routine, bring, lowest,\n", + "Nearest to one: ancestry, torah, mc, happen, re, especially, mercy, album,\n", + "Nearest to a: joke, principal, desert, dramatic, gradual, dimensional, particularly, gaza,\n", + "Nearest to will: prosecution, eye, kentucky, dangers, ontario, graham, immediate, feet,\n", + "Nearest to two: economics, coast, sing, occupation, adding, harris, context, centuries,\n", + "Nearest to often: emerge, puerto, continue, angeles, proclamation, aa, discipline, nerve,\n", + "Nearest to four: convert, web, khmer, hispanic, constraints, items, definite, napoleonic,\n", + "Nearest to five: ward, proponents, hypoglycemia, life, cavity, worker, developments, oak,\n", + "Nearest to it: pull, paint, tie, fu, piercing, gin, harvard, studies,\n", + "Nearest to have: edwin, avoid, involuntary, alpha, absolute, motors, venus, trail,\n", + "Nearest to at: veteran, possible, paths, korea, khazar, thirty, homosexual, duties,\n", + "Nearest to also: sympathetic, outbreak, shock, factories, shortened, metro, casino, independence,\n", + "Nearest to than: adrian, caucasus, contested, borne, flora, runs, crossing, countryside,\n", + "Nearest to however: complaints, substantial, ties, race, ask, good, notorious, offices,\n", + "Softmax method took 53.639996 minutes to run 100 iterations\n", + "Initialized\n", + "Average loss at step 0 : 9.35233211517\n", + "Nearest to seven: epic, macau, formations, theatre, jason, thereby, mason, goal,\n", + "Nearest to are: principles, washington, cast, walking, holy, strange, wrote, sunday,\n", + "Nearest to up: manuel, differ, we, longest, perl, morris, effective, roger,\n", + "Nearest to one: openly, wayne, spoke, isaiah, http, importantly, regulations, remote,\n", + "Nearest to a: friends, what, broadcasting, less, bigfoot, city, conquered, poem,\n", + "Nearest to will: great, organizations, remote, binomial, released, possesses, christmas, UNK,\n", + "Nearest to two: involuntary, nashville, insane, energy, concludes, reference, enterprises, remote,\n", + "Nearest to often: sensory, juan, amended, electronics, resembles, mandatory, edge, lit,\n", + "Nearest to four: surprising, singapore, constellations, shoulder, bounded, specially, rogers, eggs,\n", + "Nearest to five: springer, creed, salvation, header, critics, tr, case, presence,\n", + "Nearest to it: essence, cannot, jets, seemed, baptists, cryonics, hunt, graves,\n", + "Nearest to have: pre, theoretical, xiv, popularized, cook, seal, phrases, sur,\n", + "Nearest to at: contribution, leslie, probable, derived, pradesh, neither, philosopher, different,\n", + "Nearest to also: intentionally, render, servants, correctly, muslim, modules, connecticut, invoked,\n", + "Nearest to than: suited, coastal, color, agave, coaches, hezbollah, indirectly, heroin,\n", + "Nearest to however: estate, richard, footage, oak, material, districts, nicknamed, solve,\n", + "Average loss at step 2000 : 9.33152878189\n", + "Average loss at step 4000 : 9.32041128635\n", + "Average loss at step 6000 : 9.31568084288\n", + "Average loss at step 8000 : 9.3075892849\n", + "Average loss at step 10000 : 9.30183052254\n", + "Nearest to seven: zero, vs, epic, theatre, in, thereby, mason, risk,\n", + "Nearest to are: gb, vs, and, is, principles, washington, holy, quarters,\n", + "Nearest to up: differ, vs, we, longest, manuel, effective, if, sigma,\n", + "Nearest to one: zero, UNK, vs, and, the, nine, phi, in,\n", + "Nearest to a: the, UNK, vs, and, one, fao, zero, phi,\n", + "Nearest to will: great, remote, he, binomial, in, organizations, trial, changing,\n", + "Nearest to two: vs, UNK, one, nine, and, the, in, zero,\n", + "Nearest to often: sensory, juan, vowels, amended, resembles, edge, close, arizona,\n", + "Nearest to four: zero, one, surprising, constellations, UNK, psi, agave, volume,\n", + "Nearest to five: UNK, zero, vs, critics, and, one, in, jpg,\n", + "Nearest to it: vs, cannot, UNK, a, essence, jets, seemed, he,\n", + "Nearest to have: pre, xiv, theoretical, seal, cook, teams, ibid, sur,\n", + "Nearest to at: in, probable, contribution, leslie, and, derived, one, philosopher,\n", + "Nearest to also: intentionally, launch, muslim, vs, UNK, deceased, correctly, usually,\n", + "Nearest to than: agave, and, one, coaches, coastal, color, suited, indirectly,\n", + "Nearest to however: UNK, estate, richard, hugh, districts, nicknamed, brother, footage,\n" + ] + } + ], + "source": [ + "import urllib.request\n", + "import collections\n", + "import math\n", + "import os\n", + "import random\n", + "import zipfile\n", + "import datetime as dt\n", + "\n", + "import numpy as np\n", + "import tensorflow as tf\n", + "\n", + "\n", + "def maybe_download(filename, url, expected_bytes):\n", + " \"\"\"Download a file if not present, and make sure it's the right size.\"\"\"\n", + " if not os.path.exists(filename):\n", + " filename, _ = urllib.request.urlretrieve(url + filename, filename)\n", + " statinfo = os.stat(filename)\n", + " if statinfo.st_size == expected_bytes:\n", + " print('Found and verified', filename)\n", + " else:\n", + " print(statinfo.st_size)\n", + " raise Exception(\n", + " 'Failed to verify ' + filename + '. Can you get to it with a browser?')\n", + " return filename\n", + "\n", + "\n", + "# Read the data into a list of strings.\n", + "def read_data(filename):\n", + " \"\"\"Extract the first file enclosed in a zip file as a list of words.\"\"\"\n", + " with zipfile.ZipFile(filename) as f:\n", + " data = tf.compat.as_str(f.read(f.namelist()[0])).split()\n", + " return data\n", + "\n", + "def build_dataset(words, n_words):\n", + " \"\"\"Process raw inputs into a dataset.\"\"\"\n", + " count = [['UNK', -1]]\n", + " count.extend(collections.Counter(words).most_common(n_words - 1))\n", + " dictionary = dict()\n", + " for word, _ in count:\n", + " dictionary[word] = len(dictionary)\n", + " data = list()\n", + " unk_count = 0\n", + " for word in words:\n", + " if word in dictionary:\n", + " index = dictionary[word]\n", + " else:\n", + " index = 0 # dictionary['UNK']\n", + " unk_count += 1\n", + " data.append(index)\n", + " count[0][1] = unk_count\n", + " reversed_dictionary = dict(zip(dictionary.values(), dictionary.keys()))\n", + " return data, count, dictionary, reversed_dictionary\n", + "\n", + "\n", + "def collect_data(vocabulary_size=10000):\n", + " url = 'http://mattmahoney.net/dc/'\n", + " filename = maybe_download('text8.zip', url, 31344016)\n", + " vocabulary = read_data(filename)\n", + " print(vocabulary[:7])\n", + " data, count, dictionary, reverse_dictionary = build_dataset(vocabulary,\n", + " vocabulary_size)\n", + " del vocabulary # Hint to reduce memory.\n", + " return data, count, dictionary, reverse_dictionary\n", + "\n", + "data_index = 0\n", + "# generate batch data\n", + "def generate_batch(data, batch_size, num_skips, skip_window):\n", + " global data_index\n", + " assert batch_size % num_skips == 0\n", + " assert num_skips <= 2 * skip_window\n", + " batch = np.ndarray(shape=(batch_size), dtype=np.int32)\n", + " context = np.ndarray(shape=(batch_size, 1), dtype=np.int32)\n", + " span = 2 * skip_window + 1 # [ skip_window input_word skip_window ]\n", + " buffer = collections.deque(maxlen=span)\n", + " for _ in range(span):\n", + " buffer.append(data[data_index])\n", + " data_index = (data_index + 1) % len(data)\n", + " for i in range(batch_size // num_skips):\n", + " target = skip_window # input word at the center of the buffer\n", + " targets_to_avoid = [skip_window]\n", + " for j in range(num_skips):\n", + " while target in targets_to_avoid:\n", + " target = random.randint(0, span - 1)\n", + " targets_to_avoid.append(target)\n", + " batch[i * num_skips + j] = buffer[skip_window] # this is the input word\n", + " context[i * num_skips + j, 0] = buffer[target] # these are the context words\n", + " buffer.append(data[data_index])\n", + " data_index = (data_index + 1) % len(data)\n", + " # Backtrack a little bit to avoid skipping words in the end of a batch\n", + " data_index = (data_index + len(data) - span) % len(data)\n", + " return batch, context\n", + "\n", + "vocabulary_size = 10000\n", + "data, count, dictionary, reverse_dictionary = collect_data(vocabulary_size=vocabulary_size)\n", + "\n", + "batch_size = 128\n", + "embedding_size = 300 # Dimension of the embedding vector.\n", + "skip_window = 2 # How many words to consider left and right.\n", + "num_skips = 2 # How many times to reuse an input to generate a label.\n", + "\n", + "# We pick a random validation set to sample nearest neighbors. Here we limit the\n", + "# validation samples to the words that have a low numeric ID, which by\n", + "# construction are also the most frequent.\n", + "valid_size = 16 # Random set of words to evaluate similarity on.\n", + "valid_window = 100 # Only pick dev samples in the head of the distribution.\n", + "valid_examples = np.random.choice(valid_window, valid_size, replace=False)\n", + "num_sampled = 64 # Number of negative examples to sample.\n", + "\n", + "graph = tf.Graph()\n", + "\n", + "with graph.as_default():\n", + "\n", + " # Input data.\n", + " train_inputs = tf.placeholder(tf.int32, shape=[batch_size])\n", + " train_context = tf.placeholder(tf.int32, shape=[batch_size, 1])\n", + " valid_dataset = tf.constant(valid_examples, dtype=tf.int32)\n", + "\n", + " # Look up embeddings for inputs.\n", + " embeddings = tf.Variable(\n", + " tf.random_uniform([vocabulary_size, embedding_size], -1.0, 1.0))\n", + " embed = tf.nn.embedding_lookup(embeddings, train_inputs)\n", + "\n", + " # Construct the variables for the softmax\n", + " weights = tf.Variable(\n", + " tf.truncated_normal([embedding_size, vocabulary_size],\n", + " stddev=1.0 / math.sqrt(embedding_size)))\n", + " biases = tf.Variable(tf.zeros([vocabulary_size]))\n", + " hidden_out = tf.transpose(tf.matmul(tf.transpose(weights), tf.transpose(embed))) + biases\n", + "\n", + " # convert train_context to a one-hot format\n", + " train_one_hot = tf.one_hot(train_context, vocabulary_size)\n", + "\n", + " cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=hidden_out, labels=train_one_hot))\n", + "\n", + " # Construct the SGD optimizer using a learning rate of 1.0.\n", + " optimizer = tf.train.GradientDescentOptimizer(1.0).minimize(cross_entropy)\n", + "\n", + " # Compute the cosine similarity between minibatch examples and all embeddings.\n", + " norm = tf.sqrt(tf.reduce_sum(tf.square(embeddings), 1, keep_dims=True))\n", + " normalized_embeddings = embeddings / norm\n", + " valid_embeddings = tf.nn.embedding_lookup(\n", + " normalized_embeddings, valid_dataset)\n", + " similarity = tf.matmul(\n", + " valid_embeddings, normalized_embeddings, transpose_b=True)\n", + "\n", + " # Add variable initializer.\n", + " init = tf.global_variables_initializer()\n", + "\n", + "\n", + "def run(graph, num_steps):\n", + " with tf.Session(graph=graph) as session:\n", + " # We must initialize all variables before we use them.\n", + " init.run()\n", + " print('Initialized')\n", + "\n", + " average_loss = 0\n", + " for step in range(num_steps):\n", + " batch_inputs, batch_context = generate_batch(data,\n", + " batch_size, num_skips, skip_window)\n", + " feed_dict = {train_inputs: batch_inputs, train_context: batch_context}\n", + "\n", + " # We perform one update step by evaluating the optimizer op (including it\n", + " # in the list of returned values for session.run()\n", + " _, loss_val = session.run([optimizer, cross_entropy], feed_dict=feed_dict)\n", + " average_loss += loss_val\n", + "\n", + " if step % 2000 == 0:\n", + " if step > 0:\n", + " average_loss /= 2000\n", + " # The average loss is an estimate of the loss over the last 2000 batches.\n", + " print('Average loss at step ', step, ': ', average_loss)\n", + " average_loss = 0\n", + "\n", + " # Note that this is expensive (~20% slowdown if computed every 500 steps)\n", + " if step % 10000 == 0:\n", + " sim = similarity.eval()\n", + " for i in range(valid_size):\n", + " valid_word = reverse_dictionary[valid_examples[i]]\n", + " top_k = 8 # number of nearest neighbors\n", + " nearest = (-sim[i, :]).argsort()[1:top_k + 1]\n", + " log_str = 'Nearest to %s:' % valid_word\n", + " for k in range(top_k):\n", + " close_word = reverse_dictionary[nearest[k]]\n", + " log_str = '%s %s,' % (log_str, close_word)\n", + " print(log_str)\n", + " final_embeddings = normalized_embeddings.eval()\n", + "\n", + "num_steps = 100\n", + "softmax_start_time = dt.datetime.now()\n", + "run(graph, num_steps=num_steps)\n", + "softmax_end_time = dt.datetime.now()\n", + "print(\"Softmax method took {} minutes to run 100 iterations\".format((softmax_end_time-softmax_start_time).total_seconds()))\n", + "\n", + "with graph.as_default():\n", + "\n", + " # Construct the variables for the NCE loss\n", + " nce_weights = tf.Variable(\n", + " tf.truncated_normal([vocabulary_size, embedding_size],\n", + " stddev=1.0 / math.sqrt(embedding_size)))\n", + " nce_biases = tf.Variable(tf.zeros([vocabulary_size]))\n", + "\n", + " nce_loss = tf.reduce_mean(\n", + " tf.nn.nce_loss(weights=nce_weights,\n", + " biases=nce_biases,\n", + " labels=train_context,\n", + " inputs=embed,\n", + " num_sampled=num_sampled,\n", + " num_classes=vocabulary_size))\n", + "\n", + " optimizer = tf.train.GradientDescentOptimizer(1.0).minimize(nce_loss)\n", + "\n", + " # Add variable initializer.\n", + " init = tf.global_variables_initializer()\n", + "\n", + "num_steps = 50000\n", + "nce_start_time = dt.datetime.now()\n", + "run(graph, num_steps)\n", + "nce_end_time = dt.datetime.now()\n", + "print(\"NCE method took {} minutes to run 100 iterations\".format((nce_end_time-nce_start_time).total_seconds()))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Next we setup some TensorFlow placeholders that will hold our input words (their integer indexes) and context words which we are trying to predict. We also need to create a constant to hold our validation set indexes in TensorFlow:" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "train_inputs = tf.placeholder(tf.int32, shape=[batch_size])\n", + "train_labels = tf.placeholder(tf.int32, shape=[batch_size, 1])\n", + "valid_dataset = tf.constant(valid_examples, dtype=tf.int32)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Next, we need to setup the embedding matrix variable / tensor – this is straight-forward using the TensorFlow embedding_lookup() function, which I’ll explain shortly:" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'vocabulary_size' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;31m# Look up embeddings for inputs.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2\u001b[0m embeddings = tf.Variable(\n\u001b[0;32m----> 3\u001b[0;31m tf.random_uniform([vocabulary_size, embedding_size], -1.0, 1.0))\n\u001b[0m\u001b[1;32m 4\u001b[0m \u001b[0membed\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnn\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0membedding_lookup\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0membeddings\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtrain_inputs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mNameError\u001b[0m: name 'vocabulary_size' is not defined" + ] + } + ], + "source": [ + "# Look up embeddings for inputs.\n", + "embeddings = tf.Variable(\n", + " tf.random_uniform([vocabulary_size, embedding_size], -1.0, 1.0))\n", + "embed = tf.nn.embedding_lookup(embeddings, train_inputs)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The first step in the code above is to create the embeddings variable, which is effectively the weights of the connections to the linear hidden layer. We initialize the variable with a random uniform distribution between -1.0 to 1.0. The size of this variable is (vocabulary_size, embedding_size) – the vocabulary_size is the 10,000 words that we have used to setup our data in the previous section. This is basically our one-hot vector input, where the only element with a value of “1” is the current input word, all the other values are set to “0”. The second dimension, embedding_size, is our hidden layer size, and is the length of our new, smaller, representation of our words. We can also think of this tensor as a big lookup table – the rows are each word in our vocabulary, and the columns are our new vector representation of each of these words. " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "As can be observed, “anarchism” (which would actually be represented by a unique integer or one-hot vector) is now expressed as [0.5, 0.1, -0.1]. We can “look up” anarchism by finding its integer index and searching the rows of embeddings to find the embedding vector: [0.5, 0.1, -0.1].\n", + "\n", + "The next line in the code involves the tf.nn.embedding_lookup() function, which is a useful helper function in TensorFlow for this type of task. Here’s how it works – it takes an input vector of integer indexes – in this case our train_input tensor of training input words, and “looks up” these indexes in the supplied embeddings tensor. Therefore, this command will return the current embedding vector for each of the supplied input words in the training batch. The full embedding tensor will be optimized during the training process.\n", + "\n", + "Next we have to create some weights and bias values to connect the output softmax layer, and perform the appropriate multiplication and addition. This looks like:" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'vocabulary_size' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;31m# Construct the variables for the softmax\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m weights = tf.Variable(tf.truncated_normal([vocabulary_size, embedding_size],\n\u001b[0m\u001b[1;32m 3\u001b[0m stddev=1.0 / math.sqrt(embedding_size)))\n\u001b[1;32m 4\u001b[0m \u001b[0mbiases\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mVariable\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mzeros\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mvocabulary_size\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0mhidden_out\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmatmul\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0membed\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtranspose\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mweights\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0mbiases\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mNameError\u001b[0m: name 'vocabulary_size' is not defined" + ] + } + ], + "source": [ + "# Construct the variables for the softmax\n", + "weights = tf.Variable(tf.truncated_normal([vocabulary_size, embedding_size],\n", + " stddev=1.0 / math.sqrt(embedding_size)))\n", + "biases = tf.Variable(tf.zeros([vocabulary_size]))\n", + "hidden_out = tf.matmul(embed, tf.transpose(weights)) + biases" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The weight variable, as it is connecting the hidden layer and the output layer, is of size (out_layer_size, hidden_layer_size) = (vocabulary_size, embedding_size). The biases, as usual, will only be single dimensional and the size of the output layer. We then multiply the embedded variable (embed) by the weights and add the bias. Now we are ready to create a softmax operation and we will use cross entropy loss to optimize the weights, biases and embeddings of the model. To do this easily, we will use the TensorFlow function softmax_cross_entropy_with_logits(). However, to use this function we first have to convert the context words / integer indices into one-hot vectors. The code below performs both of these steps, and also adds a gradient descent optimization operation:" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'train_context' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;31m# convert train_context to a one-hot format\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0mtrain_one_hot\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mone_hot\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtrain_context\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvocabulary_size\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 3\u001b[0m cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=hidden_out, \n\u001b[1;32m 4\u001b[0m labels=train_one_hot))\n\u001b[1;32m 5\u001b[0m \u001b[0;31m# Construct the SGD optimizer using a learning rate of 1.0.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mNameError\u001b[0m: name 'train_context' is not defined" + ] + } + ], + "source": [ + "# convert train_context to a one-hot format\n", + "train_one_hot = tf.one_hot(train_context, vocabulary_size)\n", + "cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=hidden_out, \n", + " labels=train_one_hot))\n", + "# Construct the SGD optimizer using a learning rate of 1.0.\n", + "optimizer = tf.train.GradientDescentOptimizer(1.0).minimize(cross_entropy)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Next, we need to perform our similarity assessments to check on how the model is performing as it trains. To determine which words are similar to each other, we need to perform some sort of operation that measures the “distances” between the various word embedding vectors for the different words. In this case, we will use the cosine similarity measure of distance between vectors." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "First, we calculate the L2 norm of each vector using the tf.square(), tf.reduce_sum() and tf.sqrt() functions to calculate the square, summation and square root of the norm, respectively:" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'embeddings' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;31m# Compute the cosine similarity between minibatch examples and all embeddings.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0mnorm\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msqrt\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mreduce_sum\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msquare\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0membeddings\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mkeep_dims\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 3\u001b[0m \u001b[0mnormalized_embeddings\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0membeddings\u001b[0m \u001b[0;34m/\u001b[0m \u001b[0mnorm\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mNameError\u001b[0m: name 'embeddings' is not defined" + ] + } + ], + "source": [ + "# Compute the cosine similarity between minibatch examples and all embeddings.\n", + "norm = tf.sqrt(tf.reduce_sum(tf.square(embeddings), 1, keep_dims=True))\n", + "normalized_embeddings = embeddings / norm" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now we can look up our validation words / vectors using the tf.nn.embedding_lookup() that we discussed earlier\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'normalized_embeddings' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m valid_embeddings = tf.nn.embedding_lookup(\n\u001b[0;32m----> 2\u001b[0;31m normalized_embeddings, valid_dataset)\n\u001b[0m", + "\u001b[0;31mNameError\u001b[0m: name 'normalized_embeddings' is not defined" + ] + } + ], + "source": [ + "valid_embeddings = tf.nn.embedding_lookup(\n", + " normalized_embeddings, valid_dataset)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "As before, we are supplying a list of integers (that correspond to our validation vocabulary words) to the embedding_lookup() function, which looks up these rows in the normalized_embeddings tensor, and returns the subset of validation normalized embeddings. Now that we have the normalized validation tensor, valid_embeddings, we can multiply this by the full normalized vocabulary (normalized_embedding) to finalize our similarity calculation:" + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'valid_embeddings' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m similarity = tf.matmul(\n\u001b[0;32m----> 2\u001b[0;31m valid_embeddings, normalized_embeddings, transpose_b=True)\n\u001b[0m", + "\u001b[0;31mNameError\u001b[0m: name 'valid_embeddings' is not defined" + ] + } + ], + "source": [ + "similarity = tf.matmul(\n", + " valid_embeddings, normalized_embeddings, transpose_b=True)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This operation will return a (validation_size, vocabulary_size) sized tensor, where each row refers to one of our validation words and the columns refer to the similarity between the validation word and all the other words in the vocabulary." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Running the TensorFlow model" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The code below initializes the variables and feeds in each data batch to the training loop, printing the average loss every 2000 iterations. If this code doesn’t make sense to you, check out my TensorFlow tutorial." + ] + }, + { + "cell_type": "code", + "execution_count": 36, + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'graph' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0;32mwith\u001b[0m \u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mSession\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mgraph\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mgraph\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0msession\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2\u001b[0m \u001b[0;31m# We must initialize all variables before we use them.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0minit\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrun\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'Initialized'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mNameError\u001b[0m: name 'graph' is not defined" + ] + } + ], + "source": [ + "with tf.Session(graph=graph) as session:\n", + " # We must initialize all variables before we use them.\n", + " init.run()\n", + " print('Initialized')\n", + "\n", + " average_loss = 0\n", + " for step in range(num_steps):\n", + " batch_inputs, batch_context = generate_batch(data,\n", + " batch_size, num_skips, skip_window)\n", + " feed_dict = {train_inputs: batch_inputs, train_context: batch_context}\n", + "\n", + " # We perform one update step by evaluating the optimizer op (including it\n", + " # in the list of returned values for session.run()\n", + " _, loss_val = session.run([optimizer, cross_entropy], feed_dict=feed_dict)\n", + " average_loss += loss_val\n", + "\n", + " if step % 2000 == 0:\n", + " if step > 0:\n", + " average_loss /= 2000\n", + " # The average loss is an estimate of the loss over the last 2000 batches.\n", + " print('Average loss at step ', step, ': ', average_loss)\n", + " average_loss = 0" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Next, we want to print out the words which are most similar to our validation words – we do this by calling the similarity operation we defined above and sorting the results (note, this is only performed every 10,000 iterations as it is computationally expensive):" + ] + }, + { + "cell_type": "code", + "execution_count": 37, + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'step' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;31m# Note that this is expensive (~20% slowdown if computed every 500 steps)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0;32mif\u001b[0m \u001b[0mstep\u001b[0m \u001b[0;34m%\u001b[0m \u001b[0;36m10000\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 3\u001b[0m \u001b[0msim\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0msimilarity\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0meval\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mi\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mvalid_size\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0mvalid_word\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mreverse_dictionary\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mvalid_examples\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mNameError\u001b[0m: name 'step' is not defined" + ] + } + ], + "source": [ + "# Note that this is expensive (~20% slowdown if computed every 500 steps)\n", + "if step % 10000 == 0:\n", + " sim = similarity.eval()\n", + " for i in range(valid_size):\n", + " valid_word = reverse_dictionary[valid_examples[i]]\n", + " top_k = 8 # number of nearest neighbors\n", + " nearest = (-sim[i, :]).argsort()[1:top_k + 1]\n", + " log_str = 'Nearest to %s:' % valid_word\n", + " for k in range(top_k):\n", + " close_word = reverse_dictionary[nearest[k]]\n", + " log_str = '%s %s,' % (log_str, close_word)\n", + " print(log_str)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This function first evaluates the similarity operation, which returns an array of cosine similarity values for each of the validation words. Then we iterate through each of the validation words, taking the top 8 closest words by using argsort() on the negative of the similarity to arrange the values in descending order. The code then prints out these 8 closest words so we can monitor how the embedding process is performing.\n", + "\n", + "Finally, after all the training iterations are finished, we can assign the final embeddings to a separate tensor for use later (most likely in some sort of other deep learning or machine learning process):" + ] + }, + { + "cell_type": "code", + "execution_count": 38, + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'normalized_embeddings' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mfinal_embeddings\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnormalized_embeddings\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0meval\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;31mNameError\u001b[0m: name 'normalized_embeddings' is not defined" + ] + } + ], + "source": [ + "final_embeddings = normalized_embeddings.eval()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "So now we’re done – or are we? The code for this softmax method of Word2Vec is on this site’s Github repository – you could try running it, but I wouldn’t recommend it. Why? Because it is seriously slow." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Speeding things up – the “true” Word2Vec method" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The fact is, performing softmax evaluations and updating the weights over a 10,000 word output/vocabulary is really slow. Why’s that?" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In the context of what we are working on, the softmax function will predict what words have the highest probability of being in the context of the input word. To determine that probability however, the denominator of the softmax function has to evaluate all the possible context words in the vocabulary. Therefore, we need 300 x 10,000 = 3M weights, all of which need to be trained for the softmax output. This slows things down.\n", + "\n", + "There is an alternative, faster scheme called Noise Contrastive Estimation (NCE). Instead of taking the probability of the context word compared to all of the possible context words in the vocabulary, this method randomly samples 2-20 possible context words and evaluates the probability only from these. I won’t go into the nitty gritty details here, but suffice to say that this method has been shown to perform well and drastically speeds up the training process.\n", + "\n", + "TensorFlow has helped us out here, and has supplied an NCE loss function that we can use called tf.nn.nce_loss() which we can supply weight and bias variables to. Using this function, the time to perform 100 training iterations reduced from 25 seconds with the softmax method to less than 1 second using the NCE method. An awesome improvement! We replace the softmax lines with the following in our code:" + ] + }, + { + "cell_type": "code", + "execution_count": 39, + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'vocabulary_size' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;31m# Construct the variables for the NCE loss\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2\u001b[0m nce_weights = tf.Variable(\n\u001b[0;32m----> 3\u001b[0;31m tf.truncated_normal([vocabulary_size, embedding_size],\n\u001b[0m\u001b[1;32m 4\u001b[0m stddev=1.0 / math.sqrt(embedding_size)))\n\u001b[1;32m 5\u001b[0m \u001b[0mnce_biases\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mVariable\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mzeros\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mvocabulary_size\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mNameError\u001b[0m: name 'vocabulary_size' is not defined" + ] + } + ], + "source": [ + "# Construct the variables for the NCE loss\n", + "nce_weights = tf.Variable(\n", + " tf.truncated_normal([vocabulary_size, embedding_size],\n", + " stddev=1.0 / math.sqrt(embedding_size)))\n", + "nce_biases = tf.Variable(tf.zeros([vocabulary_size]))\n", + "\n", + "nce_loss = tf.reduce_mean(\n", + " tf.nn.nce_loss(weights=nce_weights,\n", + " biases=nce_biases,\n", + " labels=train_context,\n", + " inputs=embed,\n", + " num_sampled=num_sampled,\n", + " num_classes=vocabulary_size))\n", + "\n", + "optimizer = tf.train.GradientDescentOptimizer(1.0).minimize(nce_loss)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now we are good to run the code. You can get the full code here. As discussed, every 10,000 iterations the code outputs the validation words and the words that the Word2Vec system deems are similar. Below, you can see the improvement for some selected validation words between the random initialization and at the 50,000 iteration mark:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.5.4" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +}