# -*- coding: utf-8 -*-
import numpy as np
# N is batch size; D_in is input dimension;
# H is hidden dimension; D_out is output dimension.
N, D_in, H, D_out = 64, 1000, 100, 10
# Create random input and output data
x = np.random.randn(N, D_in)
y = np.random.randn(N, D_out)
# Randomly initialize weights
w1 = np.random.randn(D_in, H)
w2 = np.random.randn(H, D_out)
learning_rate = 1e-6
for t in range(500):
# Forward pass: compute predicted y
h = x.dot(w1)
h_relu = np.maximum(h, 0)
y_pred = h_relu.dot(w2)
# Compute and print loss
loss = np.square(y_pred - y).sum()
print(t, loss)
# Backprop to compute gradients of w1 and w2 with respect to loss
grad_y_pred = 2.0 * (y_pred - y)
grad_w2 = h_relu.T.dot(grad_y_pred)
grad_h_relu = grad_y_pred.dot(w2.T)
grad_h = grad_h_relu.copy()
grad_h[h < 0] = 0
grad_w1 = x.T.dot(grad_h)
# Update weights
w1 -= learning_rate * grad_w1
w2 -= learning_rate * grad_w2
0 29261998.9383 1 23584624.4749 2 21318274.0133 3 19389745.5408 4 16479856.1687 5 12805039.2482 6 9059166.91546 7 6042659.8759 8 3908408.60775 9 2553920.39789 10 1723204.06721 11 1219705.10145 12 906659.056268 13 704582.301008 14 567415.897123 15 469502.722688 16 396243.703489 17 339183.787367 18 293384.908371 19 255753.24473 20 224375.289442 21 197817.587324 22 175121.073496 23 155577.723508 24 138727.89154 25 124054.575745 26 111219.330545 27 99943.0384346 28 90002.3975585 29 81206.7719005 30 73409.0380627 31 66473.3112012 32 60296.3106408 33 54785.7768329 34 49859.0677676 35 45441.2604793 36 41474.757966 37 37900.6254289 38 34674.8838041 39 31756.4912462 40 29118.0035071 41 26731.4581525 42 24563.2300185 43 22591.0640449 44 20795.1155897 45 19157.9008332 46 17663.3262804 47 16297.9786927 48 15048.7541864 49 13904.2761665 50 12855.5370557 51 11893.5831871 52 11009.8840228 53 10198.5198944 54 9452.97741562 55 8766.63119037 56 8134.73416199 57 7552.22098812 58 7015.3292248 59 6519.75327917 60 6061.87657874 61 5638.66314253 62 5247.43856354 63 4885.45681905 64 4550.32597631 65 4239.90233531 66 3952.19792216 67 3685.42375585 68 3437.99282102 69 3208.32364349 70 2995.0123591 71 2796.90365889 72 2612.77458434 73 2441.61153781 74 2282.34987655 75 2134.12359301 76 1996.15517682 77 1867.71870124 78 1748.02217447 79 1636.43626907 80 1532.41129335 81 1435.3645166 82 1344.82770496 83 1260.38274162 84 1181.52556187 85 1107.86359066 86 1039.03117769 87 974.722224799 88 914.826976987 89 858.928900525 90 806.610252238 91 757.653403984 92 711.841667209 93 668.955674145 94 628.786703973 95 591.166198315 96 555.91219558 97 522.862764788 98 491.872428375 99 462.809720147 100 435.556280818 101 409.988779832 102 385.991214644 103 363.455455472 104 342.303748325 105 322.439528753 106 303.77815213 107 286.25152638 108 269.777995546 109 254.289212316 110 239.728038759 111 226.036798421 112 213.162389858 113 201.050099682 114 189.657042403 115 178.930752507 116 168.839022075 117 159.335671601 118 150.386617916 119 141.960890819 120 134.028933026 121 126.552752973 122 119.508618317 123 112.871599105 124 106.615401142 125 100.718185857 126 95.1601396318 127 89.9172743676 128 84.9725273635 129 80.3094850327 130 75.9095180326 131 71.7595557249 132 67.8449877975 133 64.1482616471 134 60.6589306595 135 57.3666798893 136 54.257828578 137 51.3223084883 138 48.5512904041 139 45.9332963261 140 43.4597612423 141 41.1234738552 142 38.9164620437 143 36.8310989551 144 34.8614349703 145 32.9989944077 146 31.2389494061 147 29.5751657677 148 28.0018533674 149 26.5143723378 150 25.1090290368 151 23.7789949676 152 22.5209671013 153 21.3316066277 154 20.2065400681 155 19.1417900359 156 18.1352582169 157 17.1825260105 158 16.2808499372 159 15.4275713396 160 14.6199024945 161 13.8558667482 162 13.1326730867 163 12.4476228187 164 11.7992270904 165 11.1852772281 166 10.6039207665 167 10.053368564 168 9.53252947626 169 9.0388943525 170 8.57125552508 171 8.12835146959 172 7.70876529188 173 7.31119727339 174 6.93478135637 175 6.57803922866 176 6.23990443082 177 5.91946669864 178 5.61584117512 179 5.32809852758 180 5.05546438442 181 4.79691466999 182 4.55190170806 183 4.31959471325 184 4.0993672564 185 3.89053306571 186 3.69272848442 187 3.5049605073 188 3.32690498544 189 3.15811754743 190 2.99800272266 191 2.84612083648 192 2.70218120603 193 2.56559847877 194 2.43598627756 195 2.31303903422 196 2.19641792427 197 2.08576686945 198 1.98084058378 199 1.88122059939 200 1.78671463098 201 1.69700035 202 1.61185950487 203 1.53108818397 204 1.45446845079 205 1.38168499608 206 1.31259415864 207 1.24704228715 208 1.18479076767 209 1.12569966367 210 1.06963559835 211 1.01638970173 212 0.965825057948 213 0.91780839967 214 0.872233916761 215 0.828936944529 216 0.787844706919 217 0.748810185424 218 0.711734247058 219 0.676516900494 220 0.643068334746 221 0.611308116249 222 0.581137457877 223 0.552687236904 224 0.525699566311 225 0.500057688773 226 0.475694935078 227 0.452550412934 228 0.430568156872 229 0.409673907048 230 0.389806573715 231 0.370932381921 232 0.352983443987 233 0.335920213981 234 0.319708565298 235 0.304296629709 236 0.289633110114 237 0.275692202732 238 0.262442493992 239 0.249835379143 240 0.23784557329 241 0.226452367874 242 0.215608440517 243 0.205291982434 244 0.195480341079 245 0.186148690515 246 0.177270139497 247 0.168825227757 248 0.160792294175 249 0.153144502297 250 0.145867509368 251 0.138945619351 252 0.132357061522 253 0.126087562687 254 0.120122854459 255 0.114444458516 256 0.109038268771 257 0.103892027299 258 0.098995553132 259 0.0943323574882 260 0.0898952639829 261 0.0856698048795 262 0.0816465504529 263 0.0778154553274 264 0.0741678325738 265 0.0706947627847 266 0.0673880124412 267 0.064238458385 268 0.0612391624809 269 0.0583816564353 270 0.0556601823579 271 0.053069427739 272 0.0506003196627 273 0.0482491518176 274 0.0460090812469 275 0.0438747302262 276 0.0418408493085 277 0.0399035163839 278 0.0380575463064 279 0.036299097922 280 0.0346233229447 281 0.0330262284609 282 0.031503877848 283 0.0300531029465 284 0.0286707587626 285 0.0273528067848 286 0.0260970026726 287 0.0249004123643 288 0.0237589699625 289 0.0226708018116 290 0.021633820431 291 0.0206448922209 292 0.0197019772237 293 0.0188035993027 294 0.0179464410754 295 0.0171291505117 296 0.0163498514344 297 0.0156066053911 298 0.0148977251492 299 0.014221758725 300 0.0135773068144 301 0.012962249886 302 0.0123757145187 303 0.0118163369769 304 0.0112825023947 305 0.0107732945335 306 0.0102876221896 307 0.00982427812751 308 0.00938206972078 309 0.00896026628877 310 0.00855771803633 311 0.00817356616853 312 0.00780707377096 313 0.00745720068811 314 0.00712345076459 315 0.00680492145638 316 0.0065007764508 317 0.00621051064797 318 0.0059335532637 319 0.00566905975894 320 0.0054165748066 321 0.00517568064705 322 0.00494562595128 323 0.00472595134395 324 0.004516286571 325 0.00431606748656 326 0.00412485701368 327 0.00394233640381 328 0.00376798636873 329 0.00360153498254 330 0.0034425879938 331 0.00329072705371 332 0.00314572164753 333 0.00300724330687 334 0.00287492825219 335 0.00274855064807 336 0.0026278818479 337 0.0025125794175 338 0.00240242046594 339 0.00229721881855 340 0.00219666068919 341 0.00210058824046 342 0.00200881735521 343 0.00192110469496 344 0.00183730831055 345 0.00175725174098 346 0.00168073646648 347 0.0016076003348 348 0.00153771359982 349 0.00147090447885 350 0.0014070576511 351 0.00134603797493 352 0.00128770467549 353 0.00123197628558 354 0.00117868085568 355 0.00112772121847 356 0.00107901471112 357 0.00103244508717 358 0.000987915862711 359 0.000945353945088 360 0.000904654397546 361 0.000865735304185 362 0.000828532206376 363 0.000792939748134 364 0.000758904541284 365 0.000726362131352 366 0.000695237114628 367 0.000665469366721 368 0.000636998610063 369 0.000609763414406 370 0.000583720319318 371 0.000558805930044 372 0.000534968044314 373 0.000512173467121 374 0.000490358710071 375 0.000469488609342 376 0.000449526687963 377 0.000430422479868 378 0.000412146230894 379 0.00039466322235 380 0.000377932326339 381 0.000361920938159 382 0.00034659873668 383 0.000331934151076 384 0.000317903619901 385 0.000304474691838 386 0.000291623739083 387 0.000279323742851 388 0.000267548222855 389 0.000256278541801 390 0.000245493571213 391 0.000235166229351 392 0.000225284297347 393 0.00021582135579 394 0.000206761919814 395 0.00019809028382 396 0.000189786130103 397 0.000181836438281 398 0.00017422642703 399 0.000166938234581 400 0.000159960525744 401 0.000153278944243 402 0.000146880325055 403 0.000140753610841 404 0.000134885748538 405 0.000129266522935 406 0.000123884484981 407 0.000118729332324 408 0.000113793045321 409 0.000109064256962 410 0.000104535895706 411 0.000100198028952 412 9.60419382144e-05 413 9.20618879821e-05 414 8.82483093451e-05 415 8.45950482713e-05 416 8.10960474992e-05 417 7.77430445771e-05 418 7.45311414851e-05 419 7.14534254255e-05 420 6.85046590699e-05 421 6.56795112e-05 422 6.29724733817e-05 423 6.03785890755e-05 424 5.7892722874e-05 425 5.55106718637e-05 426 5.32282361494e-05 427 5.10406825078e-05 428 4.89448352152e-05 429 4.6935677075e-05 430 4.50100354238e-05 431 4.31646099667e-05 432 4.13956138296e-05 433 3.97004647487e-05 434 3.80755850664e-05 435 3.65178860272e-05 436 3.50247961582e-05 437 3.35933714026e-05 438 3.22215532328e-05 439 3.09063452968e-05 440 2.9645585819e-05 441 2.84367405606e-05 442 2.72777668962e-05 443 2.61667329407e-05 444 2.51013501797e-05 445 2.40802574239e-05 446 2.31009022636e-05 447 2.21618921174e-05 448 2.12615223444e-05 449 2.03981352425e-05 450 1.95703954971e-05 451 1.877653602e-05 452 1.8015266419e-05 453 1.72851702352e-05 454 1.65850141998e-05 455 1.59137366858e-05 456 1.52708808101e-05 457 1.46533379366e-05 458 1.40608956291e-05 459 1.34927646819e-05 460 1.29478110483e-05 461 1.24251453509e-05 462 1.19238477354e-05 463 1.14429095618e-05 464 1.098163353e-05 465 1.05391013727e-05 466 1.01146537383e-05 467 9.70748474576e-06 468 9.31687028387e-06 469 8.94210531477e-06 470 8.5825945558e-06 471 8.23769054161e-06 472 7.90680480541e-06 473 7.58936772541e-06 474 7.28473974087e-06 475 6.99252291117e-06 476 6.71209205042e-06 477 6.4430614509e-06 478 6.18492658291e-06 479 5.93721360396e-06 480 5.69951164811e-06 481 5.47142875981e-06 482 5.25255539132e-06 483 5.04254555597e-06 484 4.84100017805e-06 485 4.64756344346e-06 486 4.46194790682e-06 487 4.28379534403e-06 488 4.11286394305e-06 489 3.9487860965e-06 490 3.79132018994e-06 491 3.64017549663e-06 492 3.49512942501e-06 493 3.35590547435e-06 494 3.22230253639e-06 495 3.0940368917e-06 496 2.97092430139e-06 497 2.85274840761e-06 498 2.73931894319e-06 499 2.63045198276e-06
Clearly modern deep neural networks are in need of more than what our beloved numpy can offer.
Here we introduce the most fundamental PyTorch concept: the Tensor. A PyTorch Tensor is conceptually identical to a numpy array: a Tensor is an n-dimensional array, and PyTorch provides many functions for operating on these Tensors. Like numpy arrays, PyTorch Tensors do not know anything about deep learning or computational graphs or gradients; they are a generic tool for scientific computing.
However unlike numpy, PyTorch Tensors can utilize GPUs to accelerate their numeric computations. To run a PyTorch Tensor on GPU, you simply need to cast it to a new datatype.
Here we use PyTorch Tensors to fit a two-layer network to random data. Like the numpy example above we need to manually implement the forward and backward passes through the network:
import torch
dtype = torch.FloatTensor
# dtype = torch.cuda.FloatTensor # Uncomment this to run on GPU
# N is batch size; D_in is input dimension;
# H is hidden dimension; D_out is output dimension.
N, D_in, H, D_out = 64, 1000, 100, 10
# Create random input and output data
x = torch.randn(N, D_in).type(dtype)
y = torch.randn(N, D_out).type(dtype)
# Randomly initialize weights
w1 = torch.randn(D_in, H).type(dtype)
w2 = torch.randn(H, D_out).type(dtype)
learning_rate = 1e-6
for t in range(500):
# Forward pass: compute predicted y
h = x.mm(w1)
h_relu = h.clamp(min=0)
y_pred = h_relu.mm(w2)
# Compute and print loss
loss = (y_pred - y).pow(2).sum()
print(t, loss)
# Backprop to compute gradients of w1 and w2 with respect to loss
grad_y_pred = 2.0 * (y_pred - y)
grad_w2 = h_relu.t().mm(grad_y_pred)
grad_h_relu = grad_y_pred.mm(w2.t())
grad_h = grad_h_relu.clone()
grad_h[h < 0] = 0
grad_w1 = x.t().mm(grad_h)
# Update weights using gradient descent
w1 -= learning_rate * grad_w1
w2 -= learning_rate * grad_w2
0 28214897.691271067 1 25380405.792548403 2 26288556.067442656 3 27187362.93774879 4 25326431.49736169 5 20070726.423171997 6 13438367.445337629 7 7935834.941528201 8 4453037.240495725 9 2567232.1655493514 10 1604364.933374187 11 1106295.9881061036 12 831370.3628886025 13 664479.3320915042 14 552383.0191260207 15 470307.21917449264 16 406323.70261433884 17 354377.92758273566 18 311124.330613622 19 274515.3858363455 20 243215.3152763464 21 216254.64485477417 22 192876.48988408546 23 172511.55349881982 24 154696.59197369026 25 139077.64419030334 26 125326.40331724554 27 113168.27359832195 28 102388.44114990594 29 92802.5217316554 30 84252.873688431 31 76614.83165994265 32 69777.57502200827 33 63643.55059441269 34 58122.45377116208 35 53149.5297017009 36 48661.48595352931 37 44605.11924878636 38 40936.86403570355 39 37612.1624785422 40 34589.84976270138 41 31842.023658028404 42 29339.426460701798 43 27055.76113430076 44 24971.357019224655 45 23066.443739543673 46 21322.47401335786 47 19723.635119302293 48 18257.593847584038 49 16911.857851812914 50 15676.901621120574 51 14541.234468931158 52 13495.409479309936 53 12531.688689953091 54 11642.912229433834 55 10823.396586809435 56 10066.756259321584 57 9368.032180714887 58 8722.69606901206 59 8125.648766315389 60 7573.201240118957 61 7061.110367171321 62 6586.37116674181 63 6146.2882079665205 64 5738.038465707713 65 5359.140022478372 66 5007.218182836571 67 4679.821472426938 68 4375.545563822141 69 4092.5672241640546 70 3829.2538478331044 71 3583.9982694811915 72 3355.5044128053214 73 3142.522992099788 74 2944.0477814121896 75 2758.9096522632453 76 2586.098822437947 77 2424.783757172412 78 2274.162424382146 79 2133.472267201043 80 2001.943758391455 81 1879.0327707577635 82 1764.1420179859847 83 1656.6765891071607 84 1556.0836619963645 85 1461.960692876407 86 1373.8333980444747 87 1291.3251499255507 88 1214.077519632569 89 1141.6407961478803 90 1073.7122116708274 91 1010.047731572995 92 950.3514467849104 93 894.3273352336082 94 841.7842243861196 95 792.4734904819334 96 746.1964596283701 97 702.7443149700078 98 661.9300986860596 99 623.5698773736967 100 587.5490628035759 101 553.7059624342619 102 521.87459074208 103 491.95096067483627 104 463.81437045894427 105 437.3619707183019 106 412.4634959739533 107 389.03197571304185 108 366.98684185984854 109 346.2511910920458 110 326.7168373228138 111 308.3204062757866 112 291.0116837719783 113 274.708204616996 114 259.3530469133465 115 244.89019768539188 116 231.2530311334451 117 218.40625489775357 118 206.29494907575645 119 194.87408803031087 120 184.10918123054637 121 173.95670181258504 122 164.3796226045149 123 155.34510760042053 124 146.82159396161046 125 138.78259947243896 126 131.19334880439965 127 124.03206751540091 128 117.27145023435516 129 110.8922230492454 130 104.86954096430226 131 99.17821632714708 132 93.80321977845797 133 88.73180206294792 134 83.93784170194142 135 79.41110559695994 136 75.13158900665832 137 71.09239467909009 138 67.27303860367512 139 63.661083760649944 140 60.24877063365615 141 57.02609438798197 142 53.97754052526591 143 51.095064315871184 144 48.37058476978203 145 45.794669434952766 146 43.357149485835066 147 41.053389754456276 148 38.87377426878407 149 36.81266361362863 150 34.86269390504242 151 33.019418138638315 152 31.27372891445308 153 29.623019411905716 154 28.06002445682043 155 26.58085827334935 156 25.182066968294635 157 23.856794249429644 158 22.603709343965306 159 21.416727958537756 160 20.294056803979785 161 19.230226081371868 162 18.22286712818012 163 17.26986351281531 164 16.36696748965665 165 15.512043060681435 166 14.70277965469339 167 13.936395793035047 168 13.21058501636503 169 12.522788125846773 170 11.871329149475358 171 11.254089594353673 172 10.669772994995135 173 10.115961444046548 174 9.591341026183215 175 9.094685662630582 176 8.623675345308872 177 8.177212815510206 178 7.754271122965591 179 7.354052512118528 180 6.974105103205304 181 6.613862763094033 182 6.273167739637028 183 5.949956651557034 184 5.643680276344654 185 5.353149802081873 186 5.077408776123896 187 4.8164800713806315 188 4.568759942421966 189 4.334537105201893 190 4.112015826773195 191 3.9009179414881707 192 3.7012154731272986 193 3.511612145634661 194 3.331681329765537 195 3.1611259816769106 196 2.9996718148188464 197 2.8461790457236766 198 2.7007109757500025 199 2.562890156220522 200 2.4321240546360414 201 2.308078948186587 202 2.1904870139545665 203 2.0787757790351513 204 1.972721255352237 205 1.8724816279031096 206 1.776974327720918 207 1.6867990743287722 208 1.601016306899063 209 1.5197483114327683 210 1.442438605099003 211 1.369157533522884 212 1.2998218626227995 213 1.2339273899186163 214 1.17146151531626 215 1.1119642766772915 216 1.0557099815853666 217 1.0022163466049716 218 0.9514815204819733 219 0.9033794087224507 220 0.8576643044382202 221 0.8143504655566967 222 0.7732258198716373 223 0.7342158760394923 224 0.6971644104382229 225 0.6619967066271535 226 0.6285948940725881 227 0.5968096996362284 228 0.5667985106167974 229 0.5382560311909526 230 0.5111128765857158 231 0.48532747688128897 232 0.4609265227778163 233 0.4378205148075356 234 0.4157447156268157 235 0.39488582392669613 236 0.3749829100757234 237 0.35613537196222556 238 0.3382650005067456 239 0.32128029946794356 240 0.30518656196033334 241 0.2898877071115251 242 0.2753985487457893 243 0.26155083612243324 244 0.2484203549989168 245 0.23601150551252115 246 0.22414258684202437 247 0.21293119006192796 248 0.20228290632133167 249 0.19217315565631465 250 0.18254562652399353 251 0.17339564711978817 252 0.16472807684149715 253 0.15650172744047652 254 0.14871153441717966 255 0.141252334180054 256 0.13420798837495873 257 0.1275148040973093 258 0.12115617519511047 259 0.11513308130563794 260 0.10940513697614086 261 0.10392053471471474 262 0.09873369084591621 263 0.0938313782580984 264 0.08916601624925224 265 0.08473324384660685 266 0.08052892574508519 267 0.0765146490751043 268 0.07271481811263403 269 0.06908563553494673 270 0.06569151383855609 271 0.06243397875781054 272 0.059288574380887527 273 0.056329070592443964 274 0.053548219642252315 275 0.050900103240740124 276 0.04838548394463982 277 0.0459671132623376 278 0.0437052950628003 279 0.04152813333603267 280 0.03946723512516925 281 0.037533394479267956 282 0.03568910868574027 283 0.03390143972920545 284 0.03224024862347741 285 0.030643039031359953 286 0.029140048215710534 287 0.027684004166977916 288 0.026314751884853438 289 0.025033289714943285 290 0.023797433226710796 291 0.02262336258232206 292 0.021504991155510966 293 0.02045539104525429 294 0.01946322911578169 295 0.018508058725132337 296 0.017591603927018307 297 0.0167319201745022 298 0.01589690324248605 299 0.015139217233493873 300 0.014406465492699028 301 0.013696118249792555 302 0.013027044818570366 303 0.012396137516559824 304 0.011789959562407637 305 0.011218404833911066 306 0.010683310567349003 307 0.01017237445438407 308 0.009680823881585532 309 0.009207409667161714 310 0.008765478168209662 311 0.008343950057827287 312 0.007942077827435279 313 0.007565309388805952 314 0.007207875892412341 315 0.006865805795088331 316 0.006540033828446479 317 0.006230358566105432 318 0.005935793194971506 319 0.0056569668626278435 320 0.005392627609972722 321 0.005142164502647928 322 0.004903280543345323 323 0.0046710665883187286 324 0.0044596153714386855 325 0.004254983110094868 326 0.004053112385123736 327 0.0038716554215878496 328 0.003693314754828869 329 0.0035276847824143864 330 0.0033722945089047496 331 0.003224334560254949 332 0.0030797497759889048 333 0.0029394659957044933 334 0.002811574650088522 335 0.002687472415199954 336 0.0025703362047908573 337 0.002459747191192241 338 0.002356102818997119 339 0.002255833670969709 340 0.002162025463318118 341 0.0020706938958603427 342 0.0019804429030327864 343 0.001900624922958949 344 0.0018219768719105467 345 0.0017470823932357327 346 0.00167403269153521 347 0.0016074784101616224 348 0.0015434483287186662 349 0.0014800083688997212 350 0.0014237042363174357 351 0.0013673234525823919 352 0.001313357088474909 353 0.001260321802043718 354 0.0012110123492108382 355 0.0011649015229149295 356 0.0011204107047679823 357 0.0010796027719008894 358 0.0010387537130391866 359 0.0010000977633572994 360 0.0009621068961130352 361 0.0009280829840783711 362 0.0008925718210586187 363 0.000861557630299048 364 0.0008311890107142728 365 0.0008009163000355368 366 0.0007749579994773548 367 0.0007484465107412408 368 0.0007222093569896337 369 0.0006973693140788217 370 0.0006740144106807122 371 0.0006497241727248526 372 0.0006289278786107411 373 0.0006074793578099702 374 0.0005876308963297938 375 0.0005683213433220757 376 0.0005498372268836205 377 0.0005331871459254289 378 0.000514447040892041 379 0.0004995928681320039 380 0.0004830170838928116 381 0.00046774268823562837 382 0.0004539691694929737 383 0.000439916381955785 384 0.0004264477815108525 385 0.00041461180957180765 386 0.0004007517174830638 387 0.0003885748281060031 388 0.00037587470802746825 389 0.00036685178849288347 390 0.0003563211267975097 391 0.00034580960975350017 392 0.0003373833671450055 393 0.00032734962350788877 394 0.0003172536302761264 395 0.0003087773417211892 396 0.0003003043221507795 397 0.00029157922913469747 398 0.0002838640100005785 399 0.0002773871556948082 400 0.0002693207433572403 401 0.00026274296119056795 402 0.00025629517436694116 403 0.00024865622459442627 404 0.00024191564002837285 405 0.00023580017504160056 406 0.00023021821139053433 407 0.00022492894727565993 408 0.0002187235492969869 409 0.0002130760149380989 410 0.00020846465683301008 411 0.00020339997672215449 412 0.00019872765675885856 413 0.0001944256389800475 414 0.00019000826152877626 415 0.00018525978015550282 416 0.00018077204148654602 417 0.00017703581449693417 418 0.00017288089631457837 419 0.00016859326936614905 420 0.00016520088735737237 421 0.00016153575890172356 422 0.00015850395108210624 423 0.00015459131808931437 424 0.00015143964321755188 425 0.00014752541333357128 426 0.00014445116156402982 427 0.000141356974335205 428 0.00013808374274071355 429 0.0001353787969475273 430 0.00013243990439421038 431 0.00012966755098536842 432 0.00012675479128350375 433 0.0001242446317876872 434 0.00012147723341672523 435 0.00011897251544801257 436 0.00011670839108181286 437 0.00011433415206264785 438 0.00011219214203966876 439 0.00011027887981625295 440 0.00010749669199967837 441 0.00010553591091314041 442 0.00010367609742491235 443 0.00010109258945595334 444 9.932886336866398e-05 445 9.749088251564952e-05 446 9.520718197066069e-05 447 9.345653584887093e-05 448 9.182967794658936e-05 449 8.988625574320175e-05 450 8.847123125822753e-05 451 8.713054826821331e-05 452 8.549155933869346e-05 453 8.406219949108618e-05 454 8.268637916582222e-05 455 8.105226215794625e-05 456 7.962860940187444e-05 457 7.804827419660709e-05 458 7.654020379489757e-05 459 7.561118885025808e-05 460 7.45004278926431e-05 461 7.300097409869422e-05 462 7.156340142840112e-05 463 7.07410268018932e-05 464 6.95669895526968e-05 465 6.816465251008319e-05 466 6.710678036121742e-05 467 6.58694634003143e-05 468 6.480972211669878e-05 469 6.364145772613794e-05 470 6.294719978224006e-05 471 6.208284610231818e-05 472 6.100992742692768e-05 473 6.030514397714626e-05 474 5.950513809528657e-05 475 5.863842784931128e-05 476 5.7600118031853054e-05 477 5.656285652166915e-05 478 5.575245490059555e-05 479 5.4907743583479385e-05 480 5.450484055091742e-05 481 5.38567654812111e-05 482 5.3109503177953266e-05 483 5.230015415125244e-05 484 5.148949327894725e-05 485 5.0706583465745525e-05 486 4.996987102404149e-05 487 4.9266432966016405e-05 488 4.862910026705303e-05 489 4.8046019087769065e-05 490 4.737298535380241e-05 491 4.6690329870216485e-05 492 4.610047588500532e-05 493 4.554242994432578e-05 494 4.503223066189277e-05 495 4.456991744473948e-05 496 4.387548054092527e-05 497 4.330432420614205e-05 498 4.256601870983312e-05 499 4.199306232773037e-05
PyTorch variables and autograd. Autograd package provides cool functionality as the forward pass of your network defines the computational graph; nodes in the graph will be Tensors and edges will be functions that produce output Tensors from input Tensors. Backprop through this graph then allows us to easily compue gradients.
Here we wrap the PyTorch Tensor in a Variable object; where Vaiabel represents a node in the computational graph. if x is a variable then x.data is a Tensor and x.grad is another Varialble holding the gradient of x w.r.t to some scalar value.
PyTorch Variables have samer API as PyTorch Tensots: any operation that you can do with Tensor, also works fine with Variables, difference only being that the Variable defines a computational graph, allowing us to automatically compute gradients.
# Use of Vaiables and Autograd in a 2-layer network with no need to manually implement backprop!
import torch
from torch.autograd import Variable
dtype = torch.FloatTensor
# N is batch size; D_in is input dimension;
# H is hidden dimension; D_out is output dimension.
N, D_in, H, D_out = 64, 1000, 100, 10
# Create random Tensors to hold input and outputs and wrap them in Variables.
x = Variable(torch.randn(N, D_in).type(dtype), requires_grad=False) # requires_grad=False means no need to compute gradients
y = Variable(torch.randn(N, D_out).type(dtype), requires_grad=False)
# Create random Tensors to hold weights and wrap them in Variables.
# requires_grad=True here to compute gradients w.r.t Variables during a backprop pass.
w1 = Variable(torch.randn(D_in, H).type(dtype), requires_grad=True) # requires_grad=False means no need to compute gradients
w2 = Variable(torch.randn(H, D_out).type(dtype), requires_grad=True)
learning_rate = 1e-6
for t in range(500):
# Forward pass: compute predicted y using operations on Variables; these
# are exactly the same operations we used to compute the forward pass using
# Tensors, but we do not need to keep references to intermediate values since
# we are not implementing the backward pass by hand.
y_pred = x.mm(w1).clamp(min=0).mm(w2)
# Compute and print loss using operations on Variables.
# Now loss is a Variable of shape (1,) and loss.data is a Tensor of shape
# (1,); loss.data[0] is a scalar value holding the loss.
loss = (y_pred - y).pow(2).sum()
print(t, loss.data[0])
# Use autograd to compute the backward pass. This call will compute the
# gradient of loss with respect to all Variables with requires_grad=True.
# After this call w1.grad and w2.grad will be Variables holding the gradient
# of the loss with respect to w1 and w2 respectively.
loss.backward()
# Update weights using gradient descent; w1.data and w2.data are Tensors,
# w1.grad and w2.grad are Variables and w1.grad.data and w2.grad.data are
# Tensors.
w1.data -= learning_rate * w1.grad.data
w2.data -= learning_rate * w2.grad.data
# Manually zero the gradients after updating weights
w1.grad.data.zero_()
w2.grad.data.zero_()
0 35878500.0 1 33502642.0 2 31638146.0 3 26216880.0 4 18097450.0 5 10643111.0 6 5868223.0 7 3356485.0 8 2129793.5 9 1508282.875 10 1160753.375 11 940967.3125 12 785975.375 13 668166.125 14 574389.8125 15 497736.9375 16 433985.8125 17 380330.0 18 334801.71875 19 295919.6875 20 262469.40625 21 233624.078125 22 208602.84375 23 186785.21875 24 167705.6875 25 150947.25 26 136179.03125 27 123118.4375 28 111543.015625 29 101252.5 30 92084.828125 31 83890.2109375 32 76550.203125 33 69970.4609375 34 64056.62109375 35 58728.921875 36 53917.6015625 37 49565.42578125 38 45631.03515625 39 42059.48046875 40 38813.0390625 41 35858.09765625 42 33163.74609375 43 30702.73828125 44 28452.41796875 45 26393.0234375 46 24505.55078125 47 22772.90234375 48 21181.724609375 49 19717.416015625 50 18369.517578125 51 17127.080078125 52 15980.390625 53 14921.2587890625 54 13942.4697265625 55 13036.6015625 56 12197.0810546875 57 11419.048828125 58 10696.9755859375 59 10026.7861328125 60 9403.92578125 61 8824.4482421875 62 8285.025390625 63 7782.73583984375 64 7314.56591796875 65 6878.09619140625 66 6470.642578125 67 6090.18603515625 68 5734.34912109375 69 5401.63623046875 70 5090.5068359375 71 4799.25390625 72 4526.57177734375 73 4271.24853515625 74 4031.948974609375 75 3807.61865234375 76 3597.07275390625 77 3399.272216796875 78 3213.479248046875 79 3038.89013671875 80 2874.7353515625 81 2720.358642578125 82 2575.041015625 83 2438.2685546875 84 2309.4228515625 85 2188.03125 86 2073.676513671875 87 1965.752685546875 88 1863.9710693359375 89 1767.9381103515625 90 1677.3052978515625 91 1591.765869140625 92 1510.904541015625 93 1434.5537109375 94 1362.3419189453125 95 1294.068115234375 96 1229.5435791015625 97 1168.46240234375 98 1110.672607421875 99 1055.9576416015625 100 1004.15771484375 101 955.1239013671875 102 908.6265869140625 103 864.5731811523438 104 822.8087158203125 105 783.23779296875 106 745.7089233398438 107 710.0859985351562 108 676.2913818359375 109 644.2283935546875 110 613.8003540039062 111 584.9096069335938 112 557.466796875 113 531.3948364257812 114 506.61962890625 115 483.0841369628906 116 460.7171936035156 117 439.4493408203125 118 419.2239074707031 119 399.9928283691406 120 381.705322265625 121 364.3070373535156 122 347.7393798828125 123 331.97283935546875 124 316.9671630859375 125 302.68853759765625 126 289.0873718261719 127 276.12823486328125 128 263.7820129394531 129 252.02195739746094 130 240.82232666015625 131 230.1421661376953 132 219.9631805419922 133 210.26112365722656 134 201.00880432128906 135 192.19129943847656 136 183.77542114257812 137 175.74905395507812 138 168.08860778808594 139 160.7852325439453 140 153.8109893798828 141 147.15760803222656 142 140.80355834960938 143 134.73838806152344 144 128.94873046875 145 123.42112731933594 146 118.14532470703125 147 113.09927368164062 148 108.27923583984375 149 103.67829132080078 150 99.27864837646484 151 95.07437133789062 152 91.05731964111328 153 87.2179183959961 154 83.54911804199219 155 80.0405044555664 156 76.684814453125 157 73.47515106201172 158 70.40677642822266 159 67.47286224365234 160 64.66546630859375 161 61.97762680053711 162 59.40721893310547 163 56.94810485839844 164 54.59606170654297 165 52.34416961669922 166 50.18893814086914 167 48.12546157836914 168 46.15074920654297 169 44.2588996887207 170 42.44817352294922 171 40.71442794799805 172 39.05426788330078 173 37.46466064453125 174 35.94228744506836 175 34.483211517333984 176 33.08485794067383 177 31.74663734436035 178 30.463415145874023 179 29.23353385925293 180 28.055572509765625 181 26.927295684814453 182 25.845623016357422 183 24.808975219726562 184 23.814783096313477 185 22.86162757873535 186 21.947311401367188 187 21.071706771850586 188 20.23161506652832 189 19.426319122314453 190 18.65383529663086 191 17.913501739501953 192 17.202938079833984 193 16.521442413330078 194 15.867642402648926 195 15.240697860717773 196 14.638861656188965 197 14.062265396118164 198 13.50815200805664 199 12.976459503173828 200 12.466854095458984 201 11.977287292480469 202 11.508007049560547 203 11.057541847229004 204 10.624938011169434 205 10.209487915039062 206 9.811256408691406 207 9.42843246459961 208 9.060935020446777 209 8.708433151245117 210 8.369855880737305 211 8.044754028320312 212 7.732644081115723 213 7.432569980621338 214 7.144754886627197 215 6.868185997009277 216 6.602710723876953 217 6.347681522369385 218 6.103111267089844 219 5.8678483963012695 220 5.64181661605835 221 5.424355506896973 222 5.215670108795166 223 5.015231132507324 224 4.822762966156006 225 4.637856483459473 226 4.460170269012451 227 4.28932523727417 228 4.125042915344238 229 3.967371940612793 230 3.8158133029937744 231 3.670203924179077 232 3.530052661895752 233 3.3955368995666504 234 3.2662713527679443 235 3.141659736633301 236 3.022263526916504 237 2.9074623584747314 238 2.7971487045288086 239 2.6909444332122803 240 2.5889720916748047 241 2.4906997680664062 242 2.3964099884033203 243 2.305689573287964 244 2.2184183597564697 245 2.134580612182617 246 2.053964376449585 247 1.9763656854629517 248 1.9018102884292603 249 1.830053687095642 250 1.7611491680145264 251 1.6948093175888062 252 1.6310014724731445 253 1.5695786476135254 254 1.5105568170547485 255 1.453961730003357 256 1.3993768692016602 257 1.3468921184539795 258 1.296314001083374 259 1.2476303577423096 260 1.2009528875350952 261 1.1559319496154785 262 1.1127043962478638 263 1.071056604385376 264 1.0309712886810303 265 0.9924764037132263 266 0.9553502202033997 267 0.9197673797607422 268 0.8854739665985107 269 0.8523190021514893 270 0.8206137418746948 271 0.7899705767631531 272 0.7606277465820312 273 0.73226398229599 274 0.7050141096115112 275 0.678828239440918 276 0.6536497473716736 277 0.6293545365333557 278 0.6060177683830261 279 0.5834728479385376 280 0.561832070350647 281 0.5409616231918335 282 0.5209071636199951 283 0.5015677213668823 284 0.48301637172698975 285 0.46511176228523254 286 0.4478737413883209 287 0.43125540018081665 288 0.4153783321380615 289 0.3999677896499634 290 0.3852301836013794 291 0.3709765374660492 292 0.3573264181613922 293 0.34412410855293274 294 0.3314245641231537 295 0.3191765248775482 296 0.3074215054512024 297 0.296056866645813 298 0.28519463539123535 299 0.2746979594230652 300 0.26459330320358276 301 0.25485098361968994 302 0.24548396468162537 303 0.23641419410705566 304 0.22769007086753845 305 0.21935638785362244 306 0.21129179000854492 307 0.20352837443351746 308 0.19601596891880035 309 0.18884600698947906 310 0.1819111704826355 311 0.17525847256183624 312 0.16882126033306122 313 0.16263249516487122 314 0.15666751563549042 315 0.15092256665229797 316 0.14540348947048187 317 0.14006322622299194 318 0.13494110107421875 319 0.13001160323619843 320 0.12526486814022064 321 0.1206771731376648 322 0.11626103520393372 323 0.11202862858772278 324 0.10792234539985657 325 0.10398980975151062 326 0.1001921221613884 327 0.09651487320661545 328 0.09299999475479126 329 0.08962738513946533 330 0.08636265993118286 331 0.08319984376430511 332 0.08016426116228104 333 0.07726199924945831 334 0.07444174587726593 335 0.07173093408346176 336 0.0690966546535492 337 0.06658675521612167 338 0.06416875869035721 339 0.06185092404484749 340 0.05959108844399452 341 0.057433173060417175 342 0.0553474947810173 343 0.053334783762693405 344 0.051402702927589417 345 0.04955539479851723 346 0.04775090888142586 347 0.04602515324950218 348 0.044351741671562195 349 0.04274814575910568 350 0.041199490427970886 351 0.03970283269882202 352 0.03825933113694191 353 0.036878347396850586 354 0.0355573333799839 355 0.03427095338702202 356 0.03304218873381615 357 0.03185059875249863 358 0.03069448284804821 359 0.029578279703855515 360 0.028519731014966965 361 0.02748997136950493 362 0.026511413976550102 363 0.025562353432178497 364 0.02463531121611595 365 0.023760242387652397 366 0.02290988340973854 367 0.022078199312090874 368 0.021282397210597992 369 0.020530449226498604 370 0.019799597561359406 371 0.019093159586191177 372 0.018408171832561493 373 0.017752142623066902 374 0.017124634236097336 375 0.0165147352963686 376 0.0159267857670784 377 0.015361725352704525 378 0.014817671850323677 379 0.014293329790234566 380 0.013794535771012306 381 0.013302133418619633 382 0.012831700965762138 383 0.012388093397021294 384 0.011951521039009094 385 0.011539716273546219 386 0.011130605824291706 387 0.010747026652097702 388 0.010368922725319862 389 0.010003476403653622 390 0.009659701958298683 391 0.009321259334683418 392 0.009002749808132648 393 0.008687980473041534 394 0.008389437571167946 395 0.008094895631074905 396 0.00781853124499321 397 0.00755091430619359 398 0.007287600077688694 399 0.007037787232547998 400 0.006801604758948088 401 0.006566936150193214 402 0.0063502490520477295 403 0.0061323679983615875 404 0.005927860736846924 405 0.00572930509224534 406 0.005533153191208839 407 0.005349132232367992 408 0.0051721855998039246 409 0.004995665047317743 410 0.004830839112401009 411 0.00467012170702219 412 0.004517041612416506 413 0.0043692924082279205 414 0.004221327602863312 415 0.004083284642547369 416 0.003950608428567648 417 0.0038255397230386734 418 0.003699194174259901 419 0.003574871923774481 420 0.0034629858564585447 421 0.003353290958330035 422 0.003244665451347828 423 0.003142776433378458 424 0.00304229324683547 425 0.002945477142930031 426 0.0028537893667817116 427 0.002764546312391758 428 0.002679029945284128 429 0.002593627432361245 430 0.002514220541343093 431 0.002438138471916318 432 0.0023646263871341944 433 0.0022932353895157576 434 0.002221874427050352 435 0.0021538916043937206 436 0.00209184642881155 437 0.0020262051839381456 438 0.0019666266161948442 439 0.0019075790187343955 440 0.0018491483060643077 441 0.0017957690870389342 442 0.0017438435461372137 443 0.0016942177899181843 444 0.001644242205657065 445 0.0015961473109200597 446 0.0015512119280174375 447 0.0015078299911692739 448 0.0014637272106483579 449 0.0014234762638807297 450 0.001381349633447826 451 0.0013440074399113655 452 0.001305867568589747 453 0.0012704429682344198 454 0.0012343706330284476 455 0.0012007243931293488 456 0.0011682230979204178 457 0.0011356750037521124 458 0.0011055029463022947 459 0.0010760502191260457 460 0.0010467303218320012 461 0.0010180269600823522 462 0.0009923577308654785 463 0.0009656138136051595 464 0.0009410750935785472 465 0.0009165913797914982 466 0.0008935595978982747 467 0.0008697272278368473 468 0.0008474260102957487 469 0.0008268379024229944 470 0.0008065475849434733 471 0.0007843846688047051 472 0.0007656721863895655 473 0.0007456142921000719 474 0.000727273290976882 475 0.0007085043471306562 476 0.0006915377452969551 477 0.0006745709688402712 478 0.0006604917580261827 479 0.0006438849377445877 480 0.0006294006016105413 481 0.0006143326172605157 482 0.0006005939794704318 483 0.0005858491058461368 484 0.000572391611058265 485 0.0005596061819233 486 0.0005467765731737018 487 0.0005332881701178849 488 0.0005208597867749631 489 0.0005100099369883537 490 0.000498197041451931 491 0.0004874719597864896 492 0.00047716329572722316 493 0.00046697931247763336 494 0.0004567308642435819 495 0.0004460803756956011 496 0.00043638842180371284 497 0.0004275768587831408 498 0.0004182616830803454 499 0.0004095847543794662
Under the hood, each primitive autograd operator is really two functions that operate on Tensors. The forward function computes output Tensors from input Tensors. The backward function receives the gradient of the output Tensors with respect to some scalar value, and computes the gradient of the input Tensors with respect to that same scalar value.
In PyTorch we can easily define our own autograd operator by defining a subclass of torch.autograd.Function and implementing the forward and backward functions. We can then use our new autograd operator by constructing an instance and calling it like a function, passing Variables containing input data.
In this example we define our own custom autograd function for performing the ReLU nonlinearity, and use it to implement our two-layer network:
# -*- coding: utf-8 -*-
import torch
from torch.autograd import Variable
class MyReLU(torch.autograd.Function):
"""
We can implement our own custom autograd Functions by subclassing
torch.autograd.Function and implementing the forward and backward passes
which operate on Tensors.
"""
def forward(self, input):
"""
In the forward pass we receive a Tensor containing the input and return a
Tensor containing the output. You can cache arbitrary Tensors for use in the
backward pass using the save_for_backward method.
"""
self.save_for_backward(input)
return input.clamp(min=0)
def backward(self, grad_output):
"""
In the backward pass we receive a Tensor containing the gradient of the loss
with respect to the output, and we need to compute the gradient of the loss
with respect to the input.
"""
input, = self.saved_tensors
grad_input = grad_output.clone()
grad_input[input < 0] = 0
return grad_input
dtype = torch.FloatTensor
# dtype = torch.cuda.FloatTensor # Uncomment this to run on GPU
# N is batch size; D_in is input dimension;
# H is hidden dimension; D_out is output dimension.
N, D_in, H, D_out = 64, 1000, 100, 10
# Create random Tensors to hold input and outputs, and wrap them in Variables.
x = Variable(torch.randn(N, D_in).type(dtype), requires_grad=False)
y = Variable(torch.randn(N, D_out).type(dtype), requires_grad=False)
# Create random Tensors for weights, and wrap them in Variables.
w1 = Variable(torch.randn(D_in, H).type(dtype), requires_grad=True)
w2 = Variable(torch.randn(H, D_out).type(dtype), requires_grad=True)
learning_rate = 1e-6
for t in range(500):
# Construct an instance of our MyReLU class to use in our network
relu = MyReLU()
# Forward pass: compute predicted y using operations on Variables; we compute
# ReLU using our custom autograd operation.
y_pred = relu(x.mm(w1)).mm(w2)
# Compute and print loss
loss = (y_pred - y).pow(2).sum()
print(t, loss.data[0])
# Use autograd to compute the backward pass.
loss.backward()
# Update weights using gradient descent
w1.data -= learning_rate * w1.grad.data
w2.data -= learning_rate * w2.grad.data
# Manually zero the gradients after updating weights
w1.grad.data.zero_()
w2.grad.data.zero_()
0 37267740.0 1 35764716.0 2 35199480.0 3 30134798.0 4 20876230.0 5 11940865.0 6 6248357.5 7 3411474.25 8 2109929.75 9 1486262.5 10 1147416.0 11 933659.25 12 781843.3125 13 665453.875 14 572186.3125 15 495587.15625 16 431790.71875 17 378165.53125 18 332707.5 19 293927.75 20 260602.078125 21 231805.1875 22 206837.75 23 185119.71875 24 166225.28125 25 149666.796875 26 135110.765625 27 122273.4140625 28 110904.5703125 29 100806.375 30 91813.03125 31 83787.9921875 32 76605.484375 33 70168.1953125 34 64381.640625 35 59169.4453125 36 54462.21875 37 50205.0546875 38 46347.6328125 39 42845.19921875 40 39659.359375 41 36757.4609375 42 34109.640625 43 31690.05078125 44 29476.259765625 45 27446.322265625 46 25583.3671875 47 23871.591796875 48 22297.443359375 49 20848.37109375 50 19513.296875 51 18281.951171875 52 17144.08203125 53 16093.103515625 54 15118.5703125 55 14213.7685546875 56 13372.318359375 57 12589.6640625 58 11861.248046875 59 11182.630859375 60 10549.470703125 61 9958.626953125 62 9406.4560546875 63 8890.7177734375 64 8409.7861328125 65 7959.3427734375 66 7536.99755859375 67 7140.72119140625 68 6768.61767578125 69 6418.8779296875 70 6090.21875 71 5781.171875 72 5489.9794921875 73 5215.68603515625 74 4957.0380859375 75 4713.0322265625 76 4482.77490234375 77 4265.33984375 78 4059.960205078125 79 3865.8935546875 80 3682.364990234375 81 3508.66455078125 82 3344.263427734375 83 3188.444580078125 84 3040.791015625 85 2900.77099609375 86 2767.92041015625 87 2641.862060546875 88 2522.1953125 89 2408.60107421875 90 2300.690185546875 91 2198.137451171875 92 2100.65966796875 93 2007.908447265625 94 1919.6483154296875 95 1835.62548828125 96 1755.661865234375 97 1679.4940185546875 98 1606.9317626953125 99 1537.7978515625 100 1471.900146484375 101 1409.1009521484375 102 1349.2012939453125 103 1292.034423828125 104 1237.503662109375 105 1185.4390869140625 106 1135.744384765625 107 1088.301513671875 108 1042.9739990234375 109 999.6783447265625 110 958.3234252929688 111 918.8175659179688 112 881.0344848632812 113 844.9067993164062 114 810.3673095703125 115 777.3306274414062 116 745.7218627929688 117 715.4826049804688 118 686.55517578125 119 658.874755859375 120 632.377197265625 121 606.9979858398438 122 582.705322265625 123 559.4349975585938 124 537.1435546875 125 515.7855834960938 126 495.33056640625 127 475.72637939453125 128 456.9494323730469 129 438.9481506347656 130 421.68475341796875 131 405.1681823730469 132 389.3519287109375 133 374.1879577636719 134 359.64312744140625 135 345.6932678222656 136 332.3123779296875 137 319.4747009277344 138 307.150634765625 139 295.32281494140625 140 283.9774169921875 141 273.079833984375 142 262.6209411621094 143 252.57992553710938 144 242.93710327148438 145 233.6918487548828 146 224.80181884765625 147 216.26210021972656 148 208.05758666992188 149 200.1812744140625 150 192.6105499267578 151 185.34080505371094 152 178.35305786132812 153 171.64593505859375 154 165.1945343017578 155 158.99392700195312 156 153.0364532470703 157 147.30772399902344 158 141.80587768554688 159 136.51861572265625 160 131.43280029296875 161 126.54752349853516 162 121.8475112915039 163 117.32698822021484 164 112.97860717773438 165 108.79804992675781 166 104.7764892578125 167 100.90899658203125 168 97.18679809570312 169 93.61103820800781 170 90.16815948486328 171 86.85511779785156 172 83.66851043701172 173 80.60234832763672 174 77.6517562866211 175 74.81199645996094 176 72.0802230834961 177 69.45210266113281 178 66.92032623291016 179 64.4840087890625 180 62.139137268066406 181 59.881126403808594 182 57.707550048828125 183 55.61520004272461 184 53.60150146484375 185 51.66324996948242 186 49.795257568359375 187 47.99642562866211 188 46.26518249511719 189 44.59707260131836 190 42.99076843261719 191 41.4441032409668 192 39.95560073852539 193 38.5213623046875 194 37.1389274597168 195 35.80757522583008 196 34.524593353271484 197 33.28947067260742 198 32.09878158569336 199 30.953550338745117 200 29.848520278930664 201 28.78449249267578 202 27.759124755859375 203 26.77107810974121 204 25.818552017211914 205 24.90070152282715 206 24.016618728637695 207 23.165571212768555 208 22.34392738342285 209 21.552274703979492 210 20.789934158325195 211 20.05422019958496 212 19.345535278320312 213 18.662813186645508 214 18.00432777404785 215 17.369583129882812 216 16.757579803466797 217 16.167564392089844 218 15.598984718322754 219 15.05059814453125 220 14.521815299987793 221 14.012593269348145 222 13.521288871765137 223 13.04751205444336 224 12.590230941772461 225 12.14965534210205 226 11.725013732910156 227 11.314926147460938 228 10.919958114624023 229 10.539161682128906 230 10.171355247497559 231 9.817110061645508 232 9.474847793579102 233 9.145466804504395 234 8.827065467834473 235 8.520536422729492 236 8.224677085876465 237 7.939168453216553 238 7.663815021514893 239 7.398035049438477 240 7.141916751861572 241 6.894689083099365 242 6.656137943267822 243 6.42626428604126 244 6.204166889190674 245 5.989865303039551 246 5.7830491065979 247 5.583761692047119 248 5.391234397888184 249 5.205582618713379 250 5.026453018188477 251 4.853508472442627 252 4.686666011810303 253 4.525530815124512 254 4.370087146759033 255 4.220409393310547 256 4.075485706329346 257 3.935882568359375 258 3.800877332687378 259 3.6708292961120605 260 3.5451159477233887 261 3.423879623413086 262 3.306863307952881 263 3.194089651107788 264 3.084909439086914 265 2.979776382446289 266 2.8781092166900635 267 2.7799901962280273 268 2.6851348876953125 269 2.593947172164917 270 2.50581693649292 271 2.4204869270324707 272 2.3381664752960205 273 2.2585439682006836 274 2.18206787109375 275 2.1080169677734375 276 2.036513566970825 277 1.9676604270935059 278 1.9008911848068237 279 1.8365049362182617 280 1.7742794752120972 281 1.714285135269165 282 1.6563860177993774 283 1.6004620790481567 284 1.5464372634887695 285 1.494178295135498 286 1.4437446594238281 287 1.3950902223587036 288 1.3480584621429443 289 1.3026072978973389 290 1.258941411972046 291 1.2163642644882202 292 1.1755170822143555 293 1.1359773874282837 294 1.0978280305862427 295 1.06088387966156 296 1.0252677202224731 297 0.990941047668457 298 0.9577256441116333 299 0.9256399273872375 300 0.8946157097816467 301 0.8645932674407959 302 0.8356277346611023 303 0.8076440095901489 304 0.7806621789932251 305 0.7545725703239441 306 0.7293268442153931 307 0.7049664855003357 308 0.6814430952072144 309 0.658761739730835 310 0.6367996335029602 311 0.6155775785446167 312 0.5950506925582886 313 0.5752754211425781 314 0.5561575889587402 315 0.5376402139663696 316 0.5197628736495972 317 0.5025719404220581 318 0.48587459325790405 319 0.46972161531448364 320 0.4541400969028473 321 0.43905285000801086 322 0.4244878590106964 323 0.4104618728160858 324 0.39690709114074707 325 0.38367366790771484 326 0.37096306681632996 327 0.35865893959999084 328 0.34682440757751465 329 0.3353811800479889 330 0.3242852985858917 331 0.3135945498943329 332 0.3032459616661072 333 0.29322201013565063 334 0.2835618853569031 335 0.2741992473602295 336 0.26517972350120544 337 0.25643640756607056 338 0.2479628622531891 339 0.23979204893112183 340 0.23193234205245972 341 0.2242671400308609 342 0.2168930619955063 343 0.2097444087266922 344 0.2028605192899704 345 0.1962338536977768 346 0.18978500366210938 347 0.1835404634475708 348 0.17750103771686554 349 0.17172792553901672 350 0.16605545580387115 351 0.16060921549797058 352 0.15534861385822296 353 0.15024448931217194 354 0.14536623656749725 355 0.14057379961013794 356 0.13597580790519714 357 0.13154900074005127 358 0.1272878497838974 359 0.12308774888515472 360 0.1190754845738411 361 0.11518353223800659 362 0.11140834540128708 363 0.1077791303396225 364 0.10426264256238937 365 0.10088305175304413 366 0.09758631885051727 367 0.09441451728343964 368 0.09134842455387115 369 0.08836644142866135 370 0.08551565557718277 371 0.0827186331152916 372 0.0800226628780365 373 0.07741819322109222 374 0.0749141052365303 375 0.0724942609667778 376 0.07012748718261719 377 0.06782606244087219 378 0.06563150137662888 379 0.06352625042200089 380 0.06144631654024124 381 0.05948108434677124 382 0.05756570026278496 383 0.055704500526189804 384 0.05389977991580963 385 0.052152279764413834 386 0.050462037324905396 387 0.0488210991024971 388 0.04726396128535271 389 0.045748334378004074 390 0.04426315799355507 391 0.04283710569143295 392 0.041462723165750504 393 0.04012970253825188 394 0.03883979097008705 395 0.0375945121049881 396 0.036392029374837875 397 0.035216353833675385 398 0.0340835303068161 399 0.0330033153295517 400 0.0319441556930542 401 0.03092949651181698 402 0.029925189912319183 403 0.028983892872929573 404 0.028051339089870453 405 0.027159346267580986 406 0.026293398812413216 407 0.02546057477593422 408 0.024651827290654182 409 0.023875653743743896 410 0.02311556600034237 411 0.022379254922270775 412 0.021676119416952133 413 0.0209877360612154 414 0.020319685339927673 415 0.01967649720609188 416 0.019053490832448006 417 0.018447471782565117 418 0.017870858311653137 419 0.01730620674788952 420 0.01676577515900135 421 0.01624283567070961 422 0.0157344788312912 423 0.015246149152517319 424 0.014768954366445541 425 0.01429677102714777 426 0.013850965537130833 427 0.013418878428637981 428 0.013010782189667225 429 0.012603594921529293 430 0.012216034345328808 431 0.011834518052637577 432 0.01147628203034401 433 0.011118064634501934 434 0.0107742203399539 435 0.010442456230521202 436 0.010112447664141655 437 0.009803086519241333 438 0.009505126625299454 439 0.009210986085236073 440 0.008932799100875854 441 0.008662850596010685 442 0.008402058854699135 443 0.008139402605593204 444 0.007891377434134483 445 0.007653705310076475 446 0.007423435337841511 447 0.007203694898635149 448 0.0069829924032092094 449 0.006775957066565752 450 0.0065697873942554 451 0.006375055760145187 452 0.006185355130583048 453 0.0060003213584423065 454 0.005824679974466562 455 0.005652319174259901 456 0.005483253858983517 457 0.0053228093311190605 458 0.005165347829461098 459 0.005012849345803261 460 0.00486498000100255 461 0.004725072532892227 462 0.004582775291055441 463 0.0044530704617500305 464 0.004323487635701895 465 0.004199502058327198 466 0.004082949832081795 467 0.00396241107955575 468 0.003854303155094385 469 0.0037417884450405836 470 0.003637957852333784 471 0.003536310512572527 472 0.0034359132405370474 473 0.0033391271717846394 474 0.003243305953219533 475 0.003150815377011895 476 0.003066697157919407 477 0.0029837233014404774 478 0.002901113824918866 479 0.0028215814381837845 480 0.002746968762949109 481 0.002671067835763097 482 0.002599822822958231 483 0.0025278807152062654 484 0.0024597335141152143 485 0.0023916594218462706 486 0.002332271309569478 487 0.0022717046085745096 488 0.002212217776104808 489 0.0021560348104685545 490 0.002097273012623191 491 0.0020442584063857794 492 0.00199206848628819 493 0.001940639573149383 494 0.0018914203392341733 495 0.0018432828364893794 496 0.0017941630212590098 497 0.0017496095970273018 498 0.00170668784994632 499 0.0016636578366160393
When building neural networks we frequently think of arranging the computation into layers, some of which have learnable parameters which will be optimized during learning.
In TensorFlow, packages like Keras, TensorFlow-Slim, and TFLearn provide higher-level abstractions over raw computational graphs that are useful for building neural networks.
In PyTorch, the nn package serves this same purpose. The nn package defines a set of Modules, which are roughly equivalent to neural network layers. A Module receives input Variables and computes output Variables, but may also hold internal state such as Variables containing learnable parameters. The nn package also defines a set of useful loss functions that are commonly used when training neural networks.
In this example we use the nn package to implement our two-layer network:
# -*- coding: utf-8 -*-
import torch
from torch.autograd import Variable
# N is batch size; D_in is input dimension;
# H is hidden dimension; D_out is output dimension.
N, D_in, H, D_out = 64, 1000, 100, 10
# Create random Tensors to hold inputs and outputs, and wrap them in Variables.
x = Variable(torch.randn(N, D_in))
y = Variable(torch.randn(N, D_out), requires_grad=False)
# Use the nn package to define our model as a sequence of layers. nn.Sequential
# is a Module which contains other Modules, and applies them in sequence to
# produce its output. Each Linear Module computes output from input using a
# linear function, and holds internal Variables for its weight and bias.
model = torch.nn.Sequential(
torch.nn.Linear(D_in, H),
torch.nn.ReLU(),
torch.nn.Linear(H, D_out),
)
# The nn package also contains definitions of popular loss functions; in this
# case we will use Mean Squared Error (MSE) as our loss function.
loss_fn = torch.nn.MSELoss(size_average=False)
learning_rate = 1e-4
for t in range(500):
# Forward pass: compute predicted y by passing x to the model. Module objects
# override the __call__ operator so you can call them like functions. When
# doing so you pass a Variable of input data to the Module and it produces
# a Variable of output data.
y_pred = model(x)
# Compute and print loss. We pass Variables containing the predicted and true
# values of y, and the loss function returns a Variable containing the
# loss.
loss = loss_fn(y_pred, y)
print(t, loss.data[0])
# Zero the gradients before running the backward pass.
model.zero_grad()
# Backward pass: compute gradient of the loss with respect to all the learnable
# parameters of the model. Internally, the parameters of each Module are stored
# in Variables with requires_grad=True, so this call will compute gradients for
# all learnable parameters in the model.
loss.backward()
# Update the weights using gradient descent. Each parameter is a Variable, so
# we can access its data and gradients like we did before.
for param in model.parameters():
param.data -= learning_rate * param.grad.data
0 680.08154296875 1 628.8499755859375 2 584.1482543945312 3 544.8362426757812 4 509.60052490234375 5 477.94586181640625 6 449.4169616699219 7 423.177734375 8 398.7761535644531 9 376.30096435546875 10 355.3580322265625 11 335.78729248046875 12 317.5242614746094 13 300.2922058105469 14 283.9698181152344 15 268.5042419433594 16 253.79812622070312 17 239.88864135742188 18 226.66624450683594 19 214.0961151123047 20 202.15972900390625 21 190.82431030273438 22 180.07672119140625 23 169.84349060058594 24 160.177734375 25 151.01641845703125 26 142.33706665039062 27 134.12139892578125 28 126.35624694824219 29 119.03170776367188 30 112.15123748779297 31 105.64317321777344 32 99.51466369628906 33 93.72786712646484 34 88.27943420410156 35 83.15460205078125 36 78.34161376953125 37 73.80974578857422 38 69.55596923828125 39 65.56876373291016 40 61.8112907409668 41 58.278228759765625 42 54.950050354003906 43 51.81352615356445 44 48.865779876708984 45 46.08952713012695 46 43.48237609863281 47 41.032230377197266 48 38.72626495361328 49 36.560401916503906 50 34.52272415161133 51 32.603416442871094 52 30.797603607177734 53 29.0950984954834 54 27.492034912109375 55 25.980615615844727 56 24.55516242980957 57 23.21516990661621 58 21.952560424804688 59 20.762468338012695 60 19.642051696777344 61 18.584585189819336 62 17.585891723632812 63 16.644123077392578 64 15.755452156066895 65 14.916620254516602 66 14.125481605529785 67 13.380037307739258 68 12.675837516784668 69 12.011648178100586 70 11.383655548095703 71 10.7904634475708 72 10.233356475830078 73 9.707324981689453 74 9.2102689743042 75 8.740628242492676 76 8.298053741455078 77 7.879985809326172 78 7.484074592590332 79 7.109870910644531 80 6.756218433380127 81 6.421789646148682 82 6.105353355407715 83 5.8055315017700195 84 5.52149772644043 85 5.252496242523193 86 4.997690677642822 87 4.756503582000732 88 4.527294158935547 89 4.309894561767578 90 4.103759765625 91 3.9084742069244385 92 3.7226548194885254 93 3.5464107990264893 94 3.37937068939209 95 3.2209417819976807 96 3.070256471633911 97 2.9272148609161377 98 2.791459083557129 99 2.6623592376708984 100 2.5399575233459473 101 2.4237000942230225 102 2.313190460205078 103 2.2082247734069824 104 2.1084320545196533 105 2.0137369632720947 106 1.9235584735870361 107 1.8378454446792603 108 1.7563918828964233 109 1.6788384914398193 110 1.6050490140914917 111 1.5347329378128052 112 1.4678232669830322 113 1.4041197299957275 114 1.3435639142990112 115 1.2858960628509521 116 1.2308998107910156 117 1.1784309148788452 118 1.1284685134887695 119 1.0808899402618408 120 1.0353796482086182 121 0.9919747710227966 122 0.9504337310791016 123 0.9108781814575195 124 0.8731096386909485 125 0.8370406627655029 126 0.8026205897331238 127 0.7697471380233765 128 0.7383762001991272 129 0.7084004282951355 130 0.679768979549408 131 0.6523947715759277 132 0.626214861869812 133 0.6011669635772705 134 0.5772068500518799 135 0.5543199777603149 136 0.5323988795280457 137 0.5114548206329346 138 0.49139833450317383 139 0.4721781611442566 140 0.45376724004745483 141 0.43613412976264954 142 0.41924476623535156 143 0.40306606888771057 144 0.38756275177001953 145 0.3727158010005951 146 0.35854285955429077 147 0.34496134519577026 148 0.33194538950920105 149 0.31945526599884033 150 0.3074859082698822 151 0.29600128531455994 152 0.2849758565425873 153 0.2743982970714569 154 0.2642490863800049 155 0.2545064389705658 156 0.2451404333114624 157 0.23615539073944092 158 0.22752535343170166 159 0.21923471987247467 160 0.2112797051668167 161 0.20362289249897003 162 0.19626261293888092 163 0.1891934722661972 164 0.18240153789520264 165 0.17586886882781982 166 0.16958405077457428 167 0.16354581713676453 168 0.15773969888687134 169 0.15215399861335754 170 0.14677844941616058 171 0.14159975945949554 172 0.13661399483680725 173 0.13181616365909576 174 0.1271965205669403 175 0.12274857610464096 176 0.11846866458654404 177 0.11434680968523026 178 0.11037838459014893 179 0.10655605047941208 180 0.10287366062402725 181 0.09932510554790497 182 0.0959087535738945 183 0.09261389821767807 184 0.08943838626146317 185 0.08637817949056625 186 0.08342777192592621 187 0.0805828794836998 188 0.07784154266119003 189 0.07519736886024475 190 0.0726480707526207 191 0.07019388675689697 192 0.06782343983650208 193 0.06553709506988525 194 0.06333591789007187 195 0.06121001020073891 196 0.059156980365514755 197 0.05717690289020538 198 0.055265795439481735 199 0.053421154618263245 200 0.05164136365056038 201 0.04992407187819481 202 0.04826623946428299 203 0.046666938811540604 204 0.04512207210063934 205 0.04363016411662102 206 0.04219016805291176 207 0.04079950973391533 208 0.03945689648389816 209 0.038160402327775955 210 0.036908261477947235 211 0.03569883480668068 212 0.034530479460954666 213 0.03340240567922592 214 0.032312240451574326 215 0.031259190291166306 216 0.030241671949625015 217 0.02925860695540905 218 0.028309127315878868 219 0.02739332616329193 220 0.026506047695875168 221 0.025648759678006172 222 0.02482016757130623 223 0.024018865078687668 224 0.023244598880410194 225 0.022496270015835762 226 0.02177284099161625 227 0.021073248237371445 228 0.020396927371621132 229 0.01974322460591793 230 0.019111426547169685 231 0.0185005571693182 232 0.017909277230501175 233 0.017337419092655182 234 0.016784587875008583 235 0.016250004991889 236 0.01573282666504383 237 0.015232610516250134 238 0.014748821035027504 239 0.014280819334089756 240 0.013828235678374767 241 0.013390136882662773 242 0.012966613285243511 243 0.012556690722703934 244 0.012161037884652615 245 0.0117774223908782 246 0.011406159959733486 247 0.011046788655221462 248 0.010699193924665451 249 0.010362686589360237 250 0.010037034749984741 251 0.00972204003483057 252 0.009417165070772171 253 0.009121924638748169 254 0.008836277760565281 255 0.00855974666774273 256 0.008291949518024921 257 0.00803307630121708 258 0.007782185450196266 259 0.007539329584687948 260 0.007304261904209852 261 0.007076835259795189 262 0.00685644056648016 263 0.006643133703619242 264 0.006436587776988745 265 0.006236482877284288 266 0.006042997818440199 267 0.005855487193912268 268 0.005673850420862436 269 0.005498659797012806 270 0.005328337661921978 271 0.005163470283150673 272 0.0050038304179906845 273 0.004849148914217949 274 0.004699397832155228 275 0.00455437321215868 276 0.004413901828229427 277 0.0042778486385941505 278 0.004146031104028225 279 0.004018353298306465 280 0.0038947267457842827 281 0.0037749370094388723 282 0.003658916801214218 283 0.0035464726388454437 284 0.003437580307945609 285 0.0033320633228868246 286 0.0032298287842422724 287 0.003130849450826645 288 0.003034892724826932 289 0.0029419672209769487 290 0.0028519199695438147 291 0.002764653880149126 292 0.0026801559142768383 293 0.002598251448944211 294 0.002518962835893035 295 0.002442245604470372 296 0.0023677151184529066 297 0.0022955138701945543 298 0.0022255151998251677 299 0.002157705370336771 300 0.002091981703415513 301 0.002028322545811534 302 0.00196659192442894 303 0.0019068144029006362 304 0.0018488289788365364 305 0.001792663475498557 306 0.0017382020596414804 307 0.0016854503192007542 308 0.0016342989401891828 309 0.0015847444301471114 310 0.0015366816660389304 311 0.0014901245012879372 312 0.0014449851587414742 313 0.001401250483468175 314 0.0013588427100330591 315 0.0013177691726014018 316 0.001277906121686101 317 0.0012393008219078183 318 0.0012018403504043818 319 0.001165554509498179 320 0.0011303661158308387 321 0.001096343039534986 322 0.0010632890043780208 323 0.001031213440001011 324 0.0010001431219279766 325 0.0009700573864392936 326 0.0009408452315256 327 0.000912512477952987 328 0.0008850548765622079 329 0.0008584235911257565 330 0.0008326029637828469 331 0.0008075683144852519 332 0.0007833010167814791 333 0.0007597761577926576 334 0.0007369595696218312 335 0.0007148331496864557 336 0.0006933821714483202 337 0.0006725748535245657 338 0.0006523994379676878 339 0.0006328612216748297 340 0.0006138771423138678 341 0.0005954877706244588 342 0.0005776527686975896 343 0.0005603585159406066 344 0.0005436040228232741 345 0.0005273337010294199 346 0.000511560239829123 347 0.0004962603561580181 348 0.00048146690824069083 349 0.0004670854832511395 350 0.0004531279264483601 351 0.0004396018339321017 352 0.0004264691669959575 353 0.00041373888961970806 354 0.0004013977595604956 355 0.0003894170222338289 356 0.00037781387800350785 357 0.0003665469994302839 358 0.0003556182491593063 359 0.0003450293734204024 360 0.00033474891097284853 361 0.0003247867280151695 362 0.00031511206179857254 363 0.0003057269495911896 364 0.00029663191526196897 365 0.00028781587025150657 366 0.00027925128233619034 367 0.0002709476975724101 368 0.00026289623929187655 369 0.00025508779799565673 370 0.0002475187066011131 371 0.00024016370298340917 372 0.00023303109628614038 373 0.00022611598251387477 374 0.00021940314036328346 375 0.00021291013399604708 376 0.0002065994485747069 377 0.00020047678845003247 378 0.00019453163258731365 379 0.0001887652324512601 380 0.0001831761037465185 381 0.00017774860316421837 382 0.00017248367657884955 383 0.00016738024714868516 384 0.00016242482524830848 385 0.0001576153008500114 386 0.0001529530854895711 387 0.00014842944801785052 388 0.00014404467947315425 389 0.00013978010974824429 390 0.00013565001427195966 391 0.00013164129632059485 392 0.00012775001232512295 393 0.0001239780249306932 394 0.00012031249207211658 395 0.00011676689609885216 396 0.00011332175199640915 397 0.00010997249773936346 398 0.00010673052747733891 399 0.00010358005238231272 400 0.00010052488505607471 401 9.756081271916628e-05 402 9.469027281738818e-05 403 9.190698619931936e-05 404 8.919845276977867e-05 405 8.657083526486531e-05 406 8.402206731261685e-05 407 8.154464012477547e-05 408 7.914425077615306e-05 409 7.681240822421387e-05 410 7.455307786585763e-05 411 7.236027886392549e-05 412 7.022878708085045e-05 413 6.816528912167996e-05 414 6.616451719310135e-05 415 6.4219391788356e-05 416 6.233102612895891e-05 417 6.049991861800663e-05 418 5.872180190635845e-05 419 5.699725079466589e-05 420 5.532385330297984e-05 421 5.369873542804271e-05 422 5.212163523538038e-05 423 5.0590115279192105e-05 424 4.910368807031773e-05 425 4.766530400956981e-05 426 4.626710870070383e-05 427 4.490738865570165e-05 428 4.3591026042122394e-05 429 4.231411367072724e-05 430 4.1074410546571016e-05 431 3.9871807530289516e-05 432 3.870454747811891e-05 433 3.757094600587152e-05 434 3.646806362667121e-05 435 3.539905446814373e-05 436 3.436301994952373e-05 437 3.3357628126395866e-05 438 3.237837881897576e-05 439 3.1431503884959966e-05 440 3.0511764634866267e-05 441 2.961848076665774e-05 442 2.875354402931407e-05 443 2.7911590223084204e-05 444 2.7094889446743764e-05 445 2.6300884201191366e-05 446 2.553403828642331e-05 447 2.4784965717117302e-05 448 2.4059936549747363e-05 449 2.3357155441772193e-05 450 2.267470335937105e-05 451 2.2012040062691085e-05 452 2.1369320165831596e-05 453 2.0743538698297925e-05 454 2.013810990320053e-05 455 1.9549743228708394e-05 456 1.8979713786393404e-05 457 1.842488200054504e-05 458 1.78881709871348e-05 459 1.736673220875673e-05 460 1.6858253729878925e-05 461 1.6368272554245777e-05 462 1.588999839441385e-05 463 1.542721474834252e-05 464 1.4977055798226502e-05 465 1.4539912626787554e-05 466 1.4116209058556706e-05 467 1.3703524928132538e-05 468 1.330458871962037e-05 469 1.2917284038849175e-05 470 1.254109520232305e-05 471 1.2174677976872772e-05 472 1.182032883662032e-05 473 1.14756403490901e-05 474 1.1141963113914244e-05 475 1.081790560419904e-05 476 1.0501042197574861e-05 477 1.0197520168730989e-05 478 9.900572877086233e-06 479 9.611635505279992e-06 480 9.332347872259561e-06 481 9.060167940333486e-06 482 8.796627298579551e-06 483 8.540252565580886e-06 484 8.29136752145132e-06 485 8.050311407714617e-06 486 7.816828656359576e-06 487 7.589314918732271e-06 488 7.368240403593518e-06 489 7.1535123424837366e-06 490 6.947231213416671e-06 491 6.7452187977323774e-06 492 6.549066711158957e-06 493 6.358453902066685e-06 494 6.174030204419978e-06 495 5.9948902162432205e-06 496 5.820296792080626e-06 497 5.651726041833172e-06 498 5.487122507474851e-06 499 5.328512997948565e-06
With learning rate of $1e-4$
import torch
from torch.autograd import Variable
N, D_in, H, D_out = 64, 1000, 100, 10
x = Variable(torch.randn(N, D_in))
y = Variable(torch.randn(N, D_out), requires_grad=False)
model = torch.nn.Sequential( torch.nn.Linear(D_in, H),
torch.nn.ReLU(),
torch.nn.Linear(H, D_out)
)
loss_fxn = torch.nn.MSELoss(size_average=False)
learning_rate = 1e-4
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
# We loop
for i in range(500):
y_pred = model(x)
loss = loss_fxn(y_pred, y)
print(t, loss.data[0])
optimizer.zero_grad()
loss.backward()
optimizer.step()
499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06 499 5.173239514988381e-06
For more complex computation, you can define your own module by subclassing nn.Module
import torch
from torch.autograd import Variable
class DoubleLayerNet(torch.nn.Module):
def __init__(self, D_in, H, D_out):
# initialize 2 instances of nn.Linear mods
super(DoubleLayerNet, self).__init__()
self.linear1 = torch.nn.Linear(D_in, H)
self.linear2 = torch.nn.Linear(H, D_out)
def forward(self, x):
# in this fxn we accept a Var of input data and
# return a Var of output data.
h_relu = self.linear1(x).clamp(min=0)
y_pred = self.linear2(h_relu)
return y_pred
# Next, again as usual, define batch size, input dimensions, hidden dimension and output dimension
N, D_in, H, D_out = 64, 1000, 100, 10
# Create some random tensors to hold both input and output
x = Variable(torch.randn(N, D_in))
y = Variable(torch.randn(N, D_out), requires_grad=False)
# Build model by instantiating class defined above
my_model = DoubleLayerNet(D_in, H, D_out)
# Build loss fxn and optimizer
criterion = torch.nn.MSELoss(size_average=False)
optimizer = torch.optim.SGD(model.parameters(), lr=1e-4)
# and then we loop
for i in range(500):
# fwd pass, calculate predicted y by passing x to the model
y_pred = my_model(x)
#calculate and print loss
loss = criteria(y_pred, y)
print(t, loss.data[0])
# Zero gradients, performs a backprop pass and update the weights as it goe along
optimizer.zero_grad()
loss.backward()
optimizer.step()
0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875 0 656.797607421875