Unverified Commit 450f9ca5 authored by Kiryuu Sakuya's avatar Kiryuu Sakuya 🎵
Browse files

Finish 03

parent ab24e504
This diff is collapsed.
ID,crim,zn,indus,chas,nox,rm,age,dis,rad,tax,ptratio,black,lstat
3,0.02729,0,7.07,0,0.469,7.185,61.1,4.9671,2,242,17.8,392.83,4.03
6,0.02985,0,2.18,0,0.458,6.43,58.7,6.0622,3,222,18.7,394.12,5.21
8,0.14455,12.5,7.87,0,0.524,6.172,96.1,5.9505,5,311,15.2,396.9,19.15
9,0.21124,12.5,7.87,0,0.524,5.631,100,6.0821,5,311,15.2,386.63,29.93
10,0.17004,12.5,7.87,0,0.524,6.004,85.9,6.5921,5,311,15.2,386.71,17.1
18,0.7842,0,8.14,0,0.538,5.99,81.7,4.2579,4,307,21,386.75,14.67
20,0.7258,0,8.14,0,0.538,5.727,69.5,3.7965,4,307,21,390.95,11.28
25,0.75026,0,8.14,0,0.538,5.924,94.1,4.3996,4,307,21,394.33,16.3
26,0.84054,0,8.14,0,0.538,5.599,85.7,4.4546,4,307,21,303.42,16.51
27,0.67191,0,8.14,0,0.538,5.813,90.3,4.682,4,307,21,376.88,14.81
29,0.77299,0,8.14,0,0.538,6.495,94.4,4.4547,4,307,21,387.94,12.8
30,1.00245,0,8.14,0,0.538,6.674,87.3,4.239,4,307,21,380.23,11.98
33,1.38799,0,8.14,0,0.538,5.95,82,3.99,4,307,21,232.6,27.71
34,1.15172,0,8.14,0,0.538,5.701,95,3.7872,4,307,21,358.77,18.35
36,0.06417,0,5.96,0,0.499,5.933,68.2,3.3603,5,279,19.2,396.9,9.68
37,0.09744,0,5.96,0,0.499,5.841,61.4,3.3779,5,279,19.2,377.56,11.41
38,0.08014,0,5.96,0,0.499,5.85,41.5,3.9342,5,279,19.2,396.9,8.77
42,0.12744,0,6.91,0,0.448,6.77,2.9,5.7209,3,233,17.9,385.41,4.84
49,0.25387,0,6.91,0,0.448,5.399,95.3,5.87,3,233,17.9,396.9,30.81
53,0.0536,21,5.64,0,0.439,6.511,21.1,6.8147,4,243,16.8,396.9,5.28
60,0.10328,25,5.13,0,0.453,5.927,47.2,6.932,8,284,19.7,396.9,9.22
63,0.11027,25,5.13,0,0.453,6.456,67.8,7.2255,8,284,19.7,396.9,6.73
70,0.12816,12.5,6.07,0,0.409,5.885,33,6.498,4,345,18.9,396.9,8.79
72,0.15876,0,10.81,0,0.413,5.961,17.5,5.2873,4,305,19.2,376.94,9.88
79,0.05646,0,12.83,0,0.437,6.232,53.7,5.0141,5,398,18.7,386.4,12.34
80,0.08387,0,12.83,0,0.437,5.874,36.6,4.5026,5,398,18.7,396.06,9.1
83,0.03659,25,4.86,0,0.426,6.302,32.2,5.4007,4,281,19,396.9,6.72
92,0.03932,0,3.41,0,0.489,6.405,73.9,3.0921,2,270,17.8,393.55,8.2
93,0.04203,28,15.04,0,0.464,6.442,53.6,3.6659,4,270,18.2,395.01,8.16
96,0.12204,0,2.89,0,0.445,6.625,57.8,3.4952,2,276,18,357.98,6.65
98,0.12083,0,2.89,0,0.445,8.069,76,3.4952,2,276,18,396.9,4.21
99,0.08187,0,2.89,0,0.445,7.82,36.9,3.4952,2,276,18,393.53,3.57
100,0.0686,0,2.89,0,0.445,7.416,62.5,3.4952,2,276,18,396.9,6.19
105,0.1396,0,8.56,0,0.52,6.167,90,2.421,5,384,20.9,392.69,12.33
106,0.13262,0,8.56,0,0.52,5.851,96.7,2.1069,5,384,20.9,394.05,16.47
111,0.10793,0,8.56,0,0.52,6.195,54.4,2.7778,5,384,20.9,393.49,13
113,0.12329,0,10.01,0,0.547,5.913,92.9,2.3534,6,432,17.8,394.95,16.21
114,0.22212,0,10.01,0,0.547,6.092,95.4,2.548,6,432,17.8,396.9,17.09
116,0.17134,0,10.01,0,0.547,5.928,88.2,2.4631,6,432,17.8,344.91,15.76
126,0.16902,0,25.65,0,0.581,5.986,88.4,1.9929,2,188,19.1,385.02,14.81
130,0.88125,0,21.89,0,0.624,5.637,94.7,1.9799,4,437,21.2,396.9,18.34
131,0.34006,0,21.89,0,0.624,6.458,98.9,2.1185,4,437,21.2,395.04,12.6
133,0.59005,0,21.89,0,0.624,6.372,97.9,2.3274,4,437,21.2,385.76,11.12
136,0.55778,0,21.89,0,0.624,6.335,98.2,2.1107,4,437,21.2,394.67,16.96
141,0.2909,0,21.89,0,0.624,6.174,93.6,1.6119,4,437,21.2,388.08,24.16
144,4.0974,0,19.58,0,0.871,5.468,100,1.4118,5,403,14.7,396.9,26.42
145,2.77974,0,19.58,0,0.871,4.903,97.8,1.3459,5,403,14.7,396.9,29.29
147,2.15505,0,19.58,0,0.871,5.628,100,1.5166,5,403,14.7,169.27,16.65
152,1.49632,0,19.58,0,0.871,5.404,100,1.5916,5,403,14.7,341.6,13.28
153,1.12658,0,19.58,1,0.871,5.012,88,1.6102,5,403,14.7,343.28,12.12
156,3.53501,0,19.58,1,0.871,6.152,82.6,1.7455,5,403,14.7,88.01,15.02
158,1.22358,0,19.58,0,0.605,6.943,97.4,1.8773,5,403,14.7,363.43,4.59
163,1.83377,0,19.58,1,0.605,7.802,98.2,2.0407,5,403,14.7,389.61,1.92
169,2.3004,0,19.58,0,0.605,6.319,96.1,2.1,5,403,14.7,297.09,11.1
185,0.08308,0,2.46,0,0.488,5.604,89.8,2.9879,3,193,17.8,391,13.98
186,0.06047,0,2.46,0,0.488,6.153,68.8,3.2797,3,193,17.8,387.11,13.15
189,0.12579,45,3.44,0,0.437,6.556,29.1,4.5667,5,398,15.2,382.84,4.56
196,0.01381,80,0.46,0,0.422,7.875,32,5.6484,4,255,14.4,394.23,2.97
197,0.04011,80,1.52,0,0.404,7.287,34.1,7.309,2,329,12.6,396.9,4.08
199,0.03768,80,1.52,0,0.404,7.274,38.3,7.309,2,329,12.6,392.2,6.62
200,0.0315,95,1.47,0,0.403,6.975,15.3,7.6534,3,402,17,396.9,4.56
203,0.02177,82.5,2.03,0,0.415,7.61,15.7,6.27,2,348,14.7,395.38,3.11
208,0.25199,0,10.59,0,0.489,5.783,72.7,4.3549,4,277,18.6,389.43,18.06
210,0.43571,0,10.59,1,0.489,5.344,100,3.875,4,277,18.6,396.9,23.09
211,0.17446,0,10.59,1,0.489,5.96,92.1,3.8771,4,277,18.6,393.25,17.27
213,0.21719,0,10.59,1,0.489,5.807,53.8,3.6526,4,277,18.6,390.94,16.03
216,0.19802,0,10.59,0,0.489,6.182,42.4,3.9454,4,277,18.6,393.63,9.47
218,0.07013,0,13.89,0,0.55,6.642,85.1,3.4211,5,276,16.4,392.78,9.69
219,0.11069,0,13.89,1,0.55,5.951,93.8,2.8893,5,276,16.4,396.9,17.92
220,0.11425,0,13.89,1,0.55,6.373,92.4,3.3633,5,276,16.4,393.74,10.5
221,0.35809,0,6.2,1,0.507,6.951,88.5,2.8617,8,307,17.4,391.7,9.71
229,0.29819,0,6.2,0,0.504,7.686,17,3.3751,8,307,17.4,377.51,3.92
232,0.46296,0,6.2,0,0.504,7.412,76.9,3.6715,8,307,17.4,376.14,5.25
238,0.51183,0,6.2,0,0.507,7.358,71.6,4.148,8,307,17.4,390.07,4.73
239,0.08244,30,4.93,0,0.428,6.481,18.5,6.1899,6,300,16.6,379.41,6.36
240,0.09252,30,4.93,0,0.428,6.606,42.2,6.1899,6,300,16.6,383.78,7.37
242,0.10612,30,4.93,0,0.428,6.095,65.1,6.3361,6,300,16.6,394.62,12.4
246,0.19133,22,5.86,0,0.431,5.605,70.2,7.9549,7,330,19.1,389.13,18.46
248,0.19657,22,5.86,0,0.431,6.226,79.2,8.0555,7,330,19.1,376.14,10.15
253,0.08221,22,5.86,0,0.431,6.957,6.8,8.9067,7,330,19.1,386.09,3.53
255,0.04819,80,3.64,0,0.392,6.108,32,9.2203,1,315,16.4,392.89,6.57
256,0.03548,80,3.64,0,0.392,5.876,19.1,9.2203,1,315,16.4,395.18,9.25
257,0.01538,90,3.75,0,0.394,7.454,34.2,6.3361,3,244,15.9,386.34,3.11
258,0.61154,20,3.97,0,0.647,8.704,86.9,1.801,5,264,13,389.7,5.12
259,0.66351,20,3.97,0,0.647,7.333,100,1.8946,5,264,13,383.29,7.79
260,0.65665,20,3.97,0,0.647,6.842,100,2.0107,5,264,13,391.93,6.9
268,0.57834,20,3.97,0,0.575,8.297,67,2.4216,5,264,13,384.54,7.44
270,0.09065,20,6.96,1,0.464,5.92,61.5,3.9175,3,223,18.6,391.34,13.65
271,0.29916,20,6.96,0,0.464,5.856,42.1,4.429,3,223,18.6,388.65,13
276,0.09604,40,6.41,0,0.447,6.854,42.8,4.2673,4,254,17.6,396.9,2.98
277,0.10469,40,6.41,1,0.447,7.267,49,4.7872,4,254,17.6,389.25,6.05
278,0.06127,40,6.41,1,0.447,6.826,27.6,4.8628,4,254,17.6,393.45,4.16
279,0.07978,40,6.41,0,0.447,6.482,32.1,4.1403,4,254,17.6,396.9,7.19
281,0.03578,20,3.33,0,0.4429,7.82,64.5,4.6947,5,216,14.9,387.31,3.76
288,0.03871,52.5,5.32,0,0.405,6.209,31.3,7.3172,6,293,16.6,396.9,7.14
290,0.04297,52.5,5.32,0,0.405,6.565,22.9,7.3172,6,293,16.6,371.72,9.51
292,0.07886,80,4.95,0,0.411,7.148,27.7,5.1167,4,245,19.2,396.9,3.56
295,0.08199,0,13.92,0,0.437,6.009,42.3,5.5027,4,289,16,396.9,10.4
296,0.12932,0,13.92,0,0.437,6.678,31.1,5.9604,4,289,16,396.9,6.27
299,0.06466,70,2.24,0,0.4,6.345,20.1,7.8278,5,358,14.8,368.24,4.97
300,0.05561,70,2.24,0,0.4,7.041,10,7.8278,5,358,14.8,371.58,4.74
301,0.04417,70,2.24,0,0.4,6.871,47.4,7.8278,5,358,14.8,390.86,6.07
308,0.04932,33,2.18,0,0.472,6.849,70.3,3.1827,7,222,18.4,396.9,7.53
314,0.26938,0,9.9,0,0.544,6.266,82.8,3.2628,4,304,18.4,393.39,7.9
315,0.3692,0,9.9,0,0.544,6.567,87.3,3.6023,4,304,18.4,395.69,9.28
320,0.47547,0,9.9,0,0.544,6.113,58.8,4.0019,4,304,18.4,396.23,12.73
322,0.18159,0,7.38,0,0.493,6.376,54.3,4.5404,5,287,19.6,396.9,6.87
323,0.35114,0,7.38,0,0.493,6.041,49.9,4.7211,5,287,19.6,396.9,7.7
324,0.28392,0,7.38,0,0.493,5.708,74.3,4.7211,5,287,19.6,391.13,11.74
327,0.30347,0,7.38,0,0.493,6.312,28.9,5.4159,5,287,19.6,396.9,6.15
330,0.06724,0,3.24,0,0.46,6.333,17.2,5.2146,4,430,16.9,375.21,7.34
332,0.05023,35,6.06,0,0.4379,5.706,28.4,6.6407,1,304,16.9,394.02,12.43
333,0.03466,35,6.06,0,0.4379,6.031,23.3,6.6407,1,304,16.9,362.25,7.83
336,0.03961,0,5.19,0,0.515,6.037,34.5,5.9853,5,224,20.2,396.9,8.01
338,0.03041,0,5.19,0,0.515,5.895,59.6,5.615,5,224,20.2,394.81,10.56
346,0.03113,0,4.39,0,0.442,6.014,48.5,8.0136,3,352,18.8,385.64,10.53
347,0.06162,0,4.39,0,0.442,5.898,52.3,8.0136,3,352,18.8,364.61,12.67
351,0.06211,40,1.25,0,0.429,6.49,44.4,8.7921,1,335,19.7,396.9,5.98
352,0.0795,60,1.69,0,0.411,6.579,35.9,10.7103,4,411,18.3,370.78,5.49
354,0.01709,90,2.02,0,0.41,6.728,36.1,12.1265,5,187,17,384.46,4.5
355,0.04301,80,1.91,0,0.413,5.663,21.9,10.5857,4,334,22,382.8,8.05
356,0.10659,80,1.91,0,0.413,5.936,19.5,10.5857,4,334,22,376.04,5.57
362,3.83684,0,18.1,0,0.77,6.251,91.1,2.2955,24,666,20.2,350.65,14.19
364,4.22239,0,18.1,1,0.77,5.803,89,1.9047,24,666,20.2,353.04,14.64
365,3.47428,0,18.1,1,0.718,8.78,82.9,1.9047,24,666,20.2,354.55,5.29
370,5.66998,0,18.1,1,0.631,6.683,96.8,1.3567,24,666,20.2,375.33,3.73
376,19.6091,0,18.1,0,0.671,7.313,97.9,1.3163,24,666,20.2,396.9,13.44
379,23.6482,0,18.1,0,0.671,6.38,96.2,1.3861,24,666,20.2,396.9,23.69
380,17.8667,0,18.1,0,0.671,6.223,100,1.3861,24,666,20.2,393.74,21.78
381,88.9762,0,18.1,0,0.671,6.968,91.9,1.4165,24,666,20.2,396.9,17.21
382,15.8744,0,18.1,0,0.671,6.545,99.1,1.5192,24,666,20.2,396.9,21.08
386,16.8118,0,18.1,0,0.7,5.277,98.1,1.4261,24,666,20.2,396.9,30.81
389,14.3337,0,18.1,0,0.7,4.88,100,1.5895,24,666,20.2,372.92,30.62
391,6.96215,0,18.1,0,0.7,5.713,97,1.9265,24,666,20.2,394.43,17.11
394,8.64476,0,18.1,0,0.693,6.193,92.6,1.7912,24,666,20.2,396.9,15.17
396,8.71675,0,18.1,0,0.693,6.471,98.8,1.7257,24,666,20.2,391.98,17.12
398,7.67202,0,18.1,0,0.693,5.747,98.9,1.6334,24,666,20.2,393.1,19.92
400,9.91655,0,18.1,0,0.693,5.852,77.8,1.5004,24,666,20.2,338.16,29.97
403,9.59571,0,18.1,0,0.693,6.404,100,1.639,24,666,20.2,376.11,20.31
405,41.5292,0,18.1,0,0.693,5.531,85.4,1.6074,24,666,20.2,329.46,27.38
406,67.9208,0,18.1,0,0.693,5.683,100,1.4254,24,666,20.2,384.97,22.98
407,20.7162,0,18.1,0,0.659,4.138,100,1.1781,24,666,20.2,370.22,23.34
410,14.4383,0,18.1,0,0.597,6.852,100,1.4655,24,666,20.2,179.36,19.78
411,51.1358,0,18.1,0,0.597,5.757,100,1.413,24,666,20.2,2.6,10.11
412,14.0507,0,18.1,0,0.597,6.657,100,1.5275,24,666,20.2,35.05,21.22
413,18.811,0,18.1,0,0.597,4.628,100,1.5539,24,666,20.2,28.79,34.37
417,10.8342,0,18.1,0,0.679,6.782,90.8,1.8195,24,666,20.2,21.57,25.79
421,11.0874,0,18.1,0,0.718,6.411,100,1.8589,24,666,20.2,318.75,15.02
422,7.02259,0,18.1,0,0.718,6.006,95.3,1.8746,24,666,20.2,319.98,15.7
423,12.0482,0,18.1,0,0.614,5.648,87.6,1.9512,24,666,20.2,291.55,14.1
424,7.05042,0,18.1,0,0.614,6.103,85.1,2.0218,24,666,20.2,2.52,23.29
427,12.2472,0,18.1,0,0.584,5.837,59.7,1.9976,24,666,20.2,24.65,15.69
431,8.49213,0,18.1,0,0.584,6.348,86.1,2.0527,24,666,20.2,83.45,17.64
436,11.1604,0,18.1,0,0.74,6.629,94.6,2.1247,24,666,20.2,109.85,23.27
437,14.4208,0,18.1,0,0.74,6.461,93.3,2.0026,24,666,20.2,27.49,18.05
439,13.6781,0,18.1,0,0.74,5.935,87.9,1.8206,24,666,20.2,68.95,34.02
447,6.28807,0,18.1,0,0.74,6.341,96.4,2.072,24,666,20.2,318.01,17.79
450,7.52601,0,18.1,0,0.713,6.417,98.3,2.185,24,666,20.2,304.21,19.31
451,6.71772,0,18.1,0,0.713,6.749,92.6,2.3236,24,666,20.2,0.32,17.44
455,9.51363,0,18.1,0,0.713,6.728,94.1,2.4961,24,666,20.2,6.68,18.71
457,4.66883,0,18.1,0,0.713,5.976,87.9,2.5806,24,666,20.2,10.48,19.01
471,4.34879,0,18.1,0,0.58,6.167,84,3.0334,24,666,20.2,396.9,16.29
474,4.64689,0,18.1,0,0.614,6.98,67.6,2.5329,24,666,20.2,374.68,11.66
476,6.39312,0,18.1,0,0.584,6.162,97.4,2.206,24,666,20.2,302.76,24.1
483,5.73116,0,18.1,0,0.532,7.061,77,3.4106,24,666,20.2,395.28,7.01
486,3.67367,0,18.1,0,0.583,6.312,51.9,3.9917,24,666,20.2,388.62,10.58
490,0.18337,0,27.74,0,0.609,5.414,98.3,1.7554,4,711,20.1,344.05,23.97
495,0.27957,0,9.69,0,0.585,5.926,42.6,2.3817,6,391,19.2,396.9,13.59
496,0.17899,0,9.69,0,0.585,5.67,28.8,2.7986,6,391,19.2,393.29,17.6
497,0.2896,0,9.69,0,0.585,5.39,72.9,2.7986,6,391,19.2,396.9,21.14
499,0.23912,0,9.69,0,0.585,6.019,65.3,2.4091,6,391,19.2,396.9,12.92
501,0.22438,0,9.69,0,0.585,6.027,79.7,2.4982,6,391,19.2,396.9,14.33
505,0.10959,0,11.93,0,0.573,6.794,89.3,2.3889,1,273,21,393.45,6.48
This diff is collapsed.
# Tensorflow v1 example
# https://www.kaggle.com/ratnesh88/predict-house-price-using-tensorflow
import numpy as np # linear algebra
import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)
import tensorflow as tf
import os
print(os.listdir("../input"))
# Any results you write to the current directory are saved as output.
train = pd.read_csv('../input/boston_train.csv')
train.head()
train.info()
test = pd.read_csv('../input/boston_test.csv')
test.head()
cols = train.columns
print(cols)
train.isna().sum()
default_types =[[0.0]]*len(cols)
print(default_types)
y_name = 'medv'
batch_size = 128
num_epochs = 400
buffer = 1000
split = 0.7
def parse_line(line):
columns = tf.decode_csv(line,default_types)
features = dict(zip(cols,columns))
label = features.pop(y_name)
return features, label
data = tf.data.TextLineDataset('../input/boston_train.csv').skip(1)
# Divide data into train and validation data
def in_training_set(line):
num_buckets = 1000000
bucket_id = tf.string_to_hash_bucket_fast(line, num_buckets)
return bucket_id < int(split * num_buckets)
def in_test_set(line):
return ~in_training_set(line)
train = (data.filter(in_training_set).map(parse_line))
validation = (data.filter(in_test_set).map(parse_line))
def X():
return train.repeat().shuffle(buffer).batch(batch_size).make_one_shot_iterator().get_next()
def Y():
return validation.shuffle(buffer).batch(batch_size).make_one_shot_iterator().get_next()
sess = tf.Session()
#sess.run(validation)
# Define model.. train on X
feature_columns = []
for col in cols[1:-1]:
feature_columns.append(tf.feature_column.numeric_column(col))
model = tf.estimator.DNNRegressor(feature_columns=feature_columns, hidden_units=[10,10])
model.train(input_fn= X,steps=500)
eval_result = model.evaluate(input_fn=Y)
for key in sorted(eval_result):
print('%s: %s' % (key, eval_result[key]))
test.head()
test_in = tf.estimator.inputs.pandas_input_fn(test, shuffle=False)
print(test_in)
pred_iter = model.predict(input_fn=test_in)
predC = []
for i,pred in enumerate(pred_iter):
print(test['ID'][i],pred['predictions'][0])
predC.append(pred['predictions'][0])
out_df = pd.DataFrame({"ID":test['ID'], "medv":predC})
file = out_df.to_csv("submission.csv", index=False)
print(os.listdir('../working'))
\ No newline at end of file
# -*- coding: utf-8 -*
# %matplotlib notebook
import tensorflow as tf
import matplotlib.pyplot as pyplot
import numpy
import pandas as panda
from sklearn.utils import shuffle
with tf.compat.v1.Session() as sess:
# 读取配置文件
read_data = panda.read_csv("03. predicting-boston-house-price/data/boston.csv", header = 0)
# 显示数据摘要描述信息
# print(read_data.describe())
# 获取配置文件的值
read_data = read_data.values
# 二维数组,13 列 506 行
# print(read_data)
# 转换为 numpy 的数组格式
read_data = numpy.array(read_data)
# 对特征数据(0 到 11 列)做(0 - 1)的归一化
for i in range(12):
read_data[:, i] = read_data[:, i] / (read_data[:, i].max() - read_data[:, i].min())
# x_data 为前 12 列特征数据
# 前半部分是所有数据,右半部分是列(0 - 11)
x_data = read_data[:, :12]
# y_data 为最后 1 列标签数据
y_data = read_data[:, 12]
# 是一个二维数组,506 行,12 列
# print(x_data, "\n shape = ", x_data.shape)
# 是一个一维数组,有 506 个元素/单元
# print(y_data, "\n shape = ", y_data.shape)
# 定义特征数据和标签数据的占位符(placeholder)
# 具有 12 个特征,shape 要和实际上的特征数据相吻合
# 12 个特征数据
# x = tf.placeholder(tf.float32, [None, 12], name = "X")
# 即行不管,列有 12 列
x = tf.keras.Input(name = "X", shape = (12), dtype = tf.dtypes.float32)
# 1 个标签数据
# y = tf.placeholder(tf.float32, [None, 1], name = "Y")
# 有 1 列
y = tf.keras.Input(name = "Y", shape = (1), dtype = tf.dtypes.float32)
# 定义命名空间
with tf.name_scope("Model"):
# 12 行 1 列的列向量,w1、w2、w3...
# w 初始化为 shape = (12, 1) 的随机数,标准差设置为 0.01
w = tf.Variable(tf.random.normal([12, 1], stddev = 0.01, name = "w"))
# b 初始化为 1.0
b = tf.Variable(1.0, name = "b")
# w 和 x 是矩阵相乘,用 matmul,不能使用 mutiply 或者 *
# 矩阵叉乘
# x 以后会是一个行向量!
# b 是想要预测出来的标签值 y
# y = x1 * w1 + ... + x12 * w12 + b
def model(x, w, b):
return tf.matmul(x, w) + b
# 预测计算操作,前向计算节点
predict = model(x, w, b)
# 模型训练
# 设置模型训练超参数
# 迭代轮次
train_epochs = 50
# 学习率
learning_rate = 0.0251
# 定义均方差损失函数
with tf.name_scope("LossFunction"):
# 均方误差
loss_function = tf.reduce_mean(tf.pow(y - predict, 2))
# 创建优化器
# optimizer = tf.train.GradientDescentOptimizer(learn_rate).minimize(loss_function)
# Use something like this in v2
# optimizer = tf.keras.optimizers.SGD(learning_rate).minimize(loss_function, var_list=[w, b])
# TypeError: 'Tensor' object is not callable
optimizer = tf.compat.v1.train.GradientDescentOptimizer(learning_rate).minimize(loss_function)
# 模型训练
for epoch in range(train_epochs):
loss_sum = 0.0
for xs, ys in zip(x_data, y_data):
# Feed 数据必须和 Placeholder 的 shape 一致
xs = xs.reshape(1, 12)
ys = ys.reshape(1, 1)
_, loss = sess.run([optimizer, loss_function], feed_dict = {x: xs, y: ys})
loss_sum = loss_sum + loss
# 打乱数据顺序
x_data, y_data = shuffle(x_data, y_data)
b0temp = b.eval(session = sess)
w0temp = w.eval(session = sess)
loss_average = loss_sum / len(y_data)
print("epoch = ", epoch + 1, "loss = ", loss_average, "b = ", b0temp, " w = ", w0temp)
\ No newline at end of file
# -*- coding: utf-8 -*
# %matplotlib notebook
import tensorflow as tf
import matplotlib.pyplot as pyplot
import numpy
import pandas as panda
from sklearn.utils import shuffle
with tf.compat.v1.Session() as sess:
# 读取配置文件
read_data = panda.read_csv("03. predicting-boston-house-price/data/boston.csv", header = 0)
# 显示数据摘要描述信息
# print(read_data.describe())
# 获取配置文件的值
read_data = read_data.values
# 二维数组,13 列 506 行
# print(read_data)
# 转换为 numpy 的数组格式
read_data = numpy.array(read_data)
# 对特征数据(0 到 11 列)做(0 - 1)的归一化
for i in range(12):
read_data[:, i] = read_data[:, i] / (read_data[:, i].max() - read_data[:, i].min())
# x_data 为前 12 列特征数据
# 前半部分是所有数据,右半部分是列(0 - 11)
x_data = read_data[:, :12]
# y_data 为最后 1 列标签数据
y_data = read_data[:, 12]
# 是一个二维数组,506 行,12 列
# print(x_data, "\n shape = ", x_data.shape)
# 是一个一维数组,有 506 个元素/单元
# print(y_data, "\n shape = ", y_data.shape)
# 定义特征数据和标签数据的占位符(placeholder)
# 具有 12 个特征,shape 要和实际上的特征数据相吻合
# 12 个特征数据
# 即行不管,列有 12 列
x = tf.compat.v1.placeholder(tf.float32, [None, 12], name = "x")
# 1 个标签数据
# 有 1 列
y = tf.compat.v1.placeholder(tf.float32, [None, 1], name = "y")
# 定义命名空间
with tf.name_scope("Model"):
# 12 行 1 列的列向量,w1、w2、w3...
# w 初始化为 shape = (12, 1) 的随机数,标准差设置为 0.01
w = tf.Variable(tf.random.normal([12, 1], stddev = 0.01, name = "w"))
# b 初始化为 1.0
b = tf.Variable(1.0, name = "b")
# w 和 x 是矩阵相乘,用 matmul,不能使用 mutiply 或者 *
# 矩阵叉乘
# x 以后会是一个行向量!
# b 是想要预测出来的标签值 y
# y = x1 * w1 + ... + x12 * w12 + b
def model(x, w, b):
return tf.matmul(x, w) + b
# 预测计算操作,前向计算节点
predict = model(x, w, b)
# 模型训练
# 设置模型训练超参数
# 迭代轮次
train_epochs = 50
# 学习率
learning_rate = 0.0251
# 定义均方差损失函数
with tf.name_scope("LossFunction"):
# 均方误差
loss_function = tf.reduce_mean(tf.pow(y - predict, 2))
# 创建优化器
optimizer = tf.compat.v1.train.GradientDescentOptimizer(learning_rate).minimize(loss_function)
config = tf.compat.v1.ConfigProto(allow_soft_placement = True)
gpu_options = tf.compat.v1.GPUOptions(per_process_gpu_memory_fraction = 0.7)
config.gpu_options.allow_growth = True
# 声明会话
sess = tf.compat.v1.Session(config = config)
init = tf.compat.v1.global_variables_initializer()
# Tensorboard 可视化
logdir = "03. predicting-boston-house-price/log"
# 创建一个用于记录损失值的操作
sum_loss_op = tf.summary.scalar("loss", loss_function)
# 把所有日志文件中内容合并,方便写入
merged = tf.compat.v1.summary.merge_all()
sess.run(init)
# 创建 Tensorboard 的写入器
writer = tf.compat.v1.summary.FileWriter(logdir, sess.graph)
loss_list = []
# 模型训练
for epoch in range(train_epochs):
loss_sum = 0.0
for xs, ys in zip(x_data, y_data):
# Feed 数据必须和 Placeholder 的 shape 一致
xs = xs.reshape(1, 12)
ys = ys.reshape(1, 1)
_, summary_str, loss = sess.run([optimizer, sum_loss_op, loss_function], feed_dict = {x: xs, y: ys})
writer.add_summary(summary_str, epoch)
loss_sum += loss
# 打乱数据顺序
x_data, y_data = shuffle(x_data, y_data)
b0temp = b.eval(session = sess)
w0temp = w.eval(session = sess)
loss_average = loss_sum / len(y_data)
print("epoch = ", epoch + 1, "loss = ", loss_average, "b = ", b0temp, " w = ", w0temp)
# 模型预测
n = numpy.random.randint(len(x_data[:, 0]))
# print(n)
x_test = x_data[n]
x_test = x_test.reshape([1, 12])
prediction = sess.run(predict, feed_dict={x: x_test})
print("预测值:%f" %prediction)
target = y_data[n]
print("真实值:%f" %target)
# 可视化损失值
pyplot.plot(loss_list)
\ No newline at end of file
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment