[{"data":1,"prerenderedAt":19107},["ShallowReactive",2],{"room-derivatives-en":3,"navigation":18270,"search":18320},{"id":4,"title":5,"author":6,"avatar":7,"body":8,"date":18260,"description":18261,"extension":18262,"meta":18263,"navigation":10756,"ogImage":18264,"path":18265,"seo":18266,"stem":18267,"updateSummary":18268,"updatedAt":18260,"__hash__":18269},"content\u002Fen\u002Frooms\u002Fderivatives.md","Derivatives — The Language of Change","Chau Dara - Founder of TFDevs","\u002Fassets\u002Fimg\u002Favatar.jpg",{"type":9,"value":10,"toc":18226},"minimark",[11,25,28,50,55,60,63,178,181,321,410,561,678,682,982,1415,1480,1507,1509,1513,1524,1527,1631,1801,2056,2060,2178,2440,2447,2651,2942,3052,3054,3058,3127,3165,3406,3409,3413,3419,3692,3695,4371,4425,4476,4638,4640,4644,4648,4908,5272,5278,5282,5875,5877,5881,5888,5892,5993,6222,6227,6845,6849,7097,7356,7360,7667,7873,8060,8064,8128,8414,8420,8558,8751,9043,9050,9054,9844,9846,9849,9851,9855,9859,9964,10138,10288,10497,10686,10865,10869,10872,11196,11264,11560,11596,11598,11602,11845,11849,11856,12082,12496,13048,13052,13093,13942,13953,13974,13976,13980,13983,14261,14267,15075,15166,15667,15699,16133,16139,16477,16900,16903,16905,16909,18156,18167,18169,18173,18180,18222],[12,13,14,15,19,20,24],"p",{},"Every time a neural network learns, it asks one question over and over: ",[16,17,18],"em",{},"\"If I nudge this parameter slightly, does the error go up or down — and by how much?\""," That question is answered by the ",[21,22,23],"strong",{},"derivative",". Before we talk about gradients or optimizers, we need to understand derivatives from scratch.",[26,27],"hr",{},[29,30,41,42],"div",{"className":31,"style":40},[32,33,34,35,36,37,38,39],"w-full","max-w-2xl","mx-auto","bg-white","rounded-lg","shadow-md","overflow-hidden","mb-2","height: 300px;","\n  ",[43,44],"img",{"src":45,"alt":46,"className":47,"style":49},"\u002Fassets\u002Fimg\u002Fderivative.gif","Derivative Illustration",[32,48],"h-full","object-fit: contain;",[51,52,54],"h2",{"id":53},"part-1-lines-and-slopes","Part 1 — Lines and Slopes",[56,57,59],"h3",{"id":58},"the-equation-of-a-line","The Equation of a Line",[12,61,62],{},"The simplest relationship between two quantities is a straight line:",[64,65,68],"span",{"className":66},[67],"katex-display",[64,69,72,112],{"className":70},[71],"katex",[64,73,76],{"className":74},[75],"katex-mathml",[77,78,81],"math",{"xmlns":79,"display":80},"http:\u002F\u002Fwww.w3.org\u002F1998\u002FMath\u002FMathML","block",[82,83,84,107],"semantics",{},[85,86,87,91,95,98,101,104],"mrow",{},[88,89,90],"mi",{},"y",[92,93,94],"mo",{},"=",[88,96,97],{},"m",[88,99,100],{},"x",[92,102,103],{},"+",[88,105,106],{},"b",[108,109,111],"annotation",{"encoding":110},"application\u002Fx-tex","y = mx + b",[64,113,117,144,168],{"className":114,"ariaHidden":116},[115],"katex-html","true",[64,118,121,126,132,137,141],{"className":119},[120],"base",[64,122],{"className":123,"style":125},[124],"strut","height:0.625em;vertical-align:-0.1944em;",[64,127,90],{"className":128,"style":131},[129,130],"mord","mathnormal","margin-right:0.03588em;",[64,133],{"className":134,"style":136},[135],"mspace","margin-right:0.2778em;",[64,138,94],{"className":139},[140],"mrel",[64,142],{"className":143,"style":136},[135],[64,145,147,151,154,157,161,165],{"className":146},[120],[64,148],{"className":149,"style":150},[124],"height:0.6667em;vertical-align:-0.0833em;",[64,152,97],{"className":153},[129,130],[64,155,100],{"className":156},[129,130],[64,158],{"className":159,"style":160},[135],"margin-right:0.2222em;",[64,162,103],{"className":163},[164],"mbin",[64,166],{"className":167,"style":160},[135],[64,169,171,175],{"className":170},[120],[64,172],{"className":173,"style":174},[124],"height:0.6944em;",[64,176,106],{"className":177},[129,130],[12,179,180],{},"Where:",[182,183,184,220,253,287],"ul",{},[185,186,187,216,217],"li",{},[64,188,190,203],{"className":189},[71],[64,191,193],{"className":192},[75],[77,194,195],{"xmlns":79},[82,196,197,201],{},[85,198,199],{},[88,200,100],{},[108,202,100],{"encoding":110},[64,204,206],{"className":205,"ariaHidden":116},[115],[64,207,209,213],{"className":208},[120],[64,210],{"className":211,"style":212},[124],"height:0.4306em;",[64,214,100],{"className":215},[129,130]," is the ",[21,218,219],{},"input",[185,221,222,216,250],{},[64,223,225,238],{"className":224},[71],[64,226,228],{"className":227},[75],[77,229,230],{"xmlns":79},[82,231,232,236],{},[85,233,234],{},[88,235,90],{},[108,237,90],{"encoding":110},[64,239,241],{"className":240,"ariaHidden":116},[115],[64,242,244,247],{"className":243},[120],[64,245],{"className":246,"style":125},[124],[64,248,90],{"className":249,"style":131},[129,130],[21,251,252],{},"output",[185,254,255,216,283,286],{},[64,256,258,271],{"className":257},[71],[64,259,261],{"className":260},[75],[77,262,263],{"xmlns":79},[82,264,265,269],{},[85,266,267],{},[88,268,97],{},[108,270,97],{"encoding":110},[64,272,274],{"className":273,"ariaHidden":116},[115],[64,275,277,280],{"className":276},[120],[64,278],{"className":279,"style":212},[124],[64,281,97],{"className":282},[129,130],[21,284,285],{},"slope"," — how steeply the line rises or falls",[185,288,289,216,317,320],{},[64,290,292,305],{"className":291},[71],[64,293,295],{"className":294},[75],[77,296,297],{"xmlns":79},[82,298,299,303],{},[85,300,301],{},[88,302,106],{},[108,304,106],{"encoding":110},[64,306,308],{"className":307,"ariaHidden":116},[115],[64,309,311,314],{"className":310},[120],[64,312],{"className":313,"style":174},[124],[64,315,106],{"className":316},[129,130],[21,318,319],{},"y-intercept"," — where the line crosses the vertical axis",[12,322,323,326,327],{},[21,324,325],{},"Example:"," ",[64,328,330,357],{"className":329},[71],[64,331,333],{"className":332},[75],[77,334,335],{"xmlns":79},[82,336,337,354],{},[85,338,339,341,343,347,349,351],{},[88,340,90],{},[92,342,94],{},[344,345,346],"mn",{},"2",[88,348,100],{},[92,350,103],{},[344,352,353],{},"1",[108,355,356],{"encoding":110},"y = 2x + 1",[64,358,360,378,400],{"className":359,"ariaHidden":116},[115],[64,361,363,366,369,372,375],{"className":362},[120],[64,364],{"className":365,"style":125},[124],[64,367,90],{"className":368,"style":131},[129,130],[64,370],{"className":371,"style":136},[135],[64,373,94],{"className":374},[140],[64,376],{"className":377,"style":136},[135],[64,379,381,385,388,391,394,397],{"className":380},[120],[64,382],{"className":383,"style":384},[124],"height:0.7278em;vertical-align:-0.0833em;",[64,386,346],{"className":387},[129],[64,389,100],{"className":390},[129,130],[64,392],{"className":393,"style":160},[135],[64,395,103],{"className":396},[164],[64,398],{"className":399,"style":160},[135],[64,401,403,407],{"className":402},[120],[64,404],{"className":405,"style":406},[124],"height:0.6444em;",[64,408,353],{"className":409},[129],[411,412,413,529],"table",{},[414,415,416],"thead",{},[417,418,419,450],"tr",{},[420,421,422],"th",{},[64,423,425,438],{"className":424},[71],[64,426,428],{"className":427},[75],[77,429,430],{"xmlns":79},[82,431,432,436],{},[85,433,434],{},[88,435,100],{},[108,437,100],{"encoding":110},[64,439,441],{"className":440,"ariaHidden":116},[115],[64,442,444,447],{"className":443},[120],[64,445],{"className":446,"style":212},[124],[64,448,100],{"className":449},[129,130],[420,451,452],{},[64,453,455,478],{"className":454},[71],[64,456,458],{"className":457},[75],[77,459,460],{"xmlns":79},[82,461,462,476],{},[85,463,464,466,468,470,472,474],{},[88,465,90],{},[92,467,94],{},[344,469,346],{},[88,471,100],{},[92,473,103],{},[344,475,353],{},[108,477,356],{"encoding":110},[64,479,481,499,520],{"className":480,"ariaHidden":116},[115],[64,482,484,487,490,493,496],{"className":483},[120],[64,485],{"className":486,"style":125},[124],[64,488,90],{"className":489,"style":131},[129,130],[64,491],{"className":492,"style":136},[135],[64,494,94],{"className":495},[140],[64,497],{"className":498,"style":136},[135],[64,500,502,505,508,511,514,517],{"className":501},[120],[64,503],{"className":504,"style":384},[124],[64,506,346],{"className":507},[129],[64,509,100],{"className":510},[129,130],[64,512],{"className":513,"style":160},[135],[64,515,103],{"className":516},[164],[64,518],{"className":519,"style":160},[135],[64,521,523,526],{"className":522},[120],[64,524],{"className":525,"style":406},[124],[64,527,353],{"className":528},[129],[530,531,532,540,547,554],"tbody",{},[417,533,534,538],{},[535,536,537],"td",{},"0",[535,539,353],{},[417,541,542,544],{},[535,543,353],{},[535,545,546],{},"3",[417,548,549,551],{},[535,550,346],{},[535,552,553],{},"5",[417,555,556,558],{},[535,557,546],{},[535,559,560],{},"7",[12,562,563,564,592,593,621,622,625,626,677],{},"Every time ",[64,565,567,580],{"className":566},[71],[64,568,570],{"className":569},[75],[77,571,572],{"xmlns":79},[82,573,574,578],{},[85,575,576],{},[88,577,100],{},[108,579,100],{"encoding":110},[64,581,583],{"className":582,"ariaHidden":116},[115],[64,584,586,589],{"className":585},[120],[64,587],{"className":588,"style":212},[124],[64,590,100],{"className":591},[129,130]," increases by 1, ",[64,594,596,609],{"className":595},[71],[64,597,599],{"className":598},[75],[77,600,601],{"xmlns":79},[82,602,603,607],{},[85,604,605],{},[88,606,90],{},[108,608,90],{"encoding":110},[64,610,612],{"className":611,"ariaHidden":116},[115],[64,613,615,618],{"className":614},[120],[64,616],{"className":617,"style":125},[124],[64,619,90],{"className":620,"style":131},[129,130]," increases by ",[21,623,624],{},"exactly 2",". The slope ",[64,627,629,647],{"className":628},[71],[64,630,632],{"className":631},[75],[77,633,634],{"xmlns":79},[82,635,636,644],{},[85,637,638,640,642],{},[88,639,97],{},[92,641,94],{},[344,643,346],{},[108,645,646],{"encoding":110},"m = 2",[64,648,650,668],{"className":649,"ariaHidden":116},[115],[64,651,653,656,659,662,665],{"className":652},[120],[64,654],{"className":655,"style":212},[124],[64,657,97],{"className":658},[129,130],[64,660],{"className":661,"style":136},[135],[64,663,94],{"className":664},[140],[64,666],{"className":667,"style":136},[135],[64,669,671,674],{"className":670},[120],[64,672],{"className":673,"style":406},[124],[64,675,346],{"className":676},[129]," captures this constant rate.",[56,679,681],{"id":680},"computing-the-slope-between-two-points","Computing the Slope Between Two Points",[12,683,684,685,846,847,981],{},"Given any two points ",[64,686,688,723],{"className":687},[71],[64,689,691],{"className":690},[75],[77,692,693],{"xmlns":79},[82,694,695,720],{},[85,696,697,701,708,711,717],{},[92,698,700],{"stretchy":699},"false","(",[702,703,704,706],"msub",{},[88,705,100],{},[344,707,353],{},[92,709,710],{"separator":116},",",[702,712,713,715],{},[88,714,90],{},[344,716,353],{},[92,718,719],{"stretchy":699},")",[108,721,722],{"encoding":110},"(x_1, y_1)",[64,724,726],{"className":725,"ariaHidden":116},[115],[64,727,729,733,737,793,797,801,842],{"className":728},[120],[64,730],{"className":731,"style":732},[124],"height:1em;vertical-align:-0.25em;",[64,734,700],{"className":735},[736],"mopen",[64,738,740,743],{"className":739},[129],[64,741,100],{"className":742},[129,130],[64,744,747],{"className":745},[746],"msupsub",[64,748,752,784],{"className":749},[750,751],"vlist-t","vlist-t2",[64,753,756,779],{"className":754},[755],"vlist-r",[64,757,761],{"className":758,"style":760},[759],"vlist","height:0.3011em;",[64,762,764,769],{"style":763},"top:-2.55em;margin-left:0em;margin-right:0.05em;",[64,765],{"className":766,"style":768},[767],"pstrut","height:2.7em;",[64,770,776],{"className":771},[772,773,774,775],"sizing","reset-size6","size3","mtight",[64,777,353],{"className":778},[129,775],[64,780,783],{"className":781},[782],"vlist-s","​",[64,785,787],{"className":786},[755],[64,788,791],{"className":789,"style":790},[759],"height:0.15em;",[64,792],{},[64,794,710],{"className":795},[796],"mpunct",[64,798],{"className":799,"style":800},[135],"margin-right:0.1667em;",[64,802,804,807],{"className":803},[129],[64,805,90],{"className":806,"style":131},[129,130],[64,808,810],{"className":809},[746],[64,811,813,834],{"className":812},[750,751],[64,814,816,831],{"className":815},[755],[64,817,819],{"className":818,"style":760},[759],[64,820,822,825],{"style":821},"top:-2.55em;margin-left:-0.0359em;margin-right:0.05em;",[64,823],{"className":824,"style":768},[767],[64,826,828],{"className":827},[772,773,774,775],[64,829,353],{"className":830},[129,775],[64,832,783],{"className":833},[782],[64,835,837],{"className":836},[755],[64,838,840],{"className":839,"style":790},[759],[64,841],{},[64,843,719],{"className":844},[845],"mclose"," and ",[64,848,850,880],{"className":849},[71],[64,851,853],{"className":852},[75],[77,854,855],{"xmlns":79},[82,856,857,877],{},[85,858,859,861,867,869,875],{},[92,860,700],{"stretchy":699},[702,862,863,865],{},[88,864,100],{},[344,866,346],{},[92,868,710],{"separator":116},[702,870,871,873],{},[88,872,90],{},[344,874,346],{},[92,876,719],{"stretchy":699},[108,878,879],{"encoding":110},"(x_2, y_2)",[64,881,883],{"className":882,"ariaHidden":116},[115],[64,884,886,889,892,932,935,938,978],{"className":885},[120],[64,887],{"className":888,"style":732},[124],[64,890,700],{"className":891},[736],[64,893,895,898],{"className":894},[129],[64,896,100],{"className":897},[129,130],[64,899,901],{"className":900},[746],[64,902,904,924],{"className":903},[750,751],[64,905,907,921],{"className":906},[755],[64,908,910],{"className":909,"style":760},[759],[64,911,912,915],{"style":763},[64,913],{"className":914,"style":768},[767],[64,916,918],{"className":917},[772,773,774,775],[64,919,346],{"className":920},[129,775],[64,922,783],{"className":923},[782],[64,925,927],{"className":926},[755],[64,928,930],{"className":929,"style":790},[759],[64,931],{},[64,933,710],{"className":934},[796],[64,936],{"className":937,"style":800},[135],[64,939,941,944],{"className":940},[129],[64,942,90],{"className":943,"style":131},[129,130],[64,945,947],{"className":946},[746],[64,948,950,970],{"className":949},[750,751],[64,951,953,967],{"className":952},[755],[64,954,956],{"className":955,"style":760},[759],[64,957,958,961],{"style":821},[64,959],{"className":960,"style":768},[767],[64,962,964],{"className":963},[772,773,774,775],[64,965,346],{"className":966},[129,775],[64,968,783],{"className":969},[782],[64,971,973],{"className":972},[755],[64,974,976],{"className":975,"style":790},[759],[64,977],{},[64,979,719],{"className":980},[845]," on a line, the slope is:",[64,983,985],{"className":984},[67],[64,986,988,1058],{"className":987},[71],[64,989,991],{"className":990},[75],[77,992,993],{"xmlns":79,"display":80},[82,994,995,1055],{},[85,996,997,999,1001,1018,1020],{},[88,998,97],{},[92,1000,94],{},[1002,1003,1004,1012],"mfrac",{},[85,1005,1006,1010],{},[88,1007,1009],{"mathvariant":1008},"normal","Δ",[88,1011,90],{},[85,1013,1014,1016],{},[88,1015,1009],{"mathvariant":1008},[88,1017,100],{},[92,1019,94],{},[1002,1021,1022,1039],{},[85,1023,1024,1030,1033],{},[702,1025,1026,1028],{},[88,1027,90],{},[344,1029,346],{},[92,1031,1032],{},"−",[702,1034,1035,1037],{},[88,1036,90],{},[344,1038,353],{},[85,1040,1041,1047,1049],{},[702,1042,1043,1045],{},[88,1044,100],{},[344,1046,346],{},[92,1048,1032],{},[702,1050,1051,1053],{},[88,1052,100],{},[344,1054,353],{},[108,1056,1057],{"encoding":110},"m = \\frac{\\Delta y}{\\Delta x} = \\frac{y_2 - y_1}{x_2 - x_1}",[64,1059,1061,1079,1172],{"className":1060,"ariaHidden":116},[115],[64,1062,1064,1067,1070,1073,1076],{"className":1063},[120],[64,1065],{"className":1066,"style":212},[124],[64,1068,97],{"className":1069},[129,130],[64,1071],{"className":1072,"style":136},[135],[64,1074,94],{"className":1075},[140],[64,1077],{"className":1078,"style":136},[135],[64,1080,1082,1086,1163,1166,1169],{"className":1081},[120],[64,1083],{"className":1084,"style":1085},[124],"height:2.0463em;vertical-align:-0.686em;",[64,1087,1089,1093,1160],{"className":1088},[129],[64,1090],{"className":1091},[736,1092],"nulldelimiter",[64,1094,1096],{"className":1095},[1002],[64,1097,1099,1151],{"className":1098},[750,751],[64,1100,1102,1148],{"className":1101},[755],[64,1103,1106,1122,1133],{"className":1104,"style":1105},[759],"height:1.3603em;",[64,1107,1109,1113],{"style":1108},"top:-2.314em;",[64,1110],{"className":1111,"style":1112},[767],"height:3em;",[64,1114,1116,1119],{"className":1115},[129],[64,1117,1009],{"className":1118},[129],[64,1120,100],{"className":1121},[129,130],[64,1123,1125,1128],{"style":1124},"top:-3.23em;",[64,1126],{"className":1127,"style":1112},[767],[64,1129],{"className":1130,"style":1132},[1131],"frac-line","border-bottom-width:0.04em;",[64,1134,1136,1139],{"style":1135},"top:-3.677em;",[64,1137],{"className":1138,"style":1112},[767],[64,1140,1142,1145],{"className":1141},[129],[64,1143,1009],{"className":1144},[129],[64,1146,90],{"className":1147,"style":131},[129,130],[64,1149,783],{"className":1150},[782],[64,1152,1154],{"className":1153},[755],[64,1155,1158],{"className":1156,"style":1157},[759],"height:0.686em;",[64,1159],{},[64,1161],{"className":1162},[845,1092],[64,1164],{"className":1165,"style":136},[135],[64,1167,94],{"className":1168},[140],[64,1170],{"className":1171,"style":136},[135],[64,1173,1175,1179],{"className":1174},[120],[64,1176],{"className":1177,"style":1178},[124],"height:2.0963em;vertical-align:-0.836em;",[64,1180,1182,1185,1412],{"className":1181},[129],[64,1183],{"className":1184},[736,1092],[64,1186,1188],{"className":1187},[1002],[64,1189,1191,1403],{"className":1190},[750,751],[64,1192,1194,1400],{"className":1193},[755],[64,1195,1198,1295,1303],{"className":1196,"style":1197},[759],"height:1.2603em;",[64,1199,1200,1203],{"style":1108},[64,1201],{"className":1202,"style":1112},[767],[64,1204,1206,1246,1249,1252,1255],{"className":1205},[129],[64,1207,1209,1212],{"className":1208},[129],[64,1210,100],{"className":1211},[129,130],[64,1213,1215],{"className":1214},[746],[64,1216,1218,1238],{"className":1217},[750,751],[64,1219,1221,1235],{"className":1220},[755],[64,1222,1224],{"className":1223,"style":760},[759],[64,1225,1226,1229],{"style":763},[64,1227],{"className":1228,"style":768},[767],[64,1230,1232],{"className":1231},[772,773,774,775],[64,1233,346],{"className":1234},[129,775],[64,1236,783],{"className":1237},[782],[64,1239,1241],{"className":1240},[755],[64,1242,1244],{"className":1243,"style":790},[759],[64,1245],{},[64,1247],{"className":1248,"style":160},[135],[64,1250,1032],{"className":1251},[164],[64,1253],{"className":1254,"style":160},[135],[64,1256,1258,1261],{"className":1257},[129],[64,1259,100],{"className":1260},[129,130],[64,1262,1264],{"className":1263},[746],[64,1265,1267,1287],{"className":1266},[750,751],[64,1268,1270,1284],{"className":1269},[755],[64,1271,1273],{"className":1272,"style":760},[759],[64,1274,1275,1278],{"style":763},[64,1276],{"className":1277,"style":768},[767],[64,1279,1281],{"className":1280},[772,773,774,775],[64,1282,353],{"className":1283},[129,775],[64,1285,783],{"className":1286},[782],[64,1288,1290],{"className":1289},[755],[64,1291,1293],{"className":1292,"style":790},[759],[64,1294],{},[64,1296,1297,1300],{"style":1124},[64,1298],{"className":1299,"style":1112},[767],[64,1301],{"className":1302,"style":1132},[1131],[64,1304,1305,1308],{"style":1135},[64,1306],{"className":1307,"style":1112},[767],[64,1309,1311,1351,1354,1357,1360],{"className":1310},[129],[64,1312,1314,1317],{"className":1313},[129],[64,1315,90],{"className":1316,"style":131},[129,130],[64,1318,1320],{"className":1319},[746],[64,1321,1323,1343],{"className":1322},[750,751],[64,1324,1326,1340],{"className":1325},[755],[64,1327,1329],{"className":1328,"style":760},[759],[64,1330,1331,1334],{"style":821},[64,1332],{"className":1333,"style":768},[767],[64,1335,1337],{"className":1336},[772,773,774,775],[64,1338,346],{"className":1339},[129,775],[64,1341,783],{"className":1342},[782],[64,1344,1346],{"className":1345},[755],[64,1347,1349],{"className":1348,"style":790},[759],[64,1350],{},[64,1352],{"className":1353,"style":160},[135],[64,1355,1032],{"className":1356},[164],[64,1358],{"className":1359,"style":160},[135],[64,1361,1363,1366],{"className":1362},[129],[64,1364,90],{"className":1365,"style":131},[129,130],[64,1367,1369],{"className":1368},[746],[64,1370,1372,1392],{"className":1371},[750,751],[64,1373,1375,1389],{"className":1374},[755],[64,1376,1378],{"className":1377,"style":760},[759],[64,1379,1380,1383],{"style":821},[64,1381],{"className":1382,"style":768},[767],[64,1384,1386],{"className":1385},[772,773,774,775],[64,1387,353],{"className":1388},[129,775],[64,1390,783],{"className":1391},[782],[64,1393,1395],{"className":1394},[755],[64,1396,1398],{"className":1397,"style":790},[759],[64,1399],{},[64,1401,783],{"className":1402},[782],[64,1404,1406],{"className":1405},[755],[64,1407,1410],{"className":1408,"style":1409},[759],"height:0.836em;",[64,1411],{},[64,1413],{"className":1414},[845,1092],[12,1416,1417,1418,1421,1422,1450,1451,1479],{},"This is the ",[21,1419,1420],{},"rise over run"," formula — how much ",[64,1423,1425,1438],{"className":1424},[71],[64,1426,1428],{"className":1427},[75],[77,1429,1430],{"xmlns":79},[82,1431,1432,1436],{},[85,1433,1434],{},[88,1435,90],{},[108,1437,90],{"encoding":110},[64,1439,1441],{"className":1440,"ariaHidden":116},[115],[64,1442,1444,1447],{"className":1443},[120],[64,1445],{"className":1446,"style":125},[124],[64,1448,90],{"className":1449,"style":131},[129,130]," changes (rise) per unit change in ",[64,1452,1454,1467],{"className":1453},[71],[64,1455,1457],{"className":1456},[75],[77,1458,1459],{"xmlns":79},[82,1460,1461,1465],{},[85,1462,1463],{},[88,1464,100],{},[108,1466,100],{"encoding":110},[64,1468,1470],{"className":1469,"ariaHidden":116},[115],[64,1471,1473,1476],{"className":1472},[120],[64,1474],{"className":1475,"style":212},[124],[64,1477,100],{"className":1478},[129,130]," (run).",[29,1481,41,1490,41,1497],{"className":1482},[1483,1484,1485,1486,1487,1488,1489],"bg-blue-50","dark:bg-blue-900\u002F20","border-l-4","border-blue-400","p-4","rounded-r-lg","my-6",[12,1491,1496],{"className":1492},[1493,1494,1495],"font-semibold","text-blue-800","dark:text-blue-200","Why Does Slope Matter?",[12,1498,1502,1503,1506],{"className":1499},[1500,1501],"text-blue-700","dark:text-blue-300","Slope tells you the ",[21,1504,1505],{},"rate of change",". A slope of 2 means \"for every 1 unit step in x, y changes by 2.\" A slope of −3 means y decreases by 3 for every step forward. A slope of 0 means y doesn't change at all — it's flat.",[26,1508],{},[51,1510,1512],{"id":1511},"part-2-when-lines-become-curves","Part 2 — When Lines Become Curves",[12,1514,1515,1516,1519,1520,1523],{},"A line has a ",[21,1517,1518],{},"constant"," slope — it's the same everywhere. But most interesting functions in mathematics (and in machine learning) are ",[21,1521,1522],{},"curves"," whose steepness changes at every point.",[12,1525,1526],{},"Consider the parabola:",[64,1528,1530],{"className":1529},[67],[64,1531,1533,1563],{"className":1532},[71],[64,1534,1536],{"className":1535},[75],[77,1537,1538],{"xmlns":79,"display":80},[82,1539,1540,1560],{},[85,1541,1542,1545,1547,1549,1551,1553],{},[88,1543,1544],{},"f",[92,1546,700],{"stretchy":699},[88,1548,100],{},[92,1550,719],{"stretchy":699},[92,1552,94],{},[1554,1555,1556,1558],"msup",{},[88,1557,100],{},[344,1559,346],{},[108,1561,1562],{"encoding":110},"f(x) = x^2",[64,1564,1566,1594],{"className":1565,"ariaHidden":116},[115],[64,1567,1569,1572,1576,1579,1582,1585,1588,1591],{"className":1568},[120],[64,1570],{"className":1571,"style":732},[124],[64,1573,1544],{"className":1574,"style":1575},[129,130],"margin-right:0.10764em;",[64,1577,700],{"className":1578},[736],[64,1580,100],{"className":1581},[129,130],[64,1583,719],{"className":1584},[845],[64,1586],{"className":1587,"style":136},[135],[64,1589,94],{"className":1590},[140],[64,1592],{"className":1593,"style":136},[135],[64,1595,1597,1601],{"className":1596},[120],[64,1598],{"className":1599,"style":1600},[124],"height:0.8641em;",[64,1602,1604,1607],{"className":1603},[129],[64,1605,100],{"className":1606},[129,130],[64,1608,1610],{"className":1609},[746],[64,1611,1613],{"className":1612},[750],[64,1614,1616],{"className":1615},[755],[64,1617,1619],{"className":1618,"style":1600},[759],[64,1620,1622,1625],{"style":1621},"top:-3.113em;margin-right:0.05em;",[64,1623],{"className":1624,"style":768},[767],[64,1626,1628],{"className":1627},[772,773,774,775],[64,1629,346],{"className":1630},[129,775],[411,1632,1633,1766],{},[414,1634,1635],{},[417,1636,1637,1667],{},[420,1638,1639],{},[64,1640,1642,1655],{"className":1641},[71],[64,1643,1645],{"className":1644},[75],[77,1646,1647],{"xmlns":79},[82,1648,1649,1653],{},[85,1650,1651],{},[88,1652,100],{},[108,1654,100],{"encoding":110},[64,1656,1658],{"className":1657,"ariaHidden":116},[115],[64,1659,1661,1664],{"className":1660},[120],[64,1662],{"className":1663,"style":212},[124],[64,1665,100],{"className":1666},[129,130],[420,1668,1669],{},[64,1670,1672,1699],{"className":1671},[71],[64,1673,1675],{"className":1674},[75],[77,1676,1677],{"xmlns":79},[82,1678,1679,1697],{},[85,1680,1681,1683,1685,1687,1689,1691],{},[88,1682,1544],{},[92,1684,700],{"stretchy":699},[88,1686,100],{},[92,1688,719],{"stretchy":699},[92,1690,94],{},[1554,1692,1693,1695],{},[88,1694,100],{},[344,1696,346],{},[108,1698,1562],{"encoding":110},[64,1700,1702,1729],{"className":1701,"ariaHidden":116},[115],[64,1703,1705,1708,1711,1714,1717,1720,1723,1726],{"className":1704},[120],[64,1706],{"className":1707,"style":732},[124],[64,1709,1544],{"className":1710,"style":1575},[129,130],[64,1712,700],{"className":1713},[736],[64,1715,100],{"className":1716},[129,130],[64,1718,719],{"className":1719},[845],[64,1721],{"className":1722,"style":136},[135],[64,1724,94],{"className":1725},[140],[64,1727],{"className":1728,"style":136},[135],[64,1730,1732,1736],{"className":1731},[120],[64,1733],{"className":1734,"style":1735},[124],"height:0.8141em;",[64,1737,1739,1742],{"className":1738},[129],[64,1740,100],{"className":1741},[129,130],[64,1743,1745],{"className":1744},[746],[64,1746,1748],{"className":1747},[750],[64,1749,1751],{"className":1750},[755],[64,1752,1754],{"className":1753,"style":1735},[759],[64,1755,1757,1760],{"style":1756},"top:-3.063em;margin-right:0.05em;",[64,1758],{"className":1759,"style":768},[767],[64,1761,1763],{"className":1762},[772,773,774,775],[64,1764,346],{"className":1765},[129,775],[530,1767,1768,1776,1783,1789,1795],{},[417,1769,1770,1773],{},[535,1771,1772],{},"−3",[535,1774,1775],{},"9",[417,1777,1778,1781],{},[535,1779,1780],{},"−1",[535,1782,353],{},[417,1784,1785,1787],{},[535,1786,537],{},[535,1788,537],{},[417,1790,1791,1793],{},[535,1792,353],{},[535,1794,353],{},[417,1796,1797,1799],{},[535,1798,546],{},[535,1800,1775],{},[12,1802,1803,1804,1855,1856,1907,1908,1911,1912,2051,2052,2055],{},"Near ",[64,1805,1807,1825],{"className":1806},[71],[64,1808,1810],{"className":1809},[75],[77,1811,1812],{"xmlns":79},[82,1813,1814,1822],{},[85,1815,1816,1818,1820],{},[88,1817,100],{},[92,1819,94],{},[344,1821,537],{},[108,1823,1824],{"encoding":110},"x = 0",[64,1826,1828,1846],{"className":1827,"ariaHidden":116},[115],[64,1829,1831,1834,1837,1840,1843],{"className":1830},[120],[64,1832],{"className":1833,"style":212},[124],[64,1835,100],{"className":1836},[129,130],[64,1838],{"className":1839,"style":136},[135],[64,1841,94],{"className":1842},[140],[64,1844],{"className":1845,"style":136},[135],[64,1847,1849,1852],{"className":1848},[120],[64,1850],{"className":1851,"style":406},[124],[64,1853,537],{"className":1854},[129]," the curve is nearly flat. Near ",[64,1857,1859,1877],{"className":1858},[71],[64,1860,1862],{"className":1861},[75],[77,1863,1864],{"xmlns":79},[82,1865,1866,1874],{},[85,1867,1868,1870,1872],{},[88,1869,100],{},[92,1871,94],{},[344,1873,546],{},[108,1875,1876],{"encoding":110},"x = 3",[64,1878,1880,1898],{"className":1879,"ariaHidden":116},[115],[64,1881,1883,1886,1889,1892,1895],{"className":1882},[120],[64,1884],{"className":1885,"style":212},[124],[64,1887,100],{"className":1888},[129,130],[64,1890],{"className":1891,"style":136},[135],[64,1893,94],{"className":1894},[140],[64,1896],{"className":1897,"style":136},[135],[64,1899,1901,1904],{"className":1900},[120],[64,1902],{"className":1903,"style":406},[124],[64,1905,546],{"className":1906},[129]," it rises steeply. The slope is ",[21,1909,1910],{},"different at every point"," — which means the single formula ",[64,1913,1915,1945],{"className":1914},[71],[64,1916,1918],{"className":1917},[75],[77,1919,1920],{"xmlns":79},[82,1921,1922,1942],{},[85,1923,1924,1926,1928],{},[88,1925,97],{},[92,1927,94],{},[1002,1929,1930,1936],{},[85,1931,1932,1934],{},[88,1933,1009],{"mathvariant":1008},[88,1935,90],{},[85,1937,1938,1940],{},[88,1939,1009],{"mathvariant":1008},[88,1941,100],{},[108,1943,1944],{"encoding":110},"m = \\frac{\\Delta y}{\\Delta x}",[64,1946,1948,1966],{"className":1947,"ariaHidden":116},[115],[64,1949,1951,1954,1957,1960,1963],{"className":1950},[120],[64,1952],{"className":1953,"style":212},[124],[64,1955,97],{"className":1956},[129,130],[64,1958],{"className":1959,"style":136},[135],[64,1961,94],{"className":1962},[140],[64,1964],{"className":1965,"style":136},[135],[64,1967,1969,1973],{"className":1968},[120],[64,1970],{"className":1971,"style":1972},[124],"height:1.2694em;vertical-align:-0.345em;",[64,1974,1976,1979,2048],{"className":1975},[129],[64,1977],{"className":1978},[736,1092],[64,1980,1982],{"className":1981},[1002],[64,1983,1985,2039],{"className":1984},[750,751],[64,1986,1988,2036],{"className":1987},[755],[64,1989,1992,2010,2018],{"className":1990,"style":1991},[759],"height:0.9244em;",[64,1993,1995,1998],{"style":1994},"top:-2.655em;",[64,1996],{"className":1997,"style":1112},[767],[64,1999,2001],{"className":2000},[772,773,774,775],[64,2002,2004,2007],{"className":2003},[129,775],[64,2005,1009],{"className":2006},[129,775],[64,2008,100],{"className":2009},[129,130,775],[64,2011,2012,2015],{"style":1124},[64,2013],{"className":2014,"style":1112},[767],[64,2016],{"className":2017,"style":1132},[1131],[64,2019,2021,2024],{"style":2020},"top:-3.4461em;",[64,2022],{"className":2023,"style":1112},[767],[64,2025,2027],{"className":2026},[772,773,774,775],[64,2028,2030,2033],{"className":2029},[129,775],[64,2031,1009],{"className":2032},[129,775],[64,2034,90],{"className":2035,"style":131},[129,130,775],[64,2037,783],{"className":2038},[782],[64,2040,2042],{"className":2041},[755],[64,2043,2046],{"className":2044,"style":2045},[759],"height:0.345em;",[64,2047],{},[64,2049],{"className":2050},[845,1092]," between two distant points only gives us an ",[16,2053,2054],{},"average",".",[56,2057,2059],{"id":2058},"average-rate-of-change","Average Rate of Change",[12,2061,2062,2063,846,2091,2143,2144,2173,2174,2177],{},"For two points ",[64,2064,2066,2079],{"className":2065},[71],[64,2067,2069],{"className":2068},[75],[77,2070,2071],{"xmlns":79},[82,2072,2073,2077],{},[85,2074,2075],{},[88,2076,100],{},[108,2078,100],{"encoding":110},[64,2080,2082],{"className":2081,"ariaHidden":116},[115],[64,2083,2085,2088],{"className":2084},[120],[64,2086],{"className":2087,"style":212},[124],[64,2089,100],{"className":2090},[129,130],[64,2092,2094,2113],{"className":2093},[71],[64,2095,2097],{"className":2096},[75],[77,2098,2099],{"xmlns":79},[82,2100,2101,2110],{},[85,2102,2103,2105,2107],{},[88,2104,100],{},[92,2106,103],{},[88,2108,2109],{},"h",[108,2111,2112],{"encoding":110},"x + h",[64,2114,2116,2134],{"className":2115,"ariaHidden":116},[115],[64,2117,2119,2122,2125,2128,2131],{"className":2118},[120],[64,2120],{"className":2121,"style":150},[124],[64,2123,100],{"className":2124},[129,130],[64,2126],{"className":2127,"style":160},[135],[64,2129,103],{"className":2130},[164],[64,2132],{"className":2133,"style":160},[135],[64,2135,2137,2140],{"className":2136},[120],[64,2138],{"className":2139,"style":174},[124],[64,2141,2109],{"className":2142},[129,130]," on a curve ",[64,2145,2147,2160],{"className":2146},[71],[64,2148,2150],{"className":2149},[75],[77,2151,2152],{"xmlns":79},[82,2153,2154,2158],{},[85,2155,2156],{},[88,2157,1544],{},[108,2159,1544],{"encoding":110},[64,2161,2163],{"className":2162,"ariaHidden":116},[115],[64,2164,2166,2170],{"className":2165},[120],[64,2167],{"className":2168,"style":2169},[124],"height:0.8889em;vertical-align:-0.1944em;",[64,2171,1544],{"className":2172,"style":1575},[129,130],", the ",[21,2175,2176],{},"average rate of change"," over that interval is:",[64,2179,2181],{"className":2180},[67],[64,2182,2184,2240],{"className":2183},[71],[64,2185,2187],{"className":2186},[75],[77,2188,2189],{"xmlns":79,"display":80},[82,2190,2191,2237],{},[85,2192,2193,2207,2209],{},[1002,2194,2195,2201],{},[85,2196,2197,2199],{},[88,2198,1009],{"mathvariant":1008},[88,2200,1544],{},[85,2202,2203,2205],{},[88,2204,1009],{"mathvariant":1008},[88,2206,100],{},[92,2208,94],{},[1002,2210,2211,2235],{},[85,2212,2213,2215,2217,2219,2221,2223,2225,2227,2229,2231,2233],{},[88,2214,1544],{},[92,2216,700],{"stretchy":699},[88,2218,100],{},[92,2220,103],{},[88,2222,2109],{},[92,2224,719],{"stretchy":699},[92,2226,1032],{},[88,2228,1544],{},[92,2230,700],{"stretchy":699},[88,2232,100],{},[92,2234,719],{"stretchy":699},[88,2236,2109],{},[108,2238,2239],{"encoding":110},"\\frac{\\Delta f}{\\Delta x} = \\frac{f(x + h) - f(x)}{h}",[64,2241,2243,2328],{"className":2242,"ariaHidden":116},[115],[64,2244,2246,2250,2319,2322,2325],{"className":2245},[120],[64,2247],{"className":2248,"style":2249},[124],"height:2.0574em;vertical-align:-0.686em;",[64,2251,2253,2256,2316],{"className":2252},[129],[64,2254],{"className":2255},[736,1092],[64,2257,2259],{"className":2258},[1002],[64,2260,2262,2308],{"className":2261},[750,751],[64,2263,2265,2305],{"className":2264},[755],[64,2266,2269,2283,2291],{"className":2267,"style":2268},[759],"height:1.3714em;",[64,2270,2271,2274],{"style":1108},[64,2272],{"className":2273,"style":1112},[767],[64,2275,2277,2280],{"className":2276},[129],[64,2278,1009],{"className":2279},[129],[64,2281,100],{"className":2282},[129,130],[64,2284,2285,2288],{"style":1124},[64,2286],{"className":2287,"style":1112},[767],[64,2289],{"className":2290,"style":1132},[1131],[64,2292,2293,2296],{"style":1135},[64,2294],{"className":2295,"style":1112},[767],[64,2297,2299,2302],{"className":2298},[129],[64,2300,1009],{"className":2301},[129],[64,2303,1544],{"className":2304,"style":1575},[129,130],[64,2306,783],{"className":2307},[782],[64,2309,2311],{"className":2310},[755],[64,2312,2314],{"className":2313,"style":1157},[759],[64,2315],{},[64,2317],{"className":2318},[845,1092],[64,2320],{"className":2321,"style":136},[135],[64,2323,94],{"className":2324},[140],[64,2326],{"className":2327,"style":136},[135],[64,2329,2331,2335],{"className":2330},[120],[64,2332],{"className":2333,"style":2334},[124],"height:2.113em;vertical-align:-0.686em;",[64,2336,2338,2341,2437],{"className":2337},[129],[64,2339],{"className":2340},[736,1092],[64,2342,2344],{"className":2343},[1002],[64,2345,2347,2429],{"className":2346},[750,751],[64,2348,2350,2426],{"className":2349},[755],[64,2351,2354,2365,2373],{"className":2352,"style":2353},[759],"height:1.427em;",[64,2355,2356,2359],{"style":1108},[64,2357],{"className":2358,"style":1112},[767],[64,2360,2362],{"className":2361},[129],[64,2363,2109],{"className":2364},[129,130],[64,2366,2367,2370],{"style":1124},[64,2368],{"className":2369,"style":1112},[767],[64,2371],{"className":2372,"style":1132},[1131],[64,2374,2375,2378],{"style":1135},[64,2376],{"className":2377,"style":1112},[767],[64,2379,2381,2384,2387,2390,2393,2396,2399,2402,2405,2408,2411,2414,2417,2420,2423],{"className":2380},[129],[64,2382,1544],{"className":2383,"style":1575},[129,130],[64,2385,700],{"className":2386},[736],[64,2388,100],{"className":2389},[129,130],[64,2391],{"className":2392,"style":160},[135],[64,2394,103],{"className":2395},[164],[64,2397],{"className":2398,"style":160},[135],[64,2400,2109],{"className":2401},[129,130],[64,2403,719],{"className":2404},[845],[64,2406],{"className":2407,"style":160},[135],[64,2409,1032],{"className":2410},[164],[64,2412],{"className":2413,"style":160},[135],[64,2415,1544],{"className":2416,"style":1575},[129,130],[64,2418,700],{"className":2419},[736],[64,2421,100],{"className":2422},[129,130],[64,2424,719],{"className":2425},[845],[64,2427,783],{"className":2428},[782],[64,2430,2432],{"className":2431},[755],[64,2433,2435],{"className":2434,"style":1157},[759],[64,2436],{},[64,2438],{"className":2439},[845,1092],[12,2441,2442,2443,2446],{},"This is the slope of the ",[21,2444,2445],{},"secant line"," — the straight line connecting the two points on the curve.",[12,2448,2449,2452,2453,2548,2549,846,2600,2650],{},[21,2450,2451],{},"Example"," on ",[64,2454,2456,2483],{"className":2455},[71],[64,2457,2459],{"className":2458},[75],[77,2460,2461],{"xmlns":79},[82,2462,2463,2481],{},[85,2464,2465,2467,2469,2471,2473,2475],{},[88,2466,1544],{},[92,2468,700],{"stretchy":699},[88,2470,100],{},[92,2472,719],{"stretchy":699},[92,2474,94],{},[1554,2476,2477,2479],{},[88,2478,100],{},[344,2480,346],{},[108,2482,1562],{"encoding":110},[64,2484,2486,2513],{"className":2485,"ariaHidden":116},[115],[64,2487,2489,2492,2495,2498,2501,2504,2507,2510],{"className":2488},[120],[64,2490],{"className":2491,"style":732},[124],[64,2493,1544],{"className":2494,"style":1575},[129,130],[64,2496,700],{"className":2497},[736],[64,2499,100],{"className":2500},[129,130],[64,2502,719],{"className":2503},[845],[64,2505],{"className":2506,"style":136},[135],[64,2508,94],{"className":2509},[140],[64,2511],{"className":2512,"style":136},[135],[64,2514,2516,2519],{"className":2515},[120],[64,2517],{"className":2518,"style":1735},[124],[64,2520,2522,2525],{"className":2521},[129],[64,2523,100],{"className":2524},[129,130],[64,2526,2528],{"className":2527},[746],[64,2529,2531],{"className":2530},[750],[64,2532,2534],{"className":2533},[755],[64,2535,2537],{"className":2536,"style":1735},[759],[64,2538,2539,2542],{"style":1756},[64,2540],{"className":2541,"style":768},[767],[64,2543,2545],{"className":2544},[772,773,774,775],[64,2546,346],{"className":2547},[129,775]," between ",[64,2550,2552,2570],{"className":2551},[71],[64,2553,2555],{"className":2554},[75],[77,2556,2557],{"xmlns":79},[82,2558,2559,2567],{},[85,2560,2561,2563,2565],{},[88,2562,100],{},[92,2564,94],{},[344,2566,353],{},[108,2568,2569],{"encoding":110},"x = 1",[64,2571,2573,2591],{"className":2572,"ariaHidden":116},[115],[64,2574,2576,2579,2582,2585,2588],{"className":2575},[120],[64,2577],{"className":2578,"style":212},[124],[64,2580,100],{"className":2581},[129,130],[64,2583],{"className":2584,"style":136},[135],[64,2586,94],{"className":2587},[140],[64,2589],{"className":2590,"style":136},[135],[64,2592,2594,2597],{"className":2593},[120],[64,2595],{"className":2596,"style":406},[124],[64,2598,353],{"className":2599},[129],[64,2601,2603,2620],{"className":2602},[71],[64,2604,2606],{"className":2605},[75],[77,2607,2608],{"xmlns":79},[82,2609,2610,2618],{},[85,2611,2612,2614,2616],{},[88,2613,100],{},[92,2615,94],{},[344,2617,546],{},[108,2619,1876],{"encoding":110},[64,2621,2623,2641],{"className":2622,"ariaHidden":116},[115],[64,2624,2626,2629,2632,2635,2638],{"className":2625},[120],[64,2627],{"className":2628,"style":212},[124],[64,2630,100],{"className":2631},[129,130],[64,2633],{"className":2634,"style":136},[135],[64,2636,94],{"className":2637},[140],[64,2639],{"className":2640,"style":136},[135],[64,2642,2644,2647],{"className":2643},[120],[64,2645],{"className":2646,"style":406},[124],[64,2648,546],{"className":2649},[129],":",[64,2652,2654],{"className":2653},[67],[64,2655,2657,2718],{"className":2656},[71],[64,2658,2660],{"className":2659},[75],[77,2661,2662],{"xmlns":79,"display":80},[82,2663,2664,2715],{},[85,2665,2666,2696,2698,2710,2712],{},[1002,2667,2668,2688],{},[85,2669,2670,2672,2674,2676,2678,2680,2682,2684,2686],{},[88,2671,1544],{},[92,2673,700],{"stretchy":699},[344,2675,546],{},[92,2677,719],{"stretchy":699},[92,2679,1032],{},[88,2681,1544],{},[92,2683,700],{"stretchy":699},[344,2685,353],{},[92,2687,719],{"stretchy":699},[85,2689,2690,2692,2694],{},[344,2691,546],{},[92,2693,1032],{},[344,2695,353],{},[92,2697,94],{},[1002,2699,2700,2708],{},[85,2701,2702,2704,2706],{},[344,2703,1775],{},[92,2705,1032],{},[344,2707,353],{},[344,2709,346],{},[92,2711,94],{},[344,2713,2714],{},"4",[108,2716,2717],{"encoding":110},"\\frac{f(3) - f(1)}{3 - 1} = \\frac{9 - 1}{2} = 4",[64,2719,2721,2842,2933],{"className":2720,"ariaHidden":116},[115],[64,2722,2724,2728,2833,2836,2839],{"className":2723},[120],[64,2725],{"className":2726,"style":2727},[124],"height:2.1963em;vertical-align:-0.7693em;",[64,2729,2731,2734,2830],{"className":2730},[129],[64,2732],{"className":2733},[736,1092],[64,2735,2737],{"className":2736},[1002],[64,2738,2740,2821],{"className":2739},[750,751],[64,2741,2743,2818],{"className":2742},[755],[64,2744,2746,2769,2777],{"className":2745,"style":2353},[759],[64,2747,2748,2751],{"style":1108},[64,2749],{"className":2750,"style":1112},[767],[64,2752,2754,2757,2760,2763,2766],{"className":2753},[129],[64,2755,546],{"className":2756},[129],[64,2758],{"className":2759,"style":160},[135],[64,2761,1032],{"className":2762},[164],[64,2764],{"className":2765,"style":160},[135],[64,2767,353],{"className":2768},[129],[64,2770,2771,2774],{"style":1124},[64,2772],{"className":2773,"style":1112},[767],[64,2775],{"className":2776,"style":1132},[1131],[64,2778,2779,2782],{"style":1135},[64,2780],{"className":2781,"style":1112},[767],[64,2783,2785,2788,2791,2794,2797,2800,2803,2806,2809,2812,2815],{"className":2784},[129],[64,2786,1544],{"className":2787,"style":1575},[129,130],[64,2789,700],{"className":2790},[736],[64,2792,546],{"className":2793},[129],[64,2795,719],{"className":2796},[845],[64,2798],{"className":2799,"style":160},[135],[64,2801,1032],{"className":2802},[164],[64,2804],{"className":2805,"style":160},[135],[64,2807,1544],{"className":2808,"style":1575},[129,130],[64,2810,700],{"className":2811},[736],[64,2813,353],{"className":2814},[129],[64,2816,719],{"className":2817},[845],[64,2819,783],{"className":2820},[782],[64,2822,2824],{"className":2823},[755],[64,2825,2828],{"className":2826,"style":2827},[759],"height:0.7693em;",[64,2829],{},[64,2831],{"className":2832},[845,1092],[64,2834],{"className":2835,"style":136},[135],[64,2837,94],{"className":2838},[140],[64,2840],{"className":2841,"style":136},[135],[64,2843,2845,2849,2924,2927,2930],{"className":2844},[120],[64,2846],{"className":2847,"style":2848},[124],"height:2.0074em;vertical-align:-0.686em;",[64,2850,2852,2855,2921],{"className":2851},[129],[64,2853],{"className":2854},[736,1092],[64,2856,2858],{"className":2857},[1002],[64,2859,2861,2913],{"className":2860},[750,751],[64,2862,2864,2910],{"className":2863},[755],[64,2865,2868,2879,2887],{"className":2866,"style":2867},[759],"height:1.3214em;",[64,2869,2870,2873],{"style":1108},[64,2871],{"className":2872,"style":1112},[767],[64,2874,2876],{"className":2875},[129],[64,2877,346],{"className":2878},[129],[64,2880,2881,2884],{"style":1124},[64,2882],{"className":2883,"style":1112},[767],[64,2885],{"className":2886,"style":1132},[1131],[64,2888,2889,2892],{"style":1135},[64,2890],{"className":2891,"style":1112},[767],[64,2893,2895,2898,2901,2904,2907],{"className":2894},[129],[64,2896,1775],{"className":2897},[129],[64,2899],{"className":2900,"style":160},[135],[64,2902,1032],{"className":2903},[164],[64,2905],{"className":2906,"style":160},[135],[64,2908,353],{"className":2909},[129],[64,2911,783],{"className":2912},[782],[64,2914,2916],{"className":2915},[755],[64,2917,2919],{"className":2918,"style":1157},[759],[64,2920],{},[64,2922],{"className":2923},[845,1092],[64,2925],{"className":2926,"style":136},[135],[64,2928,94],{"className":2929},[140],[64,2931],{"className":2932,"style":136},[135],[64,2934,2936,2939],{"className":2935},[120],[64,2937],{"className":2938,"style":406},[124],[64,2940,2714],{"className":2941},[129],[12,2943,2944,2945,846,2996,3047,3048,3051],{},"That is the average steepness between ",[64,2946,2948,2966],{"className":2947},[71],[64,2949,2951],{"className":2950},[75],[77,2952,2953],{"xmlns":79},[82,2954,2955,2963],{},[85,2956,2957,2959,2961],{},[88,2958,100],{},[92,2960,94],{},[344,2962,353],{},[108,2964,2965],{"encoding":110},"x=1",[64,2967,2969,2987],{"className":2968,"ariaHidden":116},[115],[64,2970,2972,2975,2978,2981,2984],{"className":2971},[120],[64,2973],{"className":2974,"style":212},[124],[64,2976,100],{"className":2977},[129,130],[64,2979],{"className":2980,"style":136},[135],[64,2982,94],{"className":2983},[140],[64,2985],{"className":2986,"style":136},[135],[64,2988,2990,2993],{"className":2989},[120],[64,2991],{"className":2992,"style":406},[124],[64,2994,353],{"className":2995},[129],[64,2997,2999,3017],{"className":2998},[71],[64,3000,3002],{"className":3001},[75],[77,3003,3004],{"xmlns":79},[82,3005,3006,3014],{},[85,3007,3008,3010,3012],{},[88,3009,100],{},[92,3011,94],{},[344,3013,546],{},[108,3015,3016],{"encoding":110},"x=3",[64,3018,3020,3038],{"className":3019,"ariaHidden":116},[115],[64,3021,3023,3026,3029,3032,3035],{"className":3022},[120],[64,3024],{"className":3025,"style":212},[124],[64,3027,100],{"className":3028},[129,130],[64,3030],{"className":3031,"style":136},[135],[64,3033,94],{"className":3034},[140],[64,3036],{"className":3037,"style":136},[135],[64,3039,3041,3044],{"className":3040},[120],[64,3042],{"className":3043,"style":406},[124],[64,3045,546],{"className":3046},[129],", but it doesn't tell us what the slope is ",[16,3049,3050],{},"at"," a specific point.",[26,3053],{},[51,3055,3057],{"id":3056},"part-3-the-limit-zooming-in-to-a-single-point","Part 3 — The Limit: Zooming In to a Single Point",[12,3059,3060,3061,3064,3065,3093,3094,3122,3123,3126],{},"To find the slope ",[21,3062,3063],{},"at one exact point",", we shrink the interval ",[64,3066,3068,3081],{"className":3067},[71],[64,3069,3071],{"className":3070},[75],[77,3072,3073],{"xmlns":79},[82,3074,3075,3079],{},[85,3076,3077],{},[88,3078,2109],{},[108,3080,2109],{"encoding":110},[64,3082,3084],{"className":3083,"ariaHidden":116},[115],[64,3085,3087,3090],{"className":3086},[120],[64,3088],{"className":3089,"style":174},[124],[64,3091,2109],{"className":3092},[129,130]," down toward zero. As ",[64,3095,3097,3110],{"className":3096},[71],[64,3098,3100],{"className":3099},[75],[77,3101,3102],{"xmlns":79},[82,3103,3104,3108],{},[85,3105,3106],{},[88,3107,2109],{},[108,3109,2109],{"encoding":110},[64,3111,3113],{"className":3112,"ariaHidden":116},[115],[64,3114,3116,3119],{"className":3115},[120],[64,3117],{"className":3118,"style":174},[124],[64,3120,2109],{"className":3121},[129,130]," gets smaller and smaller, the secant line rotates until it becomes the ",[21,3124,3125],{},"tangent line"," — touching the curve at exactly one point and matching its steepness there.",[12,3128,3129,3130,3133,3134,216,3162,2650],{},"Formally, the ",[21,3131,3132],{},"instantaneous rate of change"," at ",[64,3135,3137,3150],{"className":3136},[71],[64,3138,3140],{"className":3139},[75],[77,3141,3142],{"xmlns":79},[82,3143,3144,3148],{},[85,3145,3146],{},[88,3147,100],{},[108,3149,100],{"encoding":110},[64,3151,3153],{"className":3152,"ariaHidden":116},[115],[64,3154,3156,3159],{"className":3155},[120],[64,3157],{"className":3158,"style":212},[124],[64,3160,100],{"className":3161},[129,130],[21,3163,3164],{},"limit",[64,3166,3168],{"className":3167},[67],[64,3169,3171,3231],{"className":3170},[71],[64,3172,3174],{"className":3173},[75],[77,3175,3176],{"xmlns":79,"display":80},[82,3177,3178,3228],{},[85,3179,3180,3200],{},[3181,3182,3183,3191],"munder",{},[85,3184,3185,3188],{},[88,3186,3187],{},"lim",[92,3189,3190],{},"⁡",[85,3192,3193,3195,3198],{},[88,3194,2109],{},[92,3196,3197],{},"→",[344,3199,537],{},[1002,3201,3202,3226],{},[85,3203,3204,3206,3208,3210,3212,3214,3216,3218,3220,3222,3224],{},[88,3205,1544],{},[92,3207,700],{"stretchy":699},[88,3209,100],{},[92,3211,103],{},[88,3213,2109],{},[92,3215,719],{"stretchy":699},[92,3217,1032],{},[88,3219,1544],{},[92,3221,700],{"stretchy":699},[88,3223,100],{},[92,3225,719],{"stretchy":699},[88,3227,2109],{},[108,3229,3230],{"encoding":110},"\\lim_{h \\to 0} \\frac{f(x + h) - f(x)}{h}",[64,3232,3234],{"className":3233,"ariaHidden":116},[115],[64,3235,3237,3241,3299,3302],{"className":3236},[120],[64,3238],{"className":3239,"style":3240},[124],"height:2.1791em;vertical-align:-0.7521em;",[64,3242,3246],{"className":3243},[3244,3245],"mop","op-limits",[64,3247,3249,3290],{"className":3248},[750,751],[64,3250,3252,3287],{"className":3251},[755],[64,3253,3255,3276],{"className":3254,"style":174},[759],[64,3256,3258,3261],{"style":3257},"top:-2.3479em;margin-left:0em;",[64,3259],{"className":3260,"style":1112},[767],[64,3262,3264],{"className":3263},[772,773,774,775],[64,3265,3267,3270,3273],{"className":3266},[129,775],[64,3268,2109],{"className":3269},[129,130,775],[64,3271,3197],{"className":3272},[140,775],[64,3274,537],{"className":3275},[129,775],[64,3277,3279,3282],{"style":3278},"top:-3em;",[64,3280],{"className":3281,"style":1112},[767],[64,3283,3284],{},[64,3285,3187],{"className":3286},[3244],[64,3288,783],{"className":3289},[782],[64,3291,3293],{"className":3292},[755],[64,3294,3297],{"className":3295,"style":3296},[759],"height:0.7521em;",[64,3298],{},[64,3300],{"className":3301,"style":800},[135],[64,3303,3305,3308,3403],{"className":3304},[129],[64,3306],{"className":3307},[736,1092],[64,3309,3311],{"className":3310},[1002],[64,3312,3314,3395],{"className":3313},[750,751],[64,3315,3317,3392],{"className":3316},[755],[64,3318,3320,3331,3339],{"className":3319,"style":2353},[759],[64,3321,3322,3325],{"style":1108},[64,3323],{"className":3324,"style":1112},[767],[64,3326,3328],{"className":3327},[129],[64,3329,2109],{"className":3330},[129,130],[64,3332,3333,3336],{"style":1124},[64,3334],{"className":3335,"style":1112},[767],[64,3337],{"className":3338,"style":1132},[1131],[64,3340,3341,3344],{"style":1135},[64,3342],{"className":3343,"style":1112},[767],[64,3345,3347,3350,3353,3356,3359,3362,3365,3368,3371,3374,3377,3380,3383,3386,3389],{"className":3346},[129],[64,3348,1544],{"className":3349,"style":1575},[129,130],[64,3351,700],{"className":3352},[736],[64,3354,100],{"className":3355},[129,130],[64,3357],{"className":3358,"style":160},[135],[64,3360,103],{"className":3361},[164],[64,3363],{"className":3364,"style":160},[135],[64,3366,2109],{"className":3367},[129,130],[64,3369,719],{"className":3370},[845],[64,3372],{"className":3373,"style":160},[135],[64,3375,1032],{"className":3376},[164],[64,3378],{"className":3379,"style":160},[135],[64,3381,1544],{"className":3382,"style":1575},[129,130],[64,3384,700],{"className":3385},[736],[64,3387,100],{"className":3388},[129,130],[64,3390,719],{"className":3391},[845],[64,3393,783],{"className":3394},[782],[64,3396,3398],{"className":3397},[755],[64,3399,3401],{"className":3400,"style":1157},[759],[64,3402],{},[64,3404],{"className":3405},[845,1092],[12,3407,3408],{},"This is the core idea of a derivative.",[56,3410,3412],{"id":3411},"limits-intuitively","Limits Intuitively",[12,3414,3415,3416],{},"A limit asks: ",[16,3417,3418],{},"\"What value does an expression approach as a variable gets closer and closer to some number — even if it never arrives?\"",[64,3420,3422],{"className":3421},[67],[64,3423,3425,3481],{"className":3424},[71],[64,3426,3428],{"className":3427},[75],[77,3429,3430],{"xmlns":79,"display":80},[82,3431,3432,3478],{},[85,3433,3434,3450],{},[3181,3435,3436,3442],{},[85,3437,3438,3440],{},[88,3439,3187],{},[92,3441,3190],{},[85,3443,3444,3446,3448],{},[88,3445,2109],{},[92,3447,3197],{},[344,3449,537],{},[1002,3451,3452,3476],{},[85,3453,3454,3456,3458,3460,3462,3468,3470],{},[92,3455,700],{"stretchy":699},[88,3457,100],{},[92,3459,103],{},[88,3461,2109],{},[1554,3463,3464,3466],{},[92,3465,719],{"stretchy":699},[344,3467,346],{},[92,3469,1032],{},[1554,3471,3472,3474],{},[88,3473,100],{},[344,3475,346],{},[88,3477,2109],{},[108,3479,3480],{"encoding":110},"\\lim_{h \\to 0} \\frac{(x+h)^2 - x^2}{h}",[64,3482,3484],{"className":3483,"ariaHidden":116},[115],[64,3485,3487,3491,3544,3547],{"className":3486},[120],[64,3488],{"className":3489,"style":3490},[124],"height:2.2432em;vertical-align:-0.7521em;",[64,3492,3494],{"className":3493},[3244,3245],[64,3495,3497,3536],{"className":3496},[750,751],[64,3498,3500,3533],{"className":3499},[755],[64,3501,3503,3523],{"className":3502,"style":174},[759],[64,3504,3505,3508],{"style":3257},[64,3506],{"className":3507,"style":1112},[767],[64,3509,3511],{"className":3510},[772,773,774,775],[64,3512,3514,3517,3520],{"className":3513},[129,775],[64,3515,2109],{"className":3516},[129,130,775],[64,3518,3197],{"className":3519},[140,775],[64,3521,537],{"className":3522},[129,775],[64,3524,3525,3528],{"style":3278},[64,3526],{"className":3527,"style":1112},[767],[64,3529,3530],{},[64,3531,3187],{"className":3532},[3244],[64,3534,783],{"className":3535},[782],[64,3537,3539],{"className":3538},[755],[64,3540,3542],{"className":3541,"style":3296},[759],[64,3543],{},[64,3545],{"className":3546,"style":800},[135],[64,3548,3550,3553,3689],{"className":3549},[129],[64,3551],{"className":3552},[736,1092],[64,3554,3556],{"className":3555},[1002],[64,3557,3559,3681],{"className":3558},[750,751],[64,3560,3562,3678],{"className":3561},[755],[64,3563,3566,3577,3585],{"className":3564,"style":3565},[759],"height:1.4911em;",[64,3567,3568,3571],{"style":1108},[64,3569],{"className":3570,"style":1112},[767],[64,3572,3574],{"className":3573},[129],[64,3575,2109],{"className":3576},[129,130],[64,3578,3579,3582],{"style":1124},[64,3580],{"className":3581,"style":1112},[767],[64,3583],{"className":3584,"style":1132},[1131],[64,3586,3587,3590],{"style":1135},[64,3588],{"className":3589,"style":1112},[767],[64,3591,3593,3596,3599,3602,3605,3608,3611,3640,3643,3646,3649],{"className":3592},[129],[64,3594,700],{"className":3595},[736],[64,3597,100],{"className":3598},[129,130],[64,3600],{"className":3601,"style":160},[135],[64,3603,103],{"className":3604},[164],[64,3606],{"className":3607,"style":160},[135],[64,3609,2109],{"className":3610},[129,130],[64,3612,3614,3617],{"className":3613},[845],[64,3615,719],{"className":3616},[845],[64,3618,3620],{"className":3619},[746],[64,3621,3623],{"className":3622},[750],[64,3624,3626],{"className":3625},[755],[64,3627,3629],{"className":3628,"style":1735},[759],[64,3630,3631,3634],{"style":1756},[64,3632],{"className":3633,"style":768},[767],[64,3635,3637],{"className":3636},[772,773,774,775],[64,3638,346],{"className":3639},[129,775],[64,3641],{"className":3642,"style":160},[135],[64,3644,1032],{"className":3645},[164],[64,3647],{"className":3648,"style":160},[135],[64,3650,3652,3655],{"className":3651},[129],[64,3653,100],{"className":3654},[129,130],[64,3656,3658],{"className":3657},[746],[64,3659,3661],{"className":3660},[750],[64,3662,3664],{"className":3663},[755],[64,3665,3667],{"className":3666,"style":1735},[759],[64,3668,3669,3672],{"style":1756},[64,3670],{"className":3671,"style":768},[767],[64,3673,3675],{"className":3674},[772,773,774,775],[64,3676,346],{"className":3677},[129,775],[64,3679,783],{"className":3680},[782],[64,3682,3684],{"className":3683},[755],[64,3685,3687],{"className":3686,"style":1157},[759],[64,3688],{},[64,3690],{"className":3691},[845,1092],[12,3693,3694],{},"Expand the numerator:",[64,3696,3698],{"className":3697},[67],[64,3699,3701,3835],{"className":3700},[71],[64,3702,3704],{"className":3703},[75],[77,3705,3706],{"xmlns":79,"display":80},[82,3707,3708,3832],{},[85,3709,3710,3712,3728,3764,3766,3782,3802,3804,3820,3822,3824,3826,3828,3830],{},[92,3711,94],{},[3181,3713,3714,3720],{},[85,3715,3716,3718],{},[88,3717,3187],{},[92,3719,3190],{},[85,3721,3722,3724,3726],{},[88,3723,2109],{},[92,3725,3197],{},[344,3727,537],{},[1002,3729,3730,3762],{},[85,3731,3732,3738,3740,3742,3744,3746,3748,3754,3756],{},[1554,3733,3734,3736],{},[88,3735,100],{},[344,3737,346],{},[92,3739,103],{},[344,3741,346],{},[88,3743,100],{},[88,3745,2109],{},[92,3747,103],{},[1554,3749,3750,3752],{},[88,3751,2109],{},[344,3753,346],{},[92,3755,1032],{},[1554,3757,3758,3760],{},[88,3759,100],{},[344,3761,346],{},[88,3763,2109],{},[92,3765,94],{},[3181,3767,3768,3774],{},[85,3769,3770,3772],{},[88,3771,3187],{},[92,3773,3190],{},[85,3775,3776,3778,3780],{},[88,3777,2109],{},[92,3779,3197],{},[344,3781,537],{},[1002,3783,3784,3800],{},[85,3785,3786,3788,3790,3792,3794],{},[344,3787,346],{},[88,3789,100],{},[88,3791,2109],{},[92,3793,103],{},[1554,3795,3796,3798],{},[88,3797,2109],{},[344,3799,346],{},[88,3801,2109],{},[92,3803,94],{},[3181,3805,3806,3812],{},[85,3807,3808,3810],{},[88,3809,3187],{},[92,3811,3190],{},[85,3813,3814,3816,3818],{},[88,3815,2109],{},[92,3817,3197],{},[344,3819,537],{},[92,3821,700],{"stretchy":699},[344,3823,346],{},[88,3825,100],{},[92,3827,103],{},[88,3829,2109],{},[92,3831,719],{"stretchy":699},[108,3833,3834],{"encoding":110},"= \\lim_{h \\to 0} \\frac{x^2 + 2xh + h^2 - x^2}{h} = \\lim_{h \\to 0} \\frac{2xh + h^2}{h} = \\lim_{h \\to 0} (2x + h)",[64,3836,3838,3851,4104,4281,4359],{"className":3837,"ariaHidden":116},[115],[64,3839,3841,3845,3848],{"className":3840},[120],[64,3842],{"className":3843,"style":3844},[124],"height:0.3669em;",[64,3846,94],{"className":3847},[140],[64,3849],{"className":3850,"style":136},[135],[64,3852,3854,3857,3910,3913,4095,4098,4101],{"className":3853},[120],[64,3855],{"className":3856,"style":3490},[124],[64,3858,3860],{"className":3859},[3244,3245],[64,3861,3863,3902],{"className":3862},[750,751],[64,3864,3866,3899],{"className":3865},[755],[64,3867,3869,3889],{"className":3868,"style":174},[759],[64,3870,3871,3874],{"style":3257},[64,3872],{"className":3873,"style":1112},[767],[64,3875,3877],{"className":3876},[772,773,774,775],[64,3878,3880,3883,3886],{"className":3879},[129,775],[64,3881,2109],{"className":3882},[129,130,775],[64,3884,3197],{"className":3885},[140,775],[64,3887,537],{"className":3888},[129,775],[64,3890,3891,3894],{"style":3278},[64,3892],{"className":3893,"style":1112},[767],[64,3895,3896],{},[64,3897,3187],{"className":3898},[3244],[64,3900,783],{"className":3901},[782],[64,3903,3905],{"className":3904},[755],[64,3906,3908],{"className":3907,"style":3296},[759],[64,3909],{},[64,3911],{"className":3912,"style":800},[135],[64,3914,3916,3919,4092],{"className":3915},[129],[64,3917],{"className":3918},[736,1092],[64,3920,3922],{"className":3921},[1002],[64,3923,3925,4084],{"className":3924},[750,751],[64,3926,3928,4081],{"className":3927},[755],[64,3929,3931,3942,3950],{"className":3930,"style":3565},[759],[64,3932,3933,3936],{"style":1108},[64,3934],{"className":3935,"style":1112},[767],[64,3937,3939],{"className":3938},[129],[64,3940,2109],{"className":3941},[129,130],[64,3943,3944,3947],{"style":1124},[64,3945],{"className":3946,"style":1112},[767],[64,3948],{"className":3949,"style":1132},[1131],[64,3951,3952,3955],{"style":1135},[64,3953],{"className":3954,"style":1112},[767],[64,3956,3958,3987,3990,3993,3996,3999,4002,4005,4008,4011,4014,4043,4046,4049,4052],{"className":3957},[129],[64,3959,3961,3964],{"className":3960},[129],[64,3962,100],{"className":3963},[129,130],[64,3965,3967],{"className":3966},[746],[64,3968,3970],{"className":3969},[750],[64,3971,3973],{"className":3972},[755],[64,3974,3976],{"className":3975,"style":1735},[759],[64,3977,3978,3981],{"style":1756},[64,3979],{"className":3980,"style":768},[767],[64,3982,3984],{"className":3983},[772,773,774,775],[64,3985,346],{"className":3986},[129,775],[64,3988],{"className":3989,"style":160},[135],[64,3991,103],{"className":3992},[164],[64,3994],{"className":3995,"style":160},[135],[64,3997,346],{"className":3998},[129],[64,4000,100],{"className":4001},[129,130],[64,4003,2109],{"className":4004},[129,130],[64,4006],{"className":4007,"style":160},[135],[64,4009,103],{"className":4010},[164],[64,4012],{"className":4013,"style":160},[135],[64,4015,4017,4020],{"className":4016},[129],[64,4018,2109],{"className":4019},[129,130],[64,4021,4023],{"className":4022},[746],[64,4024,4026],{"className":4025},[750],[64,4027,4029],{"className":4028},[755],[64,4030,4032],{"className":4031,"style":1735},[759],[64,4033,4034,4037],{"style":1756},[64,4035],{"className":4036,"style":768},[767],[64,4038,4040],{"className":4039},[772,773,774,775],[64,4041,346],{"className":4042},[129,775],[64,4044],{"className":4045,"style":160},[135],[64,4047,1032],{"className":4048},[164],[64,4050],{"className":4051,"style":160},[135],[64,4053,4055,4058],{"className":4054},[129],[64,4056,100],{"className":4057},[129,130],[64,4059,4061],{"className":4060},[746],[64,4062,4064],{"className":4063},[750],[64,4065,4067],{"className":4066},[755],[64,4068,4070],{"className":4069,"style":1735},[759],[64,4071,4072,4075],{"style":1756},[64,4073],{"className":4074,"style":768},[767],[64,4076,4078],{"className":4077},[772,773,774,775],[64,4079,346],{"className":4080},[129,775],[64,4082,783],{"className":4083},[782],[64,4085,4087],{"className":4086},[755],[64,4088,4090],{"className":4089,"style":1157},[759],[64,4091],{},[64,4093],{"className":4094},[845,1092],[64,4096],{"className":4097,"style":136},[135],[64,4099,94],{"className":4100},[140],[64,4102],{"className":4103,"style":136},[135],[64,4105,4107,4110,4163,4166,4272,4275,4278],{"className":4106},[120],[64,4108],{"className":4109,"style":3490},[124],[64,4111,4113],{"className":4112},[3244,3245],[64,4114,4116,4155],{"className":4115},[750,751],[64,4117,4119,4152],{"className":4118},[755],[64,4120,4122,4142],{"className":4121,"style":174},[759],[64,4123,4124,4127],{"style":3257},[64,4125],{"className":4126,"style":1112},[767],[64,4128,4130],{"className":4129},[772,773,774,775],[64,4131,4133,4136,4139],{"className":4132},[129,775],[64,4134,2109],{"className":4135},[129,130,775],[64,4137,3197],{"className":4138},[140,775],[64,4140,537],{"className":4141},[129,775],[64,4143,4144,4147],{"style":3278},[64,4145],{"className":4146,"style":1112},[767],[64,4148,4149],{},[64,4150,3187],{"className":4151},[3244],[64,4153,783],{"className":4154},[782],[64,4156,4158],{"className":4157},[755],[64,4159,4161],{"className":4160,"style":3296},[759],[64,4162],{},[64,4164],{"className":4165,"style":800},[135],[64,4167,4169,4172,4269],{"className":4168},[129],[64,4170],{"className":4171},[736,1092],[64,4173,4175],{"className":4174},[1002],[64,4176,4178,4261],{"className":4177},[750,751],[64,4179,4181,4258],{"className":4180},[755],[64,4182,4184,4195,4203],{"className":4183,"style":3565},[759],[64,4185,4186,4189],{"style":1108},[64,4187],{"className":4188,"style":1112},[767],[64,4190,4192],{"className":4191},[129],[64,4193,2109],{"className":4194},[129,130],[64,4196,4197,4200],{"style":1124},[64,4198],{"className":4199,"style":1112},[767],[64,4201],{"className":4202,"style":1132},[1131],[64,4204,4205,4208],{"style":1135},[64,4206],{"className":4207,"style":1112},[767],[64,4209,4211,4214,4217,4220,4223,4226,4229],{"className":4210},[129],[64,4212,346],{"className":4213},[129],[64,4215,100],{"className":4216},[129,130],[64,4218,2109],{"className":4219},[129,130],[64,4221],{"className":4222,"style":160},[135],[64,4224,103],{"className":4225},[164],[64,4227],{"className":4228,"style":160},[135],[64,4230,4232,4235],{"className":4231},[129],[64,4233,2109],{"className":4234},[129,130],[64,4236,4238],{"className":4237},[746],[64,4239,4241],{"className":4240},[750],[64,4242,4244],{"className":4243},[755],[64,4245,4247],{"className":4246,"style":1735},[759],[64,4248,4249,4252],{"style":1756},[64,4250],{"className":4251,"style":768},[767],[64,4253,4255],{"className":4254},[772,773,774,775],[64,4256,346],{"className":4257},[129,775],[64,4259,783],{"className":4260},[782],[64,4262,4264],{"className":4263},[755],[64,4265,4267],{"className":4266,"style":1157},[759],[64,4268],{},[64,4270],{"className":4271},[845,1092],[64,4273],{"className":4274,"style":136},[135],[64,4276,94],{"className":4277},[140],[64,4279],{"className":4280,"style":136},[135],[64,4282,4284,4288,4341,4344,4347,4350,4353,4356],{"className":4283},[120],[64,4285],{"className":4286,"style":4287},[124],"height:1.5021em;vertical-align:-0.7521em;",[64,4289,4291],{"className":4290},[3244,3245],[64,4292,4294,4333],{"className":4293},[750,751],[64,4295,4297,4330],{"className":4296},[755],[64,4298,4300,4320],{"className":4299,"style":174},[759],[64,4301,4302,4305],{"style":3257},[64,4303],{"className":4304,"style":1112},[767],[64,4306,4308],{"className":4307},[772,773,774,775],[64,4309,4311,4314,4317],{"className":4310},[129,775],[64,4312,2109],{"className":4313},[129,130,775],[64,4315,3197],{"className":4316},[140,775],[64,4318,537],{"className":4319},[129,775],[64,4321,4322,4325],{"style":3278},[64,4323],{"className":4324,"style":1112},[767],[64,4326,4327],{},[64,4328,3187],{"className":4329},[3244],[64,4331,783],{"className":4332},[782],[64,4334,4336],{"className":4335},[755],[64,4337,4339],{"className":4338,"style":3296},[759],[64,4340],{},[64,4342,700],{"className":4343},[736],[64,4345,346],{"className":4346},[129],[64,4348,100],{"className":4349},[129,130],[64,4351],{"className":4352,"style":160},[135],[64,4354,103],{"className":4355},[164],[64,4357],{"className":4358,"style":160},[135],[64,4360,4362,4365,4368],{"className":4361},[120],[64,4363],{"className":4364,"style":732},[124],[64,4366,2109],{"className":4367},[129,130],[64,4369,719],{"className":4370},[845],[12,4372,4373,4374,2650],{},"As ",[64,4375,4377,4395],{"className":4376},[71],[64,4378,4380],{"className":4379},[75],[77,4381,4382],{"xmlns":79},[82,4383,4384,4392],{},[85,4385,4386,4388,4390],{},[88,4387,2109],{},[92,4389,3197],{},[344,4391,537],{},[108,4393,4394],{"encoding":110},"h \\to 0",[64,4396,4398,4416],{"className":4397,"ariaHidden":116},[115],[64,4399,4401,4404,4407,4410,4413],{"className":4400},[120],[64,4402],{"className":4403,"style":174},[124],[64,4405,2109],{"className":4406},[129,130],[64,4408],{"className":4409,"style":136},[135],[64,4411,3197],{"className":4412},[140],[64,4414],{"className":4415,"style":136},[135],[64,4417,4419,4422],{"className":4418},[120],[64,4420],{"className":4421,"style":406},[124],[64,4423,537],{"className":4424},[129],[64,4426,4428],{"className":4427},[67],[64,4429,4431,4449],{"className":4430},[71],[64,4432,4434],{"className":4433},[75],[77,4435,4436],{"xmlns":79,"display":80},[82,4437,4438,4446],{},[85,4439,4440,4442,4444],{},[92,4441,94],{},[344,4443,346],{},[88,4445,100],{},[108,4447,4448],{"encoding":110},"= 2x",[64,4450,4452,4464],{"className":4451,"ariaHidden":116},[115],[64,4453,4455,4458,4461],{"className":4454},[120],[64,4456],{"className":4457,"style":3844},[124],[64,4459,94],{"className":4460},[140],[64,4462],{"className":4463,"style":136},[135],[64,4465,4467,4470,4473],{"className":4466},[120],[64,4468],{"className":4469,"style":406},[124],[64,4471,346],{"className":4472},[129],[64,4474,100],{"className":4475},[129,130],[12,4477,4478,4479,4574,4575,4603,4604,2055],{},"The slope of ",[64,4480,4482,4509],{"className":4481},[71],[64,4483,4485],{"className":4484},[75],[77,4486,4487],{"xmlns":79},[82,4488,4489,4507],{},[85,4490,4491,4493,4495,4497,4499,4501],{},[88,4492,1544],{},[92,4494,700],{"stretchy":699},[88,4496,100],{},[92,4498,719],{"stretchy":699},[92,4500,94],{},[1554,4502,4503,4505],{},[88,4504,100],{},[344,4506,346],{},[108,4508,1562],{"encoding":110},[64,4510,4512,4539],{"className":4511,"ariaHidden":116},[115],[64,4513,4515,4518,4521,4524,4527,4530,4533,4536],{"className":4514},[120],[64,4516],{"className":4517,"style":732},[124],[64,4519,1544],{"className":4520,"style":1575},[129,130],[64,4522,700],{"className":4523},[736],[64,4525,100],{"className":4526},[129,130],[64,4528,719],{"className":4529},[845],[64,4531],{"className":4532,"style":136},[135],[64,4534,94],{"className":4535},[140],[64,4537],{"className":4538,"style":136},[135],[64,4540,4542,4545],{"className":4541},[120],[64,4543],{"className":4544,"style":1735},[124],[64,4546,4548,4551],{"className":4547},[129],[64,4549,100],{"className":4550},[129,130],[64,4552,4554],{"className":4553},[746],[64,4555,4557],{"className":4556},[750],[64,4558,4560],{"className":4559},[755],[64,4561,4563],{"className":4562,"style":1735},[759],[64,4564,4565,4568],{"style":1756},[64,4566],{"className":4567,"style":768},[767],[64,4569,4571],{"className":4570},[772,773,774,775],[64,4572,346],{"className":4573},[129,775]," at any point ",[64,4576,4578,4591],{"className":4577},[71],[64,4579,4581],{"className":4580},[75],[77,4582,4583],{"xmlns":79},[82,4584,4585,4589],{},[85,4586,4587],{},[88,4588,100],{},[108,4590,100],{"encoding":110},[64,4592,4594],{"className":4593,"ariaHidden":116},[115],[64,4595,4597,4600],{"className":4596},[120],[64,4598],{"className":4599,"style":212},[124],[64,4601,100],{"className":4602},[129,130]," is exactly ",[64,4605,4607,4623],{"className":4606},[71],[64,4608,4610],{"className":4609},[75],[77,4611,4612],{"xmlns":79},[82,4613,4614,4620],{},[85,4615,4616,4618],{},[344,4617,346],{},[88,4619,100],{},[108,4621,4622],{"encoding":110},"2x",[64,4624,4626],{"className":4625,"ariaHidden":116},[115],[64,4627,4629,4632,4635],{"className":4628},[120],[64,4630],{"className":4631,"style":406},[124],[64,4633,346],{"className":4634},[129],[64,4636,100],{"className":4637},[129,130],[26,4639],{},[51,4641,4643],{"id":4642},"part-4-the-derivative","Part 4 — The Derivative",[56,4645,4647],{"id":4646},"definition","Definition",[12,4649,4650,4651,4653,4654,4682,4683,4711,4712,4793,4794,4907],{},"The ",[21,4652,23],{}," of a function ",[64,4655,4657,4670],{"className":4656},[71],[64,4658,4660],{"className":4659},[75],[77,4661,4662],{"xmlns":79},[82,4663,4664,4668],{},[85,4665,4666],{},[88,4667,1544],{},[108,4669,1544],{"encoding":110},[64,4671,4673],{"className":4672,"ariaHidden":116},[115],[64,4674,4676,4679],{"className":4675},[120],[64,4677],{"className":4678,"style":2169},[124],[64,4680,1544],{"className":4681,"style":1575},[129,130]," at point ",[64,4684,4686,4699],{"className":4685},[71],[64,4687,4689],{"className":4688},[75],[77,4690,4691],{"xmlns":79},[82,4692,4693,4697],{},[85,4694,4695],{},[88,4696,100],{},[108,4698,100],{"encoding":110},[64,4700,4702],{"className":4701,"ariaHidden":116},[115],[64,4703,4705,4708],{"className":4704},[120],[64,4706],{"className":4707,"style":212},[124],[64,4709,100],{"className":4710},[129,130],", written ",[64,4713,4715,4741],{"className":4714},[71],[64,4716,4718],{"className":4717},[75],[77,4719,4720],{"xmlns":79},[82,4721,4722,4738],{},[85,4723,4724,4732,4734,4736],{},[1554,4725,4726,4728],{},[88,4727,1544],{},[92,4729,4731],{"mathvariant":1008,"lspace":4730,"rspace":4730},"0em","′",[92,4733,700],{"stretchy":699},[88,4735,100],{},[92,4737,719],{"stretchy":699},[108,4739,4740],{"encoding":110},"f'(x)",[64,4742,4744],{"className":4743,"ariaHidden":116},[115],[64,4745,4747,4751,4784,4787,4790],{"className":4746},[120],[64,4748],{"className":4749,"style":4750},[124],"height:1.0019em;vertical-align:-0.25em;",[64,4752,4754,4757],{"className":4753},[129],[64,4755,1544],{"className":4756,"style":1575},[129,130],[64,4758,4760],{"className":4759},[746],[64,4761,4763],{"className":4762},[750],[64,4764,4766],{"className":4765},[755],[64,4767,4770],{"className":4768,"style":4769},[759],"height:0.7519em;",[64,4771,4772,4775],{"style":1756},[64,4773],{"className":4774,"style":768},[767],[64,4776,4778],{"className":4777},[772,773,774,775],[64,4779,4781],{"className":4780},[129,775],[64,4782,4731],{"className":4783},[129,775],[64,4785,700],{"className":4786},[736],[64,4788,100],{"className":4789},[129,130],[64,4791,719],{"className":4792},[845]," or ",[64,4795,4797,4824],{"className":4796},[71],[64,4798,4800],{"className":4799},[75],[77,4801,4802],{"xmlns":79},[82,4803,4804,4821],{},[85,4805,4806],{},[1002,4807,4808,4815],{},[85,4809,4810,4813],{},[88,4811,4812],{},"d",[88,4814,1544],{},[85,4816,4817,4819],{},[88,4818,4812],{},[88,4820,100],{},[108,4822,4823],{"encoding":110},"\\frac{df}{dx}",[64,4825,4827],{"className":4826,"ariaHidden":116},[115],[64,4828,4830,4834],{"className":4829},[120],[64,4831],{"className":4832,"style":4833},[124],"height:1.2772em;vertical-align:-0.345em;",[64,4835,4837,4840,4904],{"className":4836},[129],[64,4838],{"className":4839},[736,1092],[64,4841,4843],{"className":4842},[1002],[64,4844,4846,4896],{"className":4845},[750,751],[64,4847,4849,4893],{"className":4848},[755],[64,4850,4853,4870,4878],{"className":4851,"style":4852},[759],"height:0.9322em;",[64,4854,4855,4858],{"style":1994},[64,4856],{"className":4857,"style":1112},[767],[64,4859,4861],{"className":4860},[772,773,774,775],[64,4862,4864,4867],{"className":4863},[129,775],[64,4865,4812],{"className":4866},[129,130,775],[64,4868,100],{"className":4869},[129,130,775],[64,4871,4872,4875],{"style":1124},[64,4873],{"className":4874,"style":1112},[767],[64,4876],{"className":4877,"style":1132},[1131],[64,4879,4880,4883],{"style":2020},[64,4881],{"className":4882,"style":1112},[767],[64,4884,4886],{"className":4885},[772,773,774,775],[64,4887,4889],{"className":4888},[129,775],[64,4890,4892],{"className":4891,"style":1575},[129,130,775],"df",[64,4894,783],{"className":4895},[782],[64,4897,4899],{"className":4898},[755],[64,4900,4902],{"className":4901,"style":2045},[759],[64,4903],{},[64,4905],{"className":4906},[845,1092],", is:",[64,4909,4911],{"className":4910},[67],[64,4912,4914,4997],{"className":4913},[71],[64,4915,4917],{"className":4916},[75],[77,4918,4919],{"xmlns":79,"display":80},[82,4920,4921,4994],{},[85,4922,4923],{},[4924,4925,4927],"menclose",{"notation":4926},"box",[4928,4929,4930],"mstyle",{"scriptlevel":537,"displaystyle":699},[4928,4931,4932],{"scriptlevel":537,"displaystyle":699},[4928,4933,4934],{"scriptlevel":537,"displaystyle":116},[85,4935,4936,4942,4944,4946,4948,4950,4966],{},[1554,4937,4938,4940],{},[88,4939,1544],{},[92,4941,4731],{"mathvariant":1008,"lspace":4730,"rspace":4730},[92,4943,700],{"stretchy":699},[88,4945,100],{},[92,4947,719],{"stretchy":699},[92,4949,94],{},[3181,4951,4952,4958],{},[85,4953,4954,4956],{},[88,4955,3187],{},[92,4957,3190],{},[85,4959,4960,4962,4964],{},[88,4961,2109],{},[92,4963,3197],{},[344,4965,537],{},[1002,4967,4968,4992],{},[85,4969,4970,4972,4974,4976,4978,4980,4982,4984,4986,4988,4990],{},[88,4971,1544],{},[92,4973,700],{"stretchy":699},[88,4975,100],{},[92,4977,103],{},[88,4979,2109],{},[92,4981,719],{"stretchy":699},[92,4983,1032],{},[88,4985,1544],{},[92,4987,700],{"stretchy":699},[88,4989,100],{},[92,4991,719],{"stretchy":699},[88,4993,2109],{},[108,4995,4996],{"encoding":110},"\\boxed{f'(x) = \\lim_{h \\to 0} \\frac{f(x + h) - f(x)}{h}}",[64,4998,5000],{"className":4999,"ariaHidden":116},[115],[64,5001,5003,5007],{"className":5002},[120],[64,5004],{"className":5005,"style":5006},[124],"height:2.8591em;vertical-align:-1.0921em;",[64,5008,5010],{"className":5009},[129],[64,5011,5013,5263],{"className":5012},[750,751],[64,5014,5016,5260],{"className":5015},[755],[64,5017,5020,5248],{"className":5018,"style":5019},[759],"height:1.767em;",[64,5021,5023,5027],{"style":5022},"top:-4.8591em;",[64,5024],{"className":5025,"style":5026},[767],"height:4.8591em;",[64,5028,5031],{"className":5029},[5030],"boxpad",[64,5032,5034],{"className":5033},[129],[64,5035,5037,5070,5073,5076,5079,5082,5085,5088,5141,5144],{"className":5036},[129],[64,5038,5040,5043],{"className":5039},[129],[64,5041,1544],{"className":5042,"style":1575},[129,130],[64,5044,5046],{"className":5045},[746],[64,5047,5049],{"className":5048},[750],[64,5050,5052],{"className":5051},[755],[64,5053,5056],{"className":5054,"style":5055},[759],"height:0.8019em;",[64,5057,5058,5061],{"style":1621},[64,5059],{"className":5060,"style":768},[767],[64,5062,5064],{"className":5063},[772,773,774,775],[64,5065,5067],{"className":5066},[129,775],[64,5068,4731],{"className":5069},[129,775],[64,5071,700],{"className":5072},[736],[64,5074,100],{"className":5075},[129,130],[64,5077,719],{"className":5078},[845],[64,5080],{"className":5081,"style":136},[135],[64,5083,94],{"className":5084},[140],[64,5086],{"className":5087,"style":136},[135],[64,5089,5091],{"className":5090},[3244,3245],[64,5092,5094,5133],{"className":5093},[750,751],[64,5095,5097,5130],{"className":5096},[755],[64,5098,5100,5120],{"className":5099,"style":174},[759],[64,5101,5102,5105],{"style":3257},[64,5103],{"className":5104,"style":1112},[767],[64,5106,5108],{"className":5107},[772,773,774,775],[64,5109,5111,5114,5117],{"className":5110},[129,775],[64,5112,2109],{"className":5113},[129,130,775],[64,5115,3197],{"className":5116},[140,775],[64,5118,537],{"className":5119},[129,775],[64,5121,5122,5125],{"style":3278},[64,5123],{"className":5124,"style":1112},[767],[64,5126,5127],{},[64,5128,3187],{"className":5129},[3244],[64,5131,783],{"className":5132},[782],[64,5134,5136],{"className":5135},[755],[64,5137,5139],{"className":5138,"style":3296},[759],[64,5140],{},[64,5142],{"className":5143,"style":800},[135],[64,5145,5147,5150,5245],{"className":5146},[129],[64,5148],{"className":5149},[736,1092],[64,5151,5153],{"className":5152},[1002],[64,5154,5156,5237],{"className":5155},[750,751],[64,5157,5159,5234],{"className":5158},[755],[64,5160,5162,5173,5181],{"className":5161,"style":2353},[759],[64,5163,5164,5167],{"style":1108},[64,5165],{"className":5166,"style":1112},[767],[64,5168,5170],{"className":5169},[129],[64,5171,2109],{"className":5172},[129,130],[64,5174,5175,5178],{"style":1124},[64,5176],{"className":5177,"style":1112},[767],[64,5179],{"className":5180,"style":1132},[1131],[64,5182,5183,5186],{"style":1135},[64,5184],{"className":5185,"style":1112},[767],[64,5187,5189,5192,5195,5198,5201,5204,5207,5210,5213,5216,5219,5222,5225,5228,5231],{"className":5188},[129],[64,5190,1544],{"className":5191,"style":1575},[129,130],[64,5193,700],{"className":5194},[736],[64,5196,100],{"className":5197},[129,130],[64,5199],{"className":5200,"style":160},[135],[64,5202,103],{"className":5203},[164],[64,5205],{"className":5206,"style":160},[135],[64,5208,2109],{"className":5209},[129,130],[64,5211,719],{"className":5212},[845],[64,5214],{"className":5215,"style":160},[135],[64,5217,1032],{"className":5218},[164],[64,5220],{"className":5221,"style":160},[135],[64,5223,1544],{"className":5224,"style":1575},[129,130],[64,5226,700],{"className":5227},[736],[64,5229,100],{"className":5230},[129,130],[64,5232,719],{"className":5233},[845],[64,5235,783],{"className":5236},[782],[64,5238,5240],{"className":5239},[755],[64,5241,5243],{"className":5242,"style":1157},[759],[64,5244],{},[64,5246],{"className":5247},[845,1092],[64,5249,5251,5254],{"style":5250},"top:-3.767em;",[64,5252],{"className":5253,"style":5026},[767],[64,5255],{"className":5256,"style":5259},[5257,5258],"stretchy","fbox","height:2.8591em;border-style:solid;border-width:0.04em;",[64,5261,783],{"className":5262},[782],[64,5264,5266],{"className":5265},[755],[64,5267,5270],{"className":5268,"style":5269},[759],"height:1.0921em;",[64,5271],{},[12,5273,5274,5275,5277],{},"It gives the ",[21,5276,3132],{}," — the slope of the tangent line at every point.",[56,5279,5281],{"id":5280},"geometric-meaning","Geometric Meaning",[411,5283,5284,5294],{},[414,5285,5286],{},[417,5287,5288,5291],{},[420,5289,5290],{},"Derivative Value",[420,5292,5293],{},"Meaning",[530,5295,5296,5434,5571,5681,5779],{},[417,5297,5298,5400],{},[535,5299,5300],{},[64,5301,5303,5332],{"className":5302},[71],[64,5304,5306],{"className":5305},[75],[77,5307,5308],{"xmlns":79},[82,5309,5310,5329],{},[85,5311,5312,5318,5320,5322,5324,5327],{},[1554,5313,5314,5316],{},[88,5315,1544],{},[92,5317,4731],{"mathvariant":1008,"lspace":4730,"rspace":4730},[92,5319,700],{"stretchy":699},[88,5321,100],{},[92,5323,719],{"stretchy":699},[92,5325,5326],{},">",[344,5328,537],{},[108,5330,5331],{"encoding":110},"f'(x) > 0",[64,5333,5335,5391],{"className":5334,"ariaHidden":116},[115],[64,5336,5338,5341,5373,5376,5379,5382,5385,5388],{"className":5337},[120],[64,5339],{"className":5340,"style":4750},[124],[64,5342,5344,5347],{"className":5343},[129],[64,5345,1544],{"className":5346,"style":1575},[129,130],[64,5348,5350],{"className":5349},[746],[64,5351,5353],{"className":5352},[750],[64,5354,5356],{"className":5355},[755],[64,5357,5359],{"className":5358,"style":4769},[759],[64,5360,5361,5364],{"style":1756},[64,5362],{"className":5363,"style":768},[767],[64,5365,5367],{"className":5366},[772,773,774,775],[64,5368,5370],{"className":5369},[129,775],[64,5371,4731],{"className":5372},[129,775],[64,5374,700],{"className":5375},[736],[64,5377,100],{"className":5378},[129,130],[64,5380,719],{"className":5381},[845],[64,5383],{"className":5384,"style":136},[135],[64,5386,5326],{"className":5387},[140],[64,5389],{"className":5390,"style":136},[135],[64,5392,5394,5397],{"className":5393},[120],[64,5395],{"className":5396,"style":406},[124],[64,5398,537],{"className":5399},[129],[535,5401,5402,5403,3133,5406],{},"Function is ",[21,5404,5405],{},"increasing",[64,5407,5409,5422],{"className":5408},[71],[64,5410,5412],{"className":5411},[75],[77,5413,5414],{"xmlns":79},[82,5415,5416,5420],{},[85,5417,5418],{},[88,5419,100],{},[108,5421,100],{"encoding":110},[64,5423,5425],{"className":5424,"ariaHidden":116},[115],[64,5426,5428,5431],{"className":5427},[120],[64,5429],{"className":5430,"style":212},[124],[64,5432,100],{"className":5433},[129,130],[417,5435,5436,5538],{},[535,5437,5438],{},[64,5439,5441,5470],{"className":5440},[71],[64,5442,5444],{"className":5443},[75],[77,5445,5446],{"xmlns":79},[82,5447,5448,5467],{},[85,5449,5450,5456,5458,5460,5462,5465],{},[1554,5451,5452,5454],{},[88,5453,1544],{},[92,5455,4731],{"mathvariant":1008,"lspace":4730,"rspace":4730},[92,5457,700],{"stretchy":699},[88,5459,100],{},[92,5461,719],{"stretchy":699},[92,5463,5464],{},"\u003C",[344,5466,537],{},[108,5468,5469],{"encoding":110},"f'(x) \u003C 0",[64,5471,5473,5529],{"className":5472,"ariaHidden":116},[115],[64,5474,5476,5479,5511,5514,5517,5520,5523,5526],{"className":5475},[120],[64,5477],{"className":5478,"style":4750},[124],[64,5480,5482,5485],{"className":5481},[129],[64,5483,1544],{"className":5484,"style":1575},[129,130],[64,5486,5488],{"className":5487},[746],[64,5489,5491],{"className":5490},[750],[64,5492,5494],{"className":5493},[755],[64,5495,5497],{"className":5496,"style":4769},[759],[64,5498,5499,5502],{"style":1756},[64,5500],{"className":5501,"style":768},[767],[64,5503,5505],{"className":5504},[772,773,774,775],[64,5506,5508],{"className":5507},[129,775],[64,5509,4731],{"className":5510},[129,775],[64,5512,700],{"className":5513},[736],[64,5515,100],{"className":5516},[129,130],[64,5518,719],{"className":5519},[845],[64,5521],{"className":5522,"style":136},[135],[64,5524,5464],{"className":5525},[140],[64,5527],{"className":5528,"style":136},[135],[64,5530,5532,5535],{"className":5531},[120],[64,5533],{"className":5534,"style":406},[124],[64,5536,537],{"className":5537},[129],[535,5539,5402,5540,3133,5543],{},[21,5541,5542],{},"decreasing",[64,5544,5546,5559],{"className":5545},[71],[64,5547,5549],{"className":5548},[75],[77,5550,5551],{"xmlns":79},[82,5552,5553,5557],{},[85,5554,5555],{},[88,5556,100],{},[108,5558,100],{"encoding":110},[64,5560,5562],{"className":5561,"ariaHidden":116},[115],[64,5563,5565,5568],{"className":5564},[120],[64,5566],{"className":5567,"style":212},[124],[64,5569,100],{"className":5570},[129,130],[417,5572,5573,5674],{},[535,5574,5575],{},[64,5576,5578,5606],{"className":5577},[71],[64,5579,5581],{"className":5580},[75],[77,5582,5583],{"xmlns":79},[82,5584,5585,5603],{},[85,5586,5587,5593,5595,5597,5599,5601],{},[1554,5588,5589,5591],{},[88,5590,1544],{},[92,5592,4731],{"mathvariant":1008,"lspace":4730,"rspace":4730},[92,5594,700],{"stretchy":699},[88,5596,100],{},[92,5598,719],{"stretchy":699},[92,5600,94],{},[344,5602,537],{},[108,5604,5605],{"encoding":110},"f'(x) = 0",[64,5607,5609,5665],{"className":5608,"ariaHidden":116},[115],[64,5610,5612,5615,5647,5650,5653,5656,5659,5662],{"className":5611},[120],[64,5613],{"className":5614,"style":4750},[124],[64,5616,5618,5621],{"className":5617},[129],[64,5619,1544],{"className":5620,"style":1575},[129,130],[64,5622,5624],{"className":5623},[746],[64,5625,5627],{"className":5626},[750],[64,5628,5630],{"className":5629},[755],[64,5631,5633],{"className":5632,"style":4769},[759],[64,5634,5635,5638],{"style":1756},[64,5636],{"className":5637,"style":768},[767],[64,5639,5641],{"className":5640},[772,773,774,775],[64,5642,5644],{"className":5643},[129,775],[64,5645,4731],{"className":5646},[129,775],[64,5648,700],{"className":5649},[736],[64,5651,100],{"className":5652},[129,130],[64,5654,719],{"className":5655},[845],[64,5657],{"className":5658,"style":136},[135],[64,5660,94],{"className":5661},[140],[64,5663],{"className":5664,"style":136},[135],[64,5666,5668,5671],{"className":5667},[120],[64,5669],{"className":5670,"style":406},[124],[64,5672,537],{"className":5673},[129],[535,5675,5676,5677,5680],{},"Function has a ",[21,5678,5679],{},"flat point"," (possible minimum, maximum, or saddle)",[417,5682,5683,5774],{},[535,5684,5685,5686],{},"Large ",[64,5687,5689,5718],{"className":5688},[71],[64,5690,5692],{"className":5691},[75],[77,5693,5694],{"xmlns":79},[82,5695,5696,5715],{},[85,5697,5698,5701,5707,5709,5711,5713],{},[88,5699,5700],{"mathvariant":1008},"∥",[1554,5702,5703,5705],{},[88,5704,1544],{},[92,5706,4731],{"mathvariant":1008,"lspace":4730,"rspace":4730},[92,5708,700],{"stretchy":699},[88,5710,100],{},[92,5712,719],{"stretchy":699},[88,5714,5700],{"mathvariant":1008},[108,5716,5717],{"encoding":110},"\\|f'(x)\\|",[64,5719,5721],{"className":5720,"ariaHidden":116},[115],[64,5722,5724,5727,5730,5762,5765,5768,5771],{"className":5723},[120],[64,5725],{"className":5726,"style":4750},[124],[64,5728,5700],{"className":5729},[129],[64,5731,5733,5736],{"className":5732},[129],[64,5734,1544],{"className":5735,"style":1575},[129,130],[64,5737,5739],{"className":5738},[746],[64,5740,5742],{"className":5741},[750],[64,5743,5745],{"className":5744},[755],[64,5746,5748],{"className":5747,"style":4769},[759],[64,5749,5750,5753],{"style":1756},[64,5751],{"className":5752,"style":768},[767],[64,5754,5756],{"className":5755},[772,773,774,775],[64,5757,5759],{"className":5758},[129,775],[64,5760,4731],{"className":5761},[129,775],[64,5763,700],{"className":5764},[736],[64,5766,100],{"className":5767},[129,130],[64,5769,719],{"className":5770},[845],[64,5772,5700],{"className":5773},[129],[535,5775,5402,5776],{},[21,5777,5778],{},"changing rapidly",[417,5780,5781,5870],{},[535,5782,5783,5784],{},"Small ",[64,5785,5787,5814],{"className":5786},[71],[64,5788,5790],{"className":5789},[75],[77,5791,5792],{"xmlns":79},[82,5793,5794,5812],{},[85,5795,5796,5798,5804,5806,5808,5810],{},[88,5797,5700],{"mathvariant":1008},[1554,5799,5800,5802],{},[88,5801,1544],{},[92,5803,4731],{"mathvariant":1008,"lspace":4730,"rspace":4730},[92,5805,700],{"stretchy":699},[88,5807,100],{},[92,5809,719],{"stretchy":699},[88,5811,5700],{"mathvariant":1008},[108,5813,5717],{"encoding":110},[64,5815,5817],{"className":5816,"ariaHidden":116},[115],[64,5818,5820,5823,5826,5858,5861,5864,5867],{"className":5819},[120],[64,5821],{"className":5822,"style":4750},[124],[64,5824,5700],{"className":5825},[129],[64,5827,5829,5832],{"className":5828},[129],[64,5830,1544],{"className":5831,"style":1575},[129,130],[64,5833,5835],{"className":5834},[746],[64,5836,5838],{"className":5837},[750],[64,5839,5841],{"className":5840},[755],[64,5842,5844],{"className":5843,"style":4769},[759],[64,5845,5846,5849],{"style":1756},[64,5847],{"className":5848,"style":768},[767],[64,5850,5852],{"className":5851},[772,773,774,775],[64,5853,5855],{"className":5854},[129,775],[64,5856,4731],{"className":5857},[129,775],[64,5859,700],{"className":5860},[736],[64,5862,100],{"className":5863},[129,130],[64,5865,719],{"className":5866},[845],[64,5868,5700],{"className":5869},[129],[535,5871,5402,5872],{},[21,5873,5874],{},"changing slowly",[26,5876],{},[51,5878,5880],{"id":5879},"part-5-differentiation-rules","Part 5 — Differentiation Rules",[12,5882,5883,5884,5887],{},"Computing limits by hand every time would be exhausting. Mathematicians have derived shortcut ",[21,5885,5886],{},"rules"," that cover almost every function you'll encounter.",[56,5889,5891],{"id":5890},"power-rule","Power Rule",[12,5893,5894,5895,2650],{},"For ",[64,5896,5898,5927],{"className":5897},[71],[64,5899,5901],{"className":5900},[75],[77,5902,5903],{"xmlns":79},[82,5904,5905,5924],{},[85,5906,5907,5909,5911,5913,5915,5917],{},[88,5908,1544],{},[92,5910,700],{"stretchy":699},[88,5912,100],{},[92,5914,719],{"stretchy":699},[92,5916,94],{},[1554,5918,5919,5921],{},[88,5920,100],{},[88,5922,5923],{},"n",[108,5925,5926],{"encoding":110},"f(x) = x^n",[64,5928,5930,5957],{"className":5929,"ariaHidden":116},[115],[64,5931,5933,5936,5939,5942,5945,5948,5951,5954],{"className":5932},[120],[64,5934],{"className":5935,"style":732},[124],[64,5937,1544],{"className":5938,"style":1575},[129,130],[64,5940,700],{"className":5941},[736],[64,5943,100],{"className":5944},[129,130],[64,5946,719],{"className":5947},[845],[64,5949],{"className":5950,"style":136},[135],[64,5952,94],{"className":5953},[140],[64,5955],{"className":5956,"style":136},[135],[64,5958,5960,5964],{"className":5959},[120],[64,5961],{"className":5962,"style":5963},[124],"height:0.6644em;",[64,5965,5967,5970],{"className":5966},[129],[64,5968,100],{"className":5969},[129,130],[64,5971,5973],{"className":5972},[746],[64,5974,5976],{"className":5975},[750],[64,5977,5979],{"className":5978},[755],[64,5980,5982],{"className":5981,"style":5963},[759],[64,5983,5984,5987],{"style":1756},[64,5985],{"className":5986,"style":768},[767],[64,5988,5990],{"className":5989},[772,773,774,775],[64,5991,5923],{"className":5992},[129,130,775],[64,5994,5996],{"className":5995},[67],[64,5997,5999,6046],{"className":5998},[71],[64,6000,6002],{"className":6001},[75],[77,6003,6004],{"xmlns":79,"display":80},[82,6005,6006,6043],{},[85,6007,6008,6018,6024,6026,6028,6031],{},[1002,6009,6010,6012],{},[88,6011,4812],{},[85,6013,6014,6016],{},[88,6015,4812],{},[88,6017,100],{},[1554,6019,6020,6022],{},[88,6021,100],{},[88,6023,5923],{},[92,6025,94],{},[88,6027,5923],{},[92,6029,6030],{},"⋅",[1554,6032,6033,6035],{},[88,6034,100],{},[85,6036,6037,6039,6041],{},[88,6038,5923],{},[92,6040,1032],{},[344,6042,353],{},[108,6044,6045],{"encoding":110},"\\frac{d}{dx} x^n = n \\cdot x^{n-1}",[64,6047,6049,6159,6178],{"className":6048,"ariaHidden":116},[115],[64,6050,6052,6055,6120,6150,6153,6156],{"className":6051},[120],[64,6053],{"className":6054,"style":2249},[124],[64,6056,6058,6061,6117],{"className":6057},[129],[64,6059],{"className":6060},[736,1092],[64,6062,6064],{"className":6063},[1002],[64,6065,6067,6109],{"className":6066},[750,751],[64,6068,6070,6106],{"className":6069},[755],[64,6071,6073,6087,6095],{"className":6072,"style":2268},[759],[64,6074,6075,6078],{"style":1108},[64,6076],{"className":6077,"style":1112},[767],[64,6079,6081,6084],{"className":6080},[129],[64,6082,4812],{"className":6083},[129,130],[64,6085,100],{"className":6086},[129,130],[64,6088,6089,6092],{"style":1124},[64,6090],{"className":6091,"style":1112},[767],[64,6093],{"className":6094,"style":1132},[1131],[64,6096,6097,6100],{"style":1135},[64,6098],{"className":6099,"style":1112},[767],[64,6101,6103],{"className":6102},[129],[64,6104,4812],{"className":6105},[129,130],[64,6107,783],{"className":6108},[782],[64,6110,6112],{"className":6111},[755],[64,6113,6115],{"className":6114,"style":1157},[759],[64,6116],{},[64,6118],{"className":6119},[845,1092],[64,6121,6123,6126],{"className":6122},[129],[64,6124,100],{"className":6125},[129,130],[64,6127,6129],{"className":6128},[746],[64,6130,6132],{"className":6131},[750],[64,6133,6135],{"className":6134},[755],[64,6136,6139],{"className":6137,"style":6138},[759],"height:0.7144em;",[64,6140,6141,6144],{"style":1621},[64,6142],{"className":6143,"style":768},[767],[64,6145,6147],{"className":6146},[772,773,774,775],[64,6148,5923],{"className":6149},[129,130,775],[64,6151],{"className":6152,"style":136},[135],[64,6154,94],{"className":6155},[140],[64,6157],{"className":6158,"style":136},[135],[64,6160,6162,6166,6169,6172,6175],{"className":6161},[120],[64,6163],{"className":6164,"style":6165},[124],"height:0.4445em;",[64,6167,5923],{"className":6168},[129,130],[64,6170],{"className":6171,"style":160},[135],[64,6173,6030],{"className":6174},[164],[64,6176],{"className":6177,"style":160},[135],[64,6179,6181,6184],{"className":6180},[120],[64,6182],{"className":6183,"style":1600},[124],[64,6185,6187,6190],{"className":6186},[129],[64,6188,100],{"className":6189},[129,130],[64,6191,6193],{"className":6192},[746],[64,6194,6196],{"className":6195},[750],[64,6197,6199],{"className":6198},[755],[64,6200,6202],{"className":6201,"style":1600},[759],[64,6203,6204,6207],{"style":1621},[64,6205],{"className":6206,"style":768},[767],[64,6208,6210],{"className":6209},[772,773,774,775],[64,6211,6213,6216,6219],{"className":6212},[129,775],[64,6214,5923],{"className":6215},[129,130,775],[64,6217,1032],{"className":6218},[164,775],[64,6220,353],{"className":6221},[129,775],[12,6223,6224],{},[21,6225,6226],{},"Examples:",[411,6228,6229,6239],{},[414,6230,6231],{},[417,6232,6233,6236],{},[420,6234,6235],{},"Function",[420,6237,6238],{},"Derivative",[530,6240,6241,6339,6468,6601,6723],{},[417,6242,6243,6304],{},[535,6244,6245],{},[64,6246,6248,6266],{"className":6247},[71],[64,6249,6251],{"className":6250},[75],[77,6252,6253],{"xmlns":79},[82,6254,6255,6263],{},[85,6256,6257],{},[1554,6258,6259,6261],{},[88,6260,100],{},[344,6262,346],{},[108,6264,6265],{"encoding":110},"x^2",[64,6267,6269],{"className":6268,"ariaHidden":116},[115],[64,6270,6272,6275],{"className":6271},[120],[64,6273],{"className":6274,"style":1735},[124],[64,6276,6278,6281],{"className":6277},[129],[64,6279,100],{"className":6280},[129,130],[64,6282,6284],{"className":6283},[746],[64,6285,6287],{"className":6286},[750],[64,6288,6290],{"className":6289},[755],[64,6291,6293],{"className":6292,"style":1735},[759],[64,6294,6295,6298],{"style":1756},[64,6296],{"className":6297,"style":768},[767],[64,6299,6301],{"className":6300},[772,773,774,775],[64,6302,346],{"className":6303},[129,775],[535,6305,6306],{},[64,6307,6309,6324],{"className":6308},[71],[64,6310,6312],{"className":6311},[75],[77,6313,6314],{"xmlns":79},[82,6315,6316,6322],{},[85,6317,6318,6320],{},[344,6319,346],{},[88,6321,100],{},[108,6323,4622],{"encoding":110},[64,6325,6327],{"className":6326,"ariaHidden":116},[115],[64,6328,6330,6333,6336],{"className":6329},[120],[64,6331],{"className":6332,"style":406},[124],[64,6334,346],{"className":6335},[129],[64,6337,100],{"className":6338},[129,130],[417,6340,6341,6402],{},[535,6342,6343],{},[64,6344,6346,6364],{"className":6345},[71],[64,6347,6349],{"className":6348},[75],[77,6350,6351],{"xmlns":79},[82,6352,6353,6361],{},[85,6354,6355],{},[1554,6356,6357,6359],{},[88,6358,100],{},[344,6360,546],{},[108,6362,6363],{"encoding":110},"x^3",[64,6365,6367],{"className":6366,"ariaHidden":116},[115],[64,6368,6370,6373],{"className":6369},[120],[64,6371],{"className":6372,"style":1735},[124],[64,6374,6376,6379],{"className":6375},[129],[64,6377,100],{"className":6378},[129,130],[64,6380,6382],{"className":6381},[746],[64,6383,6385],{"className":6384},[750],[64,6386,6388],{"className":6387},[755],[64,6389,6391],{"className":6390,"style":1735},[759],[64,6392,6393,6396],{"style":1756},[64,6394],{"className":6395,"style":768},[767],[64,6397,6399],{"className":6398},[772,773,774,775],[64,6400,546],{"className":6401},[129,775],[535,6403,6404],{},[64,6405,6407,6427],{"className":6406},[71],[64,6408,6410],{"className":6409},[75],[77,6411,6412],{"xmlns":79},[82,6413,6414,6424],{},[85,6415,6416,6418],{},[344,6417,546],{},[1554,6419,6420,6422],{},[88,6421,100],{},[344,6423,346],{},[108,6425,6426],{"encoding":110},"3x^2",[64,6428,6430],{"className":6429,"ariaHidden":116},[115],[64,6431,6433,6436,6439],{"className":6432},[120],[64,6434],{"className":6435,"style":1735},[124],[64,6437,546],{"className":6438},[129],[64,6440,6442,6445],{"className":6441},[129],[64,6443,100],{"className":6444},[129,130],[64,6446,6448],{"className":6447},[746],[64,6449,6451],{"className":6450},[750],[64,6452,6454],{"className":6453},[755],[64,6455,6457],{"className":6456,"style":1735},[759],[64,6458,6459,6462],{"style":1756},[64,6460],{"className":6461,"style":768},[767],[64,6463,6465],{"className":6464},[772,773,774,775],[64,6466,346],{"className":6467},[129,775],[417,6469,6470,6535],{},[535,6471,6472],{},[64,6473,6475,6494],{"className":6474},[71],[64,6476,6478],{"className":6477},[75],[77,6479,6480],{"xmlns":79},[82,6481,6482,6491],{},[85,6483,6484],{},[1554,6485,6486,6488],{},[88,6487,100],{},[344,6489,6490],{},"10",[108,6492,6493],{"encoding":110},"x^{10}",[64,6495,6497],{"className":6496,"ariaHidden":116},[115],[64,6498,6500,6503],{"className":6499},[120],[64,6501],{"className":6502,"style":1735},[124],[64,6504,6506,6509],{"className":6505},[129],[64,6507,100],{"className":6508},[129,130],[64,6510,6512],{"className":6511},[746],[64,6513,6515],{"className":6514},[750],[64,6516,6518],{"className":6517},[755],[64,6519,6521],{"className":6520,"style":1735},[759],[64,6522,6523,6526],{"style":1756},[64,6524],{"className":6525,"style":768},[767],[64,6527,6529],{"className":6528},[772,773,774,775],[64,6530,6532],{"className":6531},[129,775],[64,6533,6490],{"className":6534},[129,775],[535,6536,6537],{},[64,6538,6540,6560],{"className":6539},[71],[64,6541,6543],{"className":6542},[75],[77,6544,6545],{"xmlns":79},[82,6546,6547,6557],{},[85,6548,6549,6551],{},[344,6550,6490],{},[1554,6552,6553,6555],{},[88,6554,100],{},[344,6556,1775],{},[108,6558,6559],{"encoding":110},"10x^9",[64,6561,6563],{"className":6562,"ariaHidden":116},[115],[64,6564,6566,6569,6572],{"className":6565},[120],[64,6567],{"className":6568,"style":1735},[124],[64,6570,6490],{"className":6571},[129],[64,6573,6575,6578],{"className":6574},[129],[64,6576,100],{"className":6577},[129,130],[64,6579,6581],{"className":6580},[746],[64,6582,6584],{"className":6583},[750],[64,6585,6587],{"className":6586},[755],[64,6588,6590],{"className":6589,"style":1735},[759],[64,6591,6592,6595],{"style":1756},[64,6593],{"className":6594,"style":768},[767],[64,6596,6598],{"className":6597},[772,773,774,775],[64,6599,1775],{"className":6600},[129,775],[417,6602,6603,6693],{},[535,6604,6605,6633,6634,719],{},[64,6606,6608,6621],{"className":6607},[71],[64,6609,6611],{"className":6610},[75],[77,6612,6613],{"xmlns":79},[82,6614,6615,6619],{},[85,6616,6617],{},[88,6618,100],{},[108,6620,100],{"encoding":110},[64,6622,6624],{"className":6623,"ariaHidden":116},[115],[64,6625,6627,6630],{"className":6626},[120],[64,6628],{"className":6629,"style":212},[124],[64,6631,100],{"className":6632},[129,130]," (i.e. ",[64,6635,6637,6655],{"className":6636},[71],[64,6638,6640],{"className":6639},[75],[77,6641,6642],{"xmlns":79},[82,6643,6644,6652],{},[85,6645,6646],{},[1554,6647,6648,6650],{},[88,6649,100],{},[344,6651,353],{},[108,6653,6654],{"encoding":110},"x^1",[64,6656,6658],{"className":6657,"ariaHidden":116},[115],[64,6659,6661,6664],{"className":6660},[120],[64,6662],{"className":6663,"style":1735},[124],[64,6665,6667,6670],{"className":6666},[129],[64,6668,100],{"className":6669},[129,130],[64,6671,6673],{"className":6672},[746],[64,6674,6676],{"className":6675},[750],[64,6677,6679],{"className":6678},[755],[64,6680,6682],{"className":6681,"style":1735},[759],[64,6683,6684,6687],{"style":1756},[64,6685],{"className":6686,"style":768},[767],[64,6688,6690],{"className":6689},[772,773,774,775],[64,6691,353],{"className":6692},[129,775],[535,6694,6695],{},[64,6696,6698,6711],{"className":6697},[71],[64,6699,6701],{"className":6700},[75],[77,6702,6703],{"xmlns":79},[82,6704,6705,6709],{},[85,6706,6707],{},[344,6708,353],{},[108,6710,353],{"encoding":110},[64,6712,6714],{"className":6713,"ariaHidden":116},[115],[64,6715,6717,6720],{"className":6716},[120],[64,6718],{"className":6719,"style":406},[124],[64,6721,353],{"className":6722},[129],[417,6724,6725,6815],{},[535,6726,6727,6755,6756,719],{},[64,6728,6730,6743],{"className":6729},[71],[64,6731,6733],{"className":6732},[75],[77,6734,6735],{"xmlns":79},[82,6736,6737,6741],{},[85,6738,6739],{},[344,6740,553],{},[108,6742,553],{"encoding":110},[64,6744,6746],{"className":6745,"ariaHidden":116},[115],[64,6747,6749,6752],{"className":6748},[120],[64,6750],{"className":6751,"style":406},[124],[64,6753,553],{"className":6754},[129]," (constant, ",[64,6757,6759,6777],{"className":6758},[71],[64,6760,6762],{"className":6761},[75],[77,6763,6764],{"xmlns":79},[82,6765,6766,6774],{},[85,6767,6768],{},[1554,6769,6770,6772],{},[88,6771,100],{},[344,6773,537],{},[108,6775,6776],{"encoding":110},"x^0",[64,6778,6780],{"className":6779,"ariaHidden":116},[115],[64,6781,6783,6786],{"className":6782},[120],[64,6784],{"className":6785,"style":1735},[124],[64,6787,6789,6792],{"className":6788},[129],[64,6790,100],{"className":6791},[129,130],[64,6793,6795],{"className":6794},[746],[64,6796,6798],{"className":6797},[750],[64,6799,6801],{"className":6800},[755],[64,6802,6804],{"className":6803,"style":1735},[759],[64,6805,6806,6809],{"style":1756},[64,6807],{"className":6808,"style":768},[767],[64,6810,6812],{"className":6811},[772,773,774,775],[64,6813,537],{"className":6814},[129,775],[535,6816,6817],{},[64,6818,6820,6833],{"className":6819},[71],[64,6821,6823],{"className":6822},[75],[77,6824,6825],{"xmlns":79},[82,6826,6827,6831],{},[85,6828,6829],{},[344,6830,537],{},[108,6832,537],{"encoding":110},[64,6834,6836],{"className":6835,"ariaHidden":116},[115],[64,6837,6839,6842],{"className":6838},[120],[64,6840],{"className":6841,"style":406},[124],[64,6843,537],{"className":6844},[129],[56,6846,6848],{"id":6847},"constant-multiple-rule","Constant Multiple Rule",[64,6850,6852],{"className":6851},[67],[64,6853,6855,6914],{"className":6854},[71],[64,6856,6858],{"className":6857},[75],[77,6859,6860],{"xmlns":79,"display":80},[82,6861,6862,6911],{},[85,6863,6864,6874,6877,6880,6882,6884,6886,6888,6890,6893,6895,6897,6899,6905,6907,6909],{},[1002,6865,6866,6868],{},[88,6867,4812],{},[85,6869,6870,6872],{},[88,6871,4812],{},[88,6873,100],{},[92,6875,6876],{"stretchy":699},"[",[88,6878,6879],{},"c",[92,6881,6030],{},[88,6883,1544],{},[92,6885,700],{"stretchy":699},[88,6887,100],{},[92,6889,719],{"stretchy":699},[92,6891,6892],{"stretchy":699},"]",[92,6894,94],{},[88,6896,6879],{},[92,6898,6030],{},[1554,6900,6901,6903],{},[88,6902,1544],{},[92,6904,4731],{"mathvariant":1008,"lspace":4730,"rspace":4730},[92,6906,700],{"stretchy":699},[88,6908,100],{},[92,6910,719],{"stretchy":699},[108,6912,6913],{"encoding":110},"\\frac{d}{dx}[c \\cdot f(x)] = c \\cdot f'(x)",[64,6915,6917,7003,7031,7049],{"className":6916,"ariaHidden":116},[115],[64,6918,6920,6923,6988,6991,6994,6997,7000],{"className":6919},[120],[64,6921],{"className":6922,"style":2249},[124],[64,6924,6926,6929,6985],{"className":6925},[129],[64,6927],{"className":6928},[736,1092],[64,6930,6932],{"className":6931},[1002],[64,6933,6935,6977],{"className":6934},[750,751],[64,6936,6938,6974],{"className":6937},[755],[64,6939,6941,6955,6963],{"className":6940,"style":2268},[759],[64,6942,6943,6946],{"style":1108},[64,6944],{"className":6945,"style":1112},[767],[64,6947,6949,6952],{"className":6948},[129],[64,6950,4812],{"className":6951},[129,130],[64,6953,100],{"className":6954},[129,130],[64,6956,6957,6960],{"style":1124},[64,6958],{"className":6959,"style":1112},[767],[64,6961],{"className":6962,"style":1132},[1131],[64,6964,6965,6968],{"style":1135},[64,6966],{"className":6967,"style":1112},[767],[64,6969,6971],{"className":6970},[129],[64,6972,4812],{"className":6973},[129,130],[64,6975,783],{"className":6976},[782],[64,6978,6980],{"className":6979},[755],[64,6981,6983],{"className":6982,"style":1157},[759],[64,6984],{},[64,6986],{"className":6987},[845,1092],[64,6989,6876],{"className":6990},[736],[64,6992,6879],{"className":6993},[129,130],[64,6995],{"className":6996,"style":160},[135],[64,6998,6030],{"className":6999},[164],[64,7001],{"className":7002,"style":160},[135],[64,7004,7006,7009,7012,7015,7018,7022,7025,7028],{"className":7005},[120],[64,7007],{"className":7008,"style":732},[124],[64,7010,1544],{"className":7011,"style":1575},[129,130],[64,7013,700],{"className":7014},[736],[64,7016,100],{"className":7017},[129,130],[64,7019,7021],{"className":7020},[845],")]",[64,7023],{"className":7024,"style":136},[135],[64,7026,94],{"className":7027},[140],[64,7029],{"className":7030,"style":136},[135],[64,7032,7034,7037,7040,7043,7046],{"className":7033},[120],[64,7035],{"className":7036,"style":6165},[124],[64,7038,6879],{"className":7039},[129,130],[64,7041],{"className":7042,"style":160},[135],[64,7044,6030],{"className":7045},[164],[64,7047],{"className":7048,"style":160},[135],[64,7050,7052,7056,7088,7091,7094],{"className":7051},[120],[64,7053],{"className":7054,"style":7055},[124],"height:1.0519em;vertical-align:-0.25em;",[64,7057,7059,7062],{"className":7058},[129],[64,7060,1544],{"className":7061,"style":1575},[129,130],[64,7063,7065],{"className":7064},[746],[64,7066,7068],{"className":7067},[750],[64,7069,7071],{"className":7070},[755],[64,7072,7074],{"className":7073,"style":5055},[759],[64,7075,7076,7079],{"style":1621},[64,7077],{"className":7078,"style":768},[767],[64,7080,7082],{"className":7081},[772,773,774,775],[64,7083,7085],{"className":7084},[129,775],[64,7086,4731],{"className":7087},[129,775],[64,7089,700],{"className":7090},[736],[64,7092,100],{"className":7093},[129,130],[64,7095,719],{"className":7096},[845],[12,7098,7099,7100,7201,7202,2055],{},"If ",[64,7101,7103,7133],{"className":7102},[71],[64,7104,7106],{"className":7105},[75],[77,7107,7108],{"xmlns":79},[82,7109,7110,7130],{},[85,7111,7112,7114,7116,7118,7120,7122,7124],{},[88,7113,1544],{},[92,7115,700],{"stretchy":699},[88,7117,100],{},[92,7119,719],{"stretchy":699},[92,7121,94],{},[344,7123,546],{},[1554,7125,7126,7128],{},[88,7127,100],{},[344,7129,346],{},[108,7131,7132],{"encoding":110},"f(x) = 3x^2",[64,7134,7136,7163],{"className":7135,"ariaHidden":116},[115],[64,7137,7139,7142,7145,7148,7151,7154,7157,7160],{"className":7138},[120],[64,7140],{"className":7141,"style":732},[124],[64,7143,1544],{"className":7144,"style":1575},[129,130],[64,7146,700],{"className":7147},[736],[64,7149,100],{"className":7150},[129,130],[64,7152,719],{"className":7153},[845],[64,7155],{"className":7156,"style":136},[135],[64,7158,94],{"className":7159},[140],[64,7161],{"className":7162,"style":136},[135],[64,7164,7166,7169,7172],{"className":7165},[120],[64,7167],{"className":7168,"style":1735},[124],[64,7170,546],{"className":7171},[129],[64,7173,7175,7178],{"className":7174},[129],[64,7176,100],{"className":7177},[129,130],[64,7179,7181],{"className":7180},[746],[64,7182,7184],{"className":7183},[750],[64,7185,7187],{"className":7186},[755],[64,7188,7190],{"className":7189,"style":1735},[759],[64,7191,7192,7195],{"style":1756},[64,7193],{"className":7194,"style":768},[767],[64,7196,7198],{"className":7197},[772,773,774,775],[64,7199,346],{"className":7200},[129,775],", then ",[64,7203,7205,7246],{"className":7204},[71],[64,7206,7208],{"className":7207},[75],[77,7209,7210],{"xmlns":79},[82,7211,7212,7243],{},[85,7213,7214,7220,7222,7224,7226,7228,7230,7232,7234,7236,7238,7241],{},[1554,7215,7216,7218],{},[88,7217,1544],{},[92,7219,4731],{"mathvariant":1008,"lspace":4730,"rspace":4730},[92,7221,700],{"stretchy":699},[88,7223,100],{},[92,7225,719],{"stretchy":699},[92,7227,94],{},[344,7229,546],{},[92,7231,6030],{},[344,7233,346],{},[88,7235,100],{},[92,7237,94],{},[344,7239,7240],{},"6",[88,7242,100],{},[108,7244,7245],{"encoding":110},"f'(x) = 3 \\cdot 2x = 6x",[64,7247,7249,7305,7323,7344],{"className":7248,"ariaHidden":116},[115],[64,7250,7252,7255,7287,7290,7293,7296,7299,7302],{"className":7251},[120],[64,7253],{"className":7254,"style":4750},[124],[64,7256,7258,7261],{"className":7257},[129],[64,7259,1544],{"className":7260,"style":1575},[129,130],[64,7262,7264],{"className":7263},[746],[64,7265,7267],{"className":7266},[750],[64,7268,7270],{"className":7269},[755],[64,7271,7273],{"className":7272,"style":4769},[759],[64,7274,7275,7278],{"style":1756},[64,7276],{"className":7277,"style":768},[767],[64,7279,7281],{"className":7280},[772,773,774,775],[64,7282,7284],{"className":7283},[129,775],[64,7285,4731],{"className":7286},[129,775],[64,7288,700],{"className":7289},[736],[64,7291,100],{"className":7292},[129,130],[64,7294,719],{"className":7295},[845],[64,7297],{"className":7298,"style":136},[135],[64,7300,94],{"className":7301},[140],[64,7303],{"className":7304,"style":136},[135],[64,7306,7308,7311,7314,7317,7320],{"className":7307},[120],[64,7309],{"className":7310,"style":406},[124],[64,7312,546],{"className":7313},[129],[64,7315],{"className":7316,"style":160},[135],[64,7318,6030],{"className":7319},[164],[64,7321],{"className":7322,"style":160},[135],[64,7324,7326,7329,7332,7335,7338,7341],{"className":7325},[120],[64,7327],{"className":7328,"style":406},[124],[64,7330,346],{"className":7331},[129],[64,7333,100],{"className":7334},[129,130],[64,7336],{"className":7337,"style":136},[135],[64,7339,94],{"className":7340},[140],[64,7342],{"className":7343,"style":136},[135],[64,7345,7347,7350,7353],{"className":7346},[120],[64,7348],{"className":7349,"style":406},[124],[64,7351,7240],{"className":7352},[129],[64,7354,100],{"className":7355},[129,130],[56,7357,7359],{"id":7358},"sum-rule","Sum Rule",[64,7361,7363],{"className":7362},[67],[64,7364,7366,7439],{"className":7365},[71],[64,7367,7369],{"className":7368},[75],[77,7370,7371],{"xmlns":79,"display":80},[82,7372,7373,7436],{},[85,7374,7375,7385,7387,7389,7391,7393,7395,7397,7400,7402,7404,7406,7408,7410,7416,7418,7420,7422,7424,7430,7432,7434],{},[1002,7376,7377,7379],{},[88,7378,4812],{},[85,7380,7381,7383],{},[88,7382,4812],{},[88,7384,100],{},[92,7386,6876],{"stretchy":699},[88,7388,1544],{},[92,7390,700],{"stretchy":699},[88,7392,100],{},[92,7394,719],{"stretchy":699},[92,7396,103],{},[88,7398,7399],{},"g",[92,7401,700],{"stretchy":699},[88,7403,100],{},[92,7405,719],{"stretchy":699},[92,7407,6892],{"stretchy":699},[92,7409,94],{},[1554,7411,7412,7414],{},[88,7413,1544],{},[92,7415,4731],{"mathvariant":1008,"lspace":4730,"rspace":4730},[92,7417,700],{"stretchy":699},[88,7419,100],{},[92,7421,719],{"stretchy":699},[92,7423,103],{},[1554,7425,7426,7428],{},[88,7427,7399],{},[92,7429,4731],{"mathvariant":1008,"lspace":4730,"rspace":4730},[92,7431,700],{"stretchy":699},[88,7433,100],{},[92,7435,719],{"stretchy":699},[108,7437,7438],{"encoding":110},"\\frac{d}{dx}[f(x) + g(x)] = f'(x) + g'(x)",[64,7440,7442,7537,7564,7620],{"className":7441,"ariaHidden":116},[115],[64,7443,7445,7448,7513,7516,7519,7522,7525,7528,7531,7534],{"className":7444},[120],[64,7446],{"className":7447,"style":2249},[124],[64,7449,7451,7454,7510],{"className":7450},[129],[64,7452],{"className":7453},[736,1092],[64,7455,7457],{"className":7456},[1002],[64,7458,7460,7502],{"className":7459},[750,751],[64,7461,7463,7499],{"className":7462},[755],[64,7464,7466,7480,7488],{"className":7465,"style":2268},[759],[64,7467,7468,7471],{"style":1108},[64,7469],{"className":7470,"style":1112},[767],[64,7472,7474,7477],{"className":7473},[129],[64,7475,4812],{"className":7476},[129,130],[64,7478,100],{"className":7479},[129,130],[64,7481,7482,7485],{"style":1124},[64,7483],{"className":7484,"style":1112},[767],[64,7486],{"className":7487,"style":1132},[1131],[64,7489,7490,7493],{"style":1135},[64,7491],{"className":7492,"style":1112},[767],[64,7494,7496],{"className":7495},[129],[64,7497,4812],{"className":7498},[129,130],[64,7500,783],{"className":7501},[782],[64,7503,7505],{"className":7504},[755],[64,7506,7508],{"className":7507,"style":1157},[759],[64,7509],{},[64,7511],{"className":7512},[845,1092],[64,7514,6876],{"className":7515},[736],[64,7517,1544],{"className":7518,"style":1575},[129,130],[64,7520,700],{"className":7521},[736],[64,7523,100],{"className":7524},[129,130],[64,7526,719],{"className":7527},[845],[64,7529],{"className":7530,"style":160},[135],[64,7532,103],{"className":7533},[164],[64,7535],{"className":7536,"style":160},[135],[64,7538,7540,7543,7546,7549,7552,7555,7558,7561],{"className":7539},[120],[64,7541],{"className":7542,"style":732},[124],[64,7544,7399],{"className":7545,"style":131},[129,130],[64,7547,700],{"className":7548},[736],[64,7550,100],{"className":7551},[129,130],[64,7553,7021],{"className":7554},[845],[64,7556],{"className":7557,"style":136},[135],[64,7559,94],{"className":7560},[140],[64,7562],{"className":7563,"style":136},[135],[64,7565,7567,7570,7602,7605,7608,7611,7614,7617],{"className":7566},[120],[64,7568],{"className":7569,"style":7055},[124],[64,7571,7573,7576],{"className":7572},[129],[64,7574,1544],{"className":7575,"style":1575},[129,130],[64,7577,7579],{"className":7578},[746],[64,7580,7582],{"className":7581},[750],[64,7583,7585],{"className":7584},[755],[64,7586,7588],{"className":7587,"style":5055},[759],[64,7589,7590,7593],{"style":1621},[64,7591],{"className":7592,"style":768},[767],[64,7594,7596],{"className":7595},[772,773,774,775],[64,7597,7599],{"className":7598},[129,775],[64,7600,4731],{"className":7601},[129,775],[64,7603,700],{"className":7604},[736],[64,7606,100],{"className":7607},[129,130],[64,7609,719],{"className":7610},[845],[64,7612],{"className":7613,"style":160},[135],[64,7615,103],{"className":7616},[164],[64,7618],{"className":7619,"style":160},[135],[64,7621,7623,7626,7658,7661,7664],{"className":7622},[120],[64,7624],{"className":7625,"style":7055},[124],[64,7627,7629,7632],{"className":7628},[129],[64,7630,7399],{"className":7631,"style":131},[129,130],[64,7633,7635],{"className":7634},[746],[64,7636,7638],{"className":7637},[750],[64,7639,7641],{"className":7640},[755],[64,7642,7644],{"className":7643,"style":5055},[759],[64,7645,7646,7649],{"style":1621},[64,7647],{"className":7648,"style":768},[767],[64,7650,7652],{"className":7651},[772,773,774,775],[64,7653,7655],{"className":7654},[129,775],[64,7656,4731],{"className":7657},[129,775],[64,7659,700],{"className":7660},[736],[64,7662,100],{"className":7663},[129,130],[64,7665,719],{"className":7666},[845],[12,7668,7099,7669,7872],{},[64,7670,7672,7720],{"className":7671},[71],[64,7673,7675],{"className":7674},[75],[77,7676,7677],{"xmlns":79},[82,7678,7679,7717],{},[85,7680,7681,7683,7685,7687,7689,7691,7697,7699,7701,7707,7709,7711,7713,7715],{},[88,7682,1544],{},[92,7684,700],{"stretchy":699},[88,7686,100],{},[92,7688,719],{"stretchy":699},[92,7690,94],{},[1554,7692,7693,7695],{},[88,7694,100],{},[344,7696,546],{},[92,7698,103],{},[344,7700,553],{},[1554,7702,7703,7705],{},[88,7704,100],{},[344,7706,346],{},[92,7708,1032],{},[344,7710,346],{},[88,7712,100],{},[92,7714,103],{},[344,7716,560],{},[108,7718,7719],{"encoding":110},"f(x) = x^3 + 5x^2 - 2x + 7",[64,7721,7723,7750,7795,7842,7863],{"className":7722,"ariaHidden":116},[115],[64,7724,7726,7729,7732,7735,7738,7741,7744,7747],{"className":7725},[120],[64,7727],{"className":7728,"style":732},[124],[64,7730,1544],{"className":7731,"style":1575},[129,130],[64,7733,700],{"className":7734},[736],[64,7736,100],{"className":7737},[129,130],[64,7739,719],{"className":7740},[845],[64,7742],{"className":7743,"style":136},[135],[64,7745,94],{"className":7746},[140],[64,7748],{"className":7749,"style":136},[135],[64,7751,7753,7757,7786,7789,7792],{"className":7752},[120],[64,7754],{"className":7755,"style":7756},[124],"height:0.8974em;vertical-align:-0.0833em;",[64,7758,7760,7763],{"className":7759},[129],[64,7761,100],{"className":7762},[129,130],[64,7764,7766],{"className":7765},[746],[64,7767,7769],{"className":7768},[750],[64,7770,7772],{"className":7771},[755],[64,7773,7775],{"className":7774,"style":1735},[759],[64,7776,7777,7780],{"style":1756},[64,7778],{"className":7779,"style":768},[767],[64,7781,7783],{"className":7782},[772,773,774,775],[64,7784,546],{"className":7785},[129,775],[64,7787],{"className":7788,"style":160},[135],[64,7790,103],{"className":7791},[164],[64,7793],{"className":7794,"style":160},[135],[64,7796,7798,7801,7804,7833,7836,7839],{"className":7797},[120],[64,7799],{"className":7800,"style":7756},[124],[64,7802,553],{"className":7803},[129],[64,7805,7807,7810],{"className":7806},[129],[64,7808,100],{"className":7809},[129,130],[64,7811,7813],{"className":7812},[746],[64,7814,7816],{"className":7815},[750],[64,7817,7819],{"className":7818},[755],[64,7820,7822],{"className":7821,"style":1735},[759],[64,7823,7824,7827],{"style":1756},[64,7825],{"className":7826,"style":768},[767],[64,7828,7830],{"className":7829},[772,773,774,775],[64,7831,346],{"className":7832},[129,775],[64,7834],{"className":7835,"style":160},[135],[64,7837,1032],{"className":7838},[164],[64,7840],{"className":7841,"style":160},[135],[64,7843,7845,7848,7851,7854,7857,7860],{"className":7844},[120],[64,7846],{"className":7847,"style":384},[124],[64,7849,346],{"className":7850},[129],[64,7852,100],{"className":7853},[129,130],[64,7855],{"className":7856,"style":160},[135],[64,7858,103],{"className":7859},[164],[64,7861],{"className":7862,"style":160},[135],[64,7864,7866,7869],{"className":7865},[120],[64,7867],{"className":7868,"style":406},[124],[64,7870,560],{"className":7871},[129],", differentiate term by term:",[64,7874,7876],{"className":7875},[67],[64,7877,7879,7923],{"className":7878},[71],[64,7880,7882],{"className":7881},[75],[77,7883,7884],{"xmlns":79,"display":80},[82,7885,7886,7920],{},[85,7887,7888,7894,7896,7898,7900,7902,7904,7910,7912,7914,7916,7918],{},[1554,7889,7890,7892],{},[88,7891,1544],{},[92,7893,4731],{"mathvariant":1008,"lspace":4730,"rspace":4730},[92,7895,700],{"stretchy":699},[88,7897,100],{},[92,7899,719],{"stretchy":699},[92,7901,94],{},[344,7903,546],{},[1554,7905,7906,7908],{},[88,7907,100],{},[344,7909,346],{},[92,7911,103],{},[344,7913,6490],{},[88,7915,100],{},[92,7917,1032],{},[344,7919,346],{},[108,7921,7922],{"encoding":110},"f'(x) = 3x^2 + 10x - 2",[64,7924,7926,7982,8030,8051],{"className":7925,"ariaHidden":116},[115],[64,7927,7929,7932,7964,7967,7970,7973,7976,7979],{"className":7928},[120],[64,7930],{"className":7931,"style":7055},[124],[64,7933,7935,7938],{"className":7934},[129],[64,7936,1544],{"className":7937,"style":1575},[129,130],[64,7939,7941],{"className":7940},[746],[64,7942,7944],{"className":7943},[750],[64,7945,7947],{"className":7946},[755],[64,7948,7950],{"className":7949,"style":5055},[759],[64,7951,7952,7955],{"style":1621},[64,7953],{"className":7954,"style":768},[767],[64,7956,7958],{"className":7957},[772,773,774,775],[64,7959,7961],{"className":7960},[129,775],[64,7962,4731],{"className":7963},[129,775],[64,7965,700],{"className":7966},[736],[64,7968,100],{"className":7969},[129,130],[64,7971,719],{"className":7972},[845],[64,7974],{"className":7975,"style":136},[135],[64,7977,94],{"className":7978},[140],[64,7980],{"className":7981,"style":136},[135],[64,7983,7985,7989,7992,8021,8024,8027],{"className":7984},[120],[64,7986],{"className":7987,"style":7988},[124],"height:0.9474em;vertical-align:-0.0833em;",[64,7990,546],{"className":7991},[129],[64,7993,7995,7998],{"className":7994},[129],[64,7996,100],{"className":7997},[129,130],[64,7999,8001],{"className":8000},[746],[64,8002,8004],{"className":8003},[750],[64,8005,8007],{"className":8006},[755],[64,8008,8010],{"className":8009,"style":1600},[759],[64,8011,8012,8015],{"style":1621},[64,8013],{"className":8014,"style":768},[767],[64,8016,8018],{"className":8017},[772,773,774,775],[64,8019,346],{"className":8020},[129,775],[64,8022],{"className":8023,"style":160},[135],[64,8025,103],{"className":8026},[164],[64,8028],{"className":8029,"style":160},[135],[64,8031,8033,8036,8039,8042,8045,8048],{"className":8032},[120],[64,8034],{"className":8035,"style":384},[124],[64,8037,6490],{"className":8038},[129],[64,8040,100],{"className":8041},[129,130],[64,8043],{"className":8044,"style":160},[135],[64,8046,1032],{"className":8047},[164],[64,8049],{"className":8050,"style":160},[135],[64,8052,8054,8057],{"className":8053},[120],[64,8055],{"className":8056,"style":406},[124],[64,8058,346],{"className":8059},[129],[56,8061,8063],{"id":8062},"chain-rule","Chain Rule",[12,8065,8066,8067,8070,8071,2650],{},"For a ",[21,8068,8069],{},"composition"," of functions ",[64,8072,8074,8100],{"className":8073},[71],[64,8075,8077],{"className":8076},[75],[77,8078,8079],{"xmlns":79},[82,8080,8081,8097],{},[85,8082,8083,8085,8087,8089,8091,8093,8095],{},[88,8084,1544],{},[92,8086,700],{"stretchy":699},[88,8088,7399],{},[92,8090,700],{"stretchy":699},[88,8092,100],{},[92,8094,719],{"stretchy":699},[92,8096,719],{"stretchy":699},[108,8098,8099],{"encoding":110},"f(g(x))",[64,8101,8103],{"className":8102,"ariaHidden":116},[115],[64,8104,8106,8109,8112,8115,8118,8121,8124],{"className":8105},[120],[64,8107],{"className":8108,"style":732},[124],[64,8110,1544],{"className":8111,"style":1575},[129,130],[64,8113,700],{"className":8114},[736],[64,8116,7399],{"className":8117,"style":131},[129,130],[64,8119,700],{"className":8120},[736],[64,8122,100],{"className":8123},[129,130],[64,8125,8127],{"className":8126},[845],"))",[64,8129,8131],{"className":8130},[67],[64,8132,8134,8204],{"className":8133},[71],[64,8135,8137],{"className":8136},[75],[77,8138,8139],{"xmlns":79,"display":80},[82,8140,8141,8201],{},[85,8142,8143,8153,8155,8157,8159,8161,8163,8165,8167,8169,8175,8177,8179,8181,8183,8185,8187,8189,8195,8197,8199],{},[1002,8144,8145,8147],{},[88,8146,4812],{},[85,8148,8149,8151],{},[88,8150,4812],{},[88,8152,100],{},[88,8154,1544],{},[92,8156,700],{"stretchy":699},[88,8158,7399],{},[92,8160,700],{"stretchy":699},[88,8162,100],{},[92,8164,719],{"stretchy":699},[92,8166,719],{"stretchy":699},[92,8168,94],{},[1554,8170,8171,8173],{},[88,8172,1544],{},[92,8174,4731],{"mathvariant":1008,"lspace":4730,"rspace":4730},[92,8176,700],{"stretchy":699},[88,8178,7399],{},[92,8180,700],{"stretchy":699},[88,8182,100],{},[92,8184,719],{"stretchy":699},[92,8186,719],{"stretchy":699},[92,8188,6030],{},[1554,8190,8191,8193],{},[88,8192,7399],{},[92,8194,4731],{"mathvariant":1008,"lspace":4730,"rspace":4730},[92,8196,700],{"stretchy":699},[88,8198,100],{},[92,8200,719],{"stretchy":699},[108,8202,8203],{"encoding":110},"\\frac{d}{dx} f(g(x)) = f'(g(x)) \\cdot g'(x)",[64,8205,8207,8305,8367],{"className":8206,"ariaHidden":116},[115],[64,8208,8210,8213,8278,8281,8284,8287,8290,8293,8296,8299,8302],{"className":8209},[120],[64,8211],{"className":8212,"style":2249},[124],[64,8214,8216,8219,8275],{"className":8215},[129],[64,8217],{"className":8218},[736,1092],[64,8220,8222],{"className":8221},[1002],[64,8223,8225,8267],{"className":8224},[750,751],[64,8226,8228,8264],{"className":8227},[755],[64,8229,8231,8245,8253],{"className":8230,"style":2268},[759],[64,8232,8233,8236],{"style":1108},[64,8234],{"className":8235,"style":1112},[767],[64,8237,8239,8242],{"className":8238},[129],[64,8240,4812],{"className":8241},[129,130],[64,8243,100],{"className":8244},[129,130],[64,8246,8247,8250],{"style":1124},[64,8248],{"className":8249,"style":1112},[767],[64,8251],{"className":8252,"style":1132},[1131],[64,8254,8255,8258],{"style":1135},[64,8256],{"className":8257,"style":1112},[767],[64,8259,8261],{"className":8260},[129],[64,8262,4812],{"className":8263},[129,130],[64,8265,783],{"className":8266},[782],[64,8268,8270],{"className":8269},[755],[64,8271,8273],{"className":8272,"style":1157},[759],[64,8274],{},[64,8276],{"className":8277},[845,1092],[64,8279,1544],{"className":8280,"style":1575},[129,130],[64,8282,700],{"className":8283},[736],[64,8285,7399],{"className":8286,"style":131},[129,130],[64,8288,700],{"className":8289},[736],[64,8291,100],{"className":8292},[129,130],[64,8294,8127],{"className":8295},[845],[64,8297],{"className":8298,"style":136},[135],[64,8300,94],{"className":8301},[140],[64,8303],{"className":8304,"style":136},[135],[64,8306,8308,8311,8343,8346,8349,8352,8355,8358,8361,8364],{"className":8307},[120],[64,8309],{"className":8310,"style":7055},[124],[64,8312,8314,8317],{"className":8313},[129],[64,8315,1544],{"className":8316,"style":1575},[129,130],[64,8318,8320],{"className":8319},[746],[64,8321,8323],{"className":8322},[750],[64,8324,8326],{"className":8325},[755],[64,8327,8329],{"className":8328,"style":5055},[759],[64,8330,8331,8334],{"style":1621},[64,8332],{"className":8333,"style":768},[767],[64,8335,8337],{"className":8336},[772,773,774,775],[64,8338,8340],{"className":8339},[129,775],[64,8341,4731],{"className":8342},[129,775],[64,8344,700],{"className":8345},[736],[64,8347,7399],{"className":8348,"style":131},[129,130],[64,8350,700],{"className":8351},[736],[64,8353,100],{"className":8354},[129,130],[64,8356,8127],{"className":8357},[845],[64,8359],{"className":8360,"style":160},[135],[64,8362,6030],{"className":8363},[164],[64,8365],{"className":8366,"style":160},[135],[64,8368,8370,8373,8405,8408,8411],{"className":8369},[120],[64,8371],{"className":8372,"style":7055},[124],[64,8374,8376,8379],{"className":8375},[129],[64,8377,7399],{"className":8378,"style":131},[129,130],[64,8380,8382],{"className":8381},[746],[64,8383,8385],{"className":8384},[750],[64,8386,8388],{"className":8387},[755],[64,8389,8391],{"className":8390,"style":5055},[759],[64,8392,8393,8396],{"style":1621},[64,8394],{"className":8395,"style":768},[767],[64,8397,8399],{"className":8398},[772,773,774,775],[64,8400,8402],{"className":8401},[129,775],[64,8403,4731],{"className":8404},[129,775],[64,8406,700],{"className":8407},[736],[64,8409,100],{"className":8410},[129,130],[64,8412,719],{"className":8413},[845],[12,8415,8416,8417],{},"Read as: ",[16,8418,8419],{},"\"derivative of outer, evaluated at inner — times derivative of inner.\"",[12,8421,8422,326,8424],{},[21,8423,325],{},[64,8425,8427,8465],{"className":8426},[71],[64,8428,8430],{"className":8429},[75],[77,8431,8432],{"xmlns":79},[82,8433,8434,8462],{},[85,8435,8436,8438,8440,8442,8444,8446,8448,8450,8452,8454,8456],{},[88,8437,2109],{},[92,8439,700],{"stretchy":699},[88,8441,100],{},[92,8443,719],{"stretchy":699},[92,8445,94],{},[92,8447,700],{"stretchy":699},[344,8449,546],{},[88,8451,100],{},[92,8453,103],{},[344,8455,353],{},[1554,8457,8458,8460],{},[92,8459,719],{"stretchy":699},[344,8461,2714],{},[108,8463,8464],{"encoding":110},"h(x) = (3x + 1)^4",[64,8466,8468,8495,8519],{"className":8467,"ariaHidden":116},[115],[64,8469,8471,8474,8477,8480,8483,8486,8489,8492],{"className":8470},[120],[64,8472],{"className":8473,"style":732},[124],[64,8475,2109],{"className":8476},[129,130],[64,8478,700],{"className":8479},[736],[64,8481,100],{"className":8482},[129,130],[64,8484,719],{"className":8485},[845],[64,8487],{"className":8488,"style":136},[135],[64,8490,94],{"className":8491},[140],[64,8493],{"className":8494,"style":136},[135],[64,8496,8498,8501,8504,8507,8510,8513,8516],{"className":8497},[120],[64,8499],{"className":8500,"style":732},[124],[64,8502,700],{"className":8503},[736],[64,8505,546],{"className":8506},[129],[64,8508,100],{"className":8509},[129,130],[64,8511],{"className":8512,"style":160},[135],[64,8514,103],{"className":8515},[164],[64,8517],{"className":8518,"style":160},[135],[64,8520,8522,8526,8529],{"className":8521},[120],[64,8523],{"className":8524,"style":8525},[124],"height:1.0641em;vertical-align:-0.25em;",[64,8527,353],{"className":8528},[129],[64,8530,8532,8535],{"className":8531},[845],[64,8533,719],{"className":8534},[845],[64,8536,8538],{"className":8537},[746],[64,8539,8541],{"className":8540},[750],[64,8542,8544],{"className":8543},[755],[64,8545,8547],{"className":8546,"style":1735},[759],[64,8548,8549,8552],{"style":1756},[64,8550],{"className":8551,"style":768},[767],[64,8553,8555],{"className":8554},[772,773,774,775],[64,8556,2714],{"className":8557},[129,775],[12,8559,8560,8561,846,8654,2650],{},"Let ",[64,8562,8564,8594],{"className":8563},[71],[64,8565,8567],{"className":8566},[75],[77,8568,8569],{"xmlns":79},[82,8570,8571,8591],{},[85,8572,8573,8575,8577,8579,8581,8583,8585,8587,8589],{},[88,8574,7399],{},[92,8576,700],{"stretchy":699},[88,8578,100],{},[92,8580,719],{"stretchy":699},[92,8582,94],{},[344,8584,546],{},[88,8586,100],{},[92,8588,103],{},[344,8590,353],{},[108,8592,8593],{"encoding":110},"g(x) = 3x + 1",[64,8595,8597,8624,8645],{"className":8596,"ariaHidden":116},[115],[64,8598,8600,8603,8606,8609,8612,8615,8618,8621],{"className":8599},[120],[64,8601],{"className":8602,"style":732},[124],[64,8604,7399],{"className":8605,"style":131},[129,130],[64,8607,700],{"className":8608},[736],[64,8610,100],{"className":8611},[129,130],[64,8613,719],{"className":8614},[845],[64,8616],{"className":8617,"style":136},[135],[64,8619,94],{"className":8620},[140],[64,8622],{"className":8623,"style":136},[135],[64,8625,8627,8630,8633,8636,8639,8642],{"className":8626},[120],[64,8628],{"className":8629,"style":384},[124],[64,8631,546],{"className":8632},[129],[64,8634,100],{"className":8635},[129,130],[64,8637],{"className":8638,"style":160},[135],[64,8640,103],{"className":8641},[164],[64,8643],{"className":8644,"style":160},[135],[64,8646,8648,8651],{"className":8647},[120],[64,8649],{"className":8650,"style":406},[124],[64,8652,353],{"className":8653},[129],[64,8655,8657,8686],{"className":8656},[71],[64,8658,8660],{"className":8659},[75],[77,8661,8662],{"xmlns":79},[82,8663,8664,8683],{},[85,8665,8666,8668,8670,8673,8675,8677],{},[88,8667,1544],{},[92,8669,700],{"stretchy":699},[88,8671,8672],{},"u",[92,8674,719],{"stretchy":699},[92,8676,94],{},[1554,8678,8679,8681],{},[88,8680,8672],{},[344,8682,2714],{},[108,8684,8685],{"encoding":110},"f(u) = u^4",[64,8687,8689,8716],{"className":8688,"ariaHidden":116},[115],[64,8690,8692,8695,8698,8701,8704,8707,8710,8713],{"className":8691},[120],[64,8693],{"className":8694,"style":732},[124],[64,8696,1544],{"className":8697,"style":1575},[129,130],[64,8699,700],{"className":8700},[736],[64,8702,8672],{"className":8703},[129,130],[64,8705,719],{"className":8706},[845],[64,8708],{"className":8709,"style":136},[135],[64,8711,94],{"className":8712},[140],[64,8714],{"className":8715,"style":136},[135],[64,8717,8719,8722],{"className":8718},[120],[64,8720],{"className":8721,"style":1735},[124],[64,8723,8725,8728],{"className":8724},[129],[64,8726,8672],{"className":8727},[129,130],[64,8729,8731],{"className":8730},[746],[64,8732,8734],{"className":8733},[750],[64,8735,8737],{"className":8736},[755],[64,8738,8740],{"className":8739,"style":1735},[759],[64,8741,8742,8745],{"style":1756},[64,8743],{"className":8744,"style":768},[767],[64,8746,8748],{"className":8747},[772,773,774,775],[64,8749,2714],{"className":8750},[129,775],[64,8752,8754],{"className":8753},[67],[64,8755,8757,8826],{"className":8756},[71],[64,8758,8760],{"className":8759},[75],[77,8761,8762],{"xmlns":79,"display":80},[82,8763,8764,8823],{},[85,8765,8766,8772,8774,8776,8778,8780,8782,8784,8786,8788,8790,8792,8798,8800,8802,8804,8807,8809,8811,8813,8815,8817],{},[1554,8767,8768,8770],{},[88,8769,2109],{},[92,8771,4731],{"mathvariant":1008,"lspace":4730,"rspace":4730},[92,8773,700],{"stretchy":699},[88,8775,100],{},[92,8777,719],{"stretchy":699},[92,8779,94],{},[344,8781,2714],{},[92,8783,700],{"stretchy":699},[344,8785,546],{},[88,8787,100],{},[92,8789,103],{},[344,8791,353],{},[1554,8793,8794,8796],{},[92,8795,719],{"stretchy":699},[344,8797,546],{},[92,8799,6030],{},[344,8801,546],{},[92,8803,94],{},[344,8805,8806],{},"12",[92,8808,700],{"stretchy":699},[344,8810,546],{},[88,8812,100],{},[92,8814,103],{},[344,8816,353],{},[1554,8818,8819,8821],{},[92,8820,719],{"stretchy":699},[344,8822,546],{},[108,8824,8825],{"encoding":110},"h'(x) = 4(3x+1)^3 \\cdot 3 = 12(3x+1)^3",[64,8827,8829,8885,8912,8960,8978,9005],{"className":8828,"ariaHidden":116},[115],[64,8830,8832,8835,8867,8870,8873,8876,8879,8882],{"className":8831},[120],[64,8833],{"className":8834,"style":7055},[124],[64,8836,8838,8841],{"className":8837},[129],[64,8839,2109],{"className":8840},[129,130],[64,8842,8844],{"className":8843},[746],[64,8845,8847],{"className":8846},[750],[64,8848,8850],{"className":8849},[755],[64,8851,8853],{"className":8852,"style":5055},[759],[64,8854,8855,8858],{"style":1621},[64,8856],{"className":8857,"style":768},[767],[64,8859,8861],{"className":8860},[772,773,774,775],[64,8862,8864],{"className":8863},[129,775],[64,8865,4731],{"className":8866},[129,775],[64,8868,700],{"className":8869},[736],[64,8871,100],{"className":8872},[129,130],[64,8874,719],{"className":8875},[845],[64,8877],{"className":8878,"style":136},[135],[64,8880,94],{"className":8881},[140],[64,8883],{"className":8884,"style":136},[135],[64,8886,8888,8891,8894,8897,8900,8903,8906,8909],{"className":8887},[120],[64,8889],{"className":8890,"style":732},[124],[64,8892,2714],{"className":8893},[129],[64,8895,700],{"className":8896},[736],[64,8898,546],{"className":8899},[129],[64,8901,100],{"className":8902},[129,130],[64,8904],{"className":8905,"style":160},[135],[64,8907,103],{"className":8908},[164],[64,8910],{"className":8911,"style":160},[135],[64,8913,8915,8919,8922,8951,8954,8957],{"className":8914},[120],[64,8916],{"className":8917,"style":8918},[124],"height:1.1141em;vertical-align:-0.25em;",[64,8920,353],{"className":8921},[129],[64,8923,8925,8928],{"className":8924},[845],[64,8926,719],{"className":8927},[845],[64,8929,8931],{"className":8930},[746],[64,8932,8934],{"className":8933},[750],[64,8935,8937],{"className":8936},[755],[64,8938,8940],{"className":8939,"style":1600},[759],[64,8941,8942,8945],{"style":1621},[64,8943],{"className":8944,"style":768},[767],[64,8946,8948],{"className":8947},[772,773,774,775],[64,8949,546],{"className":8950},[129,775],[64,8952],{"className":8953,"style":160},[135],[64,8955,6030],{"className":8956},[164],[64,8958],{"className":8959,"style":160},[135],[64,8961,8963,8966,8969,8972,8975],{"className":8962},[120],[64,8964],{"className":8965,"style":406},[124],[64,8967,546],{"className":8968},[129],[64,8970],{"className":8971,"style":136},[135],[64,8973,94],{"className":8974},[140],[64,8976],{"className":8977,"style":136},[135],[64,8979,8981,8984,8987,8990,8993,8996,8999,9002],{"className":8980},[120],[64,8982],{"className":8983,"style":732},[124],[64,8985,8806],{"className":8986},[129],[64,8988,700],{"className":8989},[736],[64,8991,546],{"className":8992},[129],[64,8994,100],{"className":8995},[129,130],[64,8997],{"className":8998,"style":160},[135],[64,9000,103],{"className":9001},[164],[64,9003],{"className":9004,"style":160},[135],[64,9006,9008,9011,9014],{"className":9007},[120],[64,9009],{"className":9010,"style":8918},[124],[64,9012,353],{"className":9013},[129],[64,9015,9017,9020],{"className":9016},[845],[64,9018,719],{"className":9019},[845],[64,9021,9023],{"className":9022},[746],[64,9024,9026],{"className":9025},[750],[64,9027,9029],{"className":9028},[755],[64,9030,9032],{"className":9031,"style":1600},[759],[64,9033,9034,9037],{"style":1621},[64,9035],{"className":9036,"style":768},[767],[64,9038,9040],{"className":9039},[772,773,774,775],[64,9041,546],{"className":9042},[129,775],[12,9044,9045,9046,9049],{},"The chain rule is everywhere in machine learning — ",[21,9047,9048],{},"backpropagation"," is essentially repeated application of it through layers of a neural network.",[56,9051,9053],{"id":9052},"common-derivatives-reference","Common Derivatives Reference",[411,9055,9056,9064],{},[414,9057,9058],{},[417,9059,9060,9062],{},[420,9061,6235],{},[420,9063,6238],{},[530,9065,9066,9190,9344,9444,9549],{},[417,9067,9068,9130],{},[535,9069,9070],{},[64,9071,9073,9092],{"className":9072},[71],[64,9074,9076],{"className":9075},[75],[77,9077,9078],{"xmlns":79},[82,9079,9080,9089],{},[85,9081,9082],{},[1554,9083,9084,9087],{},[88,9085,9086],{},"e",[88,9088,100],{},[108,9090,9091],{"encoding":110},"e^x",[64,9093,9095],{"className":9094,"ariaHidden":116},[115],[64,9096,9098,9101],{"className":9097},[120],[64,9099],{"className":9100,"style":5963},[124],[64,9102,9104,9107],{"className":9103},[129],[64,9105,9086],{"className":9106},[129,130],[64,9108,9110],{"className":9109},[746],[64,9111,9113],{"className":9112},[750],[64,9114,9116],{"className":9115},[755],[64,9117,9119],{"className":9118,"style":5963},[759],[64,9120,9121,9124],{"style":1756},[64,9122],{"className":9123,"style":768},[767],[64,9125,9127],{"className":9126},[772,773,774,775],[64,9128,100],{"className":9129},[129,130,775],[535,9131,9132],{},[64,9133,9135,9152],{"className":9134},[71],[64,9136,9138],{"className":9137},[75],[77,9139,9140],{"xmlns":79},[82,9141,9142,9150],{},[85,9143,9144],{},[1554,9145,9146,9148],{},[88,9147,9086],{},[88,9149,100],{},[108,9151,9091],{"encoding":110},[64,9153,9155],{"className":9154,"ariaHidden":116},[115],[64,9156,9158,9161],{"className":9157},[120],[64,9159],{"className":9160,"style":5963},[124],[64,9162,9164,9167],{"className":9163},[129],[64,9165,9086],{"className":9166},[129,130],[64,9168,9170],{"className":9169},[746],[64,9171,9173],{"className":9172},[750],[64,9174,9176],{"className":9175},[755],[64,9177,9179],{"className":9178,"style":5963},[759],[64,9180,9181,9184],{"style":1756},[64,9182],{"className":9183,"style":768},[767],[64,9185,9187],{"className":9186},[772,773,774,775],[64,9188,100],{"className":9189},[129,130,775],[417,9191,9192,9241],{},[535,9193,9194],{},[64,9195,9197,9220],{"className":9196},[71],[64,9198,9200],{"className":9199},[75],[77,9201,9202],{"xmlns":79},[82,9203,9204,9217],{},[85,9205,9206,9209,9211,9213,9215],{},[88,9207,9208],{},"ln",[92,9210,3190],{},[92,9212,700],{"stretchy":699},[88,9214,100],{},[92,9216,719],{"stretchy":699},[108,9218,9219],{"encoding":110},"\\ln(x)",[64,9221,9223],{"className":9222,"ariaHidden":116},[115],[64,9224,9226,9229,9232,9235,9238],{"className":9225},[120],[64,9227],{"className":9228,"style":732},[124],[64,9230,9208],{"className":9231},[3244],[64,9233,700],{"className":9234},[736],[64,9236,100],{"className":9237},[129,130],[64,9239,719],{"className":9240},[845],[535,9242,9243],{},[64,9244,9246,9264],{"className":9245},[71],[64,9247,9249],{"className":9248},[75],[77,9250,9251],{"xmlns":79},[82,9252,9253,9261],{},[85,9254,9255],{},[1002,9256,9257,9259],{},[344,9258,353],{},[88,9260,100],{},[108,9262,9263],{"encoding":110},"\\frac{1}{x}",[64,9265,9267],{"className":9266,"ariaHidden":116},[115],[64,9268,9270,9274],{"className":9269},[120],[64,9271],{"className":9272,"style":9273},[124],"height:1.1901em;vertical-align:-0.345em;",[64,9275,9277,9280,9341],{"className":9276},[129],[64,9278],{"className":9279},[736,1092],[64,9281,9283],{"className":9282},[1002],[64,9284,9286,9333],{"className":9285},[750,751],[64,9287,9289,9330],{"className":9288},[755],[64,9290,9293,9307,9315],{"className":9291,"style":9292},[759],"height:0.8451em;",[64,9294,9295,9298],{"style":1994},[64,9296],{"className":9297,"style":1112},[767],[64,9299,9301],{"className":9300},[772,773,774,775],[64,9302,9304],{"className":9303},[129,775],[64,9305,100],{"className":9306},[129,130,775],[64,9308,9309,9312],{"style":1124},[64,9310],{"className":9311,"style":1112},[767],[64,9313],{"className":9314,"style":1132},[1131],[64,9316,9318,9321],{"style":9317},"top:-3.394em;",[64,9319],{"className":9320,"style":1112},[767],[64,9322,9324],{"className":9323},[772,773,774,775],[64,9325,9327],{"className":9326},[129,775],[64,9328,353],{"className":9329},[129,775],[64,9331,783],{"className":9332},[782],[64,9334,9336],{"className":9335},[755],[64,9337,9339],{"className":9338,"style":2045},[759],[64,9340],{},[64,9342],{"className":9343},[845,1092],[417,9345,9346,9395],{},[535,9347,9348],{},[64,9349,9351,9374],{"className":9350},[71],[64,9352,9354],{"className":9353},[75],[77,9355,9356],{"xmlns":79},[82,9357,9358,9371],{},[85,9359,9360,9363,9365,9367,9369],{},[88,9361,9362],{},"sin",[92,9364,3190],{},[92,9366,700],{"stretchy":699},[88,9368,100],{},[92,9370,719],{"stretchy":699},[108,9372,9373],{"encoding":110},"\\sin(x)",[64,9375,9377],{"className":9376,"ariaHidden":116},[115],[64,9378,9380,9383,9386,9389,9392],{"className":9379},[120],[64,9381],{"className":9382,"style":732},[124],[64,9384,9362],{"className":9385},[3244],[64,9387,700],{"className":9388},[736],[64,9390,100],{"className":9391},[129,130],[64,9393,719],{"className":9394},[845],[535,9396,9397],{},[64,9398,9400,9423],{"className":9399},[71],[64,9401,9403],{"className":9402},[75],[77,9404,9405],{"xmlns":79},[82,9406,9407,9420],{},[85,9408,9409,9412,9414,9416,9418],{},[88,9410,9411],{},"cos",[92,9413,3190],{},[92,9415,700],{"stretchy":699},[88,9417,100],{},[92,9419,719],{"stretchy":699},[108,9421,9422],{"encoding":110},"\\cos(x)",[64,9424,9426],{"className":9425,"ariaHidden":116},[115],[64,9427,9429,9432,9435,9438,9441],{"className":9428},[120],[64,9430],{"className":9431,"style":732},[124],[64,9433,9411],{"className":9434},[3244],[64,9436,700],{"className":9437},[736],[64,9439,100],{"className":9440},[129,130],[64,9442,719],{"className":9443},[845],[417,9445,9446,9493],{},[535,9447,9448],{},[64,9449,9451,9472],{"className":9450},[71],[64,9452,9454],{"className":9453},[75],[77,9455,9456],{"xmlns":79},[82,9457,9458,9470],{},[85,9459,9460,9462,9464,9466,9468],{},[88,9461,9411],{},[92,9463,3190],{},[92,9465,700],{"stretchy":699},[88,9467,100],{},[92,9469,719],{"stretchy":699},[108,9471,9422],{"encoding":110},[64,9473,9475],{"className":9474,"ariaHidden":116},[115],[64,9476,9478,9481,9484,9487,9490],{"className":9477},[120],[64,9479],{"className":9480,"style":732},[124],[64,9482,9411],{"className":9483},[3244],[64,9485,700],{"className":9486},[736],[64,9488,100],{"className":9489},[129,130],[64,9491,719],{"className":9492},[845],[535,9494,9495],{},[64,9496,9498,9522],{"className":9497},[71],[64,9499,9501],{"className":9500},[75],[77,9502,9503],{"xmlns":79},[82,9504,9505,9519],{},[85,9506,9507,9509,9511,9513,9515,9517],{},[92,9508,1032],{},[88,9510,9362],{},[92,9512,3190],{},[92,9514,700],{"stretchy":699},[88,9516,100],{},[92,9518,719],{"stretchy":699},[108,9520,9521],{"encoding":110},"-\\sin(x)",[64,9523,9525],{"className":9524,"ariaHidden":116},[115],[64,9526,9528,9531,9534,9537,9540,9543,9546],{"className":9527},[120],[64,9529],{"className":9530,"style":732},[124],[64,9532,1032],{"className":9533},[129],[64,9535],{"className":9536,"style":800},[135],[64,9538,9362],{"className":9539},[3244],[64,9541,700],{"className":9542},[736],[64,9544,100],{"className":9545},[129,130],[64,9547,719],{"className":9548},[845],[417,9550,9551,9749],{},[535,9552,9553,9748],{},[64,9554,9556,9599],{"className":9555},[71],[64,9557,9559],{"className":9558},[75],[77,9560,9561],{"xmlns":79},[82,9562,9563,9596],{},[85,9564,9565,9568,9570,9572,9574,9576],{},[88,9566,9567],{},"σ",[92,9569,700],{"stretchy":699},[88,9571,100],{},[92,9573,719],{"stretchy":699},[92,9575,94],{},[1002,9577,9578,9580],{},[344,9579,353],{},[85,9581,9582,9584,9586],{},[344,9583,353],{},[92,9585,103],{},[1554,9587,9588,9590],{},[88,9589,9086],{},[85,9591,9592,9594],{},[92,9593,1032],{},[88,9595,100],{},[108,9597,9598],{"encoding":110},"\\sigma(x) = \\frac{1}{1+e^{-x}}",[64,9600,9602,9629],{"className":9601,"ariaHidden":116},[115],[64,9603,9605,9608,9611,9614,9617,9620,9623,9626],{"className":9604},[120],[64,9606],{"className":9607,"style":732},[124],[64,9609,9567],{"className":9610,"style":131},[129,130],[64,9612,700],{"className":9613},[736],[64,9615,100],{"className":9616},[129,130],[64,9618,719],{"className":9619},[845],[64,9621],{"className":9622,"style":136},[135],[64,9624,94],{"className":9625},[140],[64,9627],{"className":9628,"style":136},[135],[64,9630,9632,9636],{"className":9631},[120],[64,9633],{"className":9634,"style":9635},[124],"height:1.2484em;vertical-align:-0.4033em;",[64,9637,9639,9642,9745],{"className":9638},[129],[64,9640],{"className":9641},[736,1092],[64,9643,9645],{"className":9644},[1002],[64,9646,9648,9736],{"className":9647},[750,751],[64,9649,9651,9733],{"className":9650},[755],[64,9652,9654,9711,9719],{"className":9653,"style":9292},[759],[64,9655,9656,9659],{"style":1994},[64,9657],{"className":9658,"style":1112},[767],[64,9660,9662],{"className":9661},[772,773,774,775],[64,9663,9665,9668,9671],{"className":9664},[129,775],[64,9666,353],{"className":9667},[129,775],[64,9669,103],{"className":9670},[164,775],[64,9672,9674,9677],{"className":9673},[129,775],[64,9675,9086],{"className":9676},[129,130,775],[64,9678,9680],{"className":9679},[746],[64,9681,9683],{"className":9682},[750],[64,9684,9686],{"className":9685},[755],[64,9687,9690],{"className":9688,"style":9689},[759],"height:0.7027em;",[64,9691,9693,9697],{"style":9692},"top:-2.786em;margin-right:0.0714em;",[64,9694],{"className":9695,"style":9696},[767],"height:2.5em;",[64,9698,9702],{"className":9699},[772,9700,9701,775],"reset-size3","size1",[64,9703,9705,9708],{"className":9704},[129,775],[64,9706,1032],{"className":9707},[129,775],[64,9709,100],{"className":9710},[129,130,775],[64,9712,9713,9716],{"style":1124},[64,9714],{"className":9715,"style":1112},[767],[64,9717],{"className":9718,"style":1132},[1131],[64,9720,9721,9724],{"style":9317},[64,9722],{"className":9723,"style":1112},[767],[64,9725,9727],{"className":9726},[772,773,774,775],[64,9728,9730],{"className":9729},[129,775],[64,9731,353],{"className":9732},[129,775],[64,9734,783],{"className":9735},[782],[64,9737,9739],{"className":9738},[755],[64,9740,9743],{"className":9741,"style":9742},[759],"height:0.4033em;",[64,9744],{},[64,9746],{"className":9747},[845,1092]," (sigmoid)",[535,9750,9751],{},[64,9752,9754,9790],{"className":9753},[71],[64,9755,9757],{"className":9756},[75],[77,9758,9759],{"xmlns":79},[82,9760,9761,9787],{},[85,9762,9763,9765,9767,9769,9771,9773,9775,9777,9779,9781,9783,9785],{},[88,9764,9567],{},[92,9766,700],{"stretchy":699},[88,9768,100],{},[92,9770,719],{"stretchy":699},[92,9772,700],{"stretchy":699},[344,9774,353],{},[92,9776,1032],{},[88,9778,9567],{},[92,9780,700],{"stretchy":699},[88,9782,100],{},[92,9784,719],{"stretchy":699},[92,9786,719],{"stretchy":699},[108,9788,9789],{"encoding":110},"\\sigma(x)(1 - \\sigma(x))",[64,9791,9793,9826],{"className":9792,"ariaHidden":116},[115],[64,9794,9796,9799,9802,9805,9808,9811,9814,9817,9820,9823],{"className":9795},[120],[64,9797],{"className":9798,"style":732},[124],[64,9800,9567],{"className":9801,"style":131},[129,130],[64,9803,700],{"className":9804},[736],[64,9806,100],{"className":9807},[129,130],[64,9809,719],{"className":9810},[845],[64,9812,700],{"className":9813},[736],[64,9815,353],{"className":9816},[129],[64,9818],{"className":9819,"style":160},[135],[64,9821,1032],{"className":9822},[164],[64,9824],{"className":9825,"style":160},[135],[64,9827,9829,9832,9835,9838,9841],{"className":9828},[120],[64,9830],{"className":9831,"style":732},[124],[64,9833,9567],{"className":9834,"style":131},[129,130],[64,9836,700],{"className":9837},[736],[64,9839,100],{"className":9840},[129,130],[64,9842,8127],{"className":9843},[845],[26,9845],{},[9847,9848],"interactive-derivative",{},[26,9850],{},[51,9852,9854],{"id":9853},"part-6-derivatives-in-practice","Part 6 — Derivatives in Practice",[56,9856,9858],{"id":9857},"finding-minima-and-maxima","Finding Minima and Maxima",[12,9860,7099,9861,9959,9960,9963],{},[64,9862,9864,9891],{"className":9863},[71],[64,9865,9867],{"className":9866},[75],[77,9868,9869],{"xmlns":79},[82,9870,9871,9889],{},[85,9872,9873,9879,9881,9883,9885,9887],{},[1554,9874,9875,9877],{},[88,9876,1544],{},[92,9878,4731],{"mathvariant":1008,"lspace":4730,"rspace":4730},[92,9880,700],{"stretchy":699},[88,9882,100],{},[92,9884,719],{"stretchy":699},[92,9886,94],{},[344,9888,537],{},[108,9890,5605],{"encoding":110},[64,9892,9894,9950],{"className":9893,"ariaHidden":116},[115],[64,9895,9897,9900,9932,9935,9938,9941,9944,9947],{"className":9896},[120],[64,9898],{"className":9899,"style":4750},[124],[64,9901,9903,9906],{"className":9902},[129],[64,9904,1544],{"className":9905,"style":1575},[129,130],[64,9907,9909],{"className":9908},[746],[64,9910,9912],{"className":9911},[750],[64,9913,9915],{"className":9914},[755],[64,9916,9918],{"className":9917,"style":4769},[759],[64,9919,9920,9923],{"style":1756},[64,9921],{"className":9922,"style":768},[767],[64,9924,9926],{"className":9925},[772,773,774,775],[64,9927,9929],{"className":9928},[129,775],[64,9930,4731],{"className":9931},[129,775],[64,9933,700],{"className":9934},[736],[64,9936,100],{"className":9937},[129,130],[64,9939,719],{"className":9940},[845],[64,9942],{"className":9943,"style":136},[135],[64,9945,94],{"className":9946},[140],[64,9948],{"className":9949,"style":136},[135],[64,9951,9953,9956],{"className":9952},[120],[64,9954],{"className":9955,"style":406},[124],[64,9957,537],{"className":9958},[129]," the function is momentarily flat — this is a ",[21,9961,9962],{},"critical point",". There are three types:",[182,9965,9966,10049,10132],{},[185,9967,9968,9971,9972,10048],{},[21,9969,9970],{},"Local minimum",": function dips down then rises → ",[64,9973,9975,9998],{"className":9974},[71],[64,9976,9978],{"className":9977},[75],[77,9979,9980],{"xmlns":79},[82,9981,9982,9996],{},[85,9983,9984,9990,9992,9994],{},[1554,9985,9986,9988],{},[88,9987,1544],{},[92,9989,4731],{"mathvariant":1008,"lspace":4730,"rspace":4730},[92,9991,700],{"stretchy":699},[88,9993,100],{},[92,9995,719],{"stretchy":699},[108,9997,4740],{"encoding":110},[64,9999,10001],{"className":10000,"ariaHidden":116},[115],[64,10002,10004,10007,10039,10042,10045],{"className":10003},[120],[64,10005],{"className":10006,"style":4750},[124],[64,10008,10010,10013],{"className":10009},[129],[64,10011,1544],{"className":10012,"style":1575},[129,130],[64,10014,10016],{"className":10015},[746],[64,10017,10019],{"className":10018},[750],[64,10020,10022],{"className":10021},[755],[64,10023,10025],{"className":10024,"style":4769},[759],[64,10026,10027,10030],{"style":1756},[64,10028],{"className":10029,"style":768},[767],[64,10031,10033],{"className":10032},[772,773,774,775],[64,10034,10036],{"className":10035},[129,775],[64,10037,4731],{"className":10038},[129,775],[64,10040,700],{"className":10041},[736],[64,10043,100],{"className":10044},[129,130],[64,10046,719],{"className":10047},[845]," changes from negative to positive",[185,10050,10051,10054,10055,10131],{},[21,10052,10053],{},"Local maximum",": function rises then dips → ",[64,10056,10058,10081],{"className":10057},[71],[64,10059,10061],{"className":10060},[75],[77,10062,10063],{"xmlns":79},[82,10064,10065,10079],{},[85,10066,10067,10073,10075,10077],{},[1554,10068,10069,10071],{},[88,10070,1544],{},[92,10072,4731],{"mathvariant":1008,"lspace":4730,"rspace":4730},[92,10074,700],{"stretchy":699},[88,10076,100],{},[92,10078,719],{"stretchy":699},[108,10080,4740],{"encoding":110},[64,10082,10084],{"className":10083,"ariaHidden":116},[115],[64,10085,10087,10090,10122,10125,10128],{"className":10086},[120],[64,10088],{"className":10089,"style":4750},[124],[64,10091,10093,10096],{"className":10092},[129],[64,10094,1544],{"className":10095,"style":1575},[129,130],[64,10097,10099],{"className":10098},[746],[64,10100,10102],{"className":10101},[750],[64,10103,10105],{"className":10104},[755],[64,10106,10108],{"className":10107,"style":4769},[759],[64,10109,10110,10113],{"style":1756},[64,10111],{"className":10112,"style":768},[767],[64,10114,10116],{"className":10115},[772,773,774,775],[64,10117,10119],{"className":10118},[129,775],[64,10120,4731],{"className":10121},[129,775],[64,10123,700],{"className":10124},[736],[64,10126,100],{"className":10127},[129,130],[64,10129,719],{"className":10130},[845]," changes from positive to negative",[185,10133,10134,10137],{},[21,10135,10136],{},"Saddle point",": function is flat but continues in the same general direction",[12,10139,10140,10142,10143],{},[21,10141,325],{}," Find the minimum of ",[64,10144,10146,10184],{"className":10145},[71],[64,10147,10149],{"className":10148},[75],[77,10150,10151],{"xmlns":79},[82,10152,10153,10181],{},[85,10154,10155,10157,10159,10161,10163,10165,10171,10173,10175,10177,10179],{},[88,10156,1544],{},[92,10158,700],{"stretchy":699},[88,10160,100],{},[92,10162,719],{"stretchy":699},[92,10164,94],{},[1554,10166,10167,10169],{},[88,10168,100],{},[344,10170,346],{},[92,10172,1032],{},[344,10174,2714],{},[88,10176,100],{},[92,10178,103],{},[344,10180,553],{},[108,10182,10183],{"encoding":110},"f(x) = x^2 - 4x + 5",[64,10185,10187,10214,10258,10279],{"className":10186,"ariaHidden":116},[115],[64,10188,10190,10193,10196,10199,10202,10205,10208,10211],{"className":10189},[120],[64,10191],{"className":10192,"style":732},[124],[64,10194,1544],{"className":10195,"style":1575},[129,130],[64,10197,700],{"className":10198},[736],[64,10200,100],{"className":10201},[129,130],[64,10203,719],{"className":10204},[845],[64,10206],{"className":10207,"style":136},[135],[64,10209,94],{"className":10210},[140],[64,10212],{"className":10213,"style":136},[135],[64,10215,10217,10220,10249,10252,10255],{"className":10216},[120],[64,10218],{"className":10219,"style":7756},[124],[64,10221,10223,10226],{"className":10222},[129],[64,10224,100],{"className":10225},[129,130],[64,10227,10229],{"className":10228},[746],[64,10230,10232],{"className":10231},[750],[64,10233,10235],{"className":10234},[755],[64,10236,10238],{"className":10237,"style":1735},[759],[64,10239,10240,10243],{"style":1756},[64,10241],{"className":10242,"style":768},[767],[64,10244,10246],{"className":10245},[772,773,774,775],[64,10247,346],{"className":10248},[129,775],[64,10250],{"className":10251,"style":160},[135],[64,10253,1032],{"className":10254},[164],[64,10256],{"className":10257,"style":160},[135],[64,10259,10261,10264,10267,10270,10273,10276],{"className":10260},[120],[64,10262],{"className":10263,"style":384},[124],[64,10265,2714],{"className":10266},[129],[64,10268,100],{"className":10269},[129,130],[64,10271],{"className":10272,"style":160},[135],[64,10274,103],{"className":10275},[164],[64,10277],{"className":10278,"style":160},[135],[64,10280,10282,10285],{"className":10281},[120],[64,10283],{"className":10284,"style":406},[124],[64,10286,553],{"className":10287},[129],[64,10289,10291],{"className":10290},[67],[64,10292,10294,10347],{"className":10293},[71],[64,10295,10297],{"className":10296},[75],[77,10298,10299],{"xmlns":79,"display":80},[82,10300,10301,10344],{},[85,10302,10303,10309,10311,10313,10315,10317,10319,10321,10323,10325,10327,10329,10333,10336,10338,10340,10342],{},[1554,10304,10305,10307],{},[88,10306,1544],{},[92,10308,4731],{"mathvariant":1008,"lspace":4730,"rspace":4730},[92,10310,700],{"stretchy":699},[88,10312,100],{},[92,10314,719],{"stretchy":699},[92,10316,94],{},[344,10318,346],{},[88,10320,100],{},[92,10322,1032],{},[344,10324,2714],{},[92,10326,94],{},[344,10328,537],{},[10330,10331,10332],"mtext",{},"  ",[92,10334,10335],{},"⟹",[10330,10337,10332],{},[88,10339,100],{},[92,10341,94],{},[344,10343,346],{},[108,10345,10346],{"encoding":110},"f'(x) = 2x - 4 = 0 \\implies x = 2",[64,10348,10350,10406,10427,10445,10470,10488],{"className":10349,"ariaHidden":116},[115],[64,10351,10353,10356,10388,10391,10394,10397,10400,10403],{"className":10352},[120],[64,10354],{"className":10355,"style":7055},[124],[64,10357,10359,10362],{"className":10358},[129],[64,10360,1544],{"className":10361,"style":1575},[129,130],[64,10363,10365],{"className":10364},[746],[64,10366,10368],{"className":10367},[750],[64,10369,10371],{"className":10370},[755],[64,10372,10374],{"className":10373,"style":5055},[759],[64,10375,10376,10379],{"style":1621},[64,10377],{"className":10378,"style":768},[767],[64,10380,10382],{"className":10381},[772,773,774,775],[64,10383,10385],{"className":10384},[129,775],[64,10386,4731],{"className":10387},[129,775],[64,10389,700],{"className":10390},[736],[64,10392,100],{"className":10393},[129,130],[64,10395,719],{"className":10396},[845],[64,10398],{"className":10399,"style":136},[135],[64,10401,94],{"className":10402},[140],[64,10404],{"className":10405,"style":136},[135],[64,10407,10409,10412,10415,10418,10421,10424],{"className":10408},[120],[64,10410],{"className":10411,"style":384},[124],[64,10413,346],{"className":10414},[129],[64,10416,100],{"className":10417},[129,130],[64,10419],{"className":10420,"style":160},[135],[64,10422,1032],{"className":10423},[164],[64,10425],{"className":10426,"style":160},[135],[64,10428,10430,10433,10436,10439,10442],{"className":10429},[120],[64,10431],{"className":10432,"style":406},[124],[64,10434,2714],{"className":10435},[129],[64,10437],{"className":10438,"style":136},[135],[64,10440,94],{"className":10441},[140],[64,10443],{"className":10444,"style":136},[135],[64,10446,10448,10452,10455,10458,10461,10464,10467],{"className":10447},[120],[64,10449],{"className":10450,"style":10451},[124],"height:0.6684em;vertical-align:-0.024em;",[64,10453,537],{"className":10454},[129],[64,10456],{"className":10457,"style":136},[135],[64,10459],{"className":10460,"style":136},[135],[64,10462,10335],{"className":10463},[140],[64,10465],{"className":10466,"style":136},[135],[64,10468],{"className":10469,"style":136},[135],[64,10471,10473,10476,10479,10482,10485],{"className":10472},[120],[64,10474],{"className":10475,"style":212},[124],[64,10477,100],{"className":10478},[129,130],[64,10480],{"className":10481,"style":136},[135],[64,10483,94],{"className":10484},[140],[64,10486],{"className":10487,"style":136},[135],[64,10489,10491,10494],{"className":10490},[120],[64,10492],{"className":10493,"style":406},[124],[64,10495,346],{"className":10496},[129],[12,10498,10499,10500,10551,10552,10685],{},"At ",[64,10501,10503,10521],{"className":10502},[71],[64,10504,10506],{"className":10505},[75],[77,10507,10508],{"xmlns":79},[82,10509,10510,10518],{},[85,10511,10512,10514,10516],{},[88,10513,100],{},[92,10515,94],{},[344,10517,346],{},[108,10519,10520],{"encoding":110},"x = 2",[64,10522,10524,10542],{"className":10523,"ariaHidden":116},[115],[64,10525,10527,10530,10533,10536,10539],{"className":10526},[120],[64,10528],{"className":10529,"style":212},[124],[64,10531,100],{"className":10532},[129,130],[64,10534],{"className":10535,"style":136},[135],[64,10537,94],{"className":10538},[140],[64,10540],{"className":10541,"style":136},[135],[64,10543,10545,10548],{"className":10544},[120],[64,10546],{"className":10547,"style":406},[124],[64,10549,346],{"className":10550},[129],": ",[64,10553,10555,10592],{"className":10554},[71],[64,10556,10558],{"className":10557},[75],[77,10559,10560],{"xmlns":79},[82,10561,10562,10589],{},[85,10563,10564,10566,10568,10570,10572,10574,10576,10578,10581,10583,10585,10587],{},[88,10565,1544],{},[92,10567,700],{"stretchy":699},[344,10569,346],{},[92,10571,719],{"stretchy":699},[92,10573,94],{},[344,10575,2714],{},[92,10577,1032],{},[344,10579,10580],{},"8",[92,10582,103],{},[344,10584,553],{},[92,10586,94],{},[344,10588,353],{},[108,10590,10591],{"encoding":110},"f(2) = 4 - 8 + 5 = 1",[64,10593,10595,10622,10640,10658,10676],{"className":10594,"ariaHidden":116},[115],[64,10596,10598,10601,10604,10607,10610,10613,10616,10619],{"className":10597},[120],[64,10599],{"className":10600,"style":732},[124],[64,10602,1544],{"className":10603,"style":1575},[129,130],[64,10605,700],{"className":10606},[736],[64,10608,346],{"className":10609},[129],[64,10611,719],{"className":10612},[845],[64,10614],{"className":10615,"style":136},[135],[64,10617,94],{"className":10618},[140],[64,10620],{"className":10621,"style":136},[135],[64,10623,10625,10628,10631,10634,10637],{"className":10624},[120],[64,10626],{"className":10627,"style":384},[124],[64,10629,2714],{"className":10630},[129],[64,10632],{"className":10633,"style":160},[135],[64,10635,1032],{"className":10636},[164],[64,10638],{"className":10639,"style":160},[135],[64,10641,10643,10646,10649,10652,10655],{"className":10642},[120],[64,10644],{"className":10645,"style":384},[124],[64,10647,10580],{"className":10648},[129],[64,10650],{"className":10651,"style":160},[135],[64,10653,103],{"className":10654},[164],[64,10656],{"className":10657,"style":160},[135],[64,10659,10661,10664,10667,10670,10673],{"className":10660},[120],[64,10662],{"className":10663,"style":406},[124],[64,10665,553],{"className":10666},[129],[64,10668],{"className":10669,"style":136},[135],[64,10671,94],{"className":10672},[140],[64,10674],{"className":10675,"style":136},[135],[64,10677,10679,10682],{"className":10678},[120],[64,10680],{"className":10681,"style":406},[124],[64,10683,353],{"className":10684},[129]," — this is the minimum.",[10687,10688,10693],"pre",{"className":10689,"code":10690,"language":10691,"meta":10692,"style":10692},"language-python shiki shiki-themes material-theme-lighter material-theme material-theme-palenight","def f(x):\n    return x**2 - 4*x + 5\n\ndef f_prime(x):\n    return 2*x - 4\n\n# Find where derivative = 0\n# 2x - 4 = 0  =>  x = 2\nx_min = 2\nprint(f\"Minimum at x={x_min}, f(x)={f(x_min)}\")  # x=2, f(x)=1\n","python","",[10694,10695,10696,10717,10751,10758,10772,10790,10795,10802,10808,10819],"code",{"__ignoreMap":10692},[64,10697,10700,10704,10708,10711,10714],{"class":10698,"line":10699},"line",1,[64,10701,10703],{"class":10702},"spNyl","def",[64,10705,10707],{"class":10706},"s2Zo4"," f",[64,10709,700],{"class":10710},"sMK4o",[64,10712,100],{"class":10713},"sHdIc",[64,10715,10716],{"class":10710},"):\n",[64,10718,10720,10724,10728,10731,10734,10737,10740,10743,10746,10748],{"class":10698,"line":10719},2,[64,10721,10723],{"class":10722},"s7zQu","    return",[64,10725,10727],{"class":10726},"sTEyZ"," x",[64,10729,10730],{"class":10710},"**",[64,10732,346],{"class":10733},"sbssI",[64,10735,10736],{"class":10710}," -",[64,10738,10739],{"class":10733}," 4",[64,10741,10742],{"class":10710},"*",[64,10744,10745],{"class":10726},"x ",[64,10747,103],{"class":10710},[64,10749,10750],{"class":10733}," 5\n",[64,10752,10754],{"class":10698,"line":10753},3,[64,10755,10757],{"emptyLinePlaceholder":10756},true,"\n",[64,10759,10761,10763,10766,10768,10770],{"class":10698,"line":10760},4,[64,10762,10703],{"class":10702},[64,10764,10765],{"class":10706}," f_prime",[64,10767,700],{"class":10710},[64,10769,100],{"class":10713},[64,10771,10716],{"class":10710},[64,10773,10775,10777,10780,10782,10784,10787],{"class":10698,"line":10774},5,[64,10776,10723],{"class":10722},[64,10778,10779],{"class":10733}," 2",[64,10781,10742],{"class":10710},[64,10783,10745],{"class":10726},[64,10785,10786],{"class":10710},"-",[64,10788,10789],{"class":10733}," 4\n",[64,10791,10793],{"class":10698,"line":10792},6,[64,10794,10757],{"emptyLinePlaceholder":10756},[64,10796,10798],{"class":10698,"line":10797},7,[64,10799,10801],{"class":10800},"sHwdD","# Find where derivative = 0\n",[64,10803,10805],{"class":10698,"line":10804},8,[64,10806,10807],{"class":10800},"# 2x - 4 = 0  =>  x = 2\n",[64,10809,10811,10814,10816],{"class":10698,"line":10810},9,[64,10812,10813],{"class":10726},"x_min ",[64,10815,94],{"class":10710},[64,10817,10818],{"class":10733}," 2\n",[64,10820,10822,10825,10827,10829,10833,10836,10839,10842,10845,10847,10849,10851,10853,10855,10857,10860,10862],{"class":10698,"line":10821},10,[64,10823,10824],{"class":10706},"print",[64,10826,700],{"class":10710},[64,10828,1544],{"class":10702},[64,10830,10832],{"class":10831},"sfazB","\"Minimum at x=",[64,10834,10835],{"class":10733},"{",[64,10837,10838],{"class":10706},"x_min",[64,10840,10841],{"class":10733},"}",[64,10843,10844],{"class":10831},", f(x)=",[64,10846,10835],{"class":10733},[64,10848,1544],{"class":10706},[64,10850,700],{"class":10710},[64,10852,10838],{"class":10706},[64,10854,719],{"class":10710},[64,10856,10841],{"class":10733},[64,10858,10859],{"class":10831},"\"",[64,10861,719],{"class":10710},[64,10863,10864],{"class":10800},"  # x=2, f(x)=1\n",[56,10866,10868],{"id":10867},"the-derivative-as-a-direction-signal","The Derivative as a Direction Signal",[12,10870,10871],{},"This is the key insight that bridges calculus to machine learning:",[10873,10874,10875,11093],"blockquote",{},[12,10876,10877],{},[21,10878,7099,10879,10977,10978,11006,11007,11035,11036,11064,11065,2055],{},[64,10880,10882,10909],{"className":10881},[71],[64,10883,10885],{"className":10884},[75],[77,10886,10887],{"xmlns":79},[82,10888,10889,10907],{},[85,10890,10891,10897,10899,10901,10903,10905],{},[1554,10892,10893,10895],{},[88,10894,1544],{},[92,10896,4731],{"mathvariant":1008,"lspace":4730,"rspace":4730},[92,10898,700],{"stretchy":699},[88,10900,100],{},[92,10902,719],{"stretchy":699},[92,10904,5326],{},[344,10906,537],{},[108,10908,5331],{"encoding":110},[64,10910,10912,10968],{"className":10911,"ariaHidden":116},[115],[64,10913,10915,10918,10950,10953,10956,10959,10962,10965],{"className":10914},[120],[64,10916],{"className":10917,"style":4750},[124],[64,10919,10921,10924],{"className":10920},[129],[64,10922,1544],{"className":10923,"style":1575},[129,130],[64,10925,10927],{"className":10926},[746],[64,10928,10930],{"className":10929},[750],[64,10931,10933],{"className":10932},[755],[64,10934,10936],{"className":10935,"style":4769},[759],[64,10937,10938,10941],{"style":1756},[64,10939],{"className":10940,"style":768},[767],[64,10942,10944],{"className":10943},[772,773,774,775],[64,10945,10947],{"className":10946},[129,775],[64,10948,4731],{"className":10949},[129,775],[64,10951,700],{"className":10952},[736],[64,10954,100],{"className":10955},[129,130],[64,10957,719],{"className":10958},[845],[64,10960],{"className":10961,"style":136},[135],[64,10963,5326],{"className":10964},[140],[64,10966],{"className":10967,"style":136},[135],[64,10969,10971,10974],{"className":10970},[120],[64,10972],{"className":10973,"style":406},[124],[64,10975,537],{"className":10976},[129]," at some point, moving ",[64,10979,10981,10994],{"className":10980},[71],[64,10982,10984],{"className":10983},[75],[77,10985,10986],{"xmlns":79},[82,10987,10988,10992],{},[85,10989,10990],{},[88,10991,100],{},[108,10993,100],{"encoding":110},[64,10995,10997],{"className":10996,"ariaHidden":116},[115],[64,10998,11000,11003],{"className":10999},[120],[64,11001],{"className":11002,"style":212},[124],[64,11004,100],{"className":11005},[129,130]," to the right increases ",[64,11008,11010,11023],{"className":11009},[71],[64,11011,11013],{"className":11012},[75],[77,11014,11015],{"xmlns":79},[82,11016,11017,11021],{},[85,11018,11019],{},[88,11020,1544],{},[108,11022,1544],{"encoding":110},[64,11024,11026],{"className":11025,"ariaHidden":116},[115],[64,11027,11029,11032],{"className":11028},[120],[64,11030],{"className":11031,"style":2169},[124],[64,11033,1544],{"className":11034,"style":1575},[129,130],". Moving ",[64,11037,11039,11052],{"className":11038},[71],[64,11040,11042],{"className":11041},[75],[77,11043,11044],{"xmlns":79},[82,11045,11046,11050],{},[85,11047,11048],{},[88,11049,100],{},[108,11051,100],{"encoding":110},[64,11053,11055],{"className":11054,"ariaHidden":116},[115],[64,11056,11058,11061],{"className":11057},[120],[64,11059],{"className":11060,"style":212},[124],[64,11062,100],{"className":11063},[129,130]," to the left decreases ",[64,11066,11068,11081],{"className":11067},[71],[64,11069,11071],{"className":11070},[75],[77,11072,11073],{"xmlns":79},[82,11074,11075,11079],{},[85,11076,11077],{},[88,11078,1544],{},[108,11080,1544],{"encoding":110},[64,11082,11084],{"className":11083,"ariaHidden":116},[115],[64,11085,11087,11090],{"className":11086},[120],[64,11088],{"className":11089,"style":2169},[124],[64,11091,1544],{"className":11092,"style":1575},[129,130],[12,11094,11095],{},[21,11096,7099,11097,11195],{},[64,11098,11100,11127],{"className":11099},[71],[64,11101,11103],{"className":11102},[75],[77,11104,11105],{"xmlns":79},[82,11106,11107,11125],{},[85,11108,11109,11115,11117,11119,11121,11123],{},[1554,11110,11111,11113],{},[88,11112,1544],{},[92,11114,4731],{"mathvariant":1008,"lspace":4730,"rspace":4730},[92,11116,700],{"stretchy":699},[88,11118,100],{},[92,11120,719],{"stretchy":699},[92,11122,5464],{},[344,11124,537],{},[108,11126,5469],{"encoding":110},[64,11128,11130,11186],{"className":11129,"ariaHidden":116},[115],[64,11131,11133,11136,11168,11171,11174,11177,11180,11183],{"className":11132},[120],[64,11134],{"className":11135,"style":4750},[124],[64,11137,11139,11142],{"className":11138},[129],[64,11140,1544],{"className":11141,"style":1575},[129,130],[64,11143,11145],{"className":11144},[746],[64,11146,11148],{"className":11147},[750],[64,11149,11151],{"className":11150},[755],[64,11152,11154],{"className":11153,"style":4769},[759],[64,11155,11156,11159],{"style":1756},[64,11157],{"className":11158,"style":768},[767],[64,11160,11162],{"className":11161},[772,773,774,775],[64,11163,11165],{"className":11164},[129,775],[64,11166,4731],{"className":11167},[129,775],[64,11169,700],{"className":11170},[736],[64,11172,100],{"className":11173},[129,130],[64,11175,719],{"className":11176},[845],[64,11178],{"className":11179,"style":136},[135],[64,11181,5464],{"className":11182},[140],[64,11184],{"className":11185,"style":136},[135],[64,11187,11189,11192],{"className":11188},[120],[64,11190],{"className":11191,"style":406},[124],[64,11193,537],{"className":11194},[129],", the opposite is true.",[12,11197,11198,11199,326,11202,11230,11231,11259,11260,11263],{},"To ",[21,11200,11201],{},"minimize",[64,11203,11205,11218],{"className":11204},[71],[64,11206,11208],{"className":11207},[75],[77,11209,11210],{"xmlns":79},[82,11211,11212,11216],{},[85,11213,11214],{},[88,11215,1544],{},[108,11217,1544],{"encoding":110},[64,11219,11221],{"className":11220,"ariaHidden":116},[115],[64,11222,11224,11227],{"className":11223},[120],[64,11225],{"className":11226,"style":2169},[124],[64,11228,1544],{"className":11229,"style":1575},[129,130],", we should always move ",[64,11232,11234,11247],{"className":11233},[71],[64,11235,11237],{"className":11236},[75],[77,11238,11239],{"xmlns":79},[82,11240,11241,11245],{},[85,11242,11243],{},[88,11244,100],{},[108,11246,100],{"encoding":110},[64,11248,11250],{"className":11249,"ariaHidden":116},[115],[64,11251,11253,11256],{"className":11252},[120],[64,11254],{"className":11255,"style":212},[124],[64,11257,100],{"className":11258},[129,130]," in the direction ",[21,11261,11262],{},"opposite"," to the derivative:",[64,11265,11267],{"className":11266},[67],[64,11268,11270,11321],{"className":11269},[71],[64,11271,11273],{"className":11272},[75],[77,11274,11275],{"xmlns":79,"display":80},[82,11276,11277,11318],{},[85,11278,11279,11286,11288,11295,11297,11300,11302,11308,11310,11316],{},[702,11280,11281,11283],{},[88,11282,100],{},[10330,11284,11285],{},"new",[92,11287,94],{},[702,11289,11290,11292],{},[88,11291,100],{},[10330,11293,11294],{},"old",[92,11296,1032],{},[88,11298,11299],{},"α",[92,11301,6030],{},[1554,11303,11304,11306],{},[88,11305,1544],{},[92,11307,4731],{"mathvariant":1008,"lspace":4730,"rspace":4730},[92,11309,700],{"stretchy":699},[702,11311,11312,11314],{},[88,11313,100],{},[10330,11315,11294],{},[92,11317,719],{"stretchy":699},[108,11319,11320],{"encoding":110},"x_{\\text{new}} = x_{\\text{old}} - \\alpha \\cdot f'(x_{\\text{old}})",[64,11322,11324,11388,11451,11470],{"className":11323,"ariaHidden":116},[115],[64,11325,11327,11331,11379,11382,11385],{"className":11326},[120],[64,11328],{"className":11329,"style":11330},[124],"height:0.5806em;vertical-align:-0.15em;",[64,11332,11334,11337],{"className":11333},[129],[64,11335,100],{"className":11336},[129,130],[64,11338,11340],{"className":11339},[746],[64,11341,11343,11371],{"className":11342},[750,751],[64,11344,11346,11368],{"className":11345},[755],[64,11347,11350],{"className":11348,"style":11349},[759],"height:0.1514em;",[64,11351,11352,11355],{"style":763},[64,11353],{"className":11354,"style":768},[767],[64,11356,11358],{"className":11357},[772,773,774,775],[64,11359,11361],{"className":11360},[129,775],[64,11362,11365],{"className":11363},[129,11364,775],"text",[64,11366,11285],{"className":11367},[129,775],[64,11369,783],{"className":11370},[782],[64,11372,11374],{"className":11373},[755],[64,11375,11377],{"className":11376,"style":790},[759],[64,11378],{},[64,11380],{"className":11381,"style":136},[135],[64,11383,94],{"className":11384},[140],[64,11386],{"className":11387,"style":136},[135],[64,11389,11391,11395,11442,11445,11448],{"className":11390},[120],[64,11392],{"className":11393,"style":11394},[124],"height:0.7333em;vertical-align:-0.15em;",[64,11396,11398,11401],{"className":11397},[129],[64,11399,100],{"className":11400},[129,130],[64,11402,11404],{"className":11403},[746],[64,11405,11407,11434],{"className":11406},[750,751],[64,11408,11410,11431],{"className":11409},[755],[64,11411,11414],{"className":11412,"style":11413},[759],"height:0.3361em;",[64,11415,11416,11419],{"style":763},[64,11417],{"className":11418,"style":768},[767],[64,11420,11422],{"className":11421},[772,773,774,775],[64,11423,11425],{"className":11424},[129,775],[64,11426,11428],{"className":11427},[129,11364,775],[64,11429,11294],{"className":11430},[129,775],[64,11432,783],{"className":11433},[782],[64,11435,11437],{"className":11436},[755],[64,11438,11440],{"className":11439,"style":790},[759],[64,11441],{},[64,11443],{"className":11444,"style":160},[135],[64,11446,1032],{"className":11447},[164],[64,11449],{"className":11450,"style":160},[135],[64,11452,11454,11457,11461,11464,11467],{"className":11453},[120],[64,11455],{"className":11456,"style":6165},[124],[64,11458,11299],{"className":11459,"style":11460},[129,130],"margin-right:0.0037em;",[64,11462],{"className":11463,"style":160},[135],[64,11465,6030],{"className":11466},[164],[64,11468],{"className":11469,"style":160},[135],[64,11471,11473,11476,11508,11511,11557],{"className":11472},[120],[64,11474],{"className":11475,"style":7055},[124],[64,11477,11479,11482],{"className":11478},[129],[64,11480,1544],{"className":11481,"style":1575},[129,130],[64,11483,11485],{"className":11484},[746],[64,11486,11488],{"className":11487},[750],[64,11489,11491],{"className":11490},[755],[64,11492,11494],{"className":11493,"style":5055},[759],[64,11495,11496,11499],{"style":1621},[64,11497],{"className":11498,"style":768},[767],[64,11500,11502],{"className":11501},[772,773,774,775],[64,11503,11505],{"className":11504},[129,775],[64,11506,4731],{"className":11507},[129,775],[64,11509,700],{"className":11510},[736],[64,11512,11514,11517],{"className":11513},[129],[64,11515,100],{"className":11516},[129,130],[64,11518,11520],{"className":11519},[746],[64,11521,11523,11549],{"className":11522},[750,751],[64,11524,11526,11546],{"className":11525},[755],[64,11527,11529],{"className":11528,"style":11413},[759],[64,11530,11531,11534],{"style":763},[64,11532],{"className":11533,"style":768},[767],[64,11535,11537],{"className":11536},[772,773,774,775],[64,11538,11540],{"className":11539},[129,775],[64,11541,11543],{"className":11542},[129,11364,775],[64,11544,11294],{"className":11545},[129,775],[64,11547,783],{"className":11548},[782],[64,11550,11552],{"className":11551},[755],[64,11553,11555],{"className":11554,"style":790},[759],[64,11556],{},[64,11558,719],{"className":11559},[845],[12,11561,11562,11563,11592,11593,2055],{},"Where ",[64,11564,11566,11580],{"className":11565},[71],[64,11567,11569],{"className":11568},[75],[77,11570,11571],{"xmlns":79},[82,11572,11573,11577],{},[85,11574,11575],{},[88,11576,11299],{},[108,11578,11579],{"encoding":110},"\\alpha",[64,11581,11583],{"className":11582,"ariaHidden":116},[115],[64,11584,11586,11589],{"className":11585},[120],[64,11587],{"className":11588,"style":212},[124],[64,11590,11299],{"className":11591,"style":11460},[129,130]," is a small step size. Notice anything? This is exactly the ",[21,11594,11595],{},"gradient descent update rule",[26,11597],{},[51,11599,11601],{"id":11600},"part-7-from-one-variable-to-many-the-gradient","Part 7 — From One Variable to Many: The Gradient",[12,11603,11604,11605,11608,11609,11640,11641,11840,11841,11844],{},"Machine learning models have not one parameter, but ",[21,11606,11607],{},"millions",". A loss function ",[64,11610,11612,11626],{"className":11611},[71],[64,11613,11615],{"className":11614},[75],[77,11616,11617],{"xmlns":79},[82,11618,11619,11624],{},[85,11620,11621],{},[88,11622,11623],{},"J",[108,11625,11623],{"encoding":110},[64,11627,11629],{"className":11628,"ariaHidden":116},[115],[64,11630,11632,11636],{"className":11631},[120],[64,11633],{"className":11634,"style":11635},[124],"height:0.6833em;",[64,11637,11623],{"className":11638,"style":11639},[129,130],"margin-right:0.09618em;"," might depend on weights ",[64,11642,11644,11684],{"className":11643},[71],[64,11645,11647],{"className":11646},[75],[77,11648,11649],{"xmlns":79},[82,11650,11651,11681],{},[85,11652,11653,11660,11662,11668,11670,11673,11675],{},[702,11654,11655,11658],{},[88,11656,11657],{},"w",[344,11659,353],{},[92,11661,710],{"separator":116},[702,11663,11664,11666],{},[88,11665,11657],{},[344,11667,346],{},[92,11669,710],{"separator":116},[92,11671,11672],{},"…",[92,11674,710],{"separator":116},[702,11676,11677,11679],{},[88,11678,11657],{},[88,11680,5923],{},[108,11682,11683],{"encoding":110},"w_1, w_2, \\ldots, w_n",[64,11685,11687],{"className":11686,"ariaHidden":116},[115],[64,11688,11690,11693,11735,11738,11741,11781,11784,11787,11791,11794,11797,11800],{"className":11689},[120],[64,11691],{"className":11692,"style":125},[124],[64,11694,11696,11700],{"className":11695},[129],[64,11697,11657],{"className":11698,"style":11699},[129,130],"margin-right:0.02691em;",[64,11701,11703],{"className":11702},[746],[64,11704,11706,11727],{"className":11705},[750,751],[64,11707,11709,11724],{"className":11708},[755],[64,11710,11712],{"className":11711,"style":760},[759],[64,11713,11715,11718],{"style":11714},"top:-2.55em;margin-left:-0.0269em;margin-right:0.05em;",[64,11716],{"className":11717,"style":768},[767],[64,11719,11721],{"className":11720},[772,773,774,775],[64,11722,353],{"className":11723},[129,775],[64,11725,783],{"className":11726},[782],[64,11728,11730],{"className":11729},[755],[64,11731,11733],{"className":11732,"style":790},[759],[64,11734],{},[64,11736,710],{"className":11737},[796],[64,11739],{"className":11740,"style":800},[135],[64,11742,11744,11747],{"className":11743},[129],[64,11745,11657],{"className":11746,"style":11699},[129,130],[64,11748,11750],{"className":11749},[746],[64,11751,11753,11773],{"className":11752},[750,751],[64,11754,11756,11770],{"className":11755},[755],[64,11757,11759],{"className":11758,"style":760},[759],[64,11760,11761,11764],{"style":11714},[64,11762],{"className":11763,"style":768},[767],[64,11765,11767],{"className":11766},[772,773,774,775],[64,11768,346],{"className":11769},[129,775],[64,11771,783],{"className":11772},[782],[64,11774,11776],{"className":11775},[755],[64,11777,11779],{"className":11778,"style":790},[759],[64,11780],{},[64,11782,710],{"className":11783},[796],[64,11785],{"className":11786,"style":800},[135],[64,11788,11672],{"className":11789},[11790],"minner",[64,11792],{"className":11793,"style":800},[135],[64,11795,710],{"className":11796},[796],[64,11798],{"className":11799,"style":800},[135],[64,11801,11803,11806],{"className":11802},[129],[64,11804,11657],{"className":11805,"style":11699},[129,130],[64,11807,11809],{"className":11808},[746],[64,11810,11812,11832],{"className":11811},[750,751],[64,11813,11815,11829],{"className":11814},[755],[64,11816,11818],{"className":11817,"style":11349},[759],[64,11819,11820,11823],{"style":11714},[64,11821],{"className":11822,"style":768},[767],[64,11824,11826],{"className":11825},[772,773,774,775],[64,11827,5923],{"className":11828},[129,130,775],[64,11830,783],{"className":11831},[782],[64,11833,11835],{"className":11834},[755],[64,11836,11838],{"className":11837,"style":790},[759],[64,11839],{},". We need derivatives with respect to ",[16,11842,11843],{},"each"," parameter simultaneously.",[56,11846,11848],{"id":11847},"partial-derivatives","Partial Derivatives",[12,11850,11851,11852,11855],{},"A ",[21,11853,11854],{},"partial derivative"," holds all other variables constant and differentiates with respect to one:",[64,11857,11859],{"className":11858},[67],[64,11860,11862,11909],{"className":11861},[71],[64,11863,11865],{"className":11864},[75],[77,11866,11867],{"xmlns":79,"display":80},[82,11868,11869,11906],{},[85,11870,11871,11891,11894,11897,11903],{},[1002,11872,11873,11880],{},[85,11874,11875,11878],{},[88,11876,11877],{"mathvariant":1008},"∂",[88,11879,11623],{},[85,11881,11882,11884],{},[88,11883,11877],{"mathvariant":1008},[702,11885,11886,11888],{},[88,11887,11657],{},[88,11889,11890],{},"i",[135,11892],{"width":11893},"1em",[10330,11895,11896],{},"= \"how much does J change if we nudge only ",[702,11898,11899,11901],{},[88,11900,11657],{},[88,11902,11890],{},[10330,11904,11905],{},"?\"",[108,11907,11908],{"encoding":110},"\\frac{\\partial J}{\\partial w_i} \\quad \\text{= \"how much does J change if we nudge only } w_i \\text{?\"}",[64,11910,11912],{"className":11911,"ariaHidden":116},[115],[64,11913,11915,11919,12026,12030,12036,12076],{"className":11914},[120],[64,11916],{"className":11917,"style":11918},[124],"height:2.2074em;vertical-align:-0.836em;",[64,11920,11922,11925,12023],{"className":11921},[129],[64,11923],{"className":11924},[736,1092],[64,11926,11928],{"className":11927},[1002],[64,11929,11931,12015],{"className":11930},[750,751],[64,11932,11934,12012],{"className":11933},[755],[64,11935,11937,11990,11998],{"className":11936,"style":2268},[759],[64,11938,11939,11942],{"style":1108},[64,11940],{"className":11941,"style":1112},[767],[64,11943,11945,11949],{"className":11944},[129],[64,11946,11877],{"className":11947,"style":11948},[129],"margin-right:0.05556em;",[64,11950,11952,11955],{"className":11951},[129],[64,11953,11657],{"className":11954,"style":11699},[129,130],[64,11956,11958],{"className":11957},[746],[64,11959,11961,11982],{"className":11960},[750,751],[64,11962,11964,11979],{"className":11963},[755],[64,11965,11968],{"className":11966,"style":11967},[759],"height:0.3117em;",[64,11969,11970,11973],{"style":11714},[64,11971],{"className":11972,"style":768},[767],[64,11974,11976],{"className":11975},[772,773,774,775],[64,11977,11890],{"className":11978},[129,130,775],[64,11980,783],{"className":11981},[782],[64,11983,11985],{"className":11984},[755],[64,11986,11988],{"className":11987,"style":790},[759],[64,11989],{},[64,11991,11992,11995],{"style":1124},[64,11993],{"className":11994,"style":1112},[767],[64,11996],{"className":11997,"style":1132},[1131],[64,11999,12000,12003],{"style":1135},[64,12001],{"className":12002,"style":1112},[767],[64,12004,12006,12009],{"className":12005},[129],[64,12007,11877],{"className":12008,"style":11948},[129],[64,12010,11623],{"className":12011,"style":11639},[129,130],[64,12013,783],{"className":12014},[782],[64,12016,12018],{"className":12017},[755],[64,12019,12021],{"className":12020,"style":1409},[759],[64,12022],{},[64,12024],{"className":12025},[845,1092],[64,12027],{"className":12028,"style":12029},[135],"margin-right:1em;",[64,12031,12033],{"className":12032},[129,11364],[64,12034,11896],{"className":12035},[129],[64,12037,12039,12042],{"className":12038},[129],[64,12040,11657],{"className":12041,"style":11699},[129,130],[64,12043,12045],{"className":12044},[746],[64,12046,12048,12068],{"className":12047},[750,751],[64,12049,12051,12065],{"className":12050},[755],[64,12052,12054],{"className":12053,"style":11967},[759],[64,12055,12056,12059],{"style":11714},[64,12057],{"className":12058,"style":768},[767],[64,12060,12062],{"className":12061},[772,773,774,775],[64,12063,11890],{"className":12064},[129,130,775],[64,12066,783],{"className":12067},[782],[64,12069,12071],{"className":12070},[755],[64,12072,12074],{"className":12073,"style":790},[759],[64,12075],{},[64,12077,12079],{"className":12078},[129,11364],[64,12080,11905],{"className":12081},[129],[12,12083,12084,326,12086],{},[21,12085,325],{},[64,12087,12089,12158],{"className":12088},[71],[64,12090,12092],{"className":12091},[75],[77,12093,12094],{"xmlns":79},[82,12095,12096,12155],{},[85,12097,12098,12100,12102,12108,12110,12116,12118,12120,12129,12131,12133,12139,12145,12147],{},[88,12099,11623],{},[92,12101,700],{"stretchy":699},[702,12103,12104,12106],{},[88,12105,11657],{},[344,12107,353],{},[92,12109,710],{"separator":116},[702,12111,12112,12114],{},[88,12113,11657],{},[344,12115,346],{},[92,12117,719],{"stretchy":699},[92,12119,94],{},[12121,12122,12123,12125,12127],"msubsup",{},[88,12124,11657],{},[344,12126,353],{},[344,12128,346],{},[92,12130,103],{},[344,12132,546],{},[702,12134,12135,12137],{},[88,12136,11657],{},[344,12138,353],{},[702,12140,12141,12143],{},[88,12142,11657],{},[344,12144,346],{},[92,12146,103],{},[12121,12148,12149,12151,12153],{},[88,12150,11657],{},[344,12152,346],{},[344,12154,346],{},[108,12156,12157],{"encoding":110},"J(w_1, w_2) = w_1^2 + 3w_1 w_2 + w_2^2",[64,12159,12161,12271,12340,12439],{"className":12160,"ariaHidden":116},[115],[64,12162,12164,12167,12170,12173,12213,12216,12219,12259,12262,12265,12268],{"className":12163},[120],[64,12165],{"className":12166,"style":732},[124],[64,12168,11623],{"className":12169,"style":11639},[129,130],[64,12171,700],{"className":12172},[736],[64,12174,12176,12179],{"className":12175},[129],[64,12177,11657],{"className":12178,"style":11699},[129,130],[64,12180,12182],{"className":12181},[746],[64,12183,12185,12205],{"className":12184},[750,751],[64,12186,12188,12202],{"className":12187},[755],[64,12189,12191],{"className":12190,"style":760},[759],[64,12192,12193,12196],{"style":11714},[64,12194],{"className":12195,"style":768},[767],[64,12197,12199],{"className":12198},[772,773,774,775],[64,12200,353],{"className":12201},[129,775],[64,12203,783],{"className":12204},[782],[64,12206,12208],{"className":12207},[755],[64,12209,12211],{"className":12210,"style":790},[759],[64,12212],{},[64,12214,710],{"className":12215},[796],[64,12217],{"className":12218,"style":800},[135],[64,12220,12222,12225],{"className":12221},[129],[64,12223,11657],{"className":12224,"style":11699},[129,130],[64,12226,12228],{"className":12227},[746],[64,12229,12231,12251],{"className":12230},[750,751],[64,12232,12234,12248],{"className":12233},[755],[64,12235,12237],{"className":12236,"style":760},[759],[64,12238,12239,12242],{"style":11714},[64,12240],{"className":12241,"style":768},[767],[64,12243,12245],{"className":12244},[772,773,774,775],[64,12246,346],{"className":12247},[129,775],[64,12249,783],{"className":12250},[782],[64,12252,12254],{"className":12253},[755],[64,12255,12257],{"className":12256,"style":790},[759],[64,12258],{},[64,12260,719],{"className":12261},[845],[64,12263],{"className":12264,"style":136},[135],[64,12266,94],{"className":12267},[140],[64,12269],{"className":12270,"style":136},[135],[64,12272,12274,12278,12331,12334,12337],{"className":12273},[120],[64,12275],{"className":12276,"style":12277},[124],"height:1.0622em;vertical-align:-0.2481em;",[64,12279,12281,12284],{"className":12280},[129],[64,12282,11657],{"className":12283,"style":11699},[129,130],[64,12285,12287],{"className":12286},[746],[64,12288,12290,12322],{"className":12289},[750,751],[64,12291,12293,12319],{"className":12292},[755],[64,12294,12296,12308],{"className":12295,"style":1735},[759],[64,12297,12299,12302],{"style":12298},"top:-2.4519em;margin-left:-0.0269em;margin-right:0.05em;",[64,12300],{"className":12301,"style":768},[767],[64,12303,12305],{"className":12304},[772,773,774,775],[64,12306,353],{"className":12307},[129,775],[64,12309,12310,12313],{"style":1756},[64,12311],{"className":12312,"style":768},[767],[64,12314,12316],{"className":12315},[772,773,774,775],[64,12317,346],{"className":12318},[129,775],[64,12320,783],{"className":12321},[782],[64,12323,12325],{"className":12324},[755],[64,12326,12329],{"className":12327,"style":12328},[759],"height:0.2481em;",[64,12330],{},[64,12332],{"className":12333,"style":160},[135],[64,12335,103],{"className":12336},[164],[64,12338],{"className":12339,"style":160},[135],[64,12341,12343,12347,12350,12390,12430,12433,12436],{"className":12342},[120],[64,12344],{"className":12345,"style":12346},[124],"height:0.7944em;vertical-align:-0.15em;",[64,12348,546],{"className":12349},[129],[64,12351,12353,12356],{"className":12352},[129],[64,12354,11657],{"className":12355,"style":11699},[129,130],[64,12357,12359],{"className":12358},[746],[64,12360,12362,12382],{"className":12361},[750,751],[64,12363,12365,12379],{"className":12364},[755],[64,12366,12368],{"className":12367,"style":760},[759],[64,12369,12370,12373],{"style":11714},[64,12371],{"className":12372,"style":768},[767],[64,12374,12376],{"className":12375},[772,773,774,775],[64,12377,353],{"className":12378},[129,775],[64,12380,783],{"className":12381},[782],[64,12383,12385],{"className":12384},[755],[64,12386,12388],{"className":12387,"style":790},[759],[64,12389],{},[64,12391,12393,12396],{"className":12392},[129],[64,12394,11657],{"className":12395,"style":11699},[129,130],[64,12397,12399],{"className":12398},[746],[64,12400,12402,12422],{"className":12401},[750,751],[64,12403,12405,12419],{"className":12404},[755],[64,12406,12408],{"className":12407,"style":760},[759],[64,12409,12410,12413],{"style":11714},[64,12411],{"className":12412,"style":768},[767],[64,12414,12416],{"className":12415},[772,773,774,775],[64,12417,346],{"className":12418},[129,775],[64,12420,783],{"className":12421},[782],[64,12423,12425],{"className":12424},[755],[64,12426,12428],{"className":12427,"style":790},[759],[64,12429],{},[64,12431],{"className":12432,"style":160},[135],[64,12434,103],{"className":12435},[164],[64,12437],{"className":12438,"style":160},[135],[64,12440,12442,12445],{"className":12441},[120],[64,12443],{"className":12444,"style":12277},[124],[64,12446,12448,12451],{"className":12447},[129],[64,12449,11657],{"className":12450,"style":11699},[129,130],[64,12452,12454],{"className":12453},[746],[64,12455,12457,12488],{"className":12456},[750,751],[64,12458,12460,12485],{"className":12459},[755],[64,12461,12463,12474],{"className":12462,"style":1735},[759],[64,12464,12465,12468],{"style":12298},[64,12466],{"className":12467,"style":768},[767],[64,12469,12471],{"className":12470},[772,773,774,775],[64,12472,346],{"className":12473},[129,775],[64,12475,12476,12479],{"style":1756},[64,12477],{"className":12478,"style":768},[767],[64,12480,12482],{"className":12481},[772,773,774,775],[64,12483,346],{"className":12484},[129,775],[64,12486,783],{"className":12487},[782],[64,12489,12491],{"className":12490},[755],[64,12492,12494],{"className":12493,"style":12328},[759],[64,12495],{},[64,12497,12499],{"className":12498},[67],[64,12500,12502,12593],{"className":12501},[71],[64,12503,12505],{"className":12504},[75],[77,12506,12507],{"xmlns":79,"display":80},[82,12508,12509,12590],{},[85,12510,12511,12529,12531,12533,12539,12541,12543,12549,12552,12570,12572,12574,12580,12582,12584],{},[1002,12512,12513,12519],{},[85,12514,12515,12517],{},[88,12516,11877],{"mathvariant":1008},[88,12518,11623],{},[85,12520,12521,12523],{},[88,12522,11877],{"mathvariant":1008},[702,12524,12525,12527],{},[88,12526,11657],{},[344,12528,353],{},[92,12530,94],{},[344,12532,346],{},[702,12534,12535,12537],{},[88,12536,11657],{},[344,12538,353],{},[92,12540,103],{},[344,12542,546],{},[702,12544,12545,12547],{},[88,12546,11657],{},[344,12548,346],{},[135,12550],{"width":12551},"2em",[1002,12553,12554,12560],{},[85,12555,12556,12558],{},[88,12557,11877],{"mathvariant":1008},[88,12559,11623],{},[85,12561,12562,12564],{},[88,12563,11877],{"mathvariant":1008},[702,12565,12566,12568],{},[88,12567,11657],{},[344,12569,346],{},[92,12571,94],{},[344,12573,546],{},[702,12575,12576,12578],{},[88,12577,11657],{},[344,12579,353],{},[92,12581,103],{},[344,12583,346],{},[702,12585,12586,12588],{},[88,12587,11657],{},[344,12589,346],{},[108,12591,12592],{"encoding":110},"\\frac{\\partial J}{\\partial w_1} = 2w_1 + 3w_2 \\qquad \\frac{\\partial J}{\\partial w_2} = 3w_1 + 2w_2",[64,12594,12596,12716,12774,12941,12999],{"className":12595,"ariaHidden":116},[115],[64,12597,12599,12602,12707,12710,12713],{"className":12598},[120],[64,12600],{"className":12601,"style":11918},[124],[64,12603,12605,12608,12704],{"className":12604},[129],[64,12606],{"className":12607},[736,1092],[64,12609,12611],{"className":12610},[1002],[64,12612,12614,12696],{"className":12613},[750,751],[64,12615,12617,12693],{"className":12616},[755],[64,12618,12620,12671,12679],{"className":12619,"style":2268},[759],[64,12621,12622,12625],{"style":1108},[64,12623],{"className":12624,"style":1112},[767],[64,12626,12628,12631],{"className":12627},[129],[64,12629,11877],{"className":12630,"style":11948},[129],[64,12632,12634,12637],{"className":12633},[129],[64,12635,11657],{"className":12636,"style":11699},[129,130],[64,12638,12640],{"className":12639},[746],[64,12641,12643,12663],{"className":12642},[750,751],[64,12644,12646,12660],{"className":12645},[755],[64,12647,12649],{"className":12648,"style":760},[759],[64,12650,12651,12654],{"style":11714},[64,12652],{"className":12653,"style":768},[767],[64,12655,12657],{"className":12656},[772,773,774,775],[64,12658,353],{"className":12659},[129,775],[64,12661,783],{"className":12662},[782],[64,12664,12666],{"className":12665},[755],[64,12667,12669],{"className":12668,"style":790},[759],[64,12670],{},[64,12672,12673,12676],{"style":1124},[64,12674],{"className":12675,"style":1112},[767],[64,12677],{"className":12678,"style":1132},[1131],[64,12680,12681,12684],{"style":1135},[64,12682],{"className":12683,"style":1112},[767],[64,12685,12687,12690],{"className":12686},[129],[64,12688,11877],{"className":12689,"style":11948},[129],[64,12691,11623],{"className":12692,"style":11639},[129,130],[64,12694,783],{"className":12695},[782],[64,12697,12699],{"className":12698},[755],[64,12700,12702],{"className":12701,"style":1409},[759],[64,12703],{},[64,12705],{"className":12706},[845,1092],[64,12708],{"className":12709,"style":136},[135],[64,12711,94],{"className":12712},[140],[64,12714],{"className":12715,"style":136},[135],[64,12717,12719,12722,12725,12765,12768,12771],{"className":12718},[120],[64,12720],{"className":12721,"style":12346},[124],[64,12723,346],{"className":12724},[129],[64,12726,12728,12731],{"className":12727},[129],[64,12729,11657],{"className":12730,"style":11699},[129,130],[64,12732,12734],{"className":12733},[746],[64,12735,12737,12757],{"className":12736},[750,751],[64,12738,12740,12754],{"className":12739},[755],[64,12741,12743],{"className":12742,"style":760},[759],[64,12744,12745,12748],{"style":11714},[64,12746],{"className":12747,"style":768},[767],[64,12749,12751],{"className":12750},[772,773,774,775],[64,12752,353],{"className":12753},[129,775],[64,12755,783],{"className":12756},[782],[64,12758,12760],{"className":12759},[755],[64,12761,12763],{"className":12762,"style":790},[759],[64,12764],{},[64,12766],{"className":12767,"style":160},[135],[64,12769,103],{"className":12770},[164],[64,12772],{"className":12773,"style":160},[135],[64,12775,12777,12780,12783,12823,12827,12932,12935,12938],{"className":12776},[120],[64,12778],{"className":12779,"style":11918},[124],[64,12781,546],{"className":12782},[129],[64,12784,12786,12789],{"className":12785},[129],[64,12787,11657],{"className":12788,"style":11699},[129,130],[64,12790,12792],{"className":12791},[746],[64,12793,12795,12815],{"className":12794},[750,751],[64,12796,12798,12812],{"className":12797},[755],[64,12799,12801],{"className":12800,"style":760},[759],[64,12802,12803,12806],{"style":11714},[64,12804],{"className":12805,"style":768},[767],[64,12807,12809],{"className":12808},[772,773,774,775],[64,12810,346],{"className":12811},[129,775],[64,12813,783],{"className":12814},[782],[64,12816,12818],{"className":12817},[755],[64,12819,12821],{"className":12820,"style":790},[759],[64,12822],{},[64,12824],{"className":12825,"style":12826},[135],"margin-right:2em;",[64,12828,12830,12833,12929],{"className":12829},[129],[64,12831],{"className":12832},[736,1092],[64,12834,12836],{"className":12835},[1002],[64,12837,12839,12921],{"className":12838},[750,751],[64,12840,12842,12918],{"className":12841},[755],[64,12843,12845,12896,12904],{"className":12844,"style":2268},[759],[64,12846,12847,12850],{"style":1108},[64,12848],{"className":12849,"style":1112},[767],[64,12851,12853,12856],{"className":12852},[129],[64,12854,11877],{"className":12855,"style":11948},[129],[64,12857,12859,12862],{"className":12858},[129],[64,12860,11657],{"className":12861,"style":11699},[129,130],[64,12863,12865],{"className":12864},[746],[64,12866,12868,12888],{"className":12867},[750,751],[64,12869,12871,12885],{"className":12870},[755],[64,12872,12874],{"className":12873,"style":760},[759],[64,12875,12876,12879],{"style":11714},[64,12877],{"className":12878,"style":768},[767],[64,12880,12882],{"className":12881},[772,773,774,775],[64,12883,346],{"className":12884},[129,775],[64,12886,783],{"className":12887},[782],[64,12889,12891],{"className":12890},[755],[64,12892,12894],{"className":12893,"style":790},[759],[64,12895],{},[64,12897,12898,12901],{"style":1124},[64,12899],{"className":12900,"style":1112},[767],[64,12902],{"className":12903,"style":1132},[1131],[64,12905,12906,12909],{"style":1135},[64,12907],{"className":12908,"style":1112},[767],[64,12910,12912,12915],{"className":12911},[129],[64,12913,11877],{"className":12914,"style":11948},[129],[64,12916,11623],{"className":12917,"style":11639},[129,130],[64,12919,783],{"className":12920},[782],[64,12922,12924],{"className":12923},[755],[64,12925,12927],{"className":12926,"style":1409},[759],[64,12928],{},[64,12930],{"className":12931},[845,1092],[64,12933],{"className":12934,"style":136},[135],[64,12936,94],{"className":12937},[140],[64,12939],{"className":12940,"style":136},[135],[64,12942,12944,12947,12950,12990,12993,12996],{"className":12943},[120],[64,12945],{"className":12946,"style":12346},[124],[64,12948,546],{"className":12949},[129],[64,12951,12953,12956],{"className":12952},[129],[64,12954,11657],{"className":12955,"style":11699},[129,130],[64,12957,12959],{"className":12958},[746],[64,12960,12962,12982],{"className":12961},[750,751],[64,12963,12965,12979],{"className":12964},[755],[64,12966,12968],{"className":12967,"style":760},[759],[64,12969,12970,12973],{"style":11714},[64,12971],{"className":12972,"style":768},[767],[64,12974,12976],{"className":12975},[772,773,774,775],[64,12977,353],{"className":12978},[129,775],[64,12980,783],{"className":12981},[782],[64,12983,12985],{"className":12984},[755],[64,12986,12988],{"className":12987,"style":790},[759],[64,12989],{},[64,12991],{"className":12992,"style":160},[135],[64,12994,103],{"className":12995},[164],[64,12997],{"className":12998,"style":160},[135],[64,13000,13002,13005,13008],{"className":13001},[120],[64,13003],{"className":13004,"style":12346},[124],[64,13006,346],{"className":13007},[129],[64,13009,13011,13014],{"className":13010},[129],[64,13012,11657],{"className":13013,"style":11699},[129,130],[64,13015,13017],{"className":13016},[746],[64,13018,13020,13040],{"className":13019},[750,751],[64,13021,13023,13037],{"className":13022},[755],[64,13024,13026],{"className":13025,"style":760},[759],[64,13027,13028,13031],{"style":11714},[64,13029],{"className":13030,"style":768},[767],[64,13032,13034],{"className":13033},[772,773,774,775],[64,13035,346],{"className":13036},[129,775],[64,13038,783],{"className":13039},[782],[64,13041,13043],{"className":13042},[755],[64,13044,13046],{"className":13045,"style":790},[759],[64,13047],{},[56,13049,13051],{"id":13050},"the-gradient-vector","The Gradient Vector",[12,13053,13054,13055,326,13058,2650],{},"Stack all partial derivatives into a single vector — this is the ",[21,13056,13057],{},"gradient",[64,13059,13061,13078],{"className":13060},[71],[64,13062,13064],{"className":13063},[75],[77,13065,13066],{"xmlns":79},[82,13067,13068,13075],{},[85,13069,13070,13073],{},[88,13071,13072],{"mathvariant":1008},"∇",[88,13074,11623],{},[108,13076,13077],{"encoding":110},"\\nabla J",[64,13079,13081],{"className":13080,"ariaHidden":116},[115],[64,13082,13084,13087,13090],{"className":13083},[120],[64,13085],{"className":13086,"style":11635},[124],[64,13088,13072],{"className":13089},[129],[64,13091,11623],{"className":13092,"style":11639},[129,130],[64,13094,13096],{"className":13095},[67],[64,13097,13099,13250],{"className":13098},[71],[64,13100,13102],{"className":13101},[75],[77,13103,13104],{"xmlns":79,"display":80},[82,13105,13106,13247],{},[85,13107,13108,13110,13112,13114,13120,13122,13128,13130,13132,13134,13140,13142,13144],{},[88,13109,13072],{"mathvariant":1008},[88,13111,11623],{},[92,13113,700],{"stretchy":699},[702,13115,13116,13118],{},[88,13117,11657],{},[344,13119,353],{},[92,13121,710],{"separator":116},[702,13123,13124,13126],{},[88,13125,11657],{},[344,13127,346],{},[92,13129,710],{"separator":116},[92,13131,11672],{},[92,13133,710],{"separator":116},[702,13135,13136,13138],{},[88,13137,11657],{},[88,13139,5923],{},[92,13141,719],{"stretchy":699},[92,13143,94],{},[85,13145,13146,13148,13245],{},[92,13147,6876],{"fence":116},[13149,13150,13153,13179,13203,13221],"mtable",{"rowspacing":13151,"columnalign":13152,"columnspacing":11893},"0.16em","center",[13154,13155,13156],"mtr",{},[13157,13158,13159],"mtd",{},[4928,13160,13161],{"scriptlevel":537,"displaystyle":699},[1002,13162,13163,13169],{},[85,13164,13165,13167],{},[88,13166,11877],{"mathvariant":1008},[88,13168,11623],{},[85,13170,13171,13173],{},[88,13172,11877],{"mathvariant":1008},[702,13174,13175,13177],{},[88,13176,11657],{},[344,13178,353],{},[13154,13180,13181],{},[13157,13182,13183],{},[4928,13184,13185],{"scriptlevel":537,"displaystyle":699},[1002,13186,13187,13193],{},[85,13188,13189,13191],{},[88,13190,11877],{"mathvariant":1008},[88,13192,11623],{},[85,13194,13195,13197],{},[88,13196,11877],{"mathvariant":1008},[702,13198,13199,13201],{},[88,13200,11657],{},[344,13202,346],{},[13154,13204,13205],{},[13157,13206,13207],{},[4928,13208,13209],{"scriptlevel":537,"displaystyle":699},[85,13210,13211,13214],{},[88,13212,13213],{"mathvariant":1008},"⋮",[13215,13216,13217],"mpadded",{"height":4730,"voffset":4730},[135,13218],{"mathbackground":13219,"width":4730,"height":13220},"black","1.5em",[13154,13222,13223],{},[13157,13224,13225],{},[4928,13226,13227],{"scriptlevel":537,"displaystyle":699},[1002,13228,13229,13235],{},[85,13230,13231,13233],{},[88,13232,11877],{"mathvariant":1008},[88,13234,11623],{},[85,13236,13237,13239],{},[88,13238,11877],{"mathvariant":1008},[702,13240,13241,13243],{},[88,13242,11657],{},[88,13244,5923],{},[92,13246,6892],{"fence":116},[108,13248,13249],{"encoding":110},"\\nabla J(w_1, w_2, \\ldots, w_n) = \\begin{bmatrix}\n\\frac{\\partial J}{\\partial w_1} \\\\[4pt]\n\\frac{\\partial J}{\\partial w_2} \\\\\n\\vdots \\\\[4pt]\n\\frac{\\partial J}{\\partial w_n}\n\\end{bmatrix}",[64,13251,13253,13424],{"className":13252,"ariaHidden":116},[115],[64,13254,13256,13259,13262,13265,13268,13308,13311,13314,13354,13357,13360,13363,13366,13369,13372,13412,13415,13418,13421],{"className":13255},[120],[64,13257],{"className":13258,"style":732},[124],[64,13260,13072],{"className":13261},[129],[64,13263,11623],{"className":13264,"style":11639},[129,130],[64,13266,700],{"className":13267},[736],[64,13269,13271,13274],{"className":13270},[129],[64,13272,11657],{"className":13273,"style":11699},[129,130],[64,13275,13277],{"className":13276},[746],[64,13278,13280,13300],{"className":13279},[750,751],[64,13281,13283,13297],{"className":13282},[755],[64,13284,13286],{"className":13285,"style":760},[759],[64,13287,13288,13291],{"style":11714},[64,13289],{"className":13290,"style":768},[767],[64,13292,13294],{"className":13293},[772,773,774,775],[64,13295,353],{"className":13296},[129,775],[64,13298,783],{"className":13299},[782],[64,13301,13303],{"className":13302},[755],[64,13304,13306],{"className":13305,"style":790},[759],[64,13307],{},[64,13309,710],{"className":13310},[796],[64,13312],{"className":13313,"style":800},[135],[64,13315,13317,13320],{"className":13316},[129],[64,13318,11657],{"className":13319,"style":11699},[129,130],[64,13321,13323],{"className":13322},[746],[64,13324,13326,13346],{"className":13325},[750,751],[64,13327,13329,13343],{"className":13328},[755],[64,13330,13332],{"className":13331,"style":760},[759],[64,13333,13334,13337],{"style":11714},[64,13335],{"className":13336,"style":768},[767],[64,13338,13340],{"className":13339},[772,773,774,775],[64,13341,346],{"className":13342},[129,775],[64,13344,783],{"className":13345},[782],[64,13347,13349],{"className":13348},[755],[64,13350,13352],{"className":13351,"style":790},[759],[64,13353],{},[64,13355,710],{"className":13356},[796],[64,13358],{"className":13359,"style":800},[135],[64,13361,11672],{"className":13362},[11790],[64,13364],{"className":13365,"style":800},[135],[64,13367,710],{"className":13368},[796],[64,13370],{"className":13371,"style":800},[135],[64,13373,13375,13378],{"className":13374},[129],[64,13376,11657],{"className":13377,"style":11699},[129,130],[64,13379,13381],{"className":13380},[746],[64,13382,13384,13404],{"className":13383},[750,751],[64,13385,13387,13401],{"className":13386},[755],[64,13388,13390],{"className":13389,"style":11349},[759],[64,13391,13392,13395],{"style":11714},[64,13393],{"className":13394,"style":768},[767],[64,13396,13398],{"className":13397},[772,773,774,775],[64,13399,5923],{"className":13400},[129,130,775],[64,13402,783],{"className":13403},[782],[64,13405,13407],{"className":13406},[755],[64,13408,13410],{"className":13409,"style":790},[759],[64,13411],{},[64,13413,719],{"className":13414},[845],[64,13416],{"className":13417,"style":136},[135],[64,13419,94],{"className":13420},[140],[64,13422],{"className":13423,"style":136},[135],[64,13425,13427,13431],{"className":13426},[120],[64,13428],{"className":13429,"style":13430},[124],"height:6.6em;vertical-align:-3.05em;",[64,13432,13434,13485,13904],{"className":13433},[11790],[64,13435,13437],{"className":13436},[736],[64,13438,13442],{"className":13439},[13440,13441],"delimsizing","mult",[64,13443,13445,13476],{"className":13444},[750,751],[64,13446,13448,13473],{"className":13447},[755],[64,13449,13452],{"className":13450,"style":13451},[759],"height:3.55em;",[64,13453,13455,13459],{"style":13454},"top:-5.55em;",[64,13456],{"className":13457,"style":13458},[767],"height:8.6em;",[64,13460,13462],{"style":13461},"width:0.667em;height:6.600em;",[13463,13464,13469],"svg",{"xmlns":13465,"width":13466,"height":13467,"viewBox":13468},"http:\u002F\u002Fwww.w3.org\u002F2000\u002Fsvg","0.667em","6.600em","0 0 667 6600",[13470,13471],"path",{"d":13472},"M403 1759 V84 H666 V0 H319 V1759 v3000 v1759 h347 v-84\nH403z M403 1759 V0 H319 V1759 v3000 v1759 h84z",[64,13474,783],{"className":13475},[782],[64,13477,13479],{"className":13478},[755],[64,13480,13483],{"className":13481,"style":13482},[759],"height:3.05em;",[64,13484],{},[64,13486,13488],{"className":13487},[129],[64,13489,13491],{"className":13490},[13149],[64,13492,13495],{"className":13493},[13494],"col-align-c",[64,13496,13498,13895],{"className":13497},[750,751],[64,13499,13501,13892],{"className":13500},[755],[64,13502,13505,13631,13751,13771],{"className":13503,"style":13504},[759],"height:3.5253em;",[64,13506,13508,13512],{"style":13507},"top:-6.3327em;",[64,13509],{"className":13510,"style":13511},[767],"height:3.6875em;",[64,13513,13515],{"className":13514},[129],[64,13516,13518,13521,13628],{"className":13517},[129],[64,13519],{"className":13520},[736,1092],[64,13522,13524],{"className":13523},[1002],[64,13525,13527,13619],{"className":13526},[750,751],[64,13528,13530,13616],{"className":13529},[755],[64,13531,13534,13591,13599],{"className":13532,"style":13533},[759],"height:0.8801em;",[64,13535,13536,13539],{"style":1994},[64,13537],{"className":13538,"style":1112},[767],[64,13540,13542],{"className":13541},[772,773,774,775],[64,13543,13545,13548],{"className":13544},[129,775],[64,13546,11877],{"className":13547,"style":11948},[129,775],[64,13549,13551,13554],{"className":13550},[129,775],[64,13552,11657],{"className":13553,"style":11699},[129,130,775],[64,13555,13557],{"className":13556},[746],[64,13558,13560,13582],{"className":13559},[750,751],[64,13561,13563,13579],{"className":13562},[755],[64,13564,13567],{"className":13565,"style":13566},[759],"height:0.3173em;",[64,13568,13570,13573],{"style":13569},"top:-2.357em;margin-left:-0.0269em;margin-right:0.0714em;",[64,13571],{"className":13572,"style":9696},[767],[64,13574,13576],{"className":13575},[772,9700,9701,775],[64,13577,353],{"className":13578},[129,775],[64,13580,783],{"className":13581},[782],[64,13583,13585],{"className":13584},[755],[64,13586,13589],{"className":13587,"style":13588},[759],"height:0.143em;",[64,13590],{},[64,13592,13593,13596],{"style":1124},[64,13594],{"className":13595,"style":1112},[767],[64,13597],{"className":13598,"style":1132},[1131],[64,13600,13601,13604],{"style":9317},[64,13602],{"className":13603,"style":1112},[767],[64,13605,13607],{"className":13606},[772,773,774,775],[64,13608,13610,13613],{"className":13609},[129,775],[64,13611,11877],{"className":13612,"style":11948},[129,775],[64,13614,11623],{"className":13615,"style":11639},[129,130,775],[64,13617,783],{"className":13618},[782],[64,13620,13622],{"className":13621},[755],[64,13623,13626],{"className":13624,"style":13625},[759],"height:0.4451em;",[64,13627],{},[64,13629],{"className":13630},[845,1092],[64,13632,13634,13637],{"style":13633},"top:-4.6925em;",[64,13635],{"className":13636,"style":13511},[767],[64,13638,13640],{"className":13639},[129],[64,13641,13643,13646,13748],{"className":13642},[129],[64,13644],{"className":13645},[736,1092],[64,13647,13649],{"className":13648},[1002],[64,13650,13652,13740],{"className":13651},[750,751],[64,13653,13655,13737],{"className":13654},[755],[64,13656,13658,13712,13720],{"className":13657,"style":13533},[759],[64,13659,13660,13663],{"style":1994},[64,13661],{"className":13662,"style":1112},[767],[64,13664,13666],{"className":13665},[772,773,774,775],[64,13667,13669,13672],{"className":13668},[129,775],[64,13670,11877],{"className":13671,"style":11948},[129,775],[64,13673,13675,13678],{"className":13674},[129,775],[64,13676,11657],{"className":13677,"style":11699},[129,130,775],[64,13679,13681],{"className":13680},[746],[64,13682,13684,13704],{"className":13683},[750,751],[64,13685,13687,13701],{"className":13686},[755],[64,13688,13690],{"className":13689,"style":13566},[759],[64,13691,13692,13695],{"style":13569},[64,13693],{"className":13694,"style":9696},[767],[64,13696,13698],{"className":13697},[772,9700,9701,775],[64,13699,346],{"className":13700},[129,775],[64,13702,783],{"className":13703},[782],[64,13705,13707],{"className":13706},[755],[64,13708,13710],{"className":13709,"style":13588},[759],[64,13711],{},[64,13713,13714,13717],{"style":1124},[64,13715],{"className":13716,"style":1112},[767],[64,13718],{"className":13719,"style":1132},[1131],[64,13721,13722,13725],{"style":9317},[64,13723],{"className":13724,"style":1112},[767],[64,13726,13728],{"className":13727},[772,773,774,775],[64,13729,13731,13734],{"className":13730},[129,775],[64,13732,11877],{"className":13733,"style":11948},[129,775],[64,13735,11623],{"className":13736,"style":11639},[129,130,775],[64,13738,783],{"className":13739},[782],[64,13741,13743],{"className":13742},[755],[64,13744,13746],{"className":13745,"style":13625},[759],[64,13747],{},[64,13749],{"className":13750},[845,1092],[64,13752,13754,13757],{"style":13753},"top:-2.7474em;",[64,13755],{"className":13756,"style":13511},[767],[64,13758,13760],{"className":13759},[129],[64,13761,13763,13766],{"className":13762},[129],[64,13764,13213],{"className":13765},[129],[64,13767],{"className":13768,"style":13770},[129,13769],"rule","border-right-width:0em;border-top-width:1.5em;bottom:0em;",[64,13772,13774,13777],{"style":13773},"top:-1.1073em;",[64,13775],{"className":13776,"style":13511},[767],[64,13778,13780],{"className":13779},[129],[64,13781,13783,13786,13889],{"className":13782},[129],[64,13784],{"className":13785},[736,1092],[64,13787,13789],{"className":13788},[1002],[64,13790,13792,13881],{"className":13791},[750,751],[64,13793,13795,13878],{"className":13794},[755],[64,13796,13798,13853,13861],{"className":13797,"style":13533},[759],[64,13799,13800,13803],{"style":1994},[64,13801],{"className":13802,"style":1112},[767],[64,13804,13806],{"className":13805},[772,773,774,775],[64,13807,13809,13812],{"className":13808},[129,775],[64,13810,11877],{"className":13811,"style":11948},[129,775],[64,13813,13815,13818],{"className":13814},[129,775],[64,13816,11657],{"className":13817,"style":11699},[129,130,775],[64,13819,13821],{"className":13820},[746],[64,13822,13824,13845],{"className":13823},[750,751],[64,13825,13827,13842],{"className":13826},[755],[64,13828,13831],{"className":13829,"style":13830},[759],"height:0.1645em;",[64,13832,13833,13836],{"style":13569},[64,13834],{"className":13835,"style":9696},[767],[64,13837,13839],{"className":13838},[772,9700,9701,775],[64,13840,5923],{"className":13841},[129,130,775],[64,13843,783],{"className":13844},[782],[64,13846,13848],{"className":13847},[755],[64,13849,13851],{"className":13850,"style":13588},[759],[64,13852],{},[64,13854,13855,13858],{"style":1124},[64,13856],{"className":13857,"style":1112},[767],[64,13859],{"className":13860,"style":1132},[1131],[64,13862,13863,13866],{"style":9317},[64,13864],{"className":13865,"style":1112},[767],[64,13867,13869],{"className":13868},[772,773,774,775],[64,13870,13872,13875],{"className":13871},[129,775],[64,13873,11877],{"className":13874,"style":11948},[129,775],[64,13876,11623],{"className":13877,"style":11639},[129,130,775],[64,13879,783],{"className":13880},[782],[64,13882,13884],{"className":13883},[755],[64,13885,13887],{"className":13886,"style":13625},[759],[64,13888],{},[64,13890],{"className":13891},[845,1092],[64,13893,783],{"className":13894},[782],[64,13896,13898],{"className":13897},[755],[64,13899,13902],{"className":13900,"style":13901},[759],"height:3.0253em;",[64,13903],{},[64,13905,13907],{"className":13906},[845],[64,13908,13910],{"className":13909},[13440,13441],[64,13911,13913,13934],{"className":13912},[750,751],[64,13914,13916,13931],{"className":13915},[755],[64,13917,13919],{"className":13918,"style":13451},[759],[64,13920,13921,13924],{"style":13454},[64,13922],{"className":13923,"style":13458},[767],[64,13925,13926],{"style":13461},[13463,13927,13928],{"xmlns":13465,"width":13466,"height":13467,"viewBox":13468},[13470,13929],{"d":13930},"M347 1759 V0 H0 V84 H263 V1759 v3000 v1759 H0 v84 H347z\nM347 1759 V0 H263 V1759 v3000 v1759 h84z",[64,13932,783],{"className":13933},[782],[64,13935,13937],{"className":13936},[755],[64,13938,13940],{"className":13939,"style":13482},[759],[64,13941],{},[12,13943,13944,13945,13948,13949,13952],{},"The gradient is the multi-dimensional equivalent of the derivative. It points in the direction of ",[21,13946,13947],{},"steepest ascent"," in the loss landscape. To minimize the loss, we move in the ",[21,13950,13951],{},"opposite direction"," — exactly what gradient descent does.",[29,13954,41,13959,41,13965],{"className":13955},[13956,13957,1485,13958,1487,1488,1489],"bg-green-50","dark:bg-green-900\u002F20","border-green-500",[12,13960,13964],{"className":13961},[1493,13962,13963],"text-green-800","dark:text-green-200","The Bridge to Machine Learning",[12,13966,13970,13971,13973],{"className":13967},[13968,13969],"text-green-700","dark:text-green-300","\n    In ML, the loss function $J(\\theta)$ measures how wrong the model is. The gradient $\\nabla J(\\theta)$ tells us which direction in parameter space increases the error most. By stepping in the ",[16,13972,11262],{}," direction, we reduce the error — step by step, iteration by iteration.\n  ",[26,13975],{},[51,13977,13979],{"id":13978},"part-8-a-complete-example-linear-regression","Part 8 — A Complete Example: Linear Regression",[12,13981,13982],{},"Let's see all of this in action.",[12,13984,13985,13988,13989,14133,14134,2055],{},[21,13986,13987],{},"Setup:"," We have data points ",[64,13990,13992,14034],{"className":13991},[71],[64,13993,13995],{"className":13994},[75],[77,13996,13997],{"xmlns":79},[82,13998,13999,14031],{},[85,14000,14001,14003,14015,14017,14029],{},[92,14002,700],{"stretchy":699},[1554,14004,14005,14007],{},[88,14006,100],{},[85,14008,14009,14011,14013],{},[92,14010,700],{"stretchy":699},[88,14012,11890],{},[92,14014,719],{"stretchy":699},[92,14016,710],{"separator":116},[1554,14018,14019,14021],{},[88,14020,90],{},[85,14022,14023,14025,14027],{},[92,14024,700],{"stretchy":699},[88,14026,11890],{},[92,14028,719],{"stretchy":699},[92,14030,719],{"stretchy":699},[108,14032,14033],{"encoding":110},"(x^{(i)}, y^{(i)})",[64,14035,14037],{"className":14036,"ariaHidden":116},[115],[64,14038,14040,14044,14047,14086,14089,14092,14130],{"className":14039},[120],[64,14041],{"className":14042,"style":14043},[124],"height:1.138em;vertical-align:-0.25em;",[64,14045,700],{"className":14046},[736],[64,14048,14050,14053],{"className":14049},[129],[64,14051,100],{"className":14052},[129,130],[64,14054,14056],{"className":14055},[746],[64,14057,14059],{"className":14058},[750],[64,14060,14062],{"className":14061},[755],[64,14063,14066],{"className":14064,"style":14065},[759],"height:0.888em;",[64,14067,14068,14071],{"style":1756},[64,14069],{"className":14070,"style":768},[767],[64,14072,14074],{"className":14073},[772,773,774,775],[64,14075,14077,14080,14083],{"className":14076},[129,775],[64,14078,700],{"className":14079},[736,775],[64,14081,11890],{"className":14082},[129,130,775],[64,14084,719],{"className":14085},[845,775],[64,14087,710],{"className":14088},[796],[64,14090],{"className":14091,"style":800},[135],[64,14093,14095,14098],{"className":14094},[129],[64,14096,90],{"className":14097,"style":131},[129,130],[64,14099,14101],{"className":14100},[746],[64,14102,14104],{"className":14103},[750],[64,14105,14107],{"className":14106},[755],[64,14108,14110],{"className":14109,"style":14065},[759],[64,14111,14112,14115],{"style":1756},[64,14113],{"className":14114,"style":768},[767],[64,14116,14118],{"className":14117},[772,773,774,775],[64,14119,14121,14124,14127],{"className":14120},[129,775],[64,14122,700],{"className":14123},[736,775],[64,14125,11890],{"className":14126},[129,130,775],[64,14128,719],{"className":14129},[845,775],[64,14131,719],{"className":14132},[845]," and want to fit ",[64,14135,14137,14167],{"className":14136},[71],[64,14138,14140],{"className":14139},[75],[77,14141,14142],{"xmlns":79},[82,14143,14144,14164],{},[85,14145,14146,14154,14156,14158,14160,14162],{},[14147,14148,14149,14151],"mover",{"accent":116},[88,14150,90],{},[92,14152,14153],{},"^",[92,14155,94],{},[88,14157,11657],{},[88,14159,100],{},[92,14161,103],{},[88,14163,106],{},[108,14165,14166],{"encoding":110},"\\hat{y} = wx + b",[64,14168,14170,14231,14252],{"className":14169,"ariaHidden":116},[115],[64,14171,14173,14176,14222,14225,14228],{"className":14172},[120],[64,14174],{"className":14175,"style":2169},[124],[64,14177,14180],{"className":14178},[129,14179],"accent",[64,14181,14183,14213],{"className":14182},[750,751],[64,14184,14186,14210],{"className":14185},[755],[64,14187,14189,14197],{"className":14188,"style":174},[759],[64,14190,14191,14194],{"style":3278},[64,14192],{"className":14193,"style":1112},[767],[64,14195,90],{"className":14196,"style":131},[129,130],[64,14198,14199,14202],{"style":3278},[64,14200],{"className":14201,"style":1112},[767],[64,14203,14207],{"className":14204,"style":14206},[14205],"accent-body","left:-0.1944em;",[64,14208,14153],{"className":14209},[129],[64,14211,783],{"className":14212},[782],[64,14214,14216],{"className":14215},[755],[64,14217,14220],{"className":14218,"style":14219},[759],"height:0.1944em;",[64,14221],{},[64,14223],{"className":14224,"style":136},[135],[64,14226,94],{"className":14227},[140],[64,14229],{"className":14230,"style":136},[135],[64,14232,14234,14237,14240,14243,14246,14249],{"className":14233},[120],[64,14235],{"className":14236,"style":150},[124],[64,14238,11657],{"className":14239,"style":11699},[129,130],[64,14241,100],{"className":14242},[129,130],[64,14244],{"className":14245,"style":160},[135],[64,14247,103],{"className":14248},[164],[64,14250],{"className":14251,"style":160},[135],[64,14253,14255,14258],{"className":14254},[120],[64,14256],{"className":14257,"style":174},[124],[64,14259,106],{"className":14260},[129,130],[12,14262,14263,14266],{},[21,14264,14265],{},"Loss function"," (Mean Squared Error):",[64,14268,14270],{"className":14269},[67],[64,14271,14273,14425],{"className":14272},[71],[64,14274,14276],{"className":14275},[75],[77,14277,14278],{"xmlns":79,"display":80},[82,14279,14280,14422],{},[85,14281,14282,14284,14286,14288,14290,14292,14294,14296,14302,14318,14358,14360,14366,14380],{},[88,14283,11623],{},[92,14285,700],{"stretchy":699},[88,14287,11657],{},[92,14289,710],{"separator":116},[88,14291,106],{},[92,14293,719],{"stretchy":699},[92,14295,94],{},[1002,14297,14298,14300],{},[344,14299,353],{},[88,14301,97],{},[14303,14304,14305,14308,14316],"munderover",{},[92,14306,14307],{},"∑",[85,14309,14310,14312,14314],{},[88,14311,11890],{},[92,14313,94],{},[344,14315,353],{},[88,14317,97],{},[1554,14319,14320,14356],{},[85,14321,14322,14324,14340,14342,14354],{},[92,14323,700],{"fence":116},[1554,14325,14326,14332],{},[14147,14327,14328,14330],{"accent":116},[88,14329,90],{},[92,14331,14153],{},[85,14333,14334,14336,14338],{},[92,14335,700],{"stretchy":699},[88,14337,11890],{},[92,14339,719],{"stretchy":699},[92,14341,1032],{},[1554,14343,14344,14346],{},[88,14345,90],{},[85,14347,14348,14350,14352],{},[92,14349,700],{"stretchy":699},[88,14351,11890],{},[92,14353,719],{"stretchy":699},[92,14355,719],{"fence":116},[344,14357,346],{},[92,14359,94],{},[1002,14361,14362,14364],{},[344,14363,353],{},[88,14365,97],{},[14303,14367,14368,14370,14378],{},[92,14369,14307],{},[85,14371,14372,14374,14376],{},[88,14373,11890],{},[92,14375,94],{},[344,14377,353],{},[88,14379,97],{},[1554,14381,14382,14420],{},[85,14383,14384,14386,14388,14400,14402,14404,14406,14418],{},[92,14385,700],{"fence":116},[88,14387,11657],{},[1554,14389,14390,14392],{},[88,14391,100],{},[85,14393,14394,14396,14398],{},[92,14395,700],{"stretchy":699},[88,14397,11890],{},[92,14399,719],{"stretchy":699},[92,14401,103],{},[88,14403,106],{},[92,14405,1032],{},[1554,14407,14408,14410],{},[88,14409,90],{},[85,14411,14412,14414,14416],{},[92,14413,700],{"stretchy":699},[88,14415,11890],{},[92,14417,719],{"stretchy":699},[92,14419,719],{"fence":116},[344,14421,346],{},[108,14423,14424],{"encoding":110},"J(w, b) = \\frac{1}{m} \\sum_{i=1}^{m} \\left(\\hat{y}^{(i)} - y^{(i)}\\right)^2 = \\frac{1}{m} \\sum_{i=1}^{m} \\left(wx^{(i)} + b - y^{(i)}\\right)^2",[64,14426,14428,14464,14793],{"className":14427,"ariaHidden":116},[115],[64,14429,14431,14434,14437,14440,14443,14446,14449,14452,14455,14458,14461],{"className":14430},[120],[64,14432],{"className":14433,"style":732},[124],[64,14435,11623],{"className":14436,"style":11639},[129,130],[64,14438,700],{"className":14439},[736],[64,14441,11657],{"className":14442,"style":11699},[129,130],[64,14444,710],{"className":14445},[796],[64,14447],{"className":14448,"style":800},[135],[64,14450,106],{"className":14451},[129,130],[64,14453,719],{"className":14454},[845],[64,14456],{"className":14457,"style":136},[135],[64,14459,94],{"className":14460},[140],[64,14462],{"className":14463,"style":136},[135],[64,14465,14467,14471,14533,14536,14610,14613,14784,14787,14790],{"className":14466},[120],[64,14468],{"className":14469,"style":14470},[124],"height:2.9291em;vertical-align:-1.2777em;",[64,14472,14474,14477,14530],{"className":14473},[129],[64,14475],{"className":14476},[736,1092],[64,14478,14480],{"className":14479},[1002],[64,14481,14483,14522],{"className":14482},[750,751],[64,14484,14486,14519],{"className":14485},[755],[64,14487,14489,14500,14508],{"className":14488,"style":2867},[759],[64,14490,14491,14494],{"style":1108},[64,14492],{"className":14493,"style":1112},[767],[64,14495,14497],{"className":14496},[129],[64,14498,97],{"className":14499},[129,130],[64,14501,14502,14505],{"style":1124},[64,14503],{"className":14504,"style":1112},[767],[64,14506],{"className":14507,"style":1132},[1131],[64,14509,14510,14513],{"style":1135},[64,14511],{"className":14512,"style":1112},[767],[64,14514,14516],{"className":14515},[129],[64,14517,353],{"className":14518},[129],[64,14520,783],{"className":14521},[782],[64,14523,14525],{"className":14524},[755],[64,14526,14528],{"className":14527,"style":1157},[759],[64,14529],{},[64,14531],{"className":14532},[845,1092],[64,14534],{"className":14535,"style":800},[135],[64,14537,14539],{"className":14538},[3244,3245],[64,14540,14542,14601],{"className":14541},[750,751],[64,14543,14545,14598],{"className":14544},[755],[64,14546,14549,14570,14583],{"className":14547,"style":14548},[759],"height:1.6514em;",[64,14550,14552,14555],{"style":14551},"top:-1.8723em;margin-left:0em;",[64,14553],{"className":14554,"style":13482},[767],[64,14556,14558],{"className":14557},[772,773,774,775],[64,14559,14561,14564,14567],{"className":14560},[129,775],[64,14562,11890],{"className":14563},[129,130,775],[64,14565,94],{"className":14566},[140,775],[64,14568,353],{"className":14569},[129,775],[64,14571,14573,14576],{"style":14572},"top:-3.05em;",[64,14574],{"className":14575,"style":13482},[767],[64,14577,14578],{},[64,14579,14307],{"className":14580},[3244,14581,14582],"op-symbol","large-op",[64,14584,14586,14589],{"style":14585},"top:-4.3em;margin-left:0em;",[64,14587],{"className":14588,"style":13482},[767],[64,14590,14592],{"className":14591},[772,773,774,775],[64,14593,14595],{"className":14594},[129,775],[64,14596,97],{"className":14597},[129,130,775],[64,14599,783],{"className":14600},[782],[64,14602,14604],{"className":14603},[755],[64,14605,14608],{"className":14606,"style":14607},[759],"height:1.2777em;",[64,14609],{},[64,14611],{"className":14612,"style":800},[135],[64,14614,14616,14759],{"className":14615},[11790],[64,14617,14619,14628,14706,14709,14712,14715,14753],{"className":14618},[11790],[64,14620,14624],{"className":14621,"style":14623},[736,14622],"delimcenter","top:0em;",[64,14625,700],{"className":14626},[13440,14627],"size2",[64,14629,14631,14673],{"className":14630},[129],[64,14632,14634],{"className":14633},[129,14179],[64,14635,14637,14665],{"className":14636},[750,751],[64,14638,14640,14662],{"className":14639},[755],[64,14641,14643,14651],{"className":14642,"style":174},[759],[64,14644,14645,14648],{"style":3278},[64,14646],{"className":14647,"style":1112},[767],[64,14649,90],{"className":14650,"style":131},[129,130],[64,14652,14653,14656],{"style":3278},[64,14654],{"className":14655,"style":1112},[767],[64,14657,14659],{"className":14658,"style":14206},[14205],[64,14660,14153],{"className":14661},[129],[64,14663,783],{"className":14664},[782],[64,14666,14668],{"className":14667},[755],[64,14669,14671],{"className":14670,"style":14219},[759],[64,14672],{},[64,14674,14676],{"className":14675},[746],[64,14677,14679],{"className":14678},[750],[64,14680,14682],{"className":14681},[755],[64,14683,14686],{"className":14684,"style":14685},[759],"height:0.938em;",[64,14687,14688,14691],{"style":1621},[64,14689],{"className":14690,"style":768},[767],[64,14692,14694],{"className":14693},[772,773,774,775],[64,14695,14697,14700,14703],{"className":14696},[129,775],[64,14698,700],{"className":14699},[736,775],[64,14701,11890],{"className":14702},[129,130,775],[64,14704,719],{"className":14705},[845,775],[64,14707],{"className":14708,"style":160},[135],[64,14710,1032],{"className":14711},[164],[64,14713],{"className":14714,"style":160},[135],[64,14716,14718,14721],{"className":14717},[129],[64,14719,90],{"className":14720,"style":131},[129,130],[64,14722,14724],{"className":14723},[746],[64,14725,14727],{"className":14726},[750],[64,14728,14730],{"className":14729},[755],[64,14731,14733],{"className":14732,"style":14685},[759],[64,14734,14735,14738],{"style":1621},[64,14736],{"className":14737,"style":768},[767],[64,14739,14741],{"className":14740},[772,773,774,775],[64,14742,14744,14747,14750],{"className":14743},[129,775],[64,14745,700],{"className":14746},[736,775],[64,14748,11890],{"className":14749},[129,130,775],[64,14751,719],{"className":14752},[845,775],[64,14754,14756],{"className":14755,"style":14623},[845,14622],[64,14757,719],{"className":14758},[13440,14627],[64,14760,14762],{"className":14761},[746],[64,14763,14765],{"className":14764},[750],[64,14766,14768],{"className":14767},[755],[64,14769,14772],{"className":14770,"style":14771},[759],"height:1.354em;",[64,14773,14775,14778],{"style":14774},"top:-3.6029em;margin-right:0.05em;",[64,14776],{"className":14777,"style":768},[767],[64,14779,14781],{"className":14780},[772,773,774,775],[64,14782,346],{"className":14783},[129,775],[64,14785],{"className":14786,"style":136},[135],[64,14788,94],{"className":14789},[140],[64,14791],{"className":14792,"style":136},[135],[64,14794,14796,14799,14861,14864,14931,14934],{"className":14795},[120],[64,14797],{"className":14798,"style":14470},[124],[64,14800,14802,14805,14858],{"className":14801},[129],[64,14803],{"className":14804},[736,1092],[64,14806,14808],{"className":14807},[1002],[64,14809,14811,14850],{"className":14810},[750,751],[64,14812,14814,14847],{"className":14813},[755],[64,14815,14817,14828,14836],{"className":14816,"style":2867},[759],[64,14818,14819,14822],{"style":1108},[64,14820],{"className":14821,"style":1112},[767],[64,14823,14825],{"className":14824},[129],[64,14826,97],{"className":14827},[129,130],[64,14829,14830,14833],{"style":1124},[64,14831],{"className":14832,"style":1112},[767],[64,14834],{"className":14835,"style":1132},[1131],[64,14837,14838,14841],{"style":1135},[64,14839],{"className":14840,"style":1112},[767],[64,14842,14844],{"className":14843},[129],[64,14845,353],{"className":14846},[129],[64,14848,783],{"className":14849},[782],[64,14851,14853],{"className":14852},[755],[64,14854,14856],{"className":14855,"style":1157},[759],[64,14857],{},[64,14859],{"className":14860},[845,1092],[64,14862],{"className":14863,"style":800},[135],[64,14865,14867],{"className":14866},[3244,3245],[64,14868,14870,14923],{"className":14869},[750,751],[64,14871,14873,14920],{"className":14872},[755],[64,14874,14876,14896,14906],{"className":14875,"style":14548},[759],[64,14877,14878,14881],{"style":14551},[64,14879],{"className":14880,"style":13482},[767],[64,14882,14884],{"className":14883},[772,773,774,775],[64,14885,14887,14890,14893],{"className":14886},[129,775],[64,14888,11890],{"className":14889},[129,130,775],[64,14891,94],{"className":14892},[140,775],[64,14894,353],{"className":14895},[129,775],[64,14897,14898,14901],{"style":14572},[64,14899],{"className":14900,"style":13482},[767],[64,14902,14903],{},[64,14904,14307],{"className":14905},[3244,14581,14582],[64,14907,14908,14911],{"style":14585},[64,14909],{"className":14910,"style":13482},[767],[64,14912,14914],{"className":14913},[772,773,774,775],[64,14915,14917],{"className":14916},[129,775],[64,14918,97],{"className":14919},[129,130,775],[64,14921,783],{"className":14922},[782],[64,14924,14926],{"className":14925},[755],[64,14927,14929],{"className":14928,"style":14607},[759],[64,14930],{},[64,14932],{"className":14933,"style":800},[135],[64,14935,14937,15052],{"className":14936},[11790],[64,14938,14940,14946,14949,14987,14990,14993,14996,14999,15002,15005,15008,15046],{"className":14939},[11790],[64,14941,14943],{"className":14942,"style":14623},[736,14622],[64,14944,700],{"className":14945},[13440,14627],[64,14947,11657],{"className":14948,"style":11699},[129,130],[64,14950,14952,14955],{"className":14951},[129],[64,14953,100],{"className":14954},[129,130],[64,14956,14958],{"className":14957},[746],[64,14959,14961],{"className":14960},[750],[64,14962,14964],{"className":14963},[755],[64,14965,14967],{"className":14966,"style":14685},[759],[64,14968,14969,14972],{"style":1621},[64,14970],{"className":14971,"style":768},[767],[64,14973,14975],{"className":14974},[772,773,774,775],[64,14976,14978,14981,14984],{"className":14977},[129,775],[64,14979,700],{"className":14980},[736,775],[64,14982,11890],{"className":14983},[129,130,775],[64,14985,719],{"className":14986},[845,775],[64,14988],{"className":14989,"style":160},[135],[64,14991,103],{"className":14992},[164],[64,14994],{"className":14995,"style":160},[135],[64,14997,106],{"className":14998},[129,130],[64,15000],{"className":15001,"style":160},[135],[64,15003,1032],{"className":15004},[164],[64,15006],{"className":15007,"style":160},[135],[64,15009,15011,15014],{"className":15010},[129],[64,15012,90],{"className":15013,"style":131},[129,130],[64,15015,15017],{"className":15016},[746],[64,15018,15020],{"className":15019},[750],[64,15021,15023],{"className":15022},[755],[64,15024,15026],{"className":15025,"style":14685},[759],[64,15027,15028,15031],{"style":1621},[64,15029],{"className":15030,"style":768},[767],[64,15032,15034],{"className":15033},[772,773,774,775],[64,15035,15037,15040,15043],{"className":15036},[129,775],[64,15038,700],{"className":15039},[736,775],[64,15041,11890],{"className":15042},[129,130,775],[64,15044,719],{"className":15045},[845,775],[64,15047,15049],{"className":15048,"style":14623},[845,14622],[64,15050,719],{"className":15051},[13440,14627],[64,15053,15055],{"className":15054},[746],[64,15056,15058],{"className":15057},[750],[64,15059,15061],{"className":15060},[755],[64,15062,15064],{"className":15063,"style":14771},[759],[64,15065,15066,15069],{"style":14774},[64,15067],{"className":15068,"style":768},[767],[64,15070,15072],{"className":15071},[772,773,774,775],[64,15073,346],{"className":15074},[129,775],[12,15076,15077,15108,15109,15165],{},[21,15078,15079,15080],{},"Partial derivative w.r.t. ",[64,15081,15083,15096],{"className":15082},[71],[64,15084,15086],{"className":15085},[75],[77,15087,15088],{"xmlns":79},[82,15089,15090,15094],{},[85,15091,15092],{},[88,15093,11657],{},[108,15095,11657],{"encoding":110},[64,15097,15099],{"className":15098,"ariaHidden":116},[115],[64,15100,15102,15105],{"className":15101},[120],[64,15103],{"className":15104,"style":212},[124],[64,15106,11657],{"className":15107,"style":11699},[129,130]," (using chain rule — derivative of outer squared term times derivative of inner ",[64,15110,15112,15132],{"className":15111},[71],[64,15113,15115],{"className":15114},[75],[77,15116,15117],{"xmlns":79},[82,15118,15119,15129],{},[85,15120,15121,15123,15125,15127],{},[88,15122,11657],{},[88,15124,100],{},[92,15126,103],{},[88,15128,106],{},[108,15130,15131],{"encoding":110},"wx+b",[64,15133,15135,15156],{"className":15134,"ariaHidden":116},[115],[64,15136,15138,15141,15144,15147,15150,15153],{"className":15137},[120],[64,15139],{"className":15140,"style":150},[124],[64,15142,11657],{"className":15143,"style":11699},[129,130],[64,15145,100],{"className":15146},[129,130],[64,15148],{"className":15149,"style":160},[135],[64,15151,103],{"className":15152},[164],[64,15154],{"className":15155,"style":160},[135],[64,15157,15159,15162],{"className":15158},[120],[64,15160],{"className":15161,"style":174},[124],[64,15163,106],{"className":15164},[129,130],"):",[64,15167,15169],{"className":15168},[67],[64,15170,15172,15272],{"className":15171},[71],[64,15173,15175],{"className":15174},[75],[77,15176,15177],{"xmlns":79,"display":80},[82,15178,15179,15269],{},[85,15180,15181,15195,15197,15203,15217,15255,15257],{},[1002,15182,15183,15189],{},[85,15184,15185,15187],{},[88,15186,11877],{"mathvariant":1008},[88,15188,11623],{},[85,15190,15191,15193],{},[88,15192,11877],{"mathvariant":1008},[88,15194,11657],{},[92,15196,94],{},[1002,15198,15199,15201],{},[344,15200,346],{},[88,15202,97],{},[14303,15204,15205,15207,15215],{},[92,15206,14307],{},[85,15208,15209,15211,15213],{},[88,15210,11890],{},[92,15212,94],{},[344,15214,353],{},[88,15216,97],{},[85,15218,15219,15221,15223,15235,15237,15239,15241,15253],{},[92,15220,700],{"fence":116},[88,15222,11657],{},[1554,15224,15225,15227],{},[88,15226,100],{},[85,15228,15229,15231,15233],{},[92,15230,700],{"stretchy":699},[88,15232,11890],{},[92,15234,719],{"stretchy":699},[92,15236,103],{},[88,15238,106],{},[92,15240,1032],{},[1554,15242,15243,15245],{},[88,15244,90],{},[85,15246,15247,15249,15251],{},[92,15248,700],{"stretchy":699},[88,15250,11890],{},[92,15252,719],{"stretchy":699},[92,15254,719],{"fence":116},[92,15256,6030],{},[1554,15258,15259,15261],{},[88,15260,100],{},[85,15262,15263,15265,15267],{},[92,15264,700],{"stretchy":699},[88,15266,11890],{},[92,15268,719],{"stretchy":699},[108,15270,15271],{"encoding":110},"\\frac{\\partial J}{\\partial w} = \\frac{2}{m} \\sum_{i=1}^{m} \\left(wx^{(i)} + b - y^{(i)}\\right) \\cdot x^{(i)}",[64,15273,15275,15358,15623],{"className":15274,"ariaHidden":116},[115],[64,15276,15278,15281,15349,15352,15355],{"className":15277},[120],[64,15279],{"className":15280,"style":2249},[124],[64,15282,15284,15287,15346],{"className":15283},[129],[64,15285],{"className":15286},[736,1092],[64,15288,15290],{"className":15289},[1002],[64,15291,15293,15338],{"className":15292},[750,751],[64,15294,15296,15335],{"className":15295},[755],[64,15297,15299,15313,15321],{"className":15298,"style":2268},[759],[64,15300,15301,15304],{"style":1108},[64,15302],{"className":15303,"style":1112},[767],[64,15305,15307,15310],{"className":15306},[129],[64,15308,11877],{"className":15309,"style":11948},[129],[64,15311,11657],{"className":15312,"style":11699},[129,130],[64,15314,15315,15318],{"style":1124},[64,15316],{"className":15317,"style":1112},[767],[64,15319],{"className":15320,"style":1132},[1131],[64,15322,15323,15326],{"style":1135},[64,15324],{"className":15325,"style":1112},[767],[64,15327,15329,15332],{"className":15328},[129],[64,15330,11877],{"className":15331,"style":11948},[129],[64,15333,11623],{"className":15334,"style":11639},[129,130],[64,15336,783],{"className":15337},[782],[64,15339,15341],{"className":15340},[755],[64,15342,15344],{"className":15343,"style":1157},[759],[64,15345],{},[64,15347],{"className":15348},[845,1092],[64,15350],{"className":15351,"style":136},[135],[64,15353,94],{"className":15354},[140],[64,15356],{"className":15357,"style":136},[135],[64,15359,15361,15364,15426,15429,15496,15499,15614,15617,15620],{"className":15360},[120],[64,15362],{"className":15363,"style":14470},[124],[64,15365,15367,15370,15423],{"className":15366},[129],[64,15368],{"className":15369},[736,1092],[64,15371,15373],{"className":15372},[1002],[64,15374,15376,15415],{"className":15375},[750,751],[64,15377,15379,15412],{"className":15378},[755],[64,15380,15382,15393,15401],{"className":15381,"style":2867},[759],[64,15383,15384,15387],{"style":1108},[64,15385],{"className":15386,"style":1112},[767],[64,15388,15390],{"className":15389},[129],[64,15391,97],{"className":15392},[129,130],[64,15394,15395,15398],{"style":1124},[64,15396],{"className":15397,"style":1112},[767],[64,15399],{"className":15400,"style":1132},[1131],[64,15402,15403,15406],{"style":1135},[64,15404],{"className":15405,"style":1112},[767],[64,15407,15409],{"className":15408},[129],[64,15410,346],{"className":15411},[129],[64,15413,783],{"className":15414},[782],[64,15416,15418],{"className":15417},[755],[64,15419,15421],{"className":15420,"style":1157},[759],[64,15422],{},[64,15424],{"className":15425},[845,1092],[64,15427],{"className":15428,"style":800},[135],[64,15430,15432],{"className":15431},[3244,3245],[64,15433,15435,15488],{"className":15434},[750,751],[64,15436,15438,15485],{"className":15437},[755],[64,15439,15441,15461,15471],{"className":15440,"style":14548},[759],[64,15442,15443,15446],{"style":14551},[64,15444],{"className":15445,"style":13482},[767],[64,15447,15449],{"className":15448},[772,773,774,775],[64,15450,15452,15455,15458],{"className":15451},[129,775],[64,15453,11890],{"className":15454},[129,130,775],[64,15456,94],{"className":15457},[140,775],[64,15459,353],{"className":15460},[129,775],[64,15462,15463,15466],{"style":14572},[64,15464],{"className":15465,"style":13482},[767],[64,15467,15468],{},[64,15469,14307],{"className":15470},[3244,14581,14582],[64,15472,15473,15476],{"style":14585},[64,15474],{"className":15475,"style":13482},[767],[64,15477,15479],{"className":15478},[772,773,774,775],[64,15480,15482],{"className":15481},[129,775],[64,15483,97],{"className":15484},[129,130,775],[64,15486,783],{"className":15487},[782],[64,15489,15491],{"className":15490},[755],[64,15492,15494],{"className":15493,"style":14607},[759],[64,15495],{},[64,15497],{"className":15498,"style":800},[135],[64,15500,15502,15508,15511,15549,15552,15555,15558,15561,15564,15567,15570,15608],{"className":15501},[11790],[64,15503,15505],{"className":15504,"style":14623},[736,14622],[64,15506,700],{"className":15507},[13440,14627],[64,15509,11657],{"className":15510,"style":11699},[129,130],[64,15512,15514,15517],{"className":15513},[129],[64,15515,100],{"className":15516},[129,130],[64,15518,15520],{"className":15519},[746],[64,15521,15523],{"className":15522},[750],[64,15524,15526],{"className":15525},[755],[64,15527,15529],{"className":15528,"style":14685},[759],[64,15530,15531,15534],{"style":1621},[64,15532],{"className":15533,"style":768},[767],[64,15535,15537],{"className":15536},[772,773,774,775],[64,15538,15540,15543,15546],{"className":15539},[129,775],[64,15541,700],{"className":15542},[736,775],[64,15544,11890],{"className":15545},[129,130,775],[64,15547,719],{"className":15548},[845,775],[64,15550],{"className":15551,"style":160},[135],[64,15553,103],{"className":15554},[164],[64,15556],{"className":15557,"style":160},[135],[64,15559,106],{"className":15560},[129,130],[64,15562],{"className":15563,"style":160},[135],[64,15565,1032],{"className":15566},[164],[64,15568],{"className":15569,"style":160},[135],[64,15571,15573,15576],{"className":15572},[129],[64,15574,90],{"className":15575,"style":131},[129,130],[64,15577,15579],{"className":15578},[746],[64,15580,15582],{"className":15581},[750],[64,15583,15585],{"className":15584},[755],[64,15586,15588],{"className":15587,"style":14685},[759],[64,15589,15590,15593],{"style":1621},[64,15591],{"className":15592,"style":768},[767],[64,15594,15596],{"className":15595},[772,773,774,775],[64,15597,15599,15602,15605],{"className":15598},[129,775],[64,15600,700],{"className":15601},[736,775],[64,15603,11890],{"className":15604},[129,130,775],[64,15606,719],{"className":15607},[845,775],[64,15609,15611],{"className":15610,"style":14623},[845,14622],[64,15612,719],{"className":15613},[13440,14627],[64,15615],{"className":15616,"style":160},[135],[64,15618,6030],{"className":15619},[164],[64,15621],{"className":15622,"style":160},[135],[64,15624,15626,15629],{"className":15625},[120],[64,15627],{"className":15628,"style":14685},[124],[64,15630,15632,15635],{"className":15631},[129],[64,15633,100],{"className":15634},[129,130],[64,15636,15638],{"className":15637},[746],[64,15639,15641],{"className":15640},[750],[64,15642,15644],{"className":15643},[755],[64,15645,15647],{"className":15646,"style":14685},[759],[64,15648,15649,15652],{"style":1621},[64,15650],{"className":15651,"style":768},[767],[64,15653,15655],{"className":15654},[772,773,774,775],[64,15656,15658,15661,15664],{"className":15657},[129,775],[64,15659,700],{"className":15660},[736,775],[64,15662,11890],{"className":15663},[129,130,775],[64,15665,719],{"className":15666},[845,775],[12,15668,15669,2650],{},[21,15670,15079,15671],{},[64,15672,15674,15687],{"className":15673},[71],[64,15675,15677],{"className":15676},[75],[77,15678,15679],{"xmlns":79},[82,15680,15681,15685],{},[85,15682,15683],{},[88,15684,106],{},[108,15686,106],{"encoding":110},[64,15688,15690],{"className":15689,"ariaHidden":116},[115],[64,15691,15693,15696],{"className":15692},[120],[64,15694],{"className":15695,"style":174},[124],[64,15697,106],{"className":15698},[129,130],[64,15700,15702],{"className":15701},[67],[64,15703,15705,15791],{"className":15704},[71],[64,15706,15708],{"className":15707},[75],[77,15709,15710],{"xmlns":79,"display":80},[82,15711,15712,15788],{},[85,15713,15714,15728,15730,15736,15750],{},[1002,15715,15716,15722],{},[85,15717,15718,15720],{},[88,15719,11877],{"mathvariant":1008},[88,15721,11623],{},[85,15723,15724,15726],{},[88,15725,11877],{"mathvariant":1008},[88,15727,106],{},[92,15729,94],{},[1002,15731,15732,15734],{},[344,15733,346],{},[88,15735,97],{},[14303,15737,15738,15740,15748],{},[92,15739,14307],{},[85,15741,15742,15744,15746],{},[88,15743,11890],{},[92,15745,94],{},[344,15747,353],{},[88,15749,97],{},[85,15751,15752,15754,15756,15768,15770,15772,15774,15786],{},[92,15753,700],{"fence":116},[88,15755,11657],{},[1554,15757,15758,15760],{},[88,15759,100],{},[85,15761,15762,15764,15766],{},[92,15763,700],{"stretchy":699},[88,15765,11890],{},[92,15767,719],{"stretchy":699},[92,15769,103],{},[88,15771,106],{},[92,15773,1032],{},[1554,15775,15776,15778],{},[88,15777,90],{},[85,15779,15780,15782,15784],{},[92,15781,700],{"stretchy":699},[88,15783,11890],{},[92,15785,719],{"stretchy":699},[92,15787,719],{"fence":116},[108,15789,15790],{"encoding":110},"\\frac{\\partial J}{\\partial b} = \\frac{2}{m} \\sum_{i=1}^{m} \\left(wx^{(i)} + b - y^{(i)}\\right)",[64,15792,15794,15877],{"className":15793,"ariaHidden":116},[115],[64,15795,15797,15800,15868,15871,15874],{"className":15796},[120],[64,15798],{"className":15799,"style":2249},[124],[64,15801,15803,15806,15865],{"className":15802},[129],[64,15804],{"className":15805},[736,1092],[64,15807,15809],{"className":15808},[1002],[64,15810,15812,15857],{"className":15811},[750,751],[64,15813,15815,15854],{"className":15814},[755],[64,15816,15818,15832,15840],{"className":15817,"style":2268},[759],[64,15819,15820,15823],{"style":1108},[64,15821],{"className":15822,"style":1112},[767],[64,15824,15826,15829],{"className":15825},[129],[64,15827,11877],{"className":15828,"style":11948},[129],[64,15830,106],{"className":15831},[129,130],[64,15833,15834,15837],{"style":1124},[64,15835],{"className":15836,"style":1112},[767],[64,15838],{"className":15839,"style":1132},[1131],[64,15841,15842,15845],{"style":1135},[64,15843],{"className":15844,"style":1112},[767],[64,15846,15848,15851],{"className":15847},[129],[64,15849,11877],{"className":15850,"style":11948},[129],[64,15852,11623],{"className":15853,"style":11639},[129,130],[64,15855,783],{"className":15856},[782],[64,15858,15860],{"className":15859},[755],[64,15861,15863],{"className":15862,"style":1157},[759],[64,15864],{},[64,15866],{"className":15867},[845,1092],[64,15869],{"className":15870,"style":136},[135],[64,15872,94],{"className":15873},[140],[64,15875],{"className":15876,"style":136},[135],[64,15878,15880,15883,15945,15948,16015,16018],{"className":15879},[120],[64,15881],{"className":15882,"style":14470},[124],[64,15884,15886,15889,15942],{"className":15885},[129],[64,15887],{"className":15888},[736,1092],[64,15890,15892],{"className":15891},[1002],[64,15893,15895,15934],{"className":15894},[750,751],[64,15896,15898,15931],{"className":15897},[755],[64,15899,15901,15912,15920],{"className":15900,"style":2867},[759],[64,15902,15903,15906],{"style":1108},[64,15904],{"className":15905,"style":1112},[767],[64,15907,15909],{"className":15908},[129],[64,15910,97],{"className":15911},[129,130],[64,15913,15914,15917],{"style":1124},[64,15915],{"className":15916,"style":1112},[767],[64,15918],{"className":15919,"style":1132},[1131],[64,15921,15922,15925],{"style":1135},[64,15923],{"className":15924,"style":1112},[767],[64,15926,15928],{"className":15927},[129],[64,15929,346],{"className":15930},[129],[64,15932,783],{"className":15933},[782],[64,15935,15937],{"className":15936},[755],[64,15938,15940],{"className":15939,"style":1157},[759],[64,15941],{},[64,15943],{"className":15944},[845,1092],[64,15946],{"className":15947,"style":800},[135],[64,15949,15951],{"className":15950},[3244,3245],[64,15952,15954,16007],{"className":15953},[750,751],[64,15955,15957,16004],{"className":15956},[755],[64,15958,15960,15980,15990],{"className":15959,"style":14548},[759],[64,15961,15962,15965],{"style":14551},[64,15963],{"className":15964,"style":13482},[767],[64,15966,15968],{"className":15967},[772,773,774,775],[64,15969,15971,15974,15977],{"className":15970},[129,775],[64,15972,11890],{"className":15973},[129,130,775],[64,15975,94],{"className":15976},[140,775],[64,15978,353],{"className":15979},[129,775],[64,15981,15982,15985],{"style":14572},[64,15983],{"className":15984,"style":13482},[767],[64,15986,15987],{},[64,15988,14307],{"className":15989},[3244,14581,14582],[64,15991,15992,15995],{"style":14585},[64,15993],{"className":15994,"style":13482},[767],[64,15996,15998],{"className":15997},[772,773,774,775],[64,15999,16001],{"className":16000},[129,775],[64,16002,97],{"className":16003},[129,130,775],[64,16005,783],{"className":16006},[782],[64,16008,16010],{"className":16009},[755],[64,16011,16013],{"className":16012,"style":14607},[759],[64,16014],{},[64,16016],{"className":16017,"style":800},[135],[64,16019,16021,16027,16030,16068,16071,16074,16077,16080,16083,16086,16089,16127],{"className":16020},[11790],[64,16022,16024],{"className":16023,"style":14623},[736,14622],[64,16025,700],{"className":16026},[13440,14627],[64,16028,11657],{"className":16029,"style":11699},[129,130],[64,16031,16033,16036],{"className":16032},[129],[64,16034,100],{"className":16035},[129,130],[64,16037,16039],{"className":16038},[746],[64,16040,16042],{"className":16041},[750],[64,16043,16045],{"className":16044},[755],[64,16046,16048],{"className":16047,"style":14685},[759],[64,16049,16050,16053],{"style":1621},[64,16051],{"className":16052,"style":768},[767],[64,16054,16056],{"className":16055},[772,773,774,775],[64,16057,16059,16062,16065],{"className":16058},[129,775],[64,16060,700],{"className":16061},[736,775],[64,16063,11890],{"className":16064},[129,130,775],[64,16066,719],{"className":16067},[845,775],[64,16069],{"className":16070,"style":160},[135],[64,16072,103],{"className":16073},[164],[64,16075],{"className":16076,"style":160},[135],[64,16078,106],{"className":16079},[129,130],[64,16081],{"className":16082,"style":160},[135],[64,16084,1032],{"className":16085},[164],[64,16087],{"className":16088,"style":160},[135],[64,16090,16092,16095],{"className":16091},[129],[64,16093,90],{"className":16094,"style":131},[129,130],[64,16096,16098],{"className":16097},[746],[64,16099,16101],{"className":16100},[750],[64,16102,16104],{"className":16103},[755],[64,16105,16107],{"className":16106,"style":14685},[759],[64,16108,16109,16112],{"style":1621},[64,16110],{"className":16111,"style":768},[767],[64,16113,16115],{"className":16114},[772,773,774,775],[64,16116,16118,16121,16124],{"className":16117},[129,775],[64,16119,700],{"className":16120},[736,775],[64,16122,11890],{"className":16123},[129,130,775],[64,16125,719],{"className":16126},[845,775],[64,16128,16130],{"className":16129,"style":14623},[845,14622],[64,16131,719],{"className":16132},[13440,14627],[12,16134,16135,16138],{},[21,16136,16137],{},"Gradient descent updates"," — move opposite to the gradient:",[64,16140,16142],{"className":16141},[67],[64,16143,16145,16214],{"className":16144},[71],[64,16146,16148],{"className":16147},[75],[77,16149,16150],{"xmlns":79,"display":80},[82,16151,16152,16211],{},[85,16153,16154,16156,16159,16161,16163,16165,16167,16181,16183,16185,16187,16189,16191,16193,16195,16197],{},[88,16155,11657],{},[92,16157,16158],{},"←",[88,16160,11657],{},[92,16162,1032],{},[88,16164,11299],{},[92,16166,6030],{},[1002,16168,16169,16175],{},[85,16170,16171,16173],{},[88,16172,11877],{"mathvariant":1008},[88,16174,11623],{},[85,16176,16177,16179],{},[88,16178,11877],{"mathvariant":1008},[88,16180,11657],{},[92,16182,710],{"separator":116},[135,16184],{"width":12551},[88,16186,106],{},[92,16188,16158],{},[88,16190,106],{},[92,16192,1032],{},[88,16194,11299],{},[92,16196,6030],{},[1002,16198,16199,16205],{},[85,16200,16201,16203],{},[88,16202,11877],{"mathvariant":1008},[88,16204,11623],{},[85,16206,16207,16209],{},[88,16208,11877],{"mathvariant":1008},[88,16210,106],{},[108,16212,16213],{"encoding":110},"w \\leftarrow w - \\alpha \\cdot \\frac{\\partial J}{\\partial w}, \\qquad b \\leftarrow b - \\alpha \\cdot \\frac{\\partial J}{\\partial b}",[64,16215,16217,16235,16253,16271,16366,16385,16403],{"className":16216,"ariaHidden":116},[115],[64,16218,16220,16223,16226,16229,16232],{"className":16219},[120],[64,16221],{"className":16222,"style":212},[124],[64,16224,11657],{"className":16225,"style":11699},[129,130],[64,16227],{"className":16228,"style":136},[135],[64,16230,16158],{"className":16231},[140],[64,16233],{"className":16234,"style":136},[135],[64,16236,16238,16241,16244,16247,16250],{"className":16237},[120],[64,16239],{"className":16240,"style":150},[124],[64,16242,11657],{"className":16243,"style":11699},[129,130],[64,16245],{"className":16246,"style":160},[135],[64,16248,1032],{"className":16249},[164],[64,16251],{"className":16252,"style":160},[135],[64,16254,16256,16259,16262,16265,16268],{"className":16255},[120],[64,16257],{"className":16258,"style":6165},[124],[64,16260,11299],{"className":16261,"style":11460},[129,130],[64,16263],{"className":16264,"style":160},[135],[64,16266,6030],{"className":16267},[164],[64,16269],{"className":16270,"style":160},[135],[64,16272,16274,16277,16345,16348,16351,16354,16357,16360,16363],{"className":16273},[120],[64,16275],{"className":16276,"style":2249},[124],[64,16278,16280,16283,16342],{"className":16279},[129],[64,16281],{"className":16282},[736,1092],[64,16284,16286],{"className":16285},[1002],[64,16287,16289,16334],{"className":16288},[750,751],[64,16290,16292,16331],{"className":16291},[755],[64,16293,16295,16309,16317],{"className":16294,"style":2268},[759],[64,16296,16297,16300],{"style":1108},[64,16298],{"className":16299,"style":1112},[767],[64,16301,16303,16306],{"className":16302},[129],[64,16304,11877],{"className":16305,"style":11948},[129],[64,16307,11657],{"className":16308,"style":11699},[129,130],[64,16310,16311,16314],{"style":1124},[64,16312],{"className":16313,"style":1112},[767],[64,16315],{"className":16316,"style":1132},[1131],[64,16318,16319,16322],{"style":1135},[64,16320],{"className":16321,"style":1112},[767],[64,16323,16325,16328],{"className":16324},[129],[64,16326,11877],{"className":16327,"style":11948},[129],[64,16329,11623],{"className":16330,"style":11639},[129,130],[64,16332,783],{"className":16333},[782],[64,16335,16337],{"className":16336},[755],[64,16338,16340],{"className":16339,"style":1157},[759],[64,16341],{},[64,16343],{"className":16344},[845,1092],[64,16346,710],{"className":16347},[796],[64,16349],{"className":16350,"style":12826},[135],[64,16352],{"className":16353,"style":800},[135],[64,16355,106],{"className":16356},[129,130],[64,16358],{"className":16359,"style":136},[135],[64,16361,16158],{"className":16362},[140],[64,16364],{"className":16365,"style":136},[135],[64,16367,16369,16373,16376,16379,16382],{"className":16368},[120],[64,16370],{"className":16371,"style":16372},[124],"height:0.7778em;vertical-align:-0.0833em;",[64,16374,106],{"className":16375},[129,130],[64,16377],{"className":16378,"style":160},[135],[64,16380,1032],{"className":16381},[164],[64,16383],{"className":16384,"style":160},[135],[64,16386,16388,16391,16394,16397,16400],{"className":16387},[120],[64,16389],{"className":16390,"style":6165},[124],[64,16392,11299],{"className":16393,"style":11460},[129,130],[64,16395],{"className":16396,"style":160},[135],[64,16398,6030],{"className":16399},[164],[64,16401],{"className":16402,"style":160},[135],[64,16404,16406,16409],{"className":16405},[120],[64,16407],{"className":16408,"style":2249},[124],[64,16410,16412,16415,16474],{"className":16411},[129],[64,16413],{"className":16414},[736,1092],[64,16416,16418],{"className":16417},[1002],[64,16419,16421,16466],{"className":16420},[750,751],[64,16422,16424,16463],{"className":16423},[755],[64,16425,16427,16441,16449],{"className":16426,"style":2268},[759],[64,16428,16429,16432],{"style":1108},[64,16430],{"className":16431,"style":1112},[767],[64,16433,16435,16438],{"className":16434},[129],[64,16436,11877],{"className":16437,"style":11948},[129],[64,16439,106],{"className":16440},[129,130],[64,16442,16443,16446],{"style":1124},[64,16444],{"className":16445,"style":1112},[767],[64,16447],{"className":16448,"style":1132},[1131],[64,16450,16451,16454],{"style":1135},[64,16452],{"className":16453,"style":1112},[767],[64,16455,16457,16460],{"className":16456},[129],[64,16458,11877],{"className":16459,"style":11948},[129],[64,16461,11623],{"className":16462,"style":11639},[129,130],[64,16464,783],{"className":16465},[782],[64,16467,16469],{"className":16468},[755],[64,16470,16472],{"className":16471,"style":1157},[759],[64,16473],{},[64,16475],{"className":16476},[845,1092],[10687,16478,16480],{"className":10689,"code":16479,"language":10691,"meta":10692,"style":10692},"import numpy as np\n\n# Data: true relationship y = 3x + 2\nX = np.array([1.0, 2.0, 3.0, 4.0, 5.0])\ny = np.array([5.0, 8.0, 11.0, 14.0, 17.0])\n\nw, b = 0.0, 0.0   # start at zero\nalpha = 0.01\nm = len(y)\n\nfor epoch in range(500):\n    y_pred = w * X + b              # forward pass\n    error  = y_pred - y             # residuals: ŷ - y\n\n    # Partial derivatives (the gradient)\n    dw = (2 \u002F m) * np.dot(error, X) # ∂J\u002F∂w\n    db = (2 \u002F m) * np.sum(error)    # ∂J\u002F∂b\n\n    # Gradient descent step\n    w = w - alpha * dw\n    b = b - alpha * db\n\nprint(f\"Fitted: ŷ = {w:.4f}·x + {b:.4f}\")\n# Output: ŷ = 3.0000·x + 2.0000\n",[10694,16481,16482,16496,16500,16505,16549,16589,16593,16614,16624,16641,16645,16667,16691,16710,16715,16721,16767,16803,16808,16814,16834,16853,16858,16894],{"__ignoreMap":10692},[64,16483,16484,16487,16490,16493],{"class":10698,"line":10699},[64,16485,16486],{"class":10722},"import",[64,16488,16489],{"class":10726}," numpy ",[64,16491,16492],{"class":10722},"as",[64,16494,16495],{"class":10726}," np\n",[64,16497,16498],{"class":10698,"line":10719},[64,16499,10757],{"emptyLinePlaceholder":10756},[64,16501,16502],{"class":10698,"line":10753},[64,16503,16504],{"class":10800},"# Data: true relationship y = 3x + 2\n",[64,16506,16507,16510,16512,16515,16517,16520,16523,16526,16528,16531,16533,16536,16538,16541,16543,16546],{"class":10698,"line":10760},[64,16508,16509],{"class":10726},"X ",[64,16511,94],{"class":10710},[64,16513,16514],{"class":10726}," np",[64,16516,2055],{"class":10710},[64,16518,16519],{"class":10706},"array",[64,16521,16522],{"class":10710},"([",[64,16524,16525],{"class":10733},"1.0",[64,16527,710],{"class":10710},[64,16529,16530],{"class":10733}," 2.0",[64,16532,710],{"class":10710},[64,16534,16535],{"class":10733}," 3.0",[64,16537,710],{"class":10710},[64,16539,16540],{"class":10733}," 4.0",[64,16542,710],{"class":10710},[64,16544,16545],{"class":10733}," 5.0",[64,16547,16548],{"class":10710},"])\n",[64,16550,16551,16554,16556,16558,16560,16562,16564,16567,16569,16572,16574,16577,16579,16582,16584,16587],{"class":10698,"line":10774},[64,16552,16553],{"class":10726},"y ",[64,16555,94],{"class":10710},[64,16557,16514],{"class":10726},[64,16559,2055],{"class":10710},[64,16561,16519],{"class":10706},[64,16563,16522],{"class":10710},[64,16565,16566],{"class":10733},"5.0",[64,16568,710],{"class":10710},[64,16570,16571],{"class":10733}," 8.0",[64,16573,710],{"class":10710},[64,16575,16576],{"class":10733}," 11.0",[64,16578,710],{"class":10710},[64,16580,16581],{"class":10733}," 14.0",[64,16583,710],{"class":10710},[64,16585,16586],{"class":10733}," 17.0",[64,16588,16548],{"class":10710},[64,16590,16591],{"class":10698,"line":10792},[64,16592,10757],{"emptyLinePlaceholder":10756},[64,16594,16595,16597,16599,16602,16604,16607,16609,16611],{"class":10698,"line":10797},[64,16596,11657],{"class":10726},[64,16598,710],{"class":10710},[64,16600,16601],{"class":10726}," b ",[64,16603,94],{"class":10710},[64,16605,16606],{"class":10733}," 0.0",[64,16608,710],{"class":10710},[64,16610,16606],{"class":10733},[64,16612,16613],{"class":10800},"   # start at zero\n",[64,16615,16616,16619,16621],{"class":10698,"line":10804},[64,16617,16618],{"class":10726},"alpha ",[64,16620,94],{"class":10710},[64,16622,16623],{"class":10733}," 0.01\n",[64,16625,16626,16629,16631,16634,16636,16638],{"class":10698,"line":10810},[64,16627,16628],{"class":10726},"m ",[64,16630,94],{"class":10710},[64,16632,16633],{"class":10706}," len",[64,16635,700],{"class":10710},[64,16637,90],{"class":10706},[64,16639,16640],{"class":10710},")\n",[64,16642,16643],{"class":10698,"line":10821},[64,16644,10757],{"emptyLinePlaceholder":10756},[64,16646,16648,16651,16654,16657,16660,16662,16665],{"class":10698,"line":16647},11,[64,16649,16650],{"class":10722},"for",[64,16652,16653],{"class":10726}," epoch ",[64,16655,16656],{"class":10722},"in",[64,16658,16659],{"class":10706}," range",[64,16661,700],{"class":10710},[64,16663,16664],{"class":10733},"500",[64,16666,10716],{"class":10710},[64,16668,16670,16673,16675,16678,16680,16683,16685,16688],{"class":10698,"line":16669},12,[64,16671,16672],{"class":10726},"    y_pred ",[64,16674,94],{"class":10710},[64,16676,16677],{"class":10726}," w ",[64,16679,10742],{"class":10710},[64,16681,16682],{"class":10726}," X ",[64,16684,103],{"class":10710},[64,16686,16687],{"class":10726}," b              ",[64,16689,16690],{"class":10800},"# forward pass\n",[64,16692,16694,16697,16699,16702,16704,16707],{"class":10698,"line":16693},13,[64,16695,16696],{"class":10726},"    error  ",[64,16698,94],{"class":10710},[64,16700,16701],{"class":10726}," y_pred ",[64,16703,10786],{"class":10710},[64,16705,16706],{"class":10726}," y             ",[64,16708,16709],{"class":10800},"# residuals: ŷ - y\n",[64,16711,16713],{"class":10698,"line":16712},14,[64,16714,10757],{"emptyLinePlaceholder":10756},[64,16716,16718],{"class":10698,"line":16717},15,[64,16719,16720],{"class":10800},"    # Partial derivatives (the gradient)\n",[64,16722,16724,16727,16729,16732,16734,16737,16740,16742,16745,16747,16749,16752,16754,16757,16759,16762,16764],{"class":10698,"line":16723},16,[64,16725,16726],{"class":10726},"    dw ",[64,16728,94],{"class":10710},[64,16730,16731],{"class":10710}," (",[64,16733,346],{"class":10733},[64,16735,16736],{"class":10710}," \u002F",[64,16738,16739],{"class":10726}," m",[64,16741,719],{"class":10710},[64,16743,16744],{"class":10710}," *",[64,16746,16514],{"class":10726},[64,16748,2055],{"class":10710},[64,16750,16751],{"class":10706},"dot",[64,16753,700],{"class":10710},[64,16755,16756],{"class":10706},"error",[64,16758,710],{"class":10710},[64,16760,16761],{"class":10706}," X",[64,16763,719],{"class":10710},[64,16765,16766],{"class":10800}," # ∂J\u002F∂w\n",[64,16768,16770,16773,16775,16777,16779,16781,16783,16785,16787,16789,16791,16794,16796,16798,16800],{"class":10698,"line":16769},17,[64,16771,16772],{"class":10726},"    db ",[64,16774,94],{"class":10710},[64,16776,16731],{"class":10710},[64,16778,346],{"class":10733},[64,16780,16736],{"class":10710},[64,16782,16739],{"class":10726},[64,16784,719],{"class":10710},[64,16786,16744],{"class":10710},[64,16788,16514],{"class":10726},[64,16790,2055],{"class":10710},[64,16792,16793],{"class":10706},"sum",[64,16795,700],{"class":10710},[64,16797,16756],{"class":10706},[64,16799,719],{"class":10710},[64,16801,16802],{"class":10800},"    # ∂J\u002F∂b\n",[64,16804,16806],{"class":10698,"line":16805},18,[64,16807,10757],{"emptyLinePlaceholder":10756},[64,16809,16811],{"class":10698,"line":16810},19,[64,16812,16813],{"class":10800},"    # Gradient descent step\n",[64,16815,16817,16820,16822,16824,16826,16829,16831],{"class":10698,"line":16816},20,[64,16818,16819],{"class":10726},"    w ",[64,16821,94],{"class":10710},[64,16823,16677],{"class":10726},[64,16825,10786],{"class":10710},[64,16827,16828],{"class":10726}," alpha ",[64,16830,10742],{"class":10710},[64,16832,16833],{"class":10726}," dw\n",[64,16835,16837,16840,16842,16844,16846,16848,16850],{"class":10698,"line":16836},21,[64,16838,16839],{"class":10726},"    b ",[64,16841,94],{"class":10710},[64,16843,16601],{"class":10726},[64,16845,10786],{"class":10710},[64,16847,16828],{"class":10726},[64,16849,10742],{"class":10710},[64,16851,16852],{"class":10726}," db\n",[64,16854,16856],{"class":10698,"line":16855},22,[64,16857,10757],{"emptyLinePlaceholder":10756},[64,16859,16861,16863,16865,16867,16870,16872,16874,16877,16879,16882,16884,16886,16888,16890,16892],{"class":10698,"line":16860},23,[64,16862,10824],{"class":10706},[64,16864,700],{"class":10710},[64,16866,1544],{"class":10702},[64,16868,16869],{"class":10831},"\"Fitted: ŷ = ",[64,16871,10835],{"class":10733},[64,16873,11657],{"class":10706},[64,16875,16876],{"class":10702},":.4f",[64,16878,10841],{"class":10733},[64,16880,16881],{"class":10831},"·x + ",[64,16883,10835],{"class":10733},[64,16885,106],{"class":10706},[64,16887,16876],{"class":10702},[64,16889,10841],{"class":10733},[64,16891,10859],{"class":10831},[64,16893,16640],{"class":10710},[64,16895,16897],{"class":10698,"line":16896},24,[64,16898,16899],{"class":10800},"# Output: ŷ = 3.0000·x + 2.0000\n",[12,16901,16902],{},"The derivative — computed analytically with calculus, then applied iteratively — is what drives the entire learning process.",[26,16904],{},[51,16906,16908],{"id":16907},"summary","Summary",[411,16910,16911,16921],{},[414,16912,16913],{},[417,16914,16915,16918],{},[420,16916,16917],{},"Concept",[420,16919,16920],{},"One-Line Definition",[530,16922,16923,17066,17257,17317,17615,17837,18136,18146],{},[417,16924,16925,16930],{},[535,16926,16927],{},[21,16928,16929],{},"Slope of a line",[535,16931,16932,17065],{},[64,16933,16935,16964],{"className":16934},[71],[64,16936,16938],{"className":16937},[75],[77,16939,16940],{"xmlns":79},[82,16941,16942,16962],{},[85,16943,16944,16946,16948],{},[88,16945,97],{},[92,16947,94],{},[1002,16949,16950,16956],{},[85,16951,16952,16954],{},[88,16953,1009],{"mathvariant":1008},[88,16955,90],{},[85,16957,16958,16960],{},[88,16959,1009],{"mathvariant":1008},[88,16961,100],{},[108,16963,1944],{"encoding":110},[64,16965,16967,16985],{"className":16966,"ariaHidden":116},[115],[64,16968,16970,16973,16976,16979,16982],{"className":16969},[120],[64,16971],{"className":16972,"style":212},[124],[64,16974,97],{"className":16975},[129,130],[64,16977],{"className":16978,"style":136},[135],[64,16980,94],{"className":16981},[140],[64,16983],{"className":16984,"style":136},[135],[64,16986,16988,16991],{"className":16987},[120],[64,16989],{"className":16990,"style":1972},[124],[64,16992,16994,16997,17062],{"className":16993},[129],[64,16995],{"className":16996},[736,1092],[64,16998,17000],{"className":16999},[1002],[64,17001,17003,17054],{"className":17002},[750,751],[64,17004,17006,17051],{"className":17005},[755],[64,17007,17009,17026,17034],{"className":17008,"style":1991},[759],[64,17010,17011,17014],{"style":1994},[64,17012],{"className":17013,"style":1112},[767],[64,17015,17017],{"className":17016},[772,773,774,775],[64,17018,17020,17023],{"className":17019},[129,775],[64,17021,1009],{"className":17022},[129,775],[64,17024,100],{"className":17025},[129,130,775],[64,17027,17028,17031],{"style":1124},[64,17029],{"className":17030,"style":1112},[767],[64,17032],{"className":17033,"style":1132},[1131],[64,17035,17036,17039],{"style":2020},[64,17037],{"className":17038,"style":1112},[767],[64,17040,17042],{"className":17041},[772,773,774,775],[64,17043,17045,17048],{"className":17044},[129,775],[64,17046,1009],{"className":17047},[129,775],[64,17049,90],{"className":17050,"style":131},[129,130,775],[64,17052,783],{"className":17053},[782],[64,17055,17057],{"className":17056},[755],[64,17058,17060],{"className":17059,"style":2045},[759],[64,17061],{},[64,17063],{"className":17064},[845,1092]," — constant rate of change",[417,17067,17068,17073],{},[535,17069,17070],{},[21,17071,17072],{},"Average rate of change",[535,17074,17075,17228,17229],{},[64,17076,17078,17118],{"className":17077},[71],[64,17079,17081],{"className":17080},[75],[77,17082,17083],{"xmlns":79},[82,17084,17085,17115],{},[85,17086,17087],{},[1002,17088,17089,17113],{},[85,17090,17091,17093,17095,17097,17099,17101,17103,17105,17107,17109,17111],{},[88,17092,1544],{},[92,17094,700],{"stretchy":699},[88,17096,100],{},[92,17098,103],{},[88,17100,2109],{},[92,17102,719],{"stretchy":699},[92,17104,1032],{},[88,17106,1544],{},[92,17108,700],{"stretchy":699},[88,17110,100],{},[92,17112,719],{"stretchy":699},[88,17114,2109],{},[108,17116,17117],{"encoding":110},"\\frac{f(x+h)-f(x)}{h}",[64,17119,17121],{"className":17120,"ariaHidden":116},[115],[64,17122,17124,17128],{"className":17123},[120],[64,17125],{"className":17126,"style":17127},[124],"height:1.355em;vertical-align:-0.345em;",[64,17129,17131,17134,17225],{"className":17130},[129],[64,17132],{"className":17133},[736,1092],[64,17135,17137],{"className":17136},[1002],[64,17138,17140,17217],{"className":17139},[750,751],[64,17141,17143,17214],{"className":17142},[755],[64,17144,17147,17161,17169],{"className":17145,"style":17146},[759],"height:1.01em;",[64,17148,17149,17152],{"style":1994},[64,17150],{"className":17151,"style":1112},[767],[64,17153,17155],{"className":17154},[772,773,774,775],[64,17156,17158],{"className":17157},[129,775],[64,17159,2109],{"className":17160},[129,130,775],[64,17162,17163,17166],{"style":1124},[64,17164],{"className":17165,"style":1112},[767],[64,17167],{"className":17168,"style":1132},[1131],[64,17170,17172,17175],{"style":17171},"top:-3.485em;",[64,17173],{"className":17174,"style":1112},[767],[64,17176,17178],{"className":17177},[772,773,774,775],[64,17179,17181,17184,17187,17190,17193,17196,17199,17202,17205,17208,17211],{"className":17180},[129,775],[64,17182,1544],{"className":17183,"style":1575},[129,130,775],[64,17185,700],{"className":17186},[736,775],[64,17188,100],{"className":17189},[129,130,775],[64,17191,103],{"className":17192},[164,775],[64,17194,2109],{"className":17195},[129,130,775],[64,17197,719],{"className":17198},[845,775],[64,17200,1032],{"className":17201},[164,775],[64,17203,1544],{"className":17204,"style":1575},[129,130,775],[64,17206,700],{"className":17207},[736,775],[64,17209,100],{"className":17210},[129,130,775],[64,17212,719],{"className":17213},[845,775],[64,17215,783],{"className":17216},[782],[64,17218,17220],{"className":17219},[755],[64,17221,17223],{"className":17222,"style":2045},[759],[64,17224],{},[64,17226],{"className":17227},[845,1092]," — slope of secant over interval ",[64,17230,17232,17245],{"className":17231},[71],[64,17233,17235],{"className":17234},[75],[77,17236,17237],{"xmlns":79},[82,17238,17239,17243],{},[85,17240,17241],{},[88,17242,2109],{},[108,17244,2109],{"encoding":110},[64,17246,17248],{"className":17247,"ariaHidden":116},[115],[64,17249,17251,17254],{"className":17250},[120],[64,17252],{"className":17253,"style":174},[124],[64,17255,2109],{"className":17256},[129,130],[417,17258,17259,17264],{},[535,17260,17261],{},[21,17262,17263],{},"Limit",[535,17265,17266,17267],{},"The value an expression approaches as ",[64,17268,17270,17287],{"className":17269},[71],[64,17271,17273],{"className":17272},[75],[77,17274,17275],{"xmlns":79},[82,17276,17277,17285],{},[85,17278,17279,17281,17283],{},[88,17280,2109],{},[92,17282,3197],{},[344,17284,537],{},[108,17286,4394],{"encoding":110},[64,17288,17290,17308],{"className":17289,"ariaHidden":116},[115],[64,17291,17293,17296,17299,17302,17305],{"className":17292},[120],[64,17294],{"className":17295,"style":174},[124],[64,17297,2109],{"className":17298},[129,130],[64,17300],{"className":17301,"style":136},[135],[64,17303,3197],{"className":17304},[140],[64,17306],{"className":17307,"style":136},[135],[64,17309,17311,17314],{"className":17310},[120],[64,17312],{"className":17313,"style":406},[124],[64,17315,537],{"className":17316},[129],[417,17318,17319,17323],{},[535,17320,17321],{},[21,17322,6238],{},[535,17324,17325,17614],{},[64,17326,17328,17398],{"className":17327},[71],[64,17329,17331],{"className":17330},[75],[77,17332,17333],{"xmlns":79},[82,17334,17335,17395],{},[85,17336,17337,17343,17345,17347,17349,17351,17367],{},[1554,17338,17339,17341],{},[88,17340,1544],{},[92,17342,4731],{"mathvariant":1008,"lspace":4730,"rspace":4730},[92,17344,700],{"stretchy":699},[88,17346,100],{},[92,17348,719],{"stretchy":699},[92,17350,94],{},[702,17352,17353,17359],{},[85,17354,17355,17357],{},[88,17356,3187],{},[92,17358,3190],{},[85,17360,17361,17363,17365],{},[88,17362,2109],{},[92,17364,3197],{},[344,17366,537],{},[1002,17368,17369,17393],{},[85,17370,17371,17373,17375,17377,17379,17381,17383,17385,17387,17389,17391],{},[88,17372,1544],{},[92,17374,700],{"stretchy":699},[88,17376,100],{},[92,17378,103],{},[88,17380,2109],{},[92,17382,719],{"stretchy":699},[92,17384,1032],{},[88,17386,1544],{},[92,17388,700],{"stretchy":699},[88,17390,100],{},[92,17392,719],{"stretchy":699},[88,17394,2109],{},[108,17396,17397],{"encoding":110},"f'(x) = \\lim_{h\\to 0}\\frac{f(x+h)-f(x)}{h}",[64,17399,17401,17457],{"className":17400,"ariaHidden":116},[115],[64,17402,17404,17407,17439,17442,17445,17448,17451,17454],{"className":17403},[120],[64,17405],{"className":17406,"style":4750},[124],[64,17408,17410,17413],{"className":17409},[129],[64,17411,1544],{"className":17412,"style":1575},[129,130],[64,17414,17416],{"className":17415},[746],[64,17417,17419],{"className":17418},[750],[64,17420,17422],{"className":17421},[755],[64,17423,17425],{"className":17424,"style":4769},[759],[64,17426,17427,17430],{"style":1756},[64,17428],{"className":17429,"style":768},[767],[64,17431,17433],{"className":17432},[772,773,774,775],[64,17434,17436],{"className":17435},[129,775],[64,17437,4731],{"className":17438},[129,775],[64,17440,700],{"className":17441},[736],[64,17443,100],{"className":17444},[129,130],[64,17446,719],{"className":17447},[845],[64,17449],{"className":17450,"style":136},[135],[64,17452,94],{"className":17453},[140],[64,17455],{"className":17456,"style":136},[135],[64,17458,17460,17463,17513,17516],{"className":17459},[120],[64,17461],{"className":17462,"style":17127},[124],[64,17464,17466,17469],{"className":17465},[3244],[64,17467,3187],{"className":17468},[3244],[64,17470,17472],{"className":17471},[746],[64,17473,17475,17505],{"className":17474},[750,751],[64,17476,17478,17502],{"className":17477},[755],[64,17479,17481],{"className":17480,"style":11413},[759],[64,17482,17484,17487],{"style":17483},"top:-2.55em;margin-right:0.05em;",[64,17485],{"className":17486,"style":768},[767],[64,17488,17490],{"className":17489},[772,773,774,775],[64,17491,17493,17496,17499],{"className":17492},[129,775],[64,17494,2109],{"className":17495},[129,130,775],[64,17497,3197],{"className":17498},[140,775],[64,17500,537],{"className":17501},[129,775],[64,17503,783],{"className":17504},[782],[64,17506,17508],{"className":17507},[755],[64,17509,17511],{"className":17510,"style":790},[759],[64,17512],{},[64,17514],{"className":17515,"style":800},[135],[64,17517,17519,17522,17611],{"className":17518},[129],[64,17520],{"className":17521},[736,1092],[64,17523,17525],{"className":17524},[1002],[64,17526,17528,17603],{"className":17527},[750,751],[64,17529,17531,17600],{"className":17530},[755],[64,17532,17534,17548,17556],{"className":17533,"style":17146},[759],[64,17535,17536,17539],{"style":1994},[64,17537],{"className":17538,"style":1112},[767],[64,17540,17542],{"className":17541},[772,773,774,775],[64,17543,17545],{"className":17544},[129,775],[64,17546,2109],{"className":17547},[129,130,775],[64,17549,17550,17553],{"style":1124},[64,17551],{"className":17552,"style":1112},[767],[64,17554],{"className":17555,"style":1132},[1131],[64,17557,17558,17561],{"style":17171},[64,17559],{"className":17560,"style":1112},[767],[64,17562,17564],{"className":17563},[772,773,774,775],[64,17565,17567,17570,17573,17576,17579,17582,17585,17588,17591,17594,17597],{"className":17566},[129,775],[64,17568,1544],{"className":17569,"style":1575},[129,130,775],[64,17571,700],{"className":17572},[736,775],[64,17574,100],{"className":17575},[129,130,775],[64,17577,103],{"className":17578},[164,775],[64,17580,2109],{"className":17581},[129,130,775],[64,17583,719],{"className":17584},[845,775],[64,17586,1032],{"className":17587},[164,775],[64,17589,1544],{"className":17590,"style":1575},[129,130,775],[64,17592,700],{"className":17593},[736,775],[64,17595,100],{"className":17596},[129,130,775],[64,17598,719],{"className":17599},[845,775],[64,17601,783],{"className":17602},[782],[64,17604,17606],{"className":17605},[755],[64,17607,17609],{"className":17608,"style":2045},[759],[64,17610],{},[64,17612],{"className":17613},[845,1092]," — instantaneous rate of change",[417,17616,17617,17622],{},[535,17618,17619],{},[21,17620,17621],{},"Power rule",[535,17623,17624],{},[64,17625,17627,17671],{"className":17626},[71],[64,17628,17630],{"className":17629},[75],[77,17631,17632],{"xmlns":79},[82,17633,17634,17668],{},[85,17635,17636,17646,17652,17654,17656],{},[1002,17637,17638,17640],{},[88,17639,4812],{},[85,17641,17642,17644],{},[88,17643,4812],{},[88,17645,100],{},[1554,17647,17648,17650],{},[88,17649,100],{},[88,17651,5923],{},[92,17653,94],{},[88,17655,5923],{},[1554,17657,17658,17660],{},[88,17659,100],{},[85,17661,17662,17664,17666],{},[88,17663,5923],{},[92,17665,1032],{},[344,17667,353],{},[108,17669,17670],{"encoding":110},"\\frac{d}{dx} x^n = nx^{n-1}",[64,17672,17674,17790],{"className":17673,"ariaHidden":116},[115],[64,17675,17677,17681,17752,17781,17784,17787],{"className":17676},[120],[64,17678],{"className":17679,"style":17680},[124],"height:1.2251em;vertical-align:-0.345em;",[64,17682,17684,17687,17749],{"className":17683},[129],[64,17685],{"className":17686},[736,1092],[64,17688,17690],{"className":17689},[1002],[64,17691,17693,17741],{"className":17692},[750,751],[64,17694,17696,17738],{"className":17695},[755],[64,17697,17699,17716,17724],{"className":17698,"style":13533},[759],[64,17700,17701,17704],{"style":1994},[64,17702],{"className":17703,"style":1112},[767],[64,17705,17707],{"className":17706},[772,773,774,775],[64,17708,17710,17713],{"className":17709},[129,775],[64,17711,4812],{"className":17712},[129,130,775],[64,17714,100],{"className":17715},[129,130,775],[64,17717,17718,17721],{"style":1124},[64,17719],{"className":17720,"style":1112},[767],[64,17722],{"className":17723,"style":1132},[1131],[64,17725,17726,17729],{"style":9317},[64,17727],{"className":17728,"style":1112},[767],[64,17730,17732],{"className":17731},[772,773,774,775],[64,17733,17735],{"className":17734},[129,775],[64,17736,4812],{"className":17737},[129,130,775],[64,17739,783],{"className":17740},[782],[64,17742,17744],{"className":17743},[755],[64,17745,17747],{"className":17746,"style":2045},[759],[64,17748],{},[64,17750],{"className":17751},[845,1092],[64,17753,17755,17758],{"className":17754},[129],[64,17756,100],{"className":17757},[129,130],[64,17759,17761],{"className":17760},[746],[64,17762,17764],{"className":17763},[750],[64,17765,17767],{"className":17766},[755],[64,17768,17770],{"className":17769,"style":5963},[759],[64,17771,17772,17775],{"style":1756},[64,17773],{"className":17774,"style":768},[767],[64,17776,17778],{"className":17777},[772,773,774,775],[64,17779,5923],{"className":17780},[129,130,775],[64,17782],{"className":17783,"style":136},[135],[64,17785,94],{"className":17786},[140],[64,17788],{"className":17789,"style":136},[135],[64,17791,17793,17796,17799],{"className":17792},[120],[64,17794],{"className":17795,"style":1735},[124],[64,17797,5923],{"className":17798},[129,130],[64,17800,17802,17805],{"className":17801},[129],[64,17803,100],{"className":17804},[129,130],[64,17806,17808],{"className":17807},[746],[64,17809,17811],{"className":17810},[750],[64,17812,17814],{"className":17813},[755],[64,17815,17817],{"className":17816,"style":1735},[759],[64,17818,17819,17822],{"style":1756},[64,17820],{"className":17821,"style":768},[767],[64,17823,17825],{"className":17824},[772,773,774,775],[64,17826,17828,17831,17834],{"className":17827},[129,775],[64,17829,5923],{"className":17830},[129,130,775],[64,17832,1032],{"className":17833},[164,775],[64,17835,353],{"className":17836},[129,775],[417,17838,17839,17844],{},[535,17840,17841],{},[21,17842,17843],{},"Chain rule",[535,17845,17846,18135],{},[64,17847,17849,17919],{"className":17848},[71],[64,17850,17852],{"className":17851},[75],[77,17853,17854],{"xmlns":79},[82,17855,17856,17916],{},[85,17857,17858,17868,17870,17872,17874,17876,17878,17880,17882,17884,17890,17892,17894,17896,17898,17900,17902,17904,17910,17912,17914],{},[1002,17859,17860,17862],{},[88,17861,4812],{},[85,17863,17864,17866],{},[88,17865,4812],{},[88,17867,100],{},[88,17869,1544],{},[92,17871,700],{"stretchy":699},[88,17873,7399],{},[92,17875,700],{"stretchy":699},[88,17877,100],{},[92,17879,719],{"stretchy":699},[92,17881,719],{"stretchy":699},[92,17883,94],{},[1554,17885,17886,17888],{},[88,17887,1544],{},[92,17889,4731],{"mathvariant":1008,"lspace":4730,"rspace":4730},[92,17891,700],{"stretchy":699},[88,17893,7399],{},[92,17895,700],{"stretchy":699},[88,17897,100],{},[92,17899,719],{"stretchy":699},[92,17901,719],{"stretchy":699},[92,17903,6030],{},[1554,17905,17906,17908],{},[88,17907,7399],{},[92,17909,4731],{"mathvariant":1008,"lspace":4730,"rspace":4730},[92,17911,700],{"stretchy":699},[88,17913,100],{},[92,17915,719],{"stretchy":699},[108,17917,17918],{"encoding":110},"\\frac{d}{dx}f(g(x)) = f'(g(x))\\cdot g'(x)",[64,17920,17922,18026,18088],{"className":17921,"ariaHidden":116},[115],[64,17923,17925,17928,17999,18002,18005,18008,18011,18014,18017,18020,18023],{"className":17924},[120],[64,17926],{"className":17927,"style":17680},[124],[64,17929,17931,17934,17996],{"className":17930},[129],[64,17932],{"className":17933},[736,1092],[64,17935,17937],{"className":17936},[1002],[64,17938,17940,17988],{"className":17939},[750,751],[64,17941,17943,17985],{"className":17942},[755],[64,17944,17946,17963,17971],{"className":17945,"style":13533},[759],[64,17947,17948,17951],{"style":1994},[64,17949],{"className":17950,"style":1112},[767],[64,17952,17954],{"className":17953},[772,773,774,775],[64,17955,17957,17960],{"className":17956},[129,775],[64,17958,4812],{"className":17959},[129,130,775],[64,17961,100],{"className":17962},[129,130,775],[64,17964,17965,17968],{"style":1124},[64,17966],{"className":17967,"style":1112},[767],[64,17969],{"className":17970,"style":1132},[1131],[64,17972,17973,17976],{"style":9317},[64,17974],{"className":17975,"style":1112},[767],[64,17977,17979],{"className":17978},[772,773,774,775],[64,17980,17982],{"className":17981},[129,775],[64,17983,4812],{"className":17984},[129,130,775],[64,17986,783],{"className":17987},[782],[64,17989,17991],{"className":17990},[755],[64,17992,17994],{"className":17993,"style":2045},[759],[64,17995],{},[64,17997],{"className":17998},[845,1092],[64,18000,1544],{"className":18001,"style":1575},[129,130],[64,18003,700],{"className":18004},[736],[64,18006,7399],{"className":18007,"style":131},[129,130],[64,18009,700],{"className":18010},[736],[64,18012,100],{"className":18013},[129,130],[64,18015,8127],{"className":18016},[845],[64,18018],{"className":18019,"style":136},[135],[64,18021,94],{"className":18022},[140],[64,18024],{"className":18025,"style":136},[135],[64,18027,18029,18032,18064,18067,18070,18073,18076,18079,18082,18085],{"className":18028},[120],[64,18030],{"className":18031,"style":4750},[124],[64,18033,18035,18038],{"className":18034},[129],[64,18036,1544],{"className":18037,"style":1575},[129,130],[64,18039,18041],{"className":18040},[746],[64,18042,18044],{"className":18043},[750],[64,18045,18047],{"className":18046},[755],[64,18048,18050],{"className":18049,"style":4769},[759],[64,18051,18052,18055],{"style":1756},[64,18053],{"className":18054,"style":768},[767],[64,18056,18058],{"className":18057},[772,773,774,775],[64,18059,18061],{"className":18060},[129,775],[64,18062,4731],{"className":18063},[129,775],[64,18065,700],{"className":18066},[736],[64,18068,7399],{"className":18069,"style":131},[129,130],[64,18071,700],{"className":18072},[736],[64,18074,100],{"className":18075},[129,130],[64,18077,8127],{"className":18078},[845],[64,18080],{"className":18081,"style":160},[135],[64,18083,6030],{"className":18084},[164],[64,18086],{"className":18087,"style":160},[135],[64,18089,18091,18094,18126,18129,18132],{"className":18090},[120],[64,18092],{"className":18093,"style":4750},[124],[64,18095,18097,18100],{"className":18096},[129],[64,18098,7399],{"className":18099,"style":131},[129,130],[64,18101,18103],{"className":18102},[746],[64,18104,18106],{"className":18105},[750],[64,18107,18109],{"className":18108},[755],[64,18110,18112],{"className":18111,"style":4769},[759],[64,18113,18114,18117],{"style":1756},[64,18115],{"className":18116,"style":768},[767],[64,18118,18120],{"className":18119},[772,773,774,775],[64,18121,18123],{"className":18122},[129,775],[64,18124,4731],{"className":18125},[129,775],[64,18127,700],{"className":18128},[736],[64,18130,100],{"className":18131},[129,130],[64,18133,719],{"className":18134},[845]," — essential for backprop",[417,18137,18138,18143],{},[535,18139,18140],{},[21,18141,18142],{},"Partial derivative",[535,18144,18145],{},"Derivative holding all other variables fixed",[417,18147,18148,18153],{},[535,18149,18150],{},[21,18151,18152],{},"Gradient",[535,18154,18155],{},"Vector of all partial derivatives — points toward steepest ascent",[12,18157,18158,18159,18162,18163,18166],{},"The derivative is the mathematical answer to the question ",[16,18160,18161],{},"\"which way is uphill?\""," In machine learning we use its negative — ",[16,18164,18165],{},"downhill"," — to train every model.",[26,18168],{},[51,18170,18172],{"id":18171},"whats-next","What's Next?",[12,18174,18175,18176,18179],{},"You now have the calculus foundation. The ",[21,18177,18178],{},"gradient descent"," algorithm takes this one concept — move opposite to the derivative — and turns it into a complete optimization engine for machine learning.",[29,18181,41,18192],{"className":18182},[18183,18184,18185,18186,18187,36,18188,1489,18189,18190,18191],"bg-indigo-50","dark:bg-indigo-900\u002F20","border","border-indigo-200","dark:border-indigo-700","p-6","flex","items-start","gap-4",[29,18193,18196,18197,18196,18206,18196,18213,41],{"className":18194},[18195],"flex-1","\n    ",[12,18198,18205],{"className":18199},[18200,18201,18202,18203,18204],"font-bold","text-indigo-900","dark:text-indigo-100","text-lg","mb-1","Next Room: Gradient Descent",[12,18207,18212],{"className":18208},[18209,18210,18211],"text-indigo-700","dark:text-indigo-300","mb-4","See how the derivative becomes an optimization algorithm — with interactive experiments, full Python code, and a walk through every step of the math.",[18214,18215,18221],"a",{"href":18216,"className":18217},"\u002Frooms\u002Fgradient-descent",[18218,1493,18219,18220,36],"inline-block","px-5","py-2","\n      Enter the Gradient Descent Room →\n    ",[18223,18224,18225],"style",{},"html pre.shiki code .spNyl, html code.shiki .spNyl{--shiki-light:#9C3EDA;--shiki-default:#C792EA;--shiki-dark:#C792EA}html pre.shiki code .s2Zo4, html code.shiki .s2Zo4{--shiki-light:#6182B8;--shiki-default:#82AAFF;--shiki-dark:#82AAFF}html pre.shiki code .sMK4o, html code.shiki .sMK4o{--shiki-light:#39ADB5;--shiki-default:#89DDFF;--shiki-dark:#89DDFF}html pre.shiki code .sHdIc, html code.shiki .sHdIc{--shiki-light:#90A4AE;--shiki-light-font-style:italic;--shiki-default:#EEFFFF;--shiki-default-font-style:italic;--shiki-dark:#BABED8;--shiki-dark-font-style:italic}html pre.shiki code .s7zQu, html code.shiki .s7zQu{--shiki-light:#39ADB5;--shiki-light-font-style:italic;--shiki-default:#89DDFF;--shiki-default-font-style:italic;--shiki-dark:#89DDFF;--shiki-dark-font-style:italic}html pre.shiki code .sTEyZ, html code.shiki .sTEyZ{--shiki-light:#90A4AE;--shiki-default:#EEFFFF;--shiki-dark:#BABED8}html pre.shiki code .sbssI, html code.shiki .sbssI{--shiki-light:#F76D47;--shiki-default:#F78C6C;--shiki-dark:#F78C6C}html pre.shiki code .sHwdD, html code.shiki .sHwdD{--shiki-light:#90A4AE;--shiki-light-font-style:italic;--shiki-default:#546E7A;--shiki-default-font-style:italic;--shiki-dark:#676E95;--shiki-dark-font-style:italic}html pre.shiki code .sfazB, html code.shiki .sfazB{--shiki-light:#91B859;--shiki-default:#C3E88D;--shiki-dark:#C3E88D}html .light .shiki span {color: var(--shiki-light);background: var(--shiki-light-bg);font-style: var(--shiki-light-font-style);font-weight: var(--shiki-light-font-weight);text-decoration: var(--shiki-light-text-decoration);}html.light .shiki span {color: var(--shiki-light);background: var(--shiki-light-bg);font-style: var(--shiki-light-font-style);font-weight: var(--shiki-light-font-weight);text-decoration: var(--shiki-light-text-decoration);}html .default .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html.dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}",{"title":10692,"searchDepth":10719,"depth":10719,"links":18227},[18228,18232,18235,18238,18242,18249,18253,18257,18258,18259],{"id":53,"depth":10719,"text":54,"children":18229},[18230,18231],{"id":58,"depth":10753,"text":59},{"id":680,"depth":10753,"text":681},{"id":1511,"depth":10719,"text":1512,"children":18233},[18234],{"id":2058,"depth":10753,"text":2059},{"id":3056,"depth":10719,"text":3057,"children":18236},[18237],{"id":3411,"depth":10753,"text":3412},{"id":4642,"depth":10719,"text":4643,"children":18239},[18240,18241],{"id":4646,"depth":10753,"text":4647},{"id":5280,"depth":10753,"text":5281},{"id":5879,"depth":10719,"text":5880,"children":18243},[18244,18245,18246,18247,18248],{"id":5890,"depth":10753,"text":5891},{"id":6847,"depth":10753,"text":6848},{"id":7358,"depth":10753,"text":7359},{"id":8062,"depth":10753,"text":8063},{"id":9052,"depth":10753,"text":9053},{"id":9853,"depth":10719,"text":9854,"children":18250},[18251,18252],{"id":9857,"depth":10753,"text":9858},{"id":10867,"depth":10753,"text":10868},{"id":11600,"depth":10719,"text":11601,"children":18254},[18255,18256],{"id":11847,"depth":10753,"text":11848},{"id":13050,"depth":10753,"text":13051},{"id":13978,"depth":10719,"text":13979},{"id":16907,"depth":10719,"text":16908},{"id":18171,"depth":10719,"text":18172},"2026-04-10","From slopes of lines to the calculus engine behind machine learning","md",{},null,"\u002Fen\u002Frooms\u002Fderivatives",{"title":5,"description":18261},"en\u002Frooms\u002Fderivatives","N\u002FA","0JvGv30RIFVDUm5qHe3F6rs0gWK4T9VkhWXav6sFFYw",[18271,18295],{"title":18272,"path":18273,"stem":18274,"children":18275,"page":18294},"En","\u002Fen","en",[18276,18280],{"title":18277,"path":18278,"stem":18279},"AI & ML Engineering Bootcamp — Batch 3","\u002Fen\u002Fbootcamp","en\u002Fbootcamp",{"title":18281,"path":18282,"stem":18283,"children":18284,"page":18294},"Rooms","\u002Fen\u002Frooms","en\u002Frooms",[18285,18289,18290],{"title":18286,"path":18287,"stem":18288},"Adam Optimizer","\u002Fen\u002Frooms\u002Fadam-optimizer","en\u002Frooms\u002Fadam-optimizer",{"title":5,"path":18265,"stem":18267},{"title":18291,"path":18292,"stem":18293},"Gradient Descent","\u002Fen\u002Frooms\u002Fgradient-descent","en\u002Frooms\u002Fgradient-descent",false,{"title":18296,"path":18297,"stem":18298,"children":18299,"page":18294},"Km","\u002Fkm","km",[18300,18304],{"title":18301,"path":18302,"stem":18303},"វគ្គបណ្តុះបណ្តាល AI & ML Engineering — ជំនាន់ទី 3","\u002Fkm\u002Fbootcamp","km\u002Fbootcamp",{"title":18281,"path":18305,"stem":18306,"children":18307,"page":18294},"\u002Fkm\u002Frooms","km\u002Frooms",[18308,18312,18316],{"title":18309,"path":18310,"stem":18311},"ស្វែងយល់ពី Adam Optimizer: GPS នៃការបង្កើត AI Model","\u002Fkm\u002Frooms\u002Fadam-optimizer","km\u002Frooms\u002Fadam-optimizer",{"title":18313,"path":18314,"stem":18315},"ដេរីវេ — ភាសានៃការផ្លាស់ប្ដូរ","\u002Fkm\u002Frooms\u002Fderivatives","km\u002Frooms\u002Fderivatives",{"title":18317,"path":18318,"stem":18319},"ស្វែងយល់ពី Gradient Descent Algorithm","\u002Fkm\u002Frooms\u002Fgradient-descent","km\u002Frooms\u002Fgradient-descent",[18321,18324,18329,18334,18338,18342,18347,18352,18357,18362,18367,18372,18377,18382,18385,18389,18394,18398,18403,18408,18413,18418,18422,18427,18432,18437,18442,18447,18452,18456,18461,18466,18471,18475,18480,18483,18486,18490,18494,18498,18502,18506,18510,18513,18517,18521,18525,18529,18533,18537,18541,18545,18548,18552,18556,18560,18564,18568,18572,18576,18580,18583,18588,18592,18597,18602,18607,18612,18617,18622,18626,18631,18635,18639,18644,18649,18654,18659,18664,18669,18674,18678,18683,18688,18693,18698,18703,18706,18711,18716,18720,18724,18729,18734,18739,18744,18749,18754,18759,18764,18767,18771,18776,18780,18785,18790,18795,18800,18804,18809,18814,18819,18824,18829,18834,18838,18843,18848,18853,18858,18862,18865,18869,18874,18879,18884,18889,18894,18899,18903,18908,18913,18918,18923,18928,18933,18938,18943,18947,18952,18957,18962,18967,18972,18977,18981,18986,18989,18994,18998,19003,19008,19013,19018,19023,19028,19032,19037,19041,19045,19049,19053,19057,19062,19067,19072,19077,19082,19087,19092,19097,19102],{"id":18278,"title":18277,"titles":18322,"content":18323,"level":10699},[],"A 22-week hands-on program taking you from mathematical foundations to deploying production ML systems. Build real models, ship real code.",{"id":18325,"title":18326,"titles":18327,"content":18328,"level":10719},"\u002Fen\u002Fbootcamp#welcome-future-ml-engineers","Welcome, Future ML Engineers!",[18277],"Over 22 weeks you will grow from someone who uses ML models into someone who builds, trains, and deploys them in production. The curriculum blends mathematical foundations, classical machine learning, deep learning, Transformers, and MLOps into a single coherent journey.",{"id":18330,"title":18331,"titles":18332,"content":18333,"level":10719},"\u002Fen\u002Fbootcamp#program-at-a-glance","Program at a Glance",[18277],"Duration22 weeks (66 hours of instruction)ScheduleThursday & Friday · 1.5 hours \u002F sessionWeekly Commitment3 hrs in-class + 4–6 hrs self-studyStart DateMarch 26, 2026 (Still Accepting Applications)ApproachMath → Classical ML → Deep Learning → Transformers → MLOps",{"id":18335,"title":18336,"titles":18337,"content":10692,"level":10719},"\u002Fen\u002Fbootcamp#tech-stack","Tech Stack",[18277],{"id":18339,"title":18340,"titles":18341,"content":10692,"level":10719},"\u002Fen\u002Fbootcamp#curriculum-modules","Curriculum Modules",[18277],{"id":18343,"title":18344,"titles":18345,"content":18346,"level":10753},"\u002Fen\u002Fbootcamp#module-1-foundations-4-weeks","Module 1 · Foundations (4 weeks)",[18277,18340],"Mathematical and conceptual building blocks for ML. Build the intuition behind how machines learn before writing a single fit() call. Week 1 — AI\u002FML\u002FDeep Learning landscape · supervised vs unsupervised learning · types of ML problemsWeek 2 — Vectors & matrices · dot products & matrix multiplication · gradients · gradient descent from scratchWeek 3 — Probability distributions · Bayes' theorem · MSE & cross-entropy loss · bias-variance tradeoffWeek 4 — Exploratory data analysis · handling missing data · feature scaling · train your first model 🎯 End-of-Module Project: Implement gradient descent and linear regression from scratch using only NumPy.",{"id":18348,"title":18349,"titles":18350,"content":18351,"level":10753},"\u002Fen\u002Fbootcamp#module-2-classical-machine-learning-5-weeks","Module 2 · Classical Machine Learning (5 weeks)",[18277,18340],"The Scikit-learn ecosystem and tabular data mastery. Build, evaluate, and tune real-world classifiers and regressors. Week 5 — Linear regression (OLS) · logistic regression · sigmoid function · decision boundariesWeek 6 — Decision trees (Gini\u002Fentropy splitting) · random forests · bagging · feature importanceWeek 7 — XGBoost & LightGBM gradient boosting · metrics (precision, recall, F1, AUC-ROC) · confusion matricesWeek 8 — K-fold cross-validation · grid & Bayesian hyperparameter search · feature engineering · preventing data leakageWeek 9 — Kaggle competition workflow · end-to-end sklearn Pipeline · model serialization 🎯 End-of-Module Project: Compete in a Kaggle tabular-data challenge and ship a complete sklearn pipeline.",{"id":18353,"title":18354,"titles":18355,"content":18356,"level":10753},"\u002Fen\u002Fbootcamp#module-3-deep-learning-with-pytorch-4-weeks","Module 3 · Deep Learning with PyTorch (4 weeks)",[18277,18340],"Neural networks from first principles to GPU-accelerated CNNs. Understand every layer, gradient update, and training trick. Week 10 — Perceptrons · multi-layer networks · forward propagation · backpropagation & chain ruleWeek 11 — Activation functions (ReLU, Softmax) · PyTorch tensors · custom Dataset & DataLoader · data augmentationWeek 12 — Training loops · Adam\u002FSGD optimizers · early stopping · model checkpointing with torch.saveWeek 13 — Convolutional layers & pooling · ResNet\u002FVGG architectures · transfer learning · fine-tuning strategies 🎯 End-of-Module Project: Build an image classifier using transfer learning with a pretrained CNN.",{"id":18358,"title":18359,"titles":18360,"content":18361,"level":10753},"\u002Fen\u002Fbootcamp#module-4-transformers-hugging-face-3-weeks","Module 4 · Transformers & Hugging Face (3 weeks)",[18277,18340],"The attention mechanism that powers modern AI. Fine-tune BERT and GPT-class models for real NLP tasks. Week 14 — Self-attention · multi-head attention · transformer architecture · tokenization (BPE\u002FWordPiece) · positional encodingWeek 15 — Hugging Face Hub & Pipeline API · fine-tuning with Trainer API · BERT for text classification & NERWeek 16 — NLP competition strategy · pushing models to Hugging Face Hub · building a live text classification service 🎯 End-of-Module Project: Fine-tune a transformer on a Kaggle NLP challenge and publish it to Hugging Face Hub.",{"id":18363,"title":18364,"titles":18365,"content":18366,"level":10753},"\u002Fen\u002Fbootcamp#module-5-mlops-deployment-3-weeks","Module 5 · MLOps & Deployment (3 weeks)",[18277,18340],"From Jupyter notebook to a production-grade API. Learn the tools and practices every ML engineer needs in industry. Week 17 — Model serialization (pickle \u002F joblib \u002F ONNX) · DVC versioning · REST prediction APIs with FastAPIWeek 18 — Docker images & containers · Dockerfile best practices · MLflow experiment tracking & model registryWeek 19 — GitHub Actions CI\u002FCD · automated testing · data drift detection · model monitoring & alerting 🎯 End-of-Module Project: Deploy an ML model end-to-end with FastAPI, Docker, and a CI\u002FCD pipeline.",{"id":18368,"title":18369,"titles":18370,"content":18371,"level":10753},"\u002Fen\u002Fbootcamp#module-6-capstone-1-week","Module 6 · Capstone (1 week)",[18277,18340],"Build and ship a full production ML system — from raw data to a live API. Source a real dataset (Kaggle or real-world problem)Full EDA, preprocessing, and feature engineeringTrain and compare multiple models with documented hyperparameter tuningDeploy a REST API (FastAPI) inside a Docker containerSet up a CI\u002FCD pipeline with GitHub Actions10–15 minute live demo presentation Example projects: sentiment analysis · medical image classification · real-estate price prediction · fake news detection · customer churn · text summarization API.",{"id":18373,"title":18374,"titles":18375,"content":18376,"level":10719},"\u002Fen\u002Fbootcamp#who-is-this-for","Who Is This For?",[18277],"Developers who know Python and want to break into ML engineeringUniversity students wanting practical, resume-worthy ML projectsAnyone who has taken online courses but wants structured, project-based depth",{"id":18378,"title":18379,"titles":18380,"content":18381,"level":10719},"\u002Fen\u002Fbootcamp#what-you-will-build","What You Will Build",[18277],"By the end of the bootcamp you will have trained and deployed real models, competed in Kaggle challenges, and delivered a capstone project that demonstrates full-stack ML skills.",{"id":18287,"title":18286,"titles":18383,"content":18384,"level":10699},[],"From fixed learning rates to adaptive moments — understanding the optimizer behind modern deep learning Image source: DL Notes: Advanced Gradient Descent Adam (short for Adaptive Moment Estimation) was introduced by Diederik Kingma and Jimmy Ba in 2015 [1], and quickly became the go-to optimizer in deep learning. This article explains why a fixed learning rate fails, what Adam does differently, and how it works — from first principles to code.",{"id":18386,"title":18387,"titles":18388,"content":10692,"level":10719},"\u002Fen\u002Frooms\u002Fadam-optimizer#the-problem-with-a-fixed-learning-rate","The Problem with a Fixed Learning Rate",[18286],{"id":18390,"title":18391,"titles":18392,"content":18393,"level":10753},"\u002Fen\u002Frooms\u002Fadam-optimizer#the-one-speed-fits-all-dilemma","The \"One Speed Fits All\" Dilemma",[18286,18387],"Imagine you're hiking through a mountain range with one strict rule: every step you take must be exactly the same length — no more, no less. On a steep cliff face, that fixed step length is terrifying — one step too large and you tumble. On a long, gentle slope to the valley, that same step feels absurdly tiny — it would take forever to reach the bottom. This is exactly the problem with a fixed learning rate α\\alphaα in gradient descent: θnew=θold−α∇J(θ)\\theta_{new} = \\theta_{old} - \\alpha \\nabla J(\\theta)θnew​=θold​−α∇J(θ) The single scalar α\\alphaα controls the step size for every parameter — whether that parameter has large gradients or tiny ones, whether it's converging well or oscillating wildly.",{"id":18395,"title":18396,"titles":18397,"content":10692,"level":10753},"\u002Fen\u002Frooms\u002Fadam-optimizer#three-ways-a-fixed-learning-rate-fails","Three Ways a Fixed Learning Rate Fails",[18286,18387],{"id":18399,"title":18400,"titles":18401,"content":18402,"level":10760},"\u002Fen\u002Frooms\u002Fadam-optimizer#_1-too-large-overshooting","1. Too Large — Overshooting",[18286,18387,18396],"When α\\alphaα is too large, gradient descent overshoots the minimum and bounces back and forth: J(θ)=θ2,α=1.0J(\\theta) = \\theta^2, \\quad \\alpha = 1.0J(θ)=θ2,α=1.0 θ0=5→−1.0×10θ1=−5→−1.0×(−10)θ2=5→⋯\\theta_0 = 5 \\xrightarrow{-1.0 \\times 10} \\theta_1 = -5 \\xrightarrow{-1.0 \\times (-10)} \\theta_2 = 5 \\xrightarrow{\\cdots}θ0​=5−1.0×10​θ1​=−5−1.0×(−10)​θ2​=5⋯​ The loss never decreases — it oscillates forever around the minimum.",{"id":18404,"title":18405,"titles":18406,"content":18407,"level":10760},"\u002Fen\u002Frooms\u002Fadam-optimizer#_2-too-small-crawling-forever","2. Too Small — Crawling Forever",[18286,18387,18396],"When α\\alphaα is too small, learning works but is painfully slow: θ0=5,α=0.001\\theta_0 = 5, \\quad \\alpha = 0.001θ0​=5,α=0.001 θ1=5−0.001×10=4.99,θ2=4.98,…\\theta_1 = 5 - 0.001 \\times 10 = 4.99, \\quad \\theta_2 = 4.98, \\quad \\ldotsθ1​=5−0.001×10=4.99,θ2​=4.98,… Thousands of iterations just to move a little. In practice with millions of parameters, this is computationally catastrophic.",{"id":18409,"title":18410,"titles":18411,"content":18412,"level":10760},"\u002Fen\u002Frooms\u002Fadam-optimizer#_3-the-ravine-problem-oscillation","3. The \"Ravine\" Problem — Oscillation",[18286,18387,18396],"In higher dimensions, loss landscapes often look like narrow ravines — very steep in one direction, nearly flat in another. With a fixed learning rate: The steep direction demands a small α\\alphaα to avoid oscillating across the ravine walls.The flat direction needs a large α\\alphaα to make any progress along the ravine floor. No single fixed α\\alphaα can satisfy both at the same time. LeCun et al. [2] provide an early and thorough analysis of these pathological loss-landscape behaviours and their impact on convergence. The Core Pain\n  Different parameters need different step sizes. A fixed learning rate treats all of them the same — and that's the bottleneck.",{"id":18414,"title":18415,"titles":18416,"content":18417,"level":10719},"\u002Fen\u002Frooms\u002Fadam-optimizer#enter-adam-the-gps-of-optimizers","Enter Adam: The GPS of Optimizers",[18286],"If vanilla gradient descent is hiking with a fixed stride, Adam is using a GPS with adaptive routing: it speeds up on highways, slows down in tight corners, and remembers which paths were already explored. Adam's secret is tracking two things per parameter at every step: QuantitySymbolIntuition1st Moment (momentum)mtm_tmt​Which direction have gradients been pointing recently?2nd Moment (adaptive scale)vtv_tvt​How large have the gradients been recently? By dividing by the square root of the 2nd moment, Adam automatically shrinks the step size for parameters with consistently large gradients and enlarges it for parameters with small gradients.",{"id":18419,"title":18420,"titles":18421,"content":10692,"level":10719},"\u002Fen\u002Frooms\u002Fadam-optimizer#building-adam-from-scratch","Building Adam from Scratch",[18286],{"id":18423,"title":18424,"titles":18425,"content":18426,"level":10753},"\u002Fen\u002Frooms\u002Fadam-optimizer#step-1-momentum-smoothing-the-direction","Step 1 — Momentum: Smoothing the Direction",[18286,18420],"The problem it solves: Gradients are noisy. Every mini-batch gives a slightly different gradient. Chasing each individual noisy gradient makes the path jagged. The idea: Keep a running average of past gradients, like a ball rolling downhill — it builds speed in a consistent direction and isn't thrown off by small bumps. mt=β1⋅mt−1+(1−β1)⋅gtm_t = \\beta_1 \\cdot m_{t-1} + (1 - \\beta_1) \\cdot g_tmt​=β1​⋅mt−1​+(1−β1​)⋅gt​ Where: gtg_tgt​ = current gradient ∇J(θt)\\nabla J(\\theta_t)∇J(θt​)β1\\beta_1β1​ = decay rate, typically 0.9 (90% weight on the past, 10% on the new gradient)m0=0m_0 = 0m0​=0 Analogy: It's like computing a weighted average of recent directions. Gradient yesterday counts more than gradient from 10 steps ago. Sutskever et al. [3] demonstrated that this momentum term is critical for fast, stable convergence in deep networks.",{"id":18428,"title":18429,"titles":18430,"content":18431,"level":10753},"\u002Fen\u002Frooms\u002Fadam-optimizer#step-2-adaptive-scale-normalizing-by-history","Step 2 — Adaptive Scale: Normalizing by History",[18286,18420],"The problem it solves: Some parameters have consistently large gradients; others have tiny ones. We want large-gradient parameters to take smaller steps, and small-gradient parameters to take larger steps. The idea: Track the running average of squared gradients: vt=β2⋅vt−1+(1−β2)⋅gt2v_t = \\beta_2 \\cdot v_{t-1} + (1 - \\beta_2) \\cdot g_t^2vt​=β2​⋅vt−1​+(1−β2​)⋅gt2​ Where: β2\\beta_2β2​ = decay rate, typically 0.999v0=0v_0 = 0v0​=0 A parameter that always receives large gradients will accumulate a large vtv_tvt​. Dividing the step size by vt\\sqrt{v_t}vt​​ keeps its updates proportionally small. This is Adam's per-parameter learning rate.",{"id":18433,"title":18434,"titles":18435,"content":18436,"level":10753},"\u002Fen\u002Frooms\u002Fadam-optimizer#step-3-bias-correction-fixing-cold-start-errors","Step 3 — Bias Correction: Fixing Cold-Start Errors",[18286,18420],"The problem it solves: Since m0=0m_0 = 0m0​=0 and v0=0v_0 = 0v0​=0, the first few estimates of mtm_tmt​ and vtv_tvt​ are heavily biased toward zero (we haven't accumulated enough history yet). The fix: Divide by (1−βt)(1 - \\beta^t)(1−βt) to correct for the initial bias: m^t=mt1−β1t,v^t=vt1−β2t\\hat{m}_t = \\frac{m_t}{1 - \\beta_1^t}, \\qquad \\hat{v}_t = \\frac{v_t}{1 - \\beta_2^t}m^t​=1−β1t​mt​​,v^t​=1−β2t​vt​​ As ttt grows, βt→0\\beta^t \\to 0βt→0, so the correction factor 11−βt→1\\frac{1}{1-\\beta^t} \\to 11−βt1​→1 and has no effect — it only matters in the early steps.",{"id":18438,"title":18439,"titles":18440,"content":18441,"level":10753},"\u002Fen\u002Frooms\u002Fadam-optimizer#step-4-the-final-update-rule","Step 4 — The Final Update Rule",[18286,18420],"θt+1=θt−αv^t+ϵ⋅m^t\\boxed{\\theta_{t+1} = \\theta_t - \\frac{\\alpha}{\\sqrt{\\hat{v}_t} + \\epsilon} \\cdot \\hat{m}_t}θt+1​=θt​−v^t​​+ϵα​⋅m^t​​ Where ϵ≈10−8\\epsilon \\approx 10^{-8}ϵ≈10−8 prevents division by zero. Default hyperparameters from the original paper [1]: HyperparameterSymbolDefaultLearning rateα\\alphaα0.0011st moment decayβ1\\beta_1β1​0.92nd moment decayβ2\\beta_2β2​0.999Numerical stabilityϵ\\epsilonϵ10−810^{-8}10−8",{"id":18443,"title":18444,"titles":18445,"content":18446,"level":10719},"\u002Fen\u002Frooms\u002Fadam-optimizer#worked-example-adam-in-action","Worked Example: Adam in Action",[18286],"Let's trace Adam manually on the same simple function we used for gradient descent: J(θ)=θ2,∇J(θ)=2θJ(\\theta) = \\theta^2, \\qquad \\nabla J(\\theta) = 2\\thetaJ(θ)=θ2,∇J(θ)=2θ Starting at θ0=5\\theta_0 = 5θ0​=5, with default hyperparameters (α=0.001\\alpha = 0.001α=0.001, β1=0.9\\beta_1 = 0.9β1​=0.9, β2=0.999\\beta_2 = 0.999β2​=0.999, ϵ=10−8\\epsilon = 10^{-8}ϵ=10−8). Initialize: m0=0m_0 = 0m0​=0, v0=0v_0 = 0v0​=0. Step t=1t=1t=1: g1=2×5=10g_1 = 2 \\times 5 = 10g1​=2×5=10 m1=0.9×0+0.1×10=1.0m_1 = 0.9 \\times 0 + 0.1 \\times 10 = 1.0m1​=0.9×0+0.1×10=1.0 v1=0.999×0+0.001×100=0.1v_1 = 0.999 \\times 0 + 0.001 \\times 100 = 0.1v1​=0.999×0+0.001×100=0.1 m^1=1.01−0.91=1.00.1=10.0\\hat{m}_1 = \\frac{1.0}{1 - 0.9^1} = \\frac{1.0}{0.1} = 10.0m^1​=1−0.911.0​=0.11.0​=10.0 v^1=0.11−0.9991=0.10.001=100.0\\hat{v}_1 = \\frac{0.1}{1 - 0.999^1} = \\frac{0.1}{0.001} = 100.0v^1​=1−0.99910.1​=0.0010.1​=100.0 θ1=5−0.001100+10−8×10.0=5−0.00110×10.0=5−0.001=4.999\\theta_1 = 5 - \\frac{0.001}{\\sqrt{100} + 10^{-8}} \\times 10.0 = 5 - \\frac{0.001}{10} \\times 10.0 = 5 - 0.001 = 4.999θ1​=5−100​+10−80.001​×10.0=5−100.001​×10.0=5−0.001=4.999 Step t=2t=2t=2: g2=2×4.999=9.998g_2 = 2 \\times 4.999 = 9.998g2​=2×4.999=9.998 m2=0.9×1.0+0.1×9.998=1.8998m_2 = 0.9 \\times 1.0 + 0.1 \\times 9.998 = 1.8998m2​=0.9×1.0+0.1×9.998=1.8998 v2=0.999×0.1+0.001×9.9982=0.1999v_2 = 0.999 \\times 0.1 + 0.001 \\times 9.998^2 = 0.1999v2​=0.999×0.1+0.001×9.9982=0.1999 With bias correction and update, θ2≈4.998\\theta_2 \\approx 4.998θ2​≈4.998. Notice: Adam makes consistent, controlled steps — not as aggressive as large-α\\alphaα SGD (which would have overshot), yet much faster than tiny-α\\alphaα SGD (which would crawl). The bias-corrected estimates keep early steps meaningful despite the cold start.",{"id":18448,"title":18449,"titles":18450,"content":18451,"level":10719},"\u002Fen\u002Frooms\u002Fadam-optimizer#comparing-optimizers-side-by-side","Comparing Optimizers Side by Side",[18286],"Let's bring it all together with intuition: OptimizerStep sizeMemoryStrengthsWeaknessesSGDFixed α\\alphaαNoneSimple, well-understoodSensitive to α\\alphaα, slow on ravinesSGD + MomentumFixed α\\alphaαGradient directionFaster, smoother pathStill needs good α\\alphaαRMSProp [4]AdaptiveGradient magnitudeGood for non-stationaryNo momentumAdamAdaptiveDirection + magnitudeBest of both worldsCan generalize slightly worse Adam essentially combines SGD with momentum (1st moment) and RMSProp (2nd moment) under one roof, with bias correction on top.",{"id":18453,"title":18454,"titles":18455,"content":10692,"level":10719},"\u002Fen\u002Frooms\u002Fadam-optimizer#python-implementation","Python Implementation",[18286],{"id":18457,"title":18458,"titles":18459,"content":18460,"level":10753},"\u002Fen\u002Frooms\u002Fadam-optimizer#minimal-adam-from-scratch","Minimal Adam from Scratch",[18286,18454],"import numpy as np\n\ndef adam(grad_fn, theta_init, alpha=0.001, beta1=0.9, beta2=0.999, eps=1e-8, max_iters=1000):\n    theta = theta_init\n    m = 0.0   # first moment (momentum)\n    v = 0.0   # second moment (adaptive scale)\n\n    for t in range(1, max_iters + 1):\n        g = grad_fn(theta)            # ① compute gradient\n\n        m = beta1 * m + (1 - beta1) * g       # ② update 1st moment\n        v = beta2 * v + (1 - beta2) * g ** 2  # ③ update 2nd moment\n\n        m_hat = m \u002F (1 - beta1 ** t)          # ④ bias-correct 1st moment\n        v_hat = v \u002F (1 - beta2 ** t)          # ⑤ bias-correct 2nd moment\n\n        theta = theta - alpha \u002F (np.sqrt(v_hat) + eps) * m_hat  # ⑥ update\n\n        if abs(g) \u003C 1e-7:\n            print(f\"Converged at step {t}\")\n            break\n\n    return theta\n\n# Minimize J(θ) = θ²,  ∇J(θ) = 2θ\ntheta_min = adam(grad_fn=lambda th: 2 * th, theta_init=5.0)\nprint(f\"Minimum at θ = {theta_min:.8f}\") Output: Converged at step 817\nMinimum at θ = 0.00000001",{"id":18462,"title":18463,"titles":18464,"content":18465,"level":10753},"\u002Fen\u002Frooms\u002Fadam-optimizer#adam-on-linear-regression","Adam on Linear Regression",[18286,18454],"Now let's apply Adam to a real use case — fitting a line y^=w⋅x+b\\hat{y} = w \\cdot x + by^​=w⋅x+b to data. import numpy as np\n\ndef adam_linear_regression(X, y, alpha=0.01, beta1=0.9, beta2=0.999,\n                            eps=1e-8, epochs=200):\n    m = len(y)\n    w, b = 0.0, 0.0\n\n    # Separate Adam state for each parameter\n    mw, vw = 0.0, 0.0   # moments for w\n    mb, vb = 0.0, 0.0   # moments for b\n\n    for t in range(1, epochs + 1):\n        y_pred = w * X + b\n        error  = y_pred - y\n\n        # Gradients (same formula as gradient descent)\n        gw = (2 \u002F m) * np.dot(error, X)\n        gb = (2 \u002F m) * np.sum(error)\n\n        # 1st and 2nd moment updates for w\n        mw = beta1 * mw + (1 - beta1) * gw\n        vw = beta2 * vw + (1 - beta2) * gw ** 2\n        mw_hat = mw \u002F (1 - beta1 ** t)\n        vw_hat = vw \u002F (1 - beta2 ** t)\n\n        # 1st and 2nd moment updates for b\n        mb = beta1 * mb + (1 - beta1) * gb\n        vb = beta2 * vb + (1 - beta2) * gb ** 2\n        mb_hat = mb \u002F (1 - beta1 ** t)\n        vb_hat = vb \u002F (1 - beta2 ** t)\n\n        # Parameter updates\n        w = w - alpha \u002F (np.sqrt(vw_hat) + eps) * mw_hat\n        b = b - alpha \u002F (np.sqrt(vb_hat) + eps) * mb_hat\n\n        if t % 50 == 0:\n            loss = np.mean(error ** 2)\n            print(f\"Epoch {t:4d}: loss={loss:.6f}  w={w:.4f}  b={b:.4f}\")\n\n    return w, b\n\n# True relationship: y = 2x + 1\nX = np.array([1.0, 2.0, 3.0, 4.0, 5.0])\ny = np.array([3.0, 5.0, 7.0, 9.0, 11.0])\n\nw, b = adam_linear_regression(X, y)\nprint(f\"\\nFitted: ŷ = {w:.4f}·x + {b:.4f}\") Output: Epoch   50: loss=0.000042  w=1.9953  b=1.0044\nEpoch  100: loss=0.000000  w=2.0000  b=1.0000\nEpoch  150: loss=0.000000  w=2.0000  b=1.0000\nEpoch  200: loss=0.000000  w=2.0000  b=1.0000\n\nFitted: ŷ = 2.0000·x + 1.0000 Adam recovers the true w=2,b=1w=2, b=1w=2,b=1 cleanly and fast — especially compared to vanilla gradient descent, which required careful learning rate tuning.",{"id":18467,"title":18468,"titles":18469,"content":18470,"level":10719},"\u002Fen\u002Frooms\u002Fadam-optimizer#when-to-use-adam","When to Use Adam",[18286],"Adam is a safe default for most deep learning tasks: Neural networks: Training MLPs, CNNs, Transformers, RNNsNoisy gradients: Mini-batch training with small batch sizesSparse features: NLP tasks where some words appear rarely (large, infrequent gradients)Getting started: When you don't want to spend time tuning the learning rate One Known Limitation\n  Wilson et al. [5] show that adaptive optimizers like Adam can converge to slightly worse generalization than well-tuned SGD with momentum for image classification. In that setting, SGD + momentum with learning rate scheduling can outperform Adam. But for most tasks, Adam's robustness wins.",{"id":18472,"title":16908,"titles":18473,"content":18474,"level":10719},"\u002Fen\u002Frooms\u002Fadam-optimizer#summary",[18286],"ConceptKey IdeaFixed learning rate flawOne α\\alphaα for all parameters — too rigidMomentum (mtm_tmt​)Smooth gradient direction over timeAdaptive scale (vtv_tvt​)Scale steps by gradient magnitude historyBias correctionFix cold-start bias when m0=v0=0m_0 = v_0 = 0m0​=v0​=0Adam updateθ←θ−αv^t+ϵm^t\\theta \\leftarrow \\theta - \\frac{\\alpha}{\\sqrt{\\hat{v}_t} + \\epsilon} \\hat{m}_tθ←θ−v^t​​+ϵα​m^t​ Adam doesn't remove the learning rate α\\alphaα — it still matters. But it makes training dramatically less sensitive to your choice of α\\alphaα. That's why the same default of 0.0010.0010.001 works well across an enormous variety of tasks. If gradient descent is hiking with a fixed stride, Adam is hiring a GPS-equipped guide who adjusts your pace, smooths your path, and makes sure you don't waste time on terrain you've already explored.",{"id":18476,"title":18477,"titles":18478,"content":18479,"level":10719},"\u002Fen\u002Frooms\u002Fadam-optimizer#references","References",[18286],"D. P. Kingma and J. Ba, \"Adam: A method for stochastic optimization,\" in Proc. 3rd Int. Conf. Learn. Representations (ICLR), San Diego, CA, USA, May 2015. [Online]. Available: https:\u002F\u002Farxiv.org\u002Fabs\u002F1412.6980\n  Y. LeCun, L. Bottou, G. B. Orr, and K.-R. Müller, \"Efficient backprop,\" in Neural Networks: Tricks of the Trade, G. B. Orr and K.-R. Müller, Eds. Berlin, Germany: Springer, 1998, pp. 9–50. [Online]. Available: https:\u002F\u002Flink.springer.com\u002Fchapter\u002F10.1007\u002F978-3-642-35289-8_5\n  I. Sutskever, J. Martens, G. Dahl, and G. Hinton, \"On the importance of initialization and momentum in deep learning,\" in Proc. 30th Int. Conf. Mach. Learn. (ICML), Atlanta, GA, USA, Jun. 2013, pp. 1139–1147. [Online]. Available: https:\u002F\u002Fproceedings.mlr.press\u002Fv28\u002Fsutskever13.html\n  T. Tieleman and G. Hinton, \"Lecture 6.5 — RMSProp: Divide the gradient by a running average of its recent magnitude,\" COURSERA: Neural Networks for Machine Learning, Tech. Rep., 2012.\n  A. C. Wilson, R. Roelofs, M. Stern, N. Srebro, and B. Recht, \"The marginal value of momentum for small learning rate SGD,\" in Proc. 31st Conf. Neural Inf. Process. Syst. (NeurIPS), Long Beach, CA, USA, Dec. 2017. [Online]. Available: https:\u002F\u002Farxiv.org\u002Fabs\u002F1705.08292 html pre.shiki code .s7zQu, html code.shiki .s7zQu{--shiki-light:#39ADB5;--shiki-light-font-style:italic;--shiki-default:#89DDFF;--shiki-default-font-style:italic;--shiki-dark:#89DDFF;--shiki-dark-font-style:italic}html pre.shiki code .sTEyZ, html code.shiki .sTEyZ{--shiki-light:#90A4AE;--shiki-default:#EEFFFF;--shiki-dark:#BABED8}html pre.shiki code .spNyl, html code.shiki .spNyl{--shiki-light:#9C3EDA;--shiki-default:#C792EA;--shiki-dark:#C792EA}html pre.shiki code .s2Zo4, html code.shiki .s2Zo4{--shiki-light:#6182B8;--shiki-default:#82AAFF;--shiki-dark:#82AAFF}html pre.shiki code .sMK4o, html code.shiki .sMK4o{--shiki-light:#39ADB5;--shiki-default:#89DDFF;--shiki-dark:#89DDFF}html pre.shiki code .sHdIc, html code.shiki .sHdIc{--shiki-light:#90A4AE;--shiki-light-font-style:italic;--shiki-default:#EEFFFF;--shiki-default-font-style:italic;--shiki-dark:#BABED8;--shiki-dark-font-style:italic}html pre.shiki code .sbssI, html code.shiki .sbssI{--shiki-light:#F76D47;--shiki-default:#F78C6C;--shiki-dark:#F78C6C}html pre.shiki code .sHwdD, html code.shiki .sHwdD{--shiki-light:#90A4AE;--shiki-light-font-style:italic;--shiki-default:#546E7A;--shiki-default-font-style:italic;--shiki-dark:#676E95;--shiki-dark-font-style:italic}html pre.shiki code .sfazB, html code.shiki .sfazB{--shiki-light:#91B859;--shiki-default:#C3E88D;--shiki-dark:#C3E88D}html .light .shiki span {color: var(--shiki-light);background: var(--shiki-light-bg);font-style: var(--shiki-light-font-style);font-weight: var(--shiki-light-font-weight);text-decoration: var(--shiki-light-text-decoration);}html.light .shiki span {color: var(--shiki-light);background: var(--shiki-light-bg);font-style: var(--shiki-light-font-style);font-weight: var(--shiki-light-font-weight);text-decoration: var(--shiki-light-text-decoration);}html .default .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html.dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html pre.shiki code .sBMFI, html code.shiki .sBMFI{--shiki-light:#E2931D;--shiki-default:#FFCB6B;--shiki-dark:#FFCB6B}",{"id":18265,"title":5,"titles":18481,"content":18482,"level":10699},[],"From slopes of lines to the calculus engine behind machine learning Every time a neural network learns, it asks one question over and over: \"If I nudge this parameter slightly, does the error go up or down — and by how much?\" That question is answered by the derivative. Before we talk about gradients or optimizers, we need to understand derivatives from scratch.",{"id":18484,"title":54,"titles":18485,"content":10692,"level":10719},"\u002Fen\u002Frooms\u002Fderivatives#part-1-lines-and-slopes",[5],{"id":18487,"title":59,"titles":18488,"content":18489,"level":10753},"\u002Fen\u002Frooms\u002Fderivatives#the-equation-of-a-line",[5,54],"The simplest relationship between two quantities is a straight line: y=mx+by = mx + by=mx+b Where: xxx is the inputyyy is the outputmmm is the slope — how steeply the line rises or fallsbbb is the y-intercept — where the line crosses the vertical axis Example: y=2x+1y = 2x + 1y=2x+1 xxxy=2x+1y = 2x + 1y=2x+101132537 Every time xxx increases by 1, yyy increases by exactly 2. The slope m=2m = 2m=2 captures this constant rate.",{"id":18491,"title":681,"titles":18492,"content":18493,"level":10753},"\u002Fen\u002Frooms\u002Fderivatives#computing-the-slope-between-two-points",[5,54],"Given any two points (x1,y1)(x_1, y_1)(x1​,y1​) and (x2,y2)(x_2, y_2)(x2​,y2​) on a line, the slope is: m=ΔyΔx=y2−y1x2−x1m = \\frac{\\Delta y}{\\Delta x} = \\frac{y_2 - y_1}{x_2 - x_1}m=ΔxΔy​=x2​−x1​y2​−y1​​ This is the rise over run formula — how much yyy changes (rise) per unit change in xxx (run). Why Does Slope Matter?\n  Slope tells you the rate of change. A slope of 2 means \"for every 1 unit step in x, y changes by 2.\" A slope of −3 means y decreases by 3 for every step forward. A slope of 0 means y doesn't change at all — it's flat.",{"id":18495,"title":1512,"titles":18496,"content":18497,"level":10719},"\u002Fen\u002Frooms\u002Fderivatives#part-2-when-lines-become-curves",[5],"A line has a constant slope — it's the same everywhere. But most interesting functions in mathematics (and in machine learning) are curves whose steepness changes at every point. Consider the parabola: f(x)=x2f(x) = x^2f(x)=x2 xxxf(x)=x2f(x) = x^2f(x)=x2−39−11001139 Near x=0x = 0x=0 the curve is nearly flat. Near x=3x = 3x=3 it rises steeply. The slope is different at every point — which means the single formula m=ΔyΔxm = \\frac{\\Delta y}{\\Delta x}m=ΔxΔy​ between two distant points only gives us an average.",{"id":18499,"title":2059,"titles":18500,"content":18501,"level":10753},"\u002Fen\u002Frooms\u002Fderivatives#average-rate-of-change",[5,1512],"For two points xxx and x+hx + hx+h on a curve fff, the average rate of change over that interval is: ΔfΔx=f(x+h)−f(x)h\\frac{\\Delta f}{\\Delta x} = \\frac{f(x + h) - f(x)}{h}ΔxΔf​=hf(x+h)−f(x)​ This is the slope of the secant line — the straight line connecting the two points on the curve. Example on f(x)=x2f(x) = x^2f(x)=x2 between x=1x = 1x=1 and x=3x = 3x=3: f(3)−f(1)3−1=9−12=4\\frac{f(3) - f(1)}{3 - 1} = \\frac{9 - 1}{2} = 43−1f(3)−f(1)​=29−1​=4 That is the average steepness between x=1x=1x=1 and x=3x=3x=3, but it doesn't tell us what the slope is at a specific point.",{"id":18503,"title":3057,"titles":18504,"content":18505,"level":10719},"\u002Fen\u002Frooms\u002Fderivatives#part-3-the-limit-zooming-in-to-a-single-point",[5],"To find the slope at one exact point, we shrink the interval hhh down toward zero. As hhh gets smaller and smaller, the secant line rotates until it becomes the tangent line — touching the curve at exactly one point and matching its steepness there. Formally, the instantaneous rate of change at xxx is the limit: lim⁡h→0f(x+h)−f(x)h\\lim_{h \\to 0} \\frac{f(x + h) - f(x)}{h}h→0lim​hf(x+h)−f(x)​ This is the core idea of a derivative.",{"id":18507,"title":3412,"titles":18508,"content":18509,"level":10753},"\u002Fen\u002Frooms\u002Fderivatives#limits-intuitively",[5,3057],"A limit asks: \"What value does an expression approach as a variable gets closer and closer to some number — even if it never arrives?\" lim⁡h→0(x+h)2−x2h\\lim_{h \\to 0} \\frac{(x+h)^2 - x^2}{h}h→0lim​h(x+h)2−x2​ Expand the numerator: =lim⁡h→0x2+2xh+h2−x2h=lim⁡h→02xh+h2h=lim⁡h→0(2x+h)= \\lim_{h \\to 0} \\frac{x^2 + 2xh + h^2 - x^2}{h} = \\lim_{h \\to 0} \\frac{2xh + h^2}{h} = \\lim_{h \\to 0} (2x + h)=h→0lim​hx2+2xh+h2−x2​=h→0lim​h2xh+h2​=h→0lim​(2x+h) As h→0h \\to 0h→0: =2x= 2x=2x The slope of f(x)=x2f(x) = x^2f(x)=x2 at any point xxx is exactly 2x2x2x.",{"id":18511,"title":4643,"titles":18512,"content":10692,"level":10719},"\u002Fen\u002Frooms\u002Fderivatives#part-4-the-derivative",[5],{"id":18514,"title":4647,"titles":18515,"content":18516,"level":10753},"\u002Fen\u002Frooms\u002Fderivatives#definition",[5,4643],"The derivative of a function fff at point xxx, written f′(x)f'(x)f′(x) or dfdx\\frac{df}{dx}dxdf​, is: f′(x)=lim⁡h→0f(x+h)−f(x)h\\boxed{f'(x) = \\lim_{h \\to 0} \\frac{f(x + h) - f(x)}{h}}f′(x)=h→0lim​hf(x+h)−f(x)​​ It gives the instantaneous rate of change — the slope of the tangent line at every point.",{"id":18518,"title":5281,"titles":18519,"content":18520,"level":10753},"\u002Fen\u002Frooms\u002Fderivatives#geometric-meaning",[5,4643],"Derivative ValueMeaningf′(x)>0f'(x) > 0f′(x)>0Function is increasing at xxxf′(x)\u003C0f'(x) \u003C 0f′(x)\u003C0Function is decreasing at xxxf′(x)=0f'(x) = 0f′(x)=0Function has a flat point (possible minimum, maximum, or saddle)Large ∥f′(x)∥\\|f'(x)\\|∥f′(x)∥Function is changing rapidlySmall ∥f′(x)∥\\|f'(x)\\|∥f′(x)∥Function is changing slowly",{"id":18522,"title":5880,"titles":18523,"content":18524,"level":10719},"\u002Fen\u002Frooms\u002Fderivatives#part-5-differentiation-rules",[5],"Computing limits by hand every time would be exhausting. Mathematicians have derived shortcut rules that cover almost every function you'll encounter.",{"id":18526,"title":5891,"titles":18527,"content":18528,"level":10753},"\u002Fen\u002Frooms\u002Fderivatives#power-rule",[5,5880],"For f(x)=xnf(x) = x^nf(x)=xn: ddxxn=n⋅xn−1\\frac{d}{dx} x^n = n \\cdot x^{n-1}dxd​xn=n⋅xn−1 Examples: FunctionDerivativex2x^2x22x2x2xx3x^3x33x23x^23x2x10x^{10}x1010x910x^910x9xxx (i.e. x1x^1x1)111555 (constant, x0x^0x0)000",{"id":18530,"title":6848,"titles":18531,"content":18532,"level":10753},"\u002Fen\u002Frooms\u002Fderivatives#constant-multiple-rule",[5,5880],"ddx[c⋅f(x)]=c⋅f′(x)\\frac{d}{dx}[c \\cdot f(x)] = c \\cdot f'(x)dxd​[c⋅f(x)]=c⋅f′(x) If f(x)=3x2f(x) = 3x^2f(x)=3x2, then f′(x)=3⋅2x=6xf'(x) = 3 \\cdot 2x = 6xf′(x)=3⋅2x=6x.",{"id":18534,"title":7359,"titles":18535,"content":18536,"level":10753},"\u002Fen\u002Frooms\u002Fderivatives#sum-rule",[5,5880],"ddx[f(x)+g(x)]=f′(x)+g′(x)\\frac{d}{dx}[f(x) + g(x)] = f'(x) + g'(x)dxd​[f(x)+g(x)]=f′(x)+g′(x) If f(x)=x3+5x2−2x+7f(x) = x^3 + 5x^2 - 2x + 7f(x)=x3+5x2−2x+7, differentiate term by term: f′(x)=3x2+10x−2f'(x) = 3x^2 + 10x - 2f′(x)=3x2+10x−2",{"id":18538,"title":8063,"titles":18539,"content":18540,"level":10753},"\u002Fen\u002Frooms\u002Fderivatives#chain-rule",[5,5880],"For a composition of functions f(g(x))f(g(x))f(g(x)): ddxf(g(x))=f′(g(x))⋅g′(x)\\frac{d}{dx} f(g(x)) = f'(g(x)) \\cdot g'(x)dxd​f(g(x))=f′(g(x))⋅g′(x) Read as: \"derivative of outer, evaluated at inner — times derivative of inner.\" Example: h(x)=(3x+1)4h(x) = (3x + 1)^4h(x)=(3x+1)4 Let g(x)=3x+1g(x) = 3x + 1g(x)=3x+1 and f(u)=u4f(u) = u^4f(u)=u4: h′(x)=4(3x+1)3⋅3=12(3x+1)3h'(x) = 4(3x+1)^3 \\cdot 3 = 12(3x+1)^3h′(x)=4(3x+1)3⋅3=12(3x+1)3 The chain rule is everywhere in machine learning — backpropagation is essentially repeated application of it through layers of a neural network.",{"id":18542,"title":9053,"titles":18543,"content":18544,"level":10753},"\u002Fen\u002Frooms\u002Fderivatives#common-derivatives-reference",[5,5880],"FunctionDerivativeexe^xexexe^xexln⁡(x)\\ln(x)ln(x)1x\\frac{1}{x}x1​sin⁡(x)\\sin(x)sin(x)cos⁡(x)\\cos(x)cos(x)cos⁡(x)\\cos(x)cos(x)−sin⁡(x)-\\sin(x)−sin(x)σ(x)=11+e−x\\sigma(x) = \\frac{1}{1+e^{-x}}σ(x)=1+e−x1​ (sigmoid)σ(x)(1−σ(x))\\sigma(x)(1 - \\sigma(x))σ(x)(1−σ(x))",{"id":18546,"title":9854,"titles":18547,"content":10692,"level":10719},"\u002Fen\u002Frooms\u002Fderivatives#part-6-derivatives-in-practice",[5],{"id":18549,"title":9858,"titles":18550,"content":18551,"level":10753},"\u002Fen\u002Frooms\u002Fderivatives#finding-minima-and-maxima",[5,9854],"If f′(x)=0f'(x) = 0f′(x)=0 the function is momentarily flat — this is a critical point. There are three types: Local minimum: function dips down then rises → f′(x)f'(x)f′(x) changes from negative to positiveLocal maximum: function rises then dips → f′(x)f'(x)f′(x) changes from positive to negativeSaddle point: function is flat but continues in the same general direction Example: Find the minimum of f(x)=x2−4x+5f(x) = x^2 - 4x + 5f(x)=x2−4x+5 f′(x)=2x−4=0  ⟹  x=2f'(x) = 2x - 4 = 0 \\implies x = 2f′(x)=2x−4=0⟹x=2 At x=2x = 2x=2: f(2)=4−8+5=1f(2) = 4 - 8 + 5 = 1f(2)=4−8+5=1 — this is the minimum. def f(x):\n    return x**2 - 4*x + 5\n\ndef f_prime(x):\n    return 2*x - 4\n\n# Find where derivative = 0\n# 2x - 4 = 0  =>  x = 2\nx_min = 2\nprint(f\"Minimum at x={x_min}, f(x)={f(x_min)}\")  # x=2, f(x)=1",{"id":18553,"title":10868,"titles":18554,"content":18555,"level":10753},"\u002Fen\u002Frooms\u002Fderivatives#the-derivative-as-a-direction-signal",[5,9854],"This is the key insight that bridges calculus to machine learning: If f′(x)>0f'(x) > 0f′(x)>0 at some point, moving xxx to the right increases fff. Moving xxx to the left decreases fff.If f′(x)\u003C0f'(x) \u003C 0f′(x)\u003C0, the opposite is true. To minimize fff, we should always move xxx in the direction opposite to the derivative: xnew=xold−α⋅f′(xold)x_{\\text{new}} = x_{\\text{old}} - \\alpha \\cdot f'(x_{\\text{old}})xnew​=xold​−α⋅f′(xold​) Where α\\alphaα is a small step size. Notice anything? This is exactly the gradient descent update rule.",{"id":18557,"title":11601,"titles":18558,"content":18559,"level":10719},"\u002Fen\u002Frooms\u002Fderivatives#part-7-from-one-variable-to-many-the-gradient",[5],"Machine learning models have not one parameter, but millions. A loss function JJJ might depend on weights w1,w2,…,wnw_1, w_2, \\ldots, w_nw1​,w2​,…,wn​. We need derivatives with respect to each parameter simultaneously.",{"id":18561,"title":11848,"titles":18562,"content":18563,"level":10753},"\u002Fen\u002Frooms\u002Fderivatives#partial-derivatives",[5,11601],"A partial derivative holds all other variables constant and differentiates with respect to one: ∂J∂wi= \"how much does J change if we nudge only wi?\"\\frac{\\partial J}{\\partial w_i} \\quad \\text{= \"how much does J change if we nudge only } w_i \\text{?\"}∂wi​∂J​= \"how much does J change if we nudge only wi​?\" Example: J(w1,w2)=w12+3w1w2+w22J(w_1, w_2) = w_1^2 + 3w_1 w_2 + w_2^2J(w1​,w2​)=w12​+3w1​w2​+w22​ ∂J∂w1=2w1+3w2∂J∂w2=3w1+2w2\\frac{\\partial J}{\\partial w_1} = 2w_1 + 3w_2 \\qquad \\frac{\\partial J}{\\partial w_2} = 3w_1 + 2w_2∂w1​∂J​=2w1​+3w2​∂w2​∂J​=3w1​+2w2​",{"id":18565,"title":13051,"titles":18566,"content":18567,"level":10753},"\u002Fen\u002Frooms\u002Fderivatives#the-gradient-vector",[5,11601],"Stack all partial derivatives into a single vector — this is the gradient ∇J\\nabla J∇J: ∇J(w1,w2,…,wn)=[∂J∂w1∂J∂w2⋮∂J∂wn]\\nabla J(w_1, w_2, \\ldots, w_n) = \\begin{bmatrix}\n\\frac{\\partial J}{\\partial w_1} \\\\[4pt]\n\\frac{\\partial J}{\\partial w_2} \\\\\n\\vdots \\\\[4pt]\n\\frac{\\partial J}{\\partial w_n}\n\\end{bmatrix}∇J(w1​,w2​,…,wn​)=​∂w1​∂J​∂w2​∂J​⋮∂wn​∂J​​​ The gradient is the multi-dimensional equivalent of the derivative. It points in the direction of steepest ascent in the loss landscape. To minimize the loss, we move in the opposite direction — exactly what gradient descent does. The Bridge to Machine Learning\n  \n    In ML, the loss function $J(\\theta)$ measures how wrong the model is. The gradient $\\nabla J(\\theta)$ tells us which direction in parameter space increases the error most. By stepping in the opposite direction, we reduce the error — step by step, iteration by iteration.",{"id":18569,"title":13979,"titles":18570,"content":18571,"level":10719},"\u002Fen\u002Frooms\u002Fderivatives#part-8-a-complete-example-linear-regression",[5],"Let's see all of this in action. Setup: We have data points (x(i),y(i))(x^{(i)}, y^{(i)})(x(i),y(i)) and want to fit y^=wx+b\\hat{y} = wx + by^​=wx+b. Loss function (Mean Squared Error): J(w,b)=1m∑i=1m(y^(i)−y(i))2=1m∑i=1m(wx(i)+b−y(i))2J(w, b) = \\frac{1}{m} \\sum_{i=1}^{m} \\left(\\hat{y}^{(i)} - y^{(i)}\\right)^2 = \\frac{1}{m} \\sum_{i=1}^{m} \\left(wx^{(i)} + b - y^{(i)}\\right)^2J(w,b)=m1​i=1∑m​(y^​(i)−y(i))2=m1​i=1∑m​(wx(i)+b−y(i))2 Partial derivative w.r.t. www (using chain rule — derivative of outer squared term times derivative of inner wx+bwx+bwx+b): ∂J∂w=2m∑i=1m(wx(i)+b−y(i))⋅x(i)\\frac{\\partial J}{\\partial w} = \\frac{2}{m} \\sum_{i=1}^{m} \\left(wx^{(i)} + b - y^{(i)}\\right) \\cdot x^{(i)}∂w∂J​=m2​i=1∑m​(wx(i)+b−y(i))⋅x(i) Partial derivative w.r.t. bbb: ∂J∂b=2m∑i=1m(wx(i)+b−y(i))\\frac{\\partial J}{\\partial b} = \\frac{2}{m} \\sum_{i=1}^{m} \\left(wx^{(i)} + b - y^{(i)}\\right)∂b∂J​=m2​i=1∑m​(wx(i)+b−y(i)) Gradient descent updates — move opposite to the gradient: w←w−α⋅∂J∂w,b←b−α⋅∂J∂bw \\leftarrow w - \\alpha \\cdot \\frac{\\partial J}{\\partial w}, \\qquad b \\leftarrow b - \\alpha \\cdot \\frac{\\partial J}{\\partial b}w←w−α⋅∂w∂J​,b←b−α⋅∂b∂J​ import numpy as np\n\n# Data: true relationship y = 3x + 2\nX = np.array([1.0, 2.0, 3.0, 4.0, 5.0])\ny = np.array([5.0, 8.0, 11.0, 14.0, 17.0])\n\nw, b = 0.0, 0.0   # start at zero\nalpha = 0.01\nm = len(y)\n\nfor epoch in range(500):\n    y_pred = w * X + b              # forward pass\n    error  = y_pred - y             # residuals: ŷ - y\n\n    # Partial derivatives (the gradient)\n    dw = (2 \u002F m) * np.dot(error, X) # ∂J\u002F∂w\n    db = (2 \u002F m) * np.sum(error)    # ∂J\u002F∂b\n\n    # Gradient descent step\n    w = w - alpha * dw\n    b = b - alpha * db\n\nprint(f\"Fitted: ŷ = {w:.4f}·x + {b:.4f}\")\n# Output: ŷ = 3.0000·x + 2.0000 The derivative — computed analytically with calculus, then applied iteratively — is what drives the entire learning process.",{"id":18573,"title":16908,"titles":18574,"content":18575,"level":10719},"\u002Fen\u002Frooms\u002Fderivatives#summary",[5],"ConceptOne-Line DefinitionSlope of a linem=ΔyΔxm = \\frac{\\Delta y}{\\Delta x}m=ΔxΔy​ — constant rate of changeAverage rate of changef(x+h)−f(x)h\\frac{f(x+h)-f(x)}{h}hf(x+h)−f(x)​ — slope of secant over interval hhhLimitThe value an expression approaches as h→0h \\to 0h→0Derivativef′(x)=lim⁡h→0f(x+h)−f(x)hf'(x) = \\lim_{h\\to 0}\\frac{f(x+h)-f(x)}{h}f′(x)=limh→0​hf(x+h)−f(x)​ — instantaneous rate of changePower ruleddxxn=nxn−1\\frac{d}{dx} x^n = nx^{n-1}dxd​xn=nxn−1Chain ruleddxf(g(x))=f′(g(x))⋅g′(x)\\frac{d}{dx}f(g(x)) = f'(g(x))\\cdot g'(x)dxd​f(g(x))=f′(g(x))⋅g′(x) — essential for backpropPartial derivativeDerivative holding all other variables fixedGradientVector of all partial derivatives — points toward steepest ascent The derivative is the mathematical answer to the question \"which way is uphill?\" In machine learning we use its negative — downhill — to train every model.",{"id":18577,"title":18172,"titles":18578,"content":18579,"level":10719},"\u002Fen\u002Frooms\u002Fderivatives#whats-next",[5],"You now have the calculus foundation. The gradient descent algorithm takes this one concept — move opposite to the derivative — and turns it into a complete optimization engine for machine learning. Next Room: Gradient Descent\n    See how the derivative becomes an optimization algorithm — with interactive experiments, full Python code, and a walk through every step of the math.\n    \n      Enter the Gradient Descent Room → html pre.shiki code .spNyl, html code.shiki .spNyl{--shiki-light:#9C3EDA;--shiki-default:#C792EA;--shiki-dark:#C792EA}html pre.shiki code .s2Zo4, html code.shiki .s2Zo4{--shiki-light:#6182B8;--shiki-default:#82AAFF;--shiki-dark:#82AAFF}html pre.shiki code .sMK4o, html code.shiki .sMK4o{--shiki-light:#39ADB5;--shiki-default:#89DDFF;--shiki-dark:#89DDFF}html pre.shiki code .sHdIc, html code.shiki .sHdIc{--shiki-light:#90A4AE;--shiki-light-font-style:italic;--shiki-default:#EEFFFF;--shiki-default-font-style:italic;--shiki-dark:#BABED8;--shiki-dark-font-style:italic}html pre.shiki code .s7zQu, html code.shiki .s7zQu{--shiki-light:#39ADB5;--shiki-light-font-style:italic;--shiki-default:#89DDFF;--shiki-default-font-style:italic;--shiki-dark:#89DDFF;--shiki-dark-font-style:italic}html pre.shiki code .sTEyZ, html code.shiki .sTEyZ{--shiki-light:#90A4AE;--shiki-default:#EEFFFF;--shiki-dark:#BABED8}html pre.shiki code .sbssI, html code.shiki .sbssI{--shiki-light:#F76D47;--shiki-default:#F78C6C;--shiki-dark:#F78C6C}html pre.shiki code .sHwdD, html code.shiki .sHwdD{--shiki-light:#90A4AE;--shiki-light-font-style:italic;--shiki-default:#546E7A;--shiki-default-font-style:italic;--shiki-dark:#676E95;--shiki-dark-font-style:italic}html pre.shiki code .sfazB, html code.shiki .sfazB{--shiki-light:#91B859;--shiki-default:#C3E88D;--shiki-dark:#C3E88D}html .light .shiki span {color: var(--shiki-light);background: var(--shiki-light-bg);font-style: var(--shiki-light-font-style);font-weight: var(--shiki-light-font-weight);text-decoration: var(--shiki-light-text-decoration);}html.light .shiki span {color: var(--shiki-light);background: var(--shiki-light-bg);font-style: var(--shiki-light-font-style);font-weight: var(--shiki-light-font-weight);text-decoration: var(--shiki-light-text-decoration);}html .default .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html.dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}",{"id":18292,"title":18291,"titles":18581,"content":18582,"level":10699},[],"Understanding the optimization algorithm that powers machine learning Image extracted from: Creating a Gradient Descent Animation in Python Gradient descent is one of the most fundamental optimization algorithms in machine learning. It's a method for finding the minimum of a function by iteratively moving in the direction of steepest descent.",{"id":18584,"title":18585,"titles":18586,"content":18587,"level":10719},"\u002Fen\u002Frooms\u002Fgradient-descent#the-intuition","The Intuition",[18291],"Imagine you're standing on a mountain in thick fog, and you want to reach the valley below. You can't see far, but you can feel the slope beneath your feet. Gradient descent works the same way: it takes small steps downhill, following the steepest path, until it reaches a minimum.",{"id":18589,"title":18590,"titles":18591,"content":10692,"level":10719},"\u002Fen\u002Frooms\u002Fgradient-descent#the-mathematics","The Mathematics",[18291],{"id":18593,"title":18594,"titles":18595,"content":18596,"level":10753},"\u002Fen\u002Frooms\u002Fgradient-descent#the-basic-formula","The Basic Formula",[18291,18590],"At its core, gradient descent updates parameters using this simple formula: θnew=θold−α∇J(θ)\\theta_{new} = \\theta_{old} - \\alpha \\nabla J(\\theta)θnew​=θold​−α∇J(θ) Where: θ\\thetaθ represents the parameters we're optimizingα\\alphaα is the learning rate (step size)∇J(θ)\\nabla J(\\theta)∇J(θ) is the gradient of the cost function JJJ with respect to θ\\thetaθ",{"id":18598,"title":18599,"titles":18600,"content":18601,"level":10753},"\u002Fen\u002Frooms\u002Fgradient-descent#understanding-the-gradient-from-simple-to-complex","Understanding the Gradient: From Simple to Complex",[18291,18590],"Let's demystify the gradient symbol ∇\\nabla∇ (called \"nabla\" or \"del\") by building up from the simplest case.",{"id":18603,"title":18604,"titles":18605,"content":18606,"level":10760},"\u002Fen\u002Frooms\u002Fgradient-descent#case-1-single-variable-one-parameter","Case 1: Single Variable (One Parameter)",[18291,18590,18599],"When we have just one parameter, the gradient is simply the derivative: ∇J(θ)=dJdθ\\nabla J(\\theta) = \\frac{dJ}{d\\theta}∇J(θ)=dθdJ​ The derivative tells us: \"If I increase θ\\thetaθ by a tiny amount, how much does JJJ change?\" Example: For J(θ)=θ2J(\\theta) = \\theta^2J(θ)=θ2: ∇J(θ)=dJdθ=2θ\\nabla J(\\theta) = \\frac{dJ}{d\\theta} = 2\\theta∇J(θ)=dθdJ​=2θ If θ=5\\theta = 5θ=5, then ∇J(5)=10\\nabla J(5) = 10∇J(5)=10 → function is increasing, go left (decrease θ\\thetaθ)If θ=−3\\theta = -3θ=−3, then ∇J(−3)=−6\\nabla J(-3) = -6∇J(−3)=−6 → function is decreasing, go right (increase θ\\thetaθ)If θ=0\\theta = 0θ=0, then ∇J(0)=0\\nabla J(0) = 0∇J(0)=0 → we're at the minimum!",{"id":18608,"title":18609,"titles":18610,"content":18611,"level":10760},"\u002Fen\u002Frooms\u002Fgradient-descent#case-2-two-variables-two-parameters","Case 2: Two Variables (Two Parameters)",[18291,18590,18599],"When we have two parameters θ1\\theta_1θ1​ and θ2\\theta_2θ2​, the gradient becomes a vector with two components: ∇J(θ1,θ2)=[∂J∂θ1∂J∂θ2]\\nabla J(\\theta_1, \\theta_2) = \\begin{bmatrix}\n\\frac{\\partial J}{\\partial \\theta_1} \\\\\n\\frac{\\partial J}{\\partial \\theta_2}\n\\end{bmatrix}∇J(θ1​,θ2​)=[∂θ1​∂J​∂θ2​∂J​​] Each partial derivative ∂J∂θi\\frac{\\partial J}{\\partial \\theta_i}∂θi​∂J​ asks: \"If I change only θi\\theta_iθi​ (keeping others fixed), how much does JJJ change?\" Example: For J(θ1,θ2)=θ12+θ22J(\\theta_1, \\theta_2) = \\theta_1^2 + \\theta_2^2J(θ1​,θ2​)=θ12​+θ22​: ∇J=[2θ12θ2]\\nabla J = \\begin{bmatrix}\n2\\theta_1 \\\\\n2\\theta_2\n\\end{bmatrix}∇J=[2θ1​2θ2​​] At point (θ1=3,θ2=4)(\\theta_1=3, \\theta_2=4)(θ1​=3,θ2​=4): ∇J=[68]\\nabla J = \\begin{bmatrix}\n6 \\\\\n8\n\\end{bmatrix}∇J=[68​] This vector points in the direction of steepest ascent. We go in the opposite direction (subtract it) to descend!",{"id":18613,"title":18614,"titles":18615,"content":18616,"level":10760},"\u002Fen\u002Frooms\u002Fgradient-descent#case-3-many-variables-general-case","Case 3: Many Variables (General Case)",[18291,18590,18599],"For n parameters θ1,θ2,…,θn\\theta_1, \\theta_2, \\ldots, \\theta_nθ1​,θ2​,…,θn​, the gradient is an n-dimensional vector: ∇J(θ)=[∂J∂θ1∂J∂θ2⋮∂J∂θn]\\nabla J(\\theta) = \\begin{bmatrix}\n\\frac{\\partial J}{\\partial \\theta_1} \\\\\n\\frac{\\partial J}{\\partial \\theta_2} \\\\\n\\vdots \\\\\n\\frac{\\partial J}{\\partial \\theta_n}\n\\end{bmatrix}∇J(θ)=​∂θ1​∂J​∂θ2​∂J​⋮∂θn​∂J​​​ Each component tells us how sensitive JJJ is to changes in that specific parameter. This is exactly what we need to know which direction to adjust each parameter! Key Insight: Whether you have 1 parameter or 1 million parameters, the idea is the same: compute how much each parameter affects the cost, then adjust them in the opposite direction.",{"id":18618,"title":18619,"titles":18620,"content":18621,"level":10719},"\u002Fen\u002Frooms\u002Fgradient-descent#walking-through-a-complete-example","Walking Through a Complete Example",[18291],"Let's see gradient descent in action with the simplest case: one variable. Consider minimizing the quadratic function: J(θ)=θ2J(\\theta) = \\theta^2J(θ)=θ2 The gradient (derivative) is: ∇J(θ)=dJdθ=2θ\\nabla J(\\theta) = \\frac{dJ}{d\\theta} = 2\\theta∇J(θ)=dθdJ​=2θ The gradient descent update rule becomes: θnew=θold−α⋅2θold\\theta_{new} = \\theta_{old} - \\alpha \\cdot 2\\theta_{old}θnew​=θold​−α⋅2θold​ Starting at θ0=10\\theta_0 = 10θ0​=10 with learning rate α=0.1\\alpha = 0.1α=0.1: Iteration 1: θ1=10−0.1×(2×10)=10−2=8\\theta_1 = 10 - 0.1 \\times (2 \\times 10) = 10 - 2 = 8θ1​=10−0.1×(2×10)=10−2=8 The gradient was positive (10 slope upward), so we moved left (decreased θ\\thetaθ) Iteration 2: θ2=8−0.1×(2×8)=8−1.6=6.4\\theta_2 = 8 - 0.1 \\times (2 \\times 8) = 8 - 1.6 = 6.4θ2​=8−0.1×(2×8)=8−1.6=6.4 Still positive gradient, getting smaller, so smaller steps Iteration 3: θ3=6.4−0.1×(2×6.4)=6.4−1.28=5.12\\theta_3 = 6.4 - 0.1 \\times (2 \\times 6.4) = 6.4 - 1.28 = 5.12θ3​=6.4−0.1×(2×6.4)=6.4−1.28=5.12 Pattern continues: as we approach the minimum, the gradient shrinks, so our steps get smaller automatically! With each step, we get closer to the minimum at θ=0\\theta = 0θ=0. Notice how the steps naturally get smaller as the gradient decreases near the minimum.",{"id":18623,"title":18624,"titles":18625,"content":10692,"level":10719},"\u002Fen\u002Frooms\u002Fgradient-descent#key-concepts","Key Concepts",[18291],{"id":18627,"title":18628,"titles":18629,"content":18630,"level":10753},"\u002Fen\u002Frooms\u002Fgradient-descent#learning-rate","Learning Rate",[18291,18624],"The learning rate α\\alphaα is crucial: Too large: We might overshoot the minimum or even divergeToo small: Convergence will be very slowJust right: Efficient convergence to the minimum",{"id":18632,"title":18633,"titles":18634,"content":10692,"level":10753},"\u002Fen\u002Frooms\u002Fgradient-descent#lets-do-experiments-with-different-learning-rates-and-see-how-it-affects-convergence","Let's do experiments with different learning rates and see how it affects convergence!",[18291,18624],{"id":18636,"title":18637,"titles":18638,"content":10692,"level":10753},"\u002Fen\u002Frooms\u002Fgradient-descent#types-of-gradient-descent","Types of Gradient Descent",[18291,18624],{"id":18640,"title":18641,"titles":18642,"content":18643,"level":10760},"\u002Fen\u002Frooms\u002Fgradient-descent#_1-batch-gradient-descent","1. Batch Gradient Descent",[18291,18624,18637],"Uses the entire dataset to compute the gradient: θ=θ−α∇θJ(θ)\\theta = \\theta - \\alpha \\nabla_\\theta J(\\theta)θ=θ−α∇θ​J(θ) Where J(θ)J(\\theta)J(θ) is computed over all training examples.",{"id":18645,"title":18646,"titles":18647,"content":18648,"level":10760},"\u002Fen\u002Frooms\u002Fgradient-descent#_2-stochastic-gradient-descent-sgd","2. Stochastic Gradient Descent (SGD)",[18291,18624,18637],"Updates parameters using one training example at a time: θ=θ−α∇θJ(θ;x(i),y(i))\\theta = \\theta - \\alpha \\nabla_\\theta J(\\theta; x^{(i)}, y^{(i)})θ=θ−α∇θ​J(θ;x(i),y(i))",{"id":18650,"title":18651,"titles":18652,"content":18653,"level":10760},"\u002Fen\u002Frooms\u002Fgradient-descent#_3-mini-batch-gradient-descent","3. Mini-batch Gradient Descent",[18291,18624,18637],"A compromise: uses a small batch of examples: θ=θ−α∇θJ(θ;x(i:i+b),y(i:i+b))\\theta = \\theta - \\alpha \\nabla_\\theta J(\\theta; x^{(i:i+b)}, y^{(i:i+b)})θ=θ−α∇θ​J(θ;x(i:i+b),y(i:i+b)) Where bbb is the batch size.",{"id":18655,"title":18656,"titles":18657,"content":18658,"level":10719},"\u002Fen\u002Frooms\u002Fgradient-descent#convergence","Convergence",[18291],"Gradient descent converges when the gradient becomes very small: ∣∇J(θ)∣\u003Cϵ|\\nabla J(\\theta)| \u003C \\epsilon∣∇J(θ)∣\u003Cϵ Where ϵ\\epsilonϵ is a small threshold value.",{"id":18660,"title":18661,"titles":18662,"content":18663,"level":10719},"\u002Fen\u002Frooms\u002Fgradient-descent#challenges","Challenges",[18291],"Local Minima: The algorithm might get stuck in local minima instead of finding the global minimumSaddle Points: Points where the gradient is zero but aren't minimaPlateau Regions: Areas where the gradient is very small, slowing down learning",{"id":18665,"title":18666,"titles":18667,"content":18668,"level":10719},"\u002Fen\u002Frooms\u002Fgradient-descent#real-world-applications","Real-World Applications",[18291],"Gradient descent is used to train: Neural Networks: Optimizing millions of parametersLinear Regression: Finding the best-fit lineLogistic Regression: Classification problemsSupport Vector Machines: Finding optimal hyperplanes",{"id":18670,"title":18671,"titles":18672,"content":18673,"level":10753},"\u002Fen\u002Frooms\u002Fgradient-descent#gradient-descent-in-deep-learning","Gradient Descent in Deep Learning",[18291,18666],"A deep neural network uses gradient descent to train weights across all its layers by minimizing the cost function. The image above shows a Deep Neural Network — a powerful type of model that directly relies on gradient descent to optimize its cost function. In deep learning: Input Layer receives raw data (e.g. image pixels, words, numbers)Hidden Layers perform feature extraction — learning complex patterns from dataOutput Layer produces the final predictionWeights www in each connection are the parameters θ\\thetaθ that gradient descent optimizes During training, the process is: Forward Pass→Compute Loss J(θ)→Backpropagation→Gradient Descent Update\\begin{aligned}\n&\\text{Forward Pass} \\\\\n&\\rightarrow \\text{Compute Loss } J(\\theta) \\\\\n&\\rightarrow \\text{Backpropagation} \\\\\n&\\rightarrow \\text{Gradient Descent Update}\n\\end{aligned}​Forward Pass→Compute Loss J(θ)→Backpropagation→Gradient Descent Update​ A network may have millions of neurons → millions of weights → a gradient vector with millions of dimensions — yet gradient descent works exactly the same way as in the 1D case: move opposite to the gradient to reduce the loss!",{"id":18675,"title":18454,"titles":18676,"content":18677,"level":10719},"\u002Fen\u002Frooms\u002Fgradient-descent#python-implementation",[18291],"Below is a pure-Python implementation — no ML libraries. Each block maps directly to the math above. Highlighted lines are the core formulas.",{"id":18679,"title":18680,"titles":18681,"content":18682,"level":10753},"\u002Fen\u002Frooms\u002Fgradient-descent#step-1-cost-function-and-its-gradient","Step 1 — Cost Function and its Gradient",[18291,18454],"J(θ)=θ2,∇J(θ)=2θJ(\\theta) = \\theta^2, \\qquad \\nabla J(\\theta) = 2\\thetaJ(θ)=θ2,∇J(θ)=2θ # J(θ) = θ²  →  the function we want to minimize\ndef cost(theta):\n    return theta ** 2\n\n# ∇J(θ) = dJ\u002Fdθ = 2θ  →  its derivative (gradient)\ndef gradient(theta):\n    return 2 * theta",{"id":18684,"title":18685,"titles":18686,"content":18687,"level":10753},"\u002Fen\u002Frooms\u002Fgradient-descent#step-2-the-update-rule","Step 2 — The Update Rule",[18291,18454],"θnew=θold−α⋅∇J(θ)\\theta_{new} = \\theta_{old} - \\alpha \\cdot \\nabla J(\\theta)θnew​=θold​−α⋅∇J(θ) def update(theta, alpha):\n    grad = gradient(theta)           # ① compute  ∇J(θ)\n    return theta - alpha * grad      # ② apply   θ_new = θ_old − α·∇J(θ) Line 3 is the update rule formula above, written directly as Python.",{"id":18689,"title":18690,"titles":18691,"content":18692,"level":10753},"\u002Fen\u002Frooms\u002Fgradient-descent#step-3-full-loop-until-convergence","Step 3 — Full Loop Until Convergence",[18291,18454],"Run updates until ∣∇J(θ)∣\u003Cε|\\nabla J(\\theta)| \u003C \\varepsilon∣∇J(θ)∣\u003Cε — when the gradient is essentially zero: def gradient_descent(theta_init, alpha, epsilon=1e-6, max_iters=1000):\n    theta = theta_init                           # θ₀ — starting point\n    for i in range(max_iters):\n        grad = gradient(theta)                   # ∇J(θ) = 2θ\n        if abs(grad) \u003C epsilon:                  # stop when |∇J(θ)| \u003C ε\n            print(f\"Converged at iteration {i}\")\n            break\n        theta = theta - alpha * grad             # θ_new = θ_old − α·∇J(θ)\n        if i \u003C 5:\n            print(f\"  iter {i+1:2d}: θ={theta:.5f}  J={cost(theta):.5f}  ∇J={grad:.5f}\")\n    return theta\n\n# Same starting values as the manual example above: θ₀ = 10, α = 0.1\ntheta_min = gradient_descent(theta_init=10.0, alpha=0.1)\nprint(f\"\\nMinimum at θ = {theta_min:.8f}\") Output — matches the manual iterations above: iter  1: θ= 8.00000  J=64.00000  ∇J=20.00000\n  iter  2: θ= 6.40000  J=40.96000  ∇J=16.00000\n  iter  3: θ= 5.12000  J=26.21440  ∇J=12.80000\n  iter  4: θ= 4.09600  J=16.77722  ∇J=10.24000\n  iter  5: θ= 3.27680  J=10.73742  ∇J= 8.19200\nMinimum at θ = 0.00000001",{"id":18694,"title":18695,"titles":18696,"content":18697,"level":10753},"\u002Fen\u002Frooms\u002Fgradient-descent#step-4-linear-regression-two-parameters","Step 4 — Linear Regression: Two Parameters",[18291,18454],"For a model y^=wX+b\\hat{y} = wX + by^​=wX+b, the cost is mean squared error: J(w,b)=1m∑i=1m(y^(i)−y(i))2J(w, b) = \\frac{1}{m} \\sum_{i=1}^{m} \\left(\\hat{y}^{(i)} - y^{(i)}\\right)^2J(w,b)=m1​∑i=1m​(y^​(i)−y(i))2 With partial derivatives: ∂J∂w=2m∑i=1m(y^(i)−y(i))x(i),∂J∂b=2m∑i=1m(y^(i)−y(i))\\frac{\\partial J}{\\partial w} = \\frac{2}{m} \\sum_{i=1}^{m} \\left(\\hat{y}^{(i)} - y^{(i)}\\right) x^{(i)}, \\qquad \\frac{\\partial J}{\\partial b} = \\frac{2}{m} \\sum_{i=1}^{m} \\left(\\hat{y}^{(i)} - y^{(i)}\\right)∂w∂J​=m2​∑i=1m​(y^​(i)−y(i))x(i),∂b∂J​=m2​∑i=1m​(y^​(i)−y(i)) import numpy as np\n\ndef linear_regression_gd(X, y, alpha=0.01, epochs=500):\n    m = len(y)\n    w, b = 0.0, 0.0                        # θ = [w, b] — initialize to zero\n    for epoch in range(epochs):\n        y_pred = w * X + b                 # forward pass:  ŷ = w·X + b\n        error  = y_pred - y                # residuals:     ŷ − y\n        dw = (2 \u002F m) * np.dot(error, X)   # ∂J\u002F∂w = (2\u002Fm) Σ (ŷ−y)·x\n        db = (2 \u002F m) * np.sum(error)       # ∂J\u002F∂b = (2\u002Fm) Σ (ŷ−y)\n        w = w - alpha * dw                 # w_new = w_old − α·∂J\u002F∂w\n        b = b - alpha * db                 # b_new = b_old − α·∂J\u002F∂b\n        if epoch % 100 == 0:\n            loss = np.mean(error ** 2)     # J(w,b) = (1\u002Fm) Σ (ŷ−y)²\n            print(f\"Epoch {epoch:4d}: loss={loss:.4f}  w={w:.4f}  b={b:.4f}\")\n    return w, b\n\n# True relationship: y = 2·x  →  model should converge to w≈2, b≈0\nX = np.array([1.0, 2.0, 3.0, 4.0, 5.0])\ny = np.array([2.0, 4.0, 6.0, 8.0, 10.0])\nw, b = linear_regression_gd(X, y)\nprint(f\"\\nFitted:  ŷ = {w:.4f}·x + {b:.4f}\") The highlighted lines 7–12 map directly to the formulas: Lines 7–8: forward pass y^=wX+b\\hat{y} = wX + by^​=wX+b and residualsLines 9–10: partial derivatives ∂J∂w\\frac{\\partial J}{\\partial w}∂w∂J​ and ∂J∂b\\frac{\\partial J}{\\partial b}∂b∂J​Lines 11–12: gradient descent update rule θnew=θold−α∇J\\theta_{new} = \\theta_{old} - \\alpha \\nabla Jθnew​=θold​−α∇J",{"id":18699,"title":18700,"titles":18701,"content":18702,"level":10719},"\u002Fen\u002Frooms\u002Fgradient-descent#next-steps","Next Steps",[18291],"Once you understand gradient descent, you can explore advanced variations: Momentum: Adds velocity to updatesAdam: Adaptive learning rates per parameterRMSprop: Handles sparse gradients better html pre.shiki code .sHwdD, html code.shiki .sHwdD{--shiki-light:#90A4AE;--shiki-light-font-style:italic;--shiki-default:#546E7A;--shiki-default-font-style:italic;--shiki-dark:#676E95;--shiki-dark-font-style:italic}html pre.shiki code .spNyl, html code.shiki .spNyl{--shiki-light:#9C3EDA;--shiki-default:#C792EA;--shiki-dark:#C792EA}html pre.shiki code .s2Zo4, html code.shiki .s2Zo4{--shiki-light:#6182B8;--shiki-default:#82AAFF;--shiki-dark:#82AAFF}html pre.shiki code .sMK4o, html code.shiki .sMK4o{--shiki-light:#39ADB5;--shiki-default:#89DDFF;--shiki-dark:#89DDFF}html pre.shiki code .sHdIc, html code.shiki .sHdIc{--shiki-light:#90A4AE;--shiki-light-font-style:italic;--shiki-default:#EEFFFF;--shiki-default-font-style:italic;--shiki-dark:#BABED8;--shiki-dark-font-style:italic}html pre.shiki code .s7zQu, html code.shiki .s7zQu{--shiki-light:#39ADB5;--shiki-light-font-style:italic;--shiki-default:#89DDFF;--shiki-default-font-style:italic;--shiki-dark:#89DDFF;--shiki-dark-font-style:italic}html pre.shiki code .sTEyZ, html code.shiki .sTEyZ{--shiki-light:#90A4AE;--shiki-default:#EEFFFF;--shiki-dark:#BABED8}html pre.shiki code .sbssI, html code.shiki .sbssI{--shiki-light:#F76D47;--shiki-default:#F78C6C;--shiki-dark:#F78C6C}html .light .shiki span {color: var(--shiki-light);background: var(--shiki-light-bg);font-style: var(--shiki-light-font-style);font-weight: var(--shiki-light-font-weight);text-decoration: var(--shiki-light-text-decoration);}html.light .shiki span {color: var(--shiki-light);background: var(--shiki-light-bg);font-style: var(--shiki-light-font-style);font-weight: var(--shiki-light-font-weight);text-decoration: var(--shiki-light-text-decoration);}html .default .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html.dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html pre.shiki code .sfazB, html code.shiki .sfazB{--shiki-light:#91B859;--shiki-default:#C3E88D;--shiki-dark:#C3E88D}html pre.shiki code .sBMFI, html code.shiki .sBMFI{--shiki-light:#E2931D;--shiki-default:#FFCB6B;--shiki-dark:#FFCB6B}",{"id":18302,"title":18301,"titles":18704,"content":18705,"level":10699},[],"កម្មវិធី 22 សប្តាហ៍ដែលនឹងបំប្លែងអ្នកពីអ្នកប្រើ ML Model ទៅជាអ្នកបង្កើត train និង deploy ពួកវានៅក្នុងការផលិតពិតប្រាកដ។",{"id":18707,"title":18708,"titles":18709,"content":18710,"level":10719},"\u002Fkm\u002Fbootcamp#សូមស្វាគមន៍-ml-engineers-នាពេលអនាគត","សូមស្វាគមន៍ ML Engineers នាពេលអនាគត!",[18301],"ក្នុងរយៈពេល 22 សប្តាហ៍ អ្នកនឹងវិវត្តន៍ពីអ្នកដែល ប្រើប្រាស់ ML Models ទៅជាអ្នកដែល បង្កើត train និង deploy ពួកវានៅក្នុងការផលិតពិតប្រាកដ។ កម្មវិធីសិក្សានេះ រួមបញ្ចូលមូលដ្ឋានគ្រឹះគណិតវិទ្យា Machine Learning បែបប្រពៃណី Deep Learning Transformers និង MLOps ចូលក្នុងដំណើររួមមួយ។",{"id":18712,"title":18713,"titles":18714,"content":18715,"level":10719},"\u002Fkm\u002Fbootcamp#ព័ត៌មានលម្អិតនៃកម្មវិធី","ព័ត៌មានលម្អិតនៃកម្មវិធី",[18301],"រយៈពេល22 សប្តាហ៍ (៦០ ម៉ោងបង្រៀន)កាលវិភាគព្រហស្បត្តិ៍ & សុក្រ · 1.5 ម៉ោង \u002F Sessionពេលវេលារៀន\u002Fសប្តាហ៍1.5 ម៉ោងក្នុងថ្នាក់ + 4–6 ម៉ោងសិក្សាដោយខ្លួនឯងថ្ងៃចាប់ផ្តើម26 មីនា 2026 (នៅបន្តចុះឈ្មោះ)វិធីសាស្ត្រគណិត → Classical ML → Deep Learning → Transformers → MLOps",{"id":18717,"title":18718,"titles":18719,"content":10692,"level":10719},"\u002Fkm\u002Fbootcamp#technology-stack","Technology Stack",[18301],{"id":18721,"title":18722,"titles":18723,"content":10692,"level":10719},"\u002Fkm\u002Fbootcamp#មាតិកានៃកម្មវិធី","មាតិកានៃកម្មវិធី",[18301],{"id":18725,"title":18726,"titles":18727,"content":18728,"level":10753},"\u002Fkm\u002Fbootcamp#កម្មវិធីទី-1-មូលដ្ឋានគ្រឹះ-៤-សប្តាហ៍","កម្មវិធី​ទី 1 · មូលដ្ឋានគ្រឹះ (៤ សប្តាហ៍)",[18301,18722],"គ្រឹះគណិតវិទ្យា និងគំនិតសម្រាប់ ML។ សិក្សាស្វែងយល់ពីរបៀបដែលម៉ាស៊ីនរៀននៅពេលមុន ការហ្វឹកហ្វឺន Model ពិតប្រាកដ។ សប្តាហ៍ 1 — AI\u002FML\u002FDeep Learning · Supervised vs Unsupervised Learning · ប្រភេទបញ្ហា MLសប្តាហ៍ 2 — Vectors & Matrices · Matrix Multiplication · Gradients · Gradient Descentសប្តាហ៍ 3 — ការចែកចាយប្រូបាប្លីស្ទេ · Bayes' Theorem · MSE & Cross-Entropy Loss · Bias-Variance Tradeoffសប្តាហ៍ 4 — Exploratory Data Analysis · ការដោះស្រាយទិន្នន័យបំផ្លាត · Feature Scaling · ហ្វឹកហ្វឺន Model ដំបូង 🎯 គម្រោងបញ្ចប់ Module: អនុវត្ត Gradient Descent និង Linear Regression ពីដំបូង ដោយប្រើ NumPy តែប៉ុណ្ណោះ។",{"id":18730,"title":18731,"titles":18732,"content":18733,"level":10753},"\u002Fkm\u002Fbootcamp#កម្មវិធីទី-2-machine-learning-បែបប្រពៃណី-៥-សប្តាហ៍","កម្មវិធី​ទី 2 · Machine Learning បែបប្រពៃណី (៥ សប្តាហ៍)",[18301,18722],"Scikit-learn Ecosystem និងការស្ទាត់ជំនាញទិន្នន័យ Tabular។ បង្កើត វាស់ស្ទង់ និង Tune Classifiers និង Regressors ពិតប្រាកដ។ សប្តាហ៍ 5 — Linear Regression · Logistic Regression · Sigmoid Function · Decision Boundariesសប្តាហ៍ 6 — Decision Trees (Gini\u002FEntropy) · Random Forests · Bagging · Feature Importanceសប្តាហ៍ 7 — XGBoost & LightGBM · Metrics (Precision, Recall, F1, AUC-ROC) · Confusion Matricesសប្តាហ៍ 8 — K-Fold Cross-Validation · Grid & Bayesian Hyperparameter Search · Feature Engineering · ការការពារ Data Leakageសប្តាហ៍ 9 — Kaggle Competition Strategy · sklearn Pipeline · Model Serialization 🎯 គម្រោងបញ្ចប់ Module: ចូលរួម Kaggle Tabular-Data Challenge ហើយបង្កើត sklearn Pipeline ពេញលេញ។",{"id":18735,"title":18736,"titles":18737,"content":18738,"level":10753},"\u002Fkm\u002Fbootcamp#កម្មវិធីទី-3-deep-learning-ជាមួយ-pytorch-៤-សប្តាហ៍","កម្មវិធី​ទី 3 · Deep Learning ជាមួយ PyTorch (៤ សប្តាហ៍)",[18301,18722],"Neural Networks ពីគ្រឹះដល់ CNNs ដំណើរការ GPU។ ស្វែងយល់ Layer ជ្រៅជ្រះ Gradient Update និងល្បិចហ្វឹកហ្វឺន។ សប្តាហ៍ 10 — Perceptrons · Multi-Layer Networks · Forward Propagation · Backpropagation & Chain Ruleសប្តាហ៍ 11 — Activation Functions (ReLU, Softmax) · PyTorch Tensors · Custom Dataset & DataLoader · Data Augmentationសប្តាហ៍ 12 — Training Loops · Adam\u002FSGD Optimizers · Early Stopping · Model Checkpointingសប្តាហ៍ 13 — Convolutional Layers & Pooling · ResNet\u002FVGG · Transfer Learning · Fine-Tuning Strategies 🎯 គម្រោងបញ្ចប់ Module: បង្កើត Image Classifier ដោយប្រើ Transfer Learning ជាមួយ CNN ដែលបានហ្វឹកហ្វឺនមុន។",{"id":18740,"title":18741,"titles":18742,"content":18743,"level":10753},"\u002Fkm\u002Fbootcamp#កម្មវិធីទី-4-transformers-hugging-face-៣-សប្តាហ៍","កម្មវិធី​ទី 4 · Transformers & Hugging Face (៣ សប្តាហ៍)",[18301,18722],"យន្តការ Attention ដែលជំរុញ AI សម័យទំនើប។ Fine-tune BERT និង GPT-class Models សម្រាប់ NLP Tasks ពិតប្រាកដ។ សប្តាហ៍ 14 — Self-Attention · Multi-Head Attention · Transformer Architecture · Tokenization (BPE\u002FWordPiece) · Positional Encodingសប្តាហ៍ 15 — Hugging Face Hub & Pipeline API · Fine-Tuning ជាមួយ Trainer API · BERT សម្រាប់ Text Classification & NERសប្តាហ៍ 16 — NLP Competition Strategy · ផ្ទុក Models ទៅ Hugging Face Hub · បង្កើត Text Classification Service 🎯 គម្រោងបញ្ចប់ Module: Fine-tune Transformer លើ Kaggle NLP Challenge ហើយបោះផ្សាយ Model ទៅ Hugging Face Hub។",{"id":18745,"title":18746,"titles":18747,"content":18748,"level":10753},"\u002Fkm\u002Fbootcamp#កម្មវិធីទី-5-mlops-deployment-៣-សប្តាហ៍","កម្មវិធី​ទី 5 · MLOps & Deployment (៣ សប្តាហ៍)",[18301,18722],"ពី Jupyter Notebook ទៅ Production API។ រៀន Tools និងការអនុវត្ត ដែល ML Engineers ទាំងអស់ត្រូវការក្នុងឧស្សាហកម្ម។ សប្តាហ៍ 17 — Model Serialization (pickle \u002F joblib \u002F ONNX) · DVC Versioning · REST Prediction APIs ជាមួយ FastAPIសប្តាហ៍ 18 — Docker Images & Containers · Dockerfile Best Practices · MLflow Experiment Tracking & Model Registryសប្តាហ៍ 19 — GitHub Actions CI\u002FCD · Automated Testing · Data Drift Detection · Model Monitoring & Alerting 🎯 គម្រោងបញ្ចប់ Module: Deploy ML Model ពេញលេញជាមួយ FastAPI, Docker, និង CI\u002FCD Pipeline។",{"id":18750,"title":18751,"titles":18752,"content":18753,"level":10753},"\u002Fkm\u002Fbootcamp#កម្មវិធីទី-6-គម្រោងបញ្ចប់-១-សប្តាហ៍","កម្មវិធី​ទី 6 · គម្រោងបញ្ចប់ (១ សប្តាហ៍)",[18301,18722],"បង្កើត និង Deploy ប្រព័ន្ធ ML Production ពេញលេញ — ពី Raw Data ទៅ Live API។ ស្វែងរក Dataset ពិតប្រាកដ (Kaggle ឬបញ្ហាពិតប្រាកដ)EDA, Preprocessing, និង Feature Engineering ពេញលេញTrain និងប្រៀបធៀប Models ច្រើន ជាមួយ Hyperparameter Tuning ដែលបានចារDeploy REST API (FastAPI) ក្នុង Docker Containerរៀបចំ CI\u002FCD Pipeline ជាមួយ GitHub ActionsDemo Presentation 10–15 នាទី គំនិតគម្រោង: Sentiment Analysis · Medical Image Classification · Real-Estate Price Prediction · Fake News Detection · Customer Churn · Text Summarization API",{"id":18755,"title":18756,"titles":18757,"content":18758,"level":10719},"\u002Fkm\u002Fbootcamp#កម្មវិធីនេះសម្រាប់អ្នកណា","កម្មវិធីនេះសម្រាប់អ្នកណា?",[18301],"Developers ដែលស្គាល់ Python ហើយចង់ចូលក្នុងវិស័យ ML Engineeringនិស្សិតដែលចង់បានការអនុវត្ត ML Projects ពិតប្រាកដអ្នកណាក៏ដោយដែលចង់ស្ទាប់ស្ទង់ ML ដោយការអនុវត្ត",{"id":18760,"title":18761,"titles":18762,"content":18763,"level":10719},"\u002Fkm\u002Fbootcamp#អ្វីដែលអ្នកនឹងបង្កើត","អ្វីដែលអ្នកនឹងបង្កើត",[18301],"នៅចុងបញ្ចប់នៃ Bootcamp អ្នកនឹងបាន Train និង Deploy ម៉ូដែលពិតប្រាកដ ចូលរួមប្រកួត Kaggle ហើយបន្ហាញ Capstone Project ដែលបង្ហាញ Full-Stack ML Skills។",{"id":18310,"title":18309,"titles":18765,"content":18766,"level":10699},[],"ពីការកំណត់ល្បឿនថេរ ទៅកាន់ការផ្លាស់ប្តូរតាមស្ថានភាព — ស្វែងយល់ពីបច្ចេកទេសនៅពីក្រោយ AI សម័យថ្មី រូបភាពយកមកពី: DL Notes: Advanced Gradient Descent ការប្រើ Gradient descent គឺពិតជាមានប្រសិទ្ធភាព ប៉ុន្តែវាមានបញ្ហាមួយ៖ រាល់គ្រប់ Parameter (ប៉ារ៉ាម៉ែត្រ) ទាំងអស់នៅក្នុងម៉ូឌែលរបស់អ្នក ប្រើប្រាស់ Learning Rate (ទំហំជំហាន) តែមួយដូចគ្នា។ ហើយការកំណត់លេខនោះឱ្យបានត្រឹមត្រូវ? វាដូចជាការទស្សន៍ទាយច្រើនជាងវិទ្យាសាស្ត្រ។ Adam (មកពីពាក្យថា Adaptive Moment Estimation) ត្រូវបានណែនាំដោយលោក Diederik Kingma និង Jimmy Ba ក្នុងឆ្នាំ ២០១៥ [1] ហើយវាបានក្លាយជា \"Optimizer\" ដ៏ពេញនិយមបំផុតក្នុងវិស័យ Deep Learning។ អត្ថបទនេះនឹងពន្យល់ថា ហេតុអ្វី បានជាការប្រើ Learning Rate ថេរតែមួយមិនសូវល្អ, តើ Adam ធ្វើអ្វីខ្លះខុសពីគេ, និង របៀប ដែលវាដំណើរការ — ចាប់ពីទ្រឹស្តីរហូតដល់កូដជាក់ស្តែង។",{"id":18768,"title":18769,"titles":18770,"content":10692,"level":10719},"\u002Fkm\u002Frooms\u002Fadam-optimizer#បញ្ហានៃការប្រើ-learning-rate-ថេរ-fixed-learning-rate","បញ្ហានៃការប្រើ Learning Rate ថេរ (Fixed Learning Rate)",[18309],{"id":18772,"title":18773,"titles":18774,"content":18775,"level":10753},"\u002Fkm\u002Frooms\u002Fadam-optimizer#វិបត្តិ-ទំហំជំហានមួយ-ប្រើគ្រប់កន្លែង","វិបត្តិ \"ទំហំជំហានមួយ ប្រើគ្រប់កន្លែង\"",[18309,18769],"សាកស្រមៃថាអ្នកកំពុងដើរភ្នំដោយមានច្បាប់ដ៏តឹងរឹងមួយ៖ រាល់ជំហានដែលអ្នកបោះ ត្រូវតែមានប្រវែងស្មើៗគ្នាជានិច្ច — មិនឱ្យលើស មិនឱ្យខ្វះ។ នៅពេលអ្នកនៅលើច្រាំងថ្មចោត ការបោះជំហានវែងពេកអាចឱ្យអ្នកធ្លាក់ជ្រោះ។ ប៉ុន្តែនៅពេលអ្នកនៅលើវាលទំនាបដែលមានជម្រាលតិចតួច ការបោះជំហានដដែលនោះមានអារម្មណ៍ថាយឺតខ្លាំងណាស់ — វាអាចនឹងចំណាយពេលរាប់ឆ្នាំទើបទៅដល់បាតភ្នំ។ នេះគឺជាបញ្ហាពិតប្រាកដនៃ Learning Rate ថេរ (α\\alphaα) នៅក្នុង Gradient Descent៖ θnew=θold−α∇J(θ)\\theta_{new} = \\theta_{old} - \\alpha \\nabla J(\\theta)θnew​=θold​−α∇J(θ) តម្លៃ α\\alphaα តែមួយនេះ គ្រប់គ្រងទំហំជំហានសម្រាប់ គ្រប់ Parameter ទាំងអស់ — ទោះបីជា Parameter ខ្លះត្រូវការបោះជំហានធំ ឬខ្លះត្រូវការបោះជំហានតូចក៏ដោយ។",{"id":18777,"title":18778,"titles":18779,"content":10692,"level":10753},"\u002Fkm\u002Frooms\u002Fadam-optimizer#ផលវិបាក-៣-យ៉ាងនៃ-learning-rate-ថេរ","ផលវិបាក ៣ យ៉ាងនៃ Learning Rate ថេរ",[18309,18769],{"id":18781,"title":18782,"titles":18783,"content":18784,"level":10760},"\u002Fkm\u002Frooms\u002Fadam-optimizer#១-ធំពេក-រំលងគោលដៅ-overshooting","១. ធំពេក — រំលងគោលដៅ (Overshooting)",[18309,18769,18778],"នៅពេល α\\alphaα ធំពេក វានឹងធ្វើឱ្យយើងបោះជំហានរំលងចំណុចទាបបំផុត (Minimum) ហើយលោតទៅលោតមក៖\nការបាត់បង់ (Loss) មិនដែលថយចុះឡើយ — វានឹងលោតចុះឡើងជុំវិញគោលដៅរហូត។",{"id":18786,"title":18787,"titles":18788,"content":18789,"level":10760},"\u002Fkm\u002Frooms\u002Fadam-optimizer#២-តូចពេក-យឺតដូចអណ្តើក-crawling","២. តូចពេក — យឺតដូចអណ្តើក (Crawling)",[18309,18769,18778],"នៅពេល α\\alphaα តូចពេក ការរៀនដំណើរការទៅមុខមែន ប៉ុន្តែវាយឺតខ្លាំងណាស់។ ក្នុងម៉ូឌែលដែលមាន Parameter រាប់លាន នេះគឺជាមហន្តរាយខាងពេលវេលា និងកម្លាំងម៉ាស៊ីន។",{"id":18791,"title":18792,"titles":18793,"content":18794,"level":10760},"\u002Fkm\u002Frooms\u002Fadam-optimizer#៣-បញ្ហា-ផ្លូវតូចចង្អៀត-ravine-problem","៣. បញ្ហា \"ផ្លូវតូចចង្អៀត\" (Ravine Problem)",[18309,18769,18778],"នៅក្នុងលំហវិមាត្រខ្ពស់ ក្រាហ្វជម្រាលជារឿយៗមើលទៅដូចជា ជ្រលងភ្នំដ៏តូចចង្អៀត — ចោតខ្លាំងក្នុងទិសដៅម្ខាង និងរាបស្មើក្នុងទិសដៅម្ខាងទៀត។ ទិសដៅដែលចោត ត្រូវការ α\\alphaα តូច ដើម្បីកុំឱ្យបោះជំហានបុកជញ្ជាំងជ្រលងភ្នំ។ទិសដៅដែលរាបស្មើ ត្រូវការ α\\alphaα ធំ ដើម្បីដើរឱ្យទៅមុខឆាប់ដល់។\nមិនមាន α\\alphaα ថេរណាមួយ អាចបំពេញចិត្តទិសដៅទាំងពីរក្នុងពេលតែមួយបានទេ។ LeCun et al. [2] បានធ្វើការវិភាគលម្អិតពីបទប្បញ្ញត្តិ Loss Landscape ទាំងនេះ និងផលប៉ះពាល់របស់ពួកវាទៅលើការ Convergence។ ចំណុចខ្សោយសំខាន់\n  Parameter ផ្សេងគ្នា ត្រូវការទំហំជំហានផ្សេងគ្នា។ Learning rate ថេរចាត់ទុកពួកវាដូចគ្នាទាំងអស់ — ហើយនេះគឺជាបញ្ហាកកស្ទះ។",{"id":18796,"title":18797,"titles":18798,"content":18799,"level":10719},"\u002Fkm\u002Frooms\u002Fadam-optimizer#ស្គាល់-adam-ប្រព័ន្ធ-gps-នៃ-optimizer","ស្គាល់ Adam: ប្រព័ន្ធ GPS នៃ Optimizer",[18309],"បើ Gradient Descent ធម្មតាគឺជាការដើរភ្នំដោយបោះជំហានថេរ Adam គឺជាការប្រើ GPS ដែលមានការណែនាំផ្លូវដោយវៃឆ្លាត៖ វាបង្កើនល្បឿននៅលើផ្លូវហាយវេ បន្ថយល្បឿននៅផ្លូវបត់ចង្អៀត និងចងចាំផ្លូវដែលធ្លាប់បានដើរកន្លងមក។ អាថ៌កំបាំងរបស់ Adam គឺការតាមដាន រឿងពីរយ៉ាង សម្រាប់រាល់ Parameter នីមួយៗ៖ បរិមាណនិមិត្តសញ្ញាអត្ថន័យងាយៗ1st Moment (Momentum)mtm_tmt​តើទិសដៅណាខ្លះដែលជម្រាល (Gradients) ធ្លាប់ចង្អុលទៅនាពេលថ្មីៗនេះ?2nd Moment (Adaptive Scale)vtv_tvt​តើជម្រាល (Gradients) មានទំហំ ធំប៉ុនណា នាពេលថ្មីៗនេះ? តាមរយៈការចែកទំហំជំហាននឹងឫសការ៉េនៃ 2nd moment, Adam នឹងបន្ថយទំហំជំហានដោយស្វ័យប្រវត្តិសម្រាប់ Parameter ណាដែលមានជម្រាលធំៗខ្លាំងពេក និងបង្កើនទំហំជំហានសម្រាប់ Parameter ណាដែលមានជម្រាលតូចៗ។",{"id":18801,"title":18802,"titles":18803,"content":10692,"level":10719},"\u002Fkm\u002Frooms\u002Fadam-optimizer#របៀបបង្កើត-adam-ជំហានម្តងៗ","របៀបបង្កើត Adam (ជំហានម្តងៗ)",[18309],{"id":18805,"title":18806,"titles":18807,"content":18808,"level":10753},"\u002Fkm\u002Frooms\u002Fadam-optimizer#ជំហានទី-១-momentum-រក្សាល្បឿន-និងទិសដៅ","ជំហានទី ១ — Momentum: រក្សាល្បឿន និងទិសដៅ",[18309,18802],"បញ្ហាដែលវាដោះស្រាយ: ជម្រាល (Gradients) ជារឿយៗមានភាពរំខាន (Noisy)។ ការរត់តាមជម្រាលដែលរំខានទាំងនោះ ធ្វើឱ្យផ្លូវដើរមិនរលូន។ គំនិត: រក្សាមធ្យមភាគនៃជម្រាលពីមុនៗ ដូចជាការរមៀលបាល់ចុះពីលើភ្នំ — វានឹងបង្កើនល្បឿនក្នុងទិសដៅដែលស្របគ្នា និងមិនងាយងាករេដោយសារដុំថ្មតូចៗតាមផ្លូវ។ mt=β1⋅mt−1+(1−β1)⋅gtm_t = \\beta_1 \\cdot m_{t-1} + (1 - \\beta_1) \\cdot g_tmt​=β1​⋅mt−1​+(1−β1​)⋅gt​ β1\\beta_1β1​ ជាមេគុណ (ជាទូទៅគឺ 0.9)៖ មានន័យថាឱ្យតម្លៃ ៩០% លើអតីតកាល និង ១០% លើជម្រាលថ្មី។ Sutskever et al. [3] បានបង្ហាញថា Momentum term នេះ មានសារៈសំខាន់ខ្លាំងណាស់ក្នុងការ Converge យ៉ាងលឿន និងស្ថិតស្ថេរ នៅក្នុង Deep Networks។",{"id":18810,"title":18811,"titles":18812,"content":18813,"level":10753},"\u002Fkm\u002Frooms\u002Fadam-optimizer#ជំហានទី-២-adaptive-scale-ការបត់បែនតាមប្រវត្តិ","ជំហានទី ២ — Adaptive Scale: ការបត់បែនតាមប្រវត្តិ",[18309,18802],"បញ្ហាដែលវាដោះស្រាយ: Parameter ខ្លះមានជម្រាលធំ ខ្លះមានជម្រាលតូច។ យើងចង់ឱ្យអាធំដើរតិចៗ និងអាតូចដើរឱ្យបានច្រើន។ គំនិត: តាមដានមធ្យមភាគនៃ \"ការ៉េ\" នៃជម្រាល៖ vt=β2⋅vt−1+(1−β2)⋅gt2v_t = \\beta_2 \\cdot v_{t-1} + (1 - \\beta_2) \\cdot g_t^2vt​=β2​⋅vt−1​+(1−β2​)⋅gt2​ Parameter ណាដែលទទួលបានជម្រាលធំៗជាបន្តបន្ទាប់ នឹងមានតម្លៃ vtv_tvt​ ធំ។ នៅពេលយើងយកជំហានទៅចែកនឹង vt\\sqrt{v_t}vt​​ វានឹងធ្វើឱ្យការ Update ថយចុះមកតូចវិញ។ នេះហើយជា Learning Rate ផ្ទាល់ខ្លួន សម្រាប់ Parameter នីមួយៗ។",{"id":18815,"title":18816,"titles":18817,"content":18818,"level":10753},"\u002Fkm\u002Frooms\u002Fadam-optimizer#ជំហានទី-៣-bias-correction-ការកែតម្រូវពេលចាប់ផ្តើម","ជំហានទី ៣ — Bias Correction: ការកែតម្រូវពេលចាប់ផ្តើម",[18309,18802],"បញ្ហាដែលវាដោះស្រាយ: ដោយសារនៅពេលចាប់ផ្តើម m0=0m_0 = 0m0​=0 និង v0=0v_0 = 0v0​=0 នោះការប៉ាន់ស្មានដំបូងៗនឹងខិតទៅជិតសូន្យខ្លាំងពេក (វាមិនទាន់មានប្រវត្តិគ្រប់គ្រាន់)។ ដំណោះស្រាយ: ចែកវាជាមួយ (1−βt)(1 - \\beta^t)(1−βt) ដើម្បីកែតម្រូវឱ្យមានតុល្យភាពវិញនៅជំហានដំបូងៗ៖ m^t=mt1−β1t,v^t=vt1−β2t\\hat{m}_t = \\frac{m_t}{1 - \\beta_1^t}, \\qquad \\hat{v}_t = \\frac{v_t}{1 - \\beta_2^t}m^t​=1−β1t​mt​​,v^t​=1−β2t​vt​​",{"id":18820,"title":18821,"titles":18822,"content":18823,"level":10753},"\u002Fkm\u002Frooms\u002Fadam-optimizer#ជំហានទី-៤-រូបមន្តចុងក្រោយនៃការ-update","ជំហានទី ៤ — រូបមន្តចុងក្រោយនៃការ Update",[18309,18802],"θt+1=θt−αv^t+ϵ⋅m^t\\boxed{\\theta_{t+1} = \\theta_t - \\frac{\\alpha}{\\sqrt{\\hat{v}_t} + \\epsilon} \\cdot \\hat{m}_t}θt+1​=θt​−v^t​​+ϵα​⋅m^t​​ (ចំណាំ៖ ϵ≈10−8\\epsilon \\approx 10^{-8}ϵ≈10−8 ដើម្បីការពារកុំឱ្យមានការចែកនឹងសូន្យ) Hyperparameters លំនាំដើមតាមក្រដាសសំណើដើម [1]: Hyperparameterនិមិត្តសញ្ញាDefaultLearning rateα\\alphaα0.0011st moment decayβ1\\beta_1β1​0.92nd moment decayβ2\\beta_2β2​0.999Numerical stabilityϵ\\epsilonϵ10−810^{-8}10−8",{"id":18825,"title":18826,"titles":18827,"content":18828,"level":10719},"\u002Fkm\u002Frooms\u002Fadam-optimizer#ឧទាហរណ៍ជាក់ស្តែង-adam-ដំណើរការ","ឧទាហរណ៍ជាក់ស្តែង: Adam ដំណើរការ",[18309],"សូមតាមដាន Adam ដោយដៃ លើអនុគមន៍សាមញ្ញដូចដែលយើងប្រើក្នុង Gradient Descent: J(θ)=θ2,∇J(θ)=2θJ(\\theta) = \\theta^2, \\qquad \\nabla J(\\theta) = 2\\thetaJ(θ)=θ2,∇J(θ)=2θ ចាប់ផ្តើមនៅ θ0=5\\theta_0 = 5θ0​=5 ជាមួយ hyperparameters លំនាំដើម (α=0.001\\alpha = 0.001α=0.001, β1=0.9\\beta_1 = 0.9β1​=0.9, β2=0.999\\beta_2 = 0.999β2​=0.999, ϵ=10−8\\epsilon = 10^{-8}ϵ=10−8)។ ចាប់ផ្តើម: m0=0m_0 = 0m0​=0, v0=0v_0 = 0v0​=0។ ជំហាន t=1t=1t=1: g1=2×5=10g_1 = 2 \\times 5 = 10g1​=2×5=10 m1=0.9×0+0.1×10=1.0m_1 = 0.9 \\times 0 + 0.1 \\times 10 = 1.0m1​=0.9×0+0.1×10=1.0 v1=0.999×0+0.001×100=0.1v_1 = 0.999 \\times 0 + 0.001 \\times 100 = 0.1v1​=0.999×0+0.001×100=0.1 m^1=1.01−0.91=1.00.1=10.0\\hat{m}_1 = \\frac{1.0}{1 - 0.9^1} = \\frac{1.0}{0.1} = 10.0m^1​=1−0.911.0​=0.11.0​=10.0 v^1=0.11−0.9991=0.10.001=100.0\\hat{v}_1 = \\frac{0.1}{1 - 0.999^1} = \\frac{0.1}{0.001} = 100.0v^1​=1−0.99910.1​=0.0010.1​=100.0 θ1=5−0.001100+10−8×10.0=5−0.00110×10.0=5−0.001=4.999\\theta_1 = 5 - \\frac{0.001}{\\sqrt{100} + 10^{-8}} \\times 10.0 = 5 - \\frac{0.001}{10} \\times 10.0 = 5 - 0.001 = 4.999θ1​=5−100​+10−80.001​×10.0=5−100.001​×10.0=5−0.001=4.999 ជំហាន t=2t=2t=2: g2=2×4.999=9.998g_2 = 2 \\times 4.999 = 9.998g2​=2×4.999=9.998 m2=0.9×1.0+0.1×9.998=1.8998m_2 = 0.9 \\times 1.0 + 0.1 \\times 9.998 = 1.8998m2​=0.9×1.0+0.1×9.998=1.8998 v2=0.999×0.1+0.001×9.9982=0.1999v_2 = 0.999 \\times 0.1 + 0.001 \\times 9.998^2 = 0.1999v2​=0.999×0.1+0.001×9.9982=0.1999 បន្ទាប់ពី Bias Correction និង Update, θ2≈4.998\\theta_2 \\approx 4.998θ2​≈4.998។ Adam ធ្វើ ជំហានស្ថិតស្ថេរ និងគ្រប់គ្រងបាន — មិនរហ័សហ្លើតដូច SGD ដែល α\\alphaα ធំ (ដែលនឹងបោះជំហានរំលងចំណុចទាប) ប៉ុន្តែលឿនជាងច្រើនពី SGD ដែល α\\alphaα តូចខ្លាំង (ដែលនឹងដើរយឺតបន្តិចម្ដងៗ)។ ការកែ Bias Correction ធ្វើឱ្យជំហានដំបូងៗនៅតែមានន័យ ទោះបីចាប់ផ្តើមពី Cold Start ក៏ដោយ។",{"id":18830,"title":18831,"titles":18832,"content":18833,"level":10719},"\u002Fkm\u002Frooms\u002Fadam-optimizer#ការប្រៀបធៀប-optimizer","ការប្រៀបធៀប Optimizer",[18309],"Optimizerទំហំជំហានការចងចាំចំណុចខ្លាំងចំណុចខ្សោយSGDថេរ (α\\alphaα)គ្មានសាមញ្ញ ងាយយល់ពិបាកកំណត់ α\\alphaα, យឺតSGD + Momentumថេរ (α\\alphaα)ទិសដៅជម្រាលដើរលឿន និងរលូនជាងនៅតែត្រូវការ α\\alphaα ល្អRMSProp [4]បត់បែនទំហំជម្រាលល្អក្នុងករណីទិន្នន័យផ្លាស់ប្ដូរគ្មាន MomentumAdamបត់បែនទិសដៅ + ទំហំល្អបំផុតសឹងគ្រប់ការងារពេលខ្លះ Generalize បានមិនល្អប៉ុណ្ណឹង Adam បញ្ចូលរួម SGD + Momentum (1st moment) និង RMSProp (2nd moment) ក្នុងក្របខណ្ឌតែមួយ ជាមួយ Bias Correction ជាការបន្ថែម។",{"id":18835,"title":18836,"titles":18837,"content":10692,"level":10719},"\u002Fkm\u002Frooms\u002Fadam-optimizer#ការអនុវត្តជាមួយ-python-កូដគំរូ","ការអនុវត្តជាមួយ Python (កូដគំរូ)",[18309],{"id":18839,"title":18840,"titles":18841,"content":18842,"level":10753},"\u002Fkm\u002Frooms\u002Fadam-optimizer#adam-យ៉ាងសាមញ្ញពីបាតដៃទទេ","Adam យ៉ាងសាមញ្ញពីបាតដៃទទេ",[18309,18836],"ខាងក្រោមនេះគឺជាការសរសេរ Adam Optimizer ដោយខ្លួនឯង (ពីបាតដៃទទេ)៖ import numpy as np\n\ndef adam(grad_fn, theta_init, alpha=0.001, beta1=0.9, beta2=0.999, eps=1e-8, max_iters=1000):\n    theta = theta_init\n    m = 0.0   # 1st moment (momentum)\n    v = 0.0   # 2nd moment (adaptive scale)\n\n    for t in range(1, max_iters + 1):\n        g = grad_fn(theta)            # ① គណនា gradient\n\n        m = beta1 * m + (1 - beta1) * g       # ② update 1st moment\n        v = beta2 * v + (1 - beta2) * g ** 2  # ③ update 2nd moment\n\n        m_hat = m \u002F (1 - beta1 ** t)          # ④ កែតម្រូវ bias សម្រាប់ m\n        v_hat = v \u002F (1 - beta2 ** t)          # ⑤ កែតម្រូវ bias សម្រាប់ v\n\n        # ⑥ ធ្វើការ Update parameter\n        theta = theta - alpha \u002F (np.sqrt(v_hat) + eps) * m_hat  \n\n        if abs(g) \u003C 1e-7:\n            print(f\"ជោគជ័យនៅជំហានទី {t}\")\n            break\n\n    return theta\n\n# សាកល្បងកាត់បន្ថយ J(θ) = θ²,  ∇J(θ) = 2θ\ntheta_min = adam(grad_fn=lambda th: 2 * th, theta_init=5.0)\nprint(f\"ចំណុចទាបបំផុតគឺ θ = {theta_min:.8f}\") លទ្ធផល: ជោគជ័យនៅជំហានទី 817\nចំណុចទាបបំផុតគឺ θ = 0.00000001",{"id":18844,"title":18845,"titles":18846,"content":18847,"level":10753},"\u002Fkm\u002Frooms\u002Fadam-optimizer#adam-លើ-linear-regression","Adam លើ Linear Regression",[18309,18836],"សូមសាកអនុវត្ត Adam លើករណីប្រើប្រាស់ជាក់ស្តែង — ការ Fit ខ្សែត្រង់ y^=w⋅x+b\\hat{y} = w \\cdot x + by^​=w⋅x+b ទៅទិន្នន័យ។ import numpy as np\n\ndef adam_linear_regression(X, y, alpha=0.01, beta1=0.9, beta2=0.999,\n                            eps=1e-8, epochs=200):\n    m = len(y)\n    w, b = 0.0, 0.0\n\n    # Adam state ដាច់ដោយឡែកសម្រាប់ parameter នីមួយៗ\n    mw, vw = 0.0, 0.0   # moments for w\n    mb, vb = 0.0, 0.0   # moments for b\n\n    for t in range(1, epochs + 1):\n        y_pred = w * X + b\n        error  = y_pred - y\n\n        # Gradients (រូបមន្តដូចគ្នានឹង Gradient Descent)\n        gw = (2 \u002F m) * np.dot(error, X)\n        gb = (2 \u002F m) * np.sum(error)\n\n        # 1st និង 2nd moment updates សម្រាប់ w\n        mw = beta1 * mw + (1 - beta1) * gw\n        vw = beta2 * vw + (1 - beta2) * gw ** 2\n        mw_hat = mw \u002F (1 - beta1 ** t)\n        vw_hat = vw \u002F (1 - beta2 ** t)\n\n        # 1st និង 2nd moment updates សម្រាប់ b\n        mb = beta1 * mb + (1 - beta1) * gb\n        vb = beta2 * vb + (1 - beta2) * gb ** 2\n        mb_hat = mb \u002F (1 - beta1 ** t)\n        vb_hat = vb \u002F (1 - beta2 ** t)\n\n        # Parameter updates\n        w = w - alpha \u002F (np.sqrt(vw_hat) + eps) * mw_hat\n        b = b - alpha \u002F (np.sqrt(vb_hat) + eps) * mb_hat\n\n        if t % 50 == 0:\n            loss = np.mean(error ** 2)\n            print(f\"Epoch {t:4d}: loss={loss:.6f}  w={w:.4f}  b={b:.4f}\")\n\n    return w, b\n\n# ទំនាក់ទំនងពិត: y = 2x + 1\nX = np.array([1.0, 2.0, 3.0, 4.0, 5.0])\ny = np.array([3.0, 5.0, 7.0, 9.0, 11.0])\n\nw, b = adam_linear_regression(X, y)\nprint(f\"\\nFitted: ŷ = {w:.4f}·x + {b:.4f}\") លទ្ធផល: Epoch   50: loss=0.000042  w=1.9953  b=1.0044\nEpoch  100: loss=0.000000  w=2.0000  b=1.0000\nEpoch  150: loss=0.000000  w=2.0000  b=1.0000\nEpoch  200: loss=0.000000  w=2.0000  b=1.0000\n\nFitted: ŷ = 2.0000·x + 1.0000 Adam ស្ដារ w=2,b=1w=2, b=1w=2,b=1 បានច្បាស់លាស់ និងលឿន — ជាពិសេសបើប្រៀបនឹង Gradient Descent ធម្មតា ដែលត្រូវបន្ដ Tune Learning Rate ដោយប្រុងប្រយ័ត្ន។",{"id":18849,"title":18850,"titles":18851,"content":18852,"level":10719},"\u002Fkm\u002Frooms\u002Fadam-optimizer#ពេលណាគួរប្រើ-adam","ពេលណាគួរប្រើ Adam?",[18309],"Adam គឺជាជម្រើសដ៏សុវត្ថិភាពបំផុតសម្រាប់កិច្ចការ Deep Learning ស្ទើរតែទាំងអស់៖ Neural networks: Training MLPs, CNNs, Transformers, RNNsទិន្នន័យដែលមានការរំខាន (Noisy gradients): ល្អសម្រាប់ Mini-batch training ដែលប្រើ Batch size តូចៗ។ទិន្នន័យរំដោចខ្ចាត (Sparse features): ល្អសម្រាប់ NLP ដែលពាក្យខ្លះបង្ហាញកម្រ (ជម្រាលធំ ប៉ុន្តែមិនសូវញឹកញាប់)។អ្នកទើបចាប់ផ្តើម: នៅពេលអ្នកមិនចង់ចំណាយពេលច្រើនក្នុងការ Tune Learning Rate។ ចំណាំមួយ\n  Wilson et al. [5] បង្ហាញថា Adaptive optimizer ដូចជា Adam អាចនឹង Generalize បានន ចុះបន្តិចបើប្រៀបនឹង SGD + Momentum ដែល Tune ល្អ សម្រាប់ Image Classification។ ក្នុងករណីនោះ SGD + Momentum ជាមួយ Learning Rate Scheduling អាចប្រសើរជាង Adam។ ប៉ុន្តែសម្រាប់កិច្ចការភាគច្រើន ភាពរឹងមាំ (Robustness) របស់ Adam នៅតែឈ្នះ។",{"id":18854,"title":18855,"titles":18856,"content":18857,"level":10719},"\u002Fkm\u002Frooms\u002Fadam-optimizer#សេចក្តីសង្ខេប","សេចក្តីសង្ខេប",[18309],"គំនិតចំណុចសំខាន់ចំណុចខ្សោយ Fixed LRα\\alphaα តែមួយសម្រាប់ Parameters ទាំងអស់ — លម្អិតពេកMomentum (mtm_tmt​)ធ្វើឱ្យទិសជម្រាលរលូន និងស្ថិតស្ថេរតាមពេលAdaptive scale (vtv_tvt​)Scale ជំហានតាមប្រវត្តិទំហំជម្រាលBias correctionកែ Cold-start bias នៅពេល m0=v0=0m_0 = v_0 = 0m0​=v0​=0Adam updateθ←θ−αv^t+ϵm^t\\theta \\leftarrow \\theta - \\frac{\\alpha}{\\sqrt{\\hat{v}_t} + \\epsilon} \\hat{m}_tθ←θ−v^t​​+ϵα​m^t​ Adam មិនមែនមកលុបបំបាត់ Learning Rate (α\\alphaα) នោះទេ — វានៅតែសំខាន់។ ប៉ុន្តែ Adam ធ្វើឱ្យការហ្វឹកហាត់ម៉ូឌែល មិនសូវរងឥទ្ធិពលខ្លាំង ពីការកំណត់លេខ α\\alphaα ខុស។ នេះជាមូលហេតុដែលតម្លៃ Default 0.001 របស់វា ដំណើរការបានយ៉ាងល្អលើម៉ូឌែលរាប់ពាន់ខុសៗគ្នា។ បើ Gradient Descent គឺជាការដើរភ្នំដោយបោះជំហានស្មើៗគ្នា Adam គឺជាការជួលអ្នកនាំផ្លូវដែលមាន GPS ជាប់ខ្លួន ដែលចេះកែសម្រួលល្បឿនតាមស្ថានភាពផ្លូវ និងធានាថាអ្នកនឹងមិនដើរវង្វេង ឬចំណាយពេលឥតប្រយោជន៍លើផ្លូវដែលធ្លាប់ដើររួចនោះទេ។",{"id":18859,"title":18860,"titles":18861,"content":18479,"level":10719},"\u002Fkm\u002Frooms\u002Fadam-optimizer#ឯកសារយោង","ឯកសារយោង",[18309],{"id":18314,"title":18313,"titles":18863,"content":18864,"level":10699},[],"ផ្តើមពី មេគុណប្រាប់ទិសនៃសមីការបន្ទាត់ រហូតដល់ calculus នៅពីក្រោយ machine learning រាល់ពេលដែល Neural Network រៀន វាតែងតែសួរខ្លួនឯងនូវសំណួរដដែលៗថា: \"ប្រសិនបើខ្ញុំកែប្រែ parameter នេះបន្តិច តើ error នឹងកើនឡើង ឬថយចុះ — ហើយប៉ុន្មាន?\" សំណួរនេះត្រូវបានឆ្លើយដោយ ដេរីវេ (Derivative)។ មុននឹងយើងនិយាយអំពី gradient ឬ optimizer យើងត្រូវយល់ derivative ពីដំបូងបង្អស់។",{"id":18866,"title":18867,"titles":18868,"content":10692,"level":10719},"\u002Fkm\u002Frooms\u002Fderivatives#ផ្នែកទី-១-បន្ទាត់-និងមេគុណប្រាប់ទិស-lines-and-slopes","ផ្នែកទី ១ — បន្ទាត់ និងមេគុណប្រាប់ទិស (Lines and Slopes)",[18313],{"id":18870,"title":18871,"titles":18872,"content":18873,"level":10753},"\u002Fkm\u002Frooms\u002Fderivatives#សមីការបន្ទាត់","សមីការបន្ទាត់",[18313,18867],"ទំនាក់ទំនងងាយបំផុតរវាងតម្លៃពីរ គឺបន្ទាត់ត្រង់៖ y=mx+by = mx + by=mx+b ក្នុងនោះ៖ xxx គឺជា ទិន្នន័យចូល (input)yyy គឺជា លទ្ធផល (output)mmm គឺជា មេគុណប្រាប់ទិស ឬចំណោត (slope) — បញ្ជាក់ថាបន្ទាត់នោះងើបឡើង ឬចុះក្រោមខ្លាំងកម្រិតណាbbb គឺជា ចំណុចប្រសព្វអ័ក្ស y — ជាកន្លែងដែលបន្ទាត់កាត់អ័ក្សឈរ ឧទាហរណ៍៖ y=2x+1y = 2x + 1y=2x+1 xxxy=2x+1y = 2x + 1y=2x+101132537 រាល់ពេលដែល xxx កើនឡើង ១ នោះ yyy នឹងកើនឡើង ២ ជានិច្ច។ មេគុណប្រាប់ទិស m=2m = 2m=2 គឺជាអ្នកកំណត់អត្រាកំណើនថេរនេះ។",{"id":18875,"title":18876,"titles":18877,"content":18878,"level":10753},"\u002Fkm\u002Frooms\u002Fderivatives#ការគណនាមេគុណប្រាប់ទិសរវាងចំណុចពីរ","ការគណនាមេគុណប្រាប់ទិសរវាងចំណុចពីរ",[18313,18867],"ប្រសិនបើយើងមានពីរចំណុច (x1,y1)(x_1, y_1)(x1​,y1​) និង (x2,y2)(x_2, y_2)(x2​,y2​) នៅលើបន្ទាត់ មេគុណប្រាប់ទិសគឺ៖ m=ΔyΔx=y2−y1x2−x1m = \\frac{\\Delta y}{\\Delta x} = \\frac{y_2 - y_1}{x_2 - x_1}m=ΔxΔy​=x2​−x1​y2​−y1​​ នេះគឺជាការគណនា បំរែបំរួលកម្ពស់ ធៀបនឹងបំរែបំរួលចម្ងាយដេក — ពោលគឺ yyy ប្រែប្រួលប៉ុន្មាន នៅពេល xxx ផ្លាស់ប្តូរមួយឯកតា។ ហេតុអ្វីបានជាមេគុណប្រាប់ទិសមានសារៈសំខាន់?\n  មេគុណប្រាប់ទិសប្រាប់អ្នកអំពី អត្រាបម្រែបម្រួល។ បើវាស្មើ ២ មានន័យថា \"រាល់ពេលដើរទៅមុខ ១ ជំហានក្នុងទិសដៅ x, តម្លៃ y នឹងឡើង ២\"។ បើស្មើ −3 មានន័យថា y នឹងចុះ ៣។ បើស្មើ ០ មានន័យថា y មិនប្រែប្រួលទេ (បន្ទាត់ដេករាបស្មើ)។",{"id":18880,"title":18881,"titles":18882,"content":18883,"level":10719},"\u002Fkm\u002Frooms\u002Fderivatives#ផ្នែកទី-២-នៅពេលបន្ទាត់ក្លាយជាខ្សែកោង","ផ្នែកទី ២ — នៅពេលបន្ទាត់ក្លាយជាខ្សែកោង",[18313],"បន្ទាត់ត្រង់មានមេគុណប្រាប់ទិស ថេរ នៅគ្រប់កន្លែង។ ប៉ុន្តែក្នុងគណិតវិទ្យា និង machine learning ភាគច្រើនយើងជួបនឹង ខ្សែកោង ដែលមានចំណោតប្រែប្រួលនៅគ្រប់ចំណុច។ សូមពិនិត្យមើលអនុគមន៍ប៉ារ៉ាបូល៖ f(x)=x2f(x) = x^2f(x)=x2 xxxf(x)=x2f(x) = x^2f(x)=x2−39−11001139 នៅក្បែរ x=0x = 0x=0 ខ្សែកោងនេះស្ទើរតែរាបស្មើ។ ប៉ុន្តែនៅក្បែរ x=3x = 3x=3 វាហក់ឡើងយ៉ាងខ្លាំង។ ចំណោតរបស់វា ខុសៗគ្នានៅគ្រប់ចំណុច — នេះមានន័យថារូបមន្ត m=ΔyΔxm = \\frac{\\Delta y}{\\Delta x}m=ΔxΔy​ រវាងចំណុចពីរដែលនៅឆ្ងាយគ្នា អាចប្រាប់យើងបានត្រឹមតែ អត្រាបម្រែបម្រួលមធ្យម ប៉ុណ្ណោះ។",{"id":18885,"title":18886,"titles":18887,"content":18888,"level":10753},"\u002Fkm\u002Frooms\u002Fderivatives#អត្រាបម្រែបម្រួលមធ្យម","អត្រាបម្រែបម្រួលមធ្យម",[18313,18881],"សម្រាប់ចំណុចពីរ xxx និង x+hx + hx+h នៅលើខ្សែកោង fff អត្រាបម្រែបម្រួលមធ្យមគឺ៖ ΔfΔx=f(x+h)−f(x)h\\frac{\\Delta f}{\\Delta x} = \\frac{f(x + h) - f(x)}{h}ΔxΔf​=hf(x+h)−f(x)​ នេះគឺជាចំណោតនៃ បន្ទាត់កាត់ (secant line) ដែលភ្ជាប់ចំណុចពីរនៅលើខ្សែកោង។ ឧទាហរណ៍ នៅលើ f(x)=x2f(x) = x^2f(x)=x2 ចន្លោះ x=1x = 1x=1 និង x=3x = 3x=3: f(3)−f(1)3−1=9−12=4\\frac{f(3) - f(1)}{3 - 1} = \\frac{9 - 1}{2} = 43−1f(3)−f(1)​=29−1​=4 នោះគឺជាភាពចោតមធ្យមចន្លោះ x=1x=1x=1 និង x=3x=3x=3 ប៉ុន្តែវាមិនប្រាប់យើងពីចំណោតនៅ ចំណុចជាក់លាក់ ណាមួយឡើយ។",{"id":18890,"title":18891,"titles":18892,"content":18893,"level":10719},"\u002Fkm\u002Frooms\u002Fderivatives#ផ្នែកទី-៣-លីមីត-ការពង្រីកមើលចំណុចតែមួយ","ផ្នែកទី ៣ — លីមីត៖ ការពង្រីកមើលចំណុចតែមួយ",[18313],"ដើម្បីរកចំណោត នៅត្រង់ចំណុចជាក់លាក់មួយ យើងត្រូវបង្រួមចម្ងាយ hhh ឱ្យខិតទៅជិតសូន្យបំផុត។ នៅពេល hhh កាន់តែតូចទៅៗ បន្ទាត់កាត់នឹងរំកិលខ្លួនរហូតក្លាយជា បន្ទាត់ប៉ះ (tangent line) — ដែលប៉ះខ្សែកោងត្រង់ចំណុចតែមួយគត់ និងបង្ហាញពីចំណោតពិតប្រាកដនៅត្រង់នោះ។ ជាផ្លូវការ អត្រាបម្រែបម្រួលខណៈ (ភ្លាមៗ) នៅត្រង់ xxx គឺជា លីមីត (limit)៖ lim⁡h→0f(x+h)−f(x)h\\lim_{h \\to 0} \\frac{f(x + h) - f(x)}{h}h→0lim​hf(x+h)−f(x)​ នេះគឺជាគំនិតស្នូលនៃ ដេរីវេ។",{"id":18895,"title":18896,"titles":18897,"content":18898,"level":10753},"\u002Fkm\u002Frooms\u002Fderivatives#លីមីតតាមរយៈការយល់ដឹង","លីមីតតាមរយៈការយល់ដឹង",[18313,18891],"លីមីតសួរថា: \"តើប្រាតិបត្តិការណ៍នោះខិតទៅតម្លៃអ្វី នៅពេលអថេរខិតទៅជិតលេខណាមួយ — ទោះបីជាមិនដល់ចំណុចនោះក៏ដោយ?\" lim⁡h→0(x+h)2−x2h\\lim_{h \\to 0} \\frac{(x+h)^2 - x^2}{h}h→0lim​h(x+h)2−x2​ ពង្រីក numerator (ចំនួននៅខាងលើ): =lim⁡h→0x2+2xh+h2−x2h=lim⁡h→02xh+h2h=lim⁡h→0(2x+h)= \\lim_{h \\to 0} \\frac{x^2 + 2xh + h^2 - x^2}{h} = \\lim_{h \\to 0} \\frac{2xh + h^2}{h} = \\lim_{h \\to 0} (2x + h)=h→0lim​hx2+2xh+h2−x2​=h→0lim​h2xh+h2​=h→0lim​(2x+h) នៅពេល h→0h \\to 0h→0: =2x= 2x=2x ចំណោតនៃ f(x)=x2f(x) = x^2f(x)=x2 នៅចំណុចណាមួយ xxx គឺ 2x2x2x ពិតប្រាកដ។",{"id":18900,"title":18901,"titles":18902,"content":10692,"level":10719},"\u002Fkm\u002Frooms\u002Fderivatives#ផ្នែកទី-៤-ដេរីវេ-the-derivative","ផ្នែកទី ៤ — ដេរីវេ (The Derivative)",[18313],{"id":18904,"title":18905,"titles":18906,"content":18907,"level":10753},"\u002Fkm\u002Frooms\u002Fderivatives#និយមន័យ","និយមន័យ",[18313,18901],"ដេរីវេ នៃអនុគមន៍ fff នៅត្រង់ចំណុច xxx សរសេរថា f′(x)f'(x)f′(x) ឬ dfdx\\frac{df}{dx}dxdf​ គឺ៖ f′(x)=lim⁡h→0f(x+h)−f(x)h\\boxed{f'(x) = \\lim_{h \\to 0} \\frac{f(x + h) - f(x)}{h}}f′(x)=h→0lim​hf(x+h)−f(x)​​ វាប្រាប់យើងអំពី អត្រាបម្រែបម្រួលភ្លាមៗ ឬចំណោតនៃបន្ទាត់ប៉ះនៅគ្រប់ចំណុច។",{"id":18909,"title":18910,"titles":18911,"content":18912,"level":10753},"\u002Fkm\u002Frooms\u002Fderivatives#អត្ថន័យតាមរូបធរណីមាត្រ","អត្ថន័យតាមរូបធរណីមាត្រ",[18313,18901],"តម្លៃដេរីវេអត្ថន័យf′(x)>0f'(x) > 0f′(x)>0អនុគមន៍កំពុង កើនឡើង នៅត្រង់ xxxf′(x)\u003C0f'(x) \u003C 0f′(x)\u003C0អនុគមន៍កំពុង ថយចុះ នៅត្រង់ xxxf′(x)=0f'(x) = 0f′(x)=0អនុគមន៍ រាបស្មើ (អាចជាចំណុចទាបបំផុត ឬខ្ពស់បំផុត)∥f′(x)∥\\|f'(x)\\|∥f′(x)∥ ធំអនុគមន៍ប្រែប្រួល យ៉ាងលឿន∥f′(x)∥\\|f'(x)\\|∥f′(x)∥ តូចអនុគមន៍ប្រែប្រួល យឺតៗ",{"id":18914,"title":18915,"titles":18916,"content":18917,"level":10719},"\u002Fkm\u002Frooms\u002Fderivatives#ផ្នែកទី-៥-ច្បាប់នៃការគណនាដេរីវេ-rules","ផ្នែកទី ៥ — ច្បាប់នៃការគណនាដេរីវេ (Rules)",[18313],"ការគណនាលីមីតដោយដៃរាល់ពេលគឺហត់នឿយណាស់។ អ្នកគណិតវិទ្យាបានបង្កើត ច្បាប់កាត់ (shortcut rules) ជាច្រើន ដែលគ្របដណ្ដប់អនុគមន៍ស្ទើរតែទាំងអស់ដែលអ្នកនឹងជួប។",{"id":18919,"title":18920,"titles":18921,"content":18922,"level":10753},"\u002Fkm\u002Frooms\u002Fderivatives#ច្បាប់ស្វ័យគុណ-power-rule","ច្បាប់ស្វ័យគុណ (Power Rule)",[18313,18915],"សម្រាប់ f(x)=xnf(x) = x^nf(x)=xn: ddxxn=n⋅xn−1\\frac{d}{dx} x^n = n \\cdot x^{n-1}dxd​xn=n⋅xn−1 ឧទាហរណ៍: អនុគមន៍ដេរីវេx2x^2x22x2x2xx3x^3x33x23x^23x2x10x^{10}x1010x910x^910x9xxx (ពោលគឺ x1x^1x1)111555 (ថេរ, x0x^0x0)000",{"id":18924,"title":18925,"titles":18926,"content":18927,"level":10753},"\u002Fkm\u002Frooms\u002Fderivatives#ច្បាប់គុណនឹងថេរ-constant-multiple-rule","ច្បាប់គុណនឹងថេរ (Constant Multiple Rule)",[18313,18915],"ddx[c⋅f(x)]=c⋅f′(x)\\frac{d}{dx}[c \\cdot f(x)] = c \\cdot f'(x)dxd​[c⋅f(x)]=c⋅f′(x) ប្រសិនបើ f(x)=3x2f(x) = 3x^2f(x)=3x2 នោះ f′(x)=3⋅2x=6xf'(x) = 3 \\cdot 2x = 6xf′(x)=3⋅2x=6x។",{"id":18929,"title":18930,"titles":18931,"content":18932,"level":10753},"\u002Fkm\u002Frooms\u002Fderivatives#ច្បាប់ផលបូក-sum-rule","ច្បាប់ផលបូក (Sum Rule)",[18313,18915],"ddx[f(x)+g(x)]=f′(x)+g′(x)\\frac{d}{dx}[f(x) + g(x)] = f'(x) + g'(x)dxd​[f(x)+g(x)]=f′(x)+g′(x) ប្រសិនបើ f(x)=x3+5x2−2x+7f(x) = x^3 + 5x^2 - 2x + 7f(x)=x3+5x2−2x+7 គណនាដេរីវេម្តងមួយតេប: f′(x)=3x2+10x−2f'(x) = 3x^2 + 10x - 2f′(x)=3x2+10x−2",{"id":18934,"title":18935,"titles":18936,"content":18937,"level":10753},"\u002Fkm\u002Frooms\u002Fderivatives#ច្បាប់បណ្តាក់-chain-rule","ច្បាប់បណ្តាក់ (Chain Rule)",[18313,18915],"សម្រាប់ ការផ្សំ នៃអនុគមន៍ f(g(x))f(g(x))f(g(x)): ddxf(g(x))=f′(g(x))⋅g′(x)\\frac{d}{dx} f(g(x)) = f'(g(x)) \\cdot g'(x)dxd​f(g(x))=f′(g(x))⋅g′(x) អានថា: \"ដេរីវេនៃ outer function គណនានៅ inner function — គុណ (times) ដេរីវេនៃ inner function\" ឧទាហរណ៍: h(x)=(3x+1)4h(x) = (3x + 1)^4h(x)=(3x+1)4 ឱ្យ g(x)=3x+1g(x) = 3x + 1g(x)=3x+1 និង f(u)=u4f(u) = u^4f(u)=u4: h′(x)=4(3x+1)3⋅3=12(3x+1)3h'(x) = 4(3x+1)^3 \\cdot 3 = 12(3x+1)^3h′(x)=4(3x+1)3⋅3=12(3x+1)3 ច្បាប់បណ្តាក់ (Chain Rule) មាននៅគ្រប់ទីកន្លែងក្នុង machine learning — Backpropagation គឺជាការអនុវត្តច្បាប់នេះម្តងហើយម្តងទៀតឆ្លងកាត់ layer នៃ neural network។",{"id":18939,"title":18940,"titles":18941,"content":18942,"level":10753},"\u002Fkm\u002Frooms\u002Fderivatives#តារាងសង្ខេបដេរីវេទូទៅ","តារាងសង្ខេបដេរីវេទូទៅ",[18313,18915],"អនុគមន៍ដេរីវេexe^xexexe^xexln⁡(x)\\ln(x)ln(x)1x\\frac{1}{x}x1​sin⁡(x)\\sin(x)sin(x)cos⁡(x)\\cos(x)cos(x)cos⁡(x)\\cos(x)cos(x)−sin⁡(x)-\\sin(x)−sin(x)σ(x)=11+e−x\\sigma(x) = \\frac{1}{1+e^{-x}}σ(x)=1+e−x1​ (sigmoid)σ(x)(1−σ(x))\\sigma(x)(1 - \\sigma(x))σ(x)(1−σ(x))",{"id":18944,"title":18945,"titles":18946,"content":10692,"level":10719},"\u002Fkm\u002Frooms\u002Fderivatives#ផ្នែកទី-៦-ការប្រើប្រាស់ដេរីវេក្នុងអនុវត្តជាក់ស្តែង","ផ្នែកទី ៦ — ការប្រើប្រាស់ដេរីវេក្នុងអនុវត្តជាក់ស្តែង",[18313],{"id":18948,"title":18949,"titles":18950,"content":18951,"level":10753},"\u002Fkm\u002Frooms\u002Fderivatives#ការស្វែងរកចំណុចទាបបំផុត-និងខ្ពស់បំផុត","ការស្វែងរកចំណុចទាបបំផុត និងខ្ពស់បំផុត",[18313,18945],"ប្រសិនបើ f′(x)=0f'(x) = 0f′(x)=0 នោះអនុគមន៍គឺរាបស្មើ។ ចំណុចនេះហៅថា ចំណុចវិបាក (critical point) ដែលអាចជា៖ ចំណុចអប្បបរមា (Local minimum): អនុគមន៍ចុះក្រោម ហើយឡើងវិញ → f′(x)f'(x)f′(x) ប្រែពីអវិជ្ជមានទៅជ្ជមានចំណុចអតិបរមា (Local maximum): អនុគមន៍ឡើង ហើយចុះក្រោម → f′(x)f'(x)f′(x) ប្រែពីជ្ជមានទៅអវិជ្ជមានចំណុចបន្ទោះ (Saddle point): អនុគមន៍រាបស្មើ ប៉ុន្តែបន្តដំណើរក្នុងទិសដៅទូទៅដដែល ឧទាហរណ៍: រករចំណុចអប្បបរមានៃ f(x)=x2−4x+5f(x) = x^2 - 4x + 5f(x)=x2−4x+5 f′(x)=2x−4=0  ⟹  x=2f'(x) = 2x - 4 = 0 \\implies x = 2f′(x)=2x−4=0⟹x=2 នៅ x=2x = 2x=2: f(2)=4−8+5=1f(2) = 4 - 8 + 5 = 1f(2)=4−8+5=1 — នេះជាចំណុចអប្បបរមា។ def f(x):\n    return x**2 - 4*x + 5\n\ndef f_prime(x):\n    return 2*x - 4\n\n# ស្វែងរកកន្លែងដែលដេរីវេ = 0\n# 2x - 4 = 0  =>  x = 2\nx_min = 2\nprint(f\"ចំណុចអប្បបរមានៅ x={x_min}, f(x)={f(x_min)}\")  # x=2, f(x)=1",{"id":18953,"title":18954,"titles":18955,"content":18956,"level":10753},"\u002Fkm\u002Frooms\u002Fderivatives#ដេរីវេជាសញ្ញាណទិស","ដេរីវេជាសញ្ញាណទិស",[18313,18945],"នេះគឺជាអត្ថន័យសំខាន់ដែលភ្ជាប់ calculus ទៅ machine learning: ប្រសិនបើ f′(x)>0f'(x) > 0f′(x)>0 នៅចំណុចណាមួយ ការដើរ xxx ទៅស្តាំធ្វើឱ្យ fff កើន។ ការដើរ xxx ទៅឆ្វេងធ្វើឱ្យ fff ចុះ។ប្រសិនបើ f′(x)\u003C0f'(x) \u003C 0f′(x)\u003C0 ផ្ទុយពីខាងលើ។ ដើម្បី កាត់បន្ថយ fff យើងគួរតែដើរ xxx ក្នុងទិសដៅ ផ្ទុយ ពីដេរីវេ: xnew=xold−α⋅f′(xold)x_{\\text{new}} = x_{\\text{old}} - \\alpha \\cdot f'(x_{\\text{old}})xnew​=xold​−α⋅f′(xold​) (ថ្មី = new, ចាស់ = old) ត្រង់ α\\alphaα គឺជាជំហានតូច (step size)។ ត្រូវចំណាំទេ? នេះគឺ ច្បាប់អាប់ដែតនៃ gradient descent ពិតប្រាកដ។",{"id":18958,"title":18959,"titles":18960,"content":18961,"level":10719},"\u002Fkm\u002Frooms\u002Fderivatives#ផ្នែកទី-៧-ពីអថេរមួយទៅអថេរច្រើន-ហ្ក្រាដ្យង់-the-gradient","ផ្នែកទី ៧ — ពីអថេរមួយទៅអថេរច្រើន៖ ហ្ក្រាដ្យង់ (The Gradient)",[18313],"ម៉ូឌែល Machine learning មិនមែនមានប៉ារ៉ាម៉ែត្រតែមួយទេ គឺវាមានរាប់លាន។ យើងត្រូវការរកដេរីវេធៀបនឹងប៉ារ៉ាម៉ែត្រ នីមួយៗ ក្នុងពេលតែមួយ។",{"id":18963,"title":18964,"titles":18965,"content":18966,"level":10753},"\u002Fkm\u002Frooms\u002Fderivatives#ដេរីវេដោយផ្នែក-partial-derivatives","ដេរីវេដោយផ្នែក (Partial Derivatives)",[18313,18959],"ដេរីវេដោយផ្នែក គឺការគណនាដេរីវេធៀបនឹងអថេរមួយ ដោយទុកអថេរផ្សេងទៀតឱ្យនៅថេរ (មិនប្រែប្រួល)។ ∂J∂wi\\frac{\\partial J}{\\partial w_i}∂wi​∂J​ = \"តើ J ប្រែប្រួលប៉ុន្មាន ប្រសិនបើយើងកែ wiw_iwi​ តែម្នាក់ឯង?\" ឧទាហរណ៍: J(w1,w2)=w12+3w1w2+w22J(w_1, w_2) = w_1^2 + 3w_1 w_2 + w_2^2J(w1​,w2​)=w12​+3w1​w2​+w22​ ∂J∂w1=2w1+3w2∂J∂w2=3w1+2w2\\frac{\\partial J}{\\partial w_1} = 2w_1 + 3w_2 \\qquad \\frac{\\partial J}{\\partial w_2} = 3w_1 + 2w_2∂w1​∂J​=2w1​+3w2​∂w2​∂J​=3w1​+2w2​",{"id":18968,"title":18969,"titles":18970,"content":18971,"level":10753},"\u002Fkm\u002Frooms\u002Fderivatives#វ៉ិចទ័រហ្ក្រាដ្យង់-the-gradient-vector","វ៉ិចទ័រហ្ក្រាដ្យង់ (The Gradient Vector)",[18313,18959],"នៅពេលយើងប្រមូលដេរីវេដោយផ្នែកទាំងអស់មកដាក់ក្នុងវ៉ិចទ័រតែមួយ យើងហៅវាថា ហ្ក្រាដ្យង់ (∇J\\nabla J∇J): ∇J(w1,w2,…,wn)=[∂J∂w1∂J∂w2⋮∂J∂wn]\\nabla J(w_1, w_2, \\ldots, w_n) = \\begin{bmatrix}\n\\frac{\\partial J}{\\partial w_1} \\\\[4pt]\n\\frac{\\partial J}{\\partial w_2} \\\\\n\\vdots \\\\[4pt]\n\\frac{\\partial J}{\\partial w_n}\n\\end{bmatrix}∇J(w1​,w2​,…,wn​)=​∂w1​∂J​∂w2​∂J​⋮∂wn​∂J​​​ ហ្ក្រាដ្យង់គឺជាដេរីវេច្រើនឯករណ៍ (multi-dimensional equivalent of the derivative)។ វាចង្អុលបង្ហាញទិសដៅ ឡើងខ្លាំងបំផុត ក្នុង loss landscape។ ដើម្បីកាត់បន្ថយ error យើងត្រូវដើរក្នុង ទិសដៅផ្ទុយ ពីហ្ក្រាដ្យង់ — នេះជាមូលដ្ឋាននៃ gradient descent ពិតប្រាកដ។ ស្ពានទៅកាន់ Machine Learning\n  \n    ក្នុង ML, អនុគមន៍ខាតបង់ (loss function) វាស់វែងថា តើម៉ូឌែលទស្សន៍ទាយខុសកម្រិតណា។ ហ្ក្រាដ្យង់ប្រាប់យើងថា តើត្រូវកែតម្រូវទម្ងន់ (weights) ទៅទិសដៅណាដើម្បីឱ្យកំហុសនោះថយចុះ។",{"id":18973,"title":18974,"titles":18975,"content":18976,"level":10719},"\u002Fkm\u002Frooms\u002Fderivatives#ផ្នែកទី-៨-ឧទាហរណ៍ពេញលេញ-linear-regression","ផ្នែកទី ៨ — ឧទាហរណ៍ពេញលេញ៖ Linear Regression",[18313],"សូមមើលការអនុវត្តជាក់ស្ដែងនៃគំនិតទាំងអស់នេះ។ ការតំរុង: យើងមានទិន្នន័យ (x(i),y(i))(x^{(i)}, y^{(i)})(x(i),y(i)) ហើយចង់ Fit បន្ទាត់ y^=wx+b\\hat{y} = wx + by^​=wx+b។ អនុគមន៍ខាតបង់ (Loss function) (Mean Squared Error): J(w,b)=1m∑i=1m(y^(i)−y(i))2=1m∑i=1m(wx(i)+b−y(i))2J(w, b) = \\frac{1}{m} \\sum_{i=1}^{m} \\left(\\hat{y}^{(i)} - y^{(i)}\\right)^2 = \\frac{1}{m} \\sum_{i=1}^{m} \\left(wx^{(i)} + b - y^{(i)}\\right)^2J(w,b)=m1​i=1∑m​(y^​(i)−y(i))2=m1​i=1∑m​(wx(i)+b−y(i))2 ដេរីវេដោយផ្នែក ធៀបនឹង www (ប្រើ chain rule — ដេរីវេនៃ squared term ខាងក្រៅ គុណ ដេរីវេនៃ wx+bwx+bwx+b ខាងក្នុង): ∂J∂w=2m∑i=1m(wx(i)+b−y(i))⋅x(i)\\frac{\\partial J}{\\partial w} = \\frac{2}{m} \\sum_{i=1}^{m} \\left(wx^{(i)} + b - y^{(i)}\\right) \\cdot x^{(i)}∂w∂J​=m2​i=1∑m​(wx(i)+b−y(i))⋅x(i) ដេរីវេដោយផ្នែក ធៀបនឹង bbb: ∂J∂b=2m∑i=1m(wx(i)+b−y(i))\\frac{\\partial J}{\\partial b} = \\frac{2}{m} \\sum_{i=1}^{m} \\left(wx^{(i)} + b - y^{(i)}\\right)∂b∂J​=m2​i=1∑m​(wx(i)+b−y(i)) ការអាប់ដែត gradient descent — ដើរក្នុងទិសដៅផ្ទុយពី gradient: w←w−α⋅∂J∂w,b←b−α⋅∂J∂bw \\leftarrow w - \\alpha \\cdot \\frac{\\partial J}{\\partial w}, \\qquad b \\leftarrow b - \\alpha \\cdot \\frac{\\partial J}{\\partial b}w←w−α⋅∂w∂J​,b←b−α⋅∂b∂J​ import numpy as np\n\n# ទិន្នន័យ: ទំនាក់ទំនងពិត y = 3x + 2\nX = np.array([1.0, 2.0, 3.0, 4.0, 5.0])\ny = np.array([5.0, 8.0, 11.0, 14.0, 17.0])\n\nw, b = 0.0, 0.0   # ចាប់ផ្ដើមពីសូន្យ\nalpha = 0.01\nm = len(y)\n\nfor epoch in range(500):\n    y_pred = w * X + b              # forward pass\n    error  = y_pred - y             # residuals: ŷ - y\n\n    # ដេរីវេដោយផ្នែក (the gradient)\n    dw = (2 \u002F m) * np.dot(error, X) # ∂J\u002F∂w\n    db = (2 \u002F m) * np.sum(error)    # ∂J\u002F∂b\n\n    # ជំហាន gradient descent\n    w = w - alpha * dw\n    b = b - alpha * db\n\nprint(f\"Fitted: ŷ = {w:.4f}·x + {b:.4f}\")\n# Output: ŷ = 3.0000·x + 2.0000 ដេរីវេ — គណនាតាមការវិភាគ calculus ហើយប្រើប្រាស់ម្ដងហើយម្ដងទៀត — គឺជាអ្វីដែលដំណើរការដំណើររៀនទាំងមូល។",{"id":18978,"title":18855,"titles":18979,"content":18980,"level":10719},"\u002Fkm\u002Frooms\u002Fderivatives#សេចក្តីសង្ខេប",[18313],"គំនិតនិយមន័យខ្លីៗចំណោត (Slope)m=ΔyΔxm = \\frac{\\Delta y}{\\Delta x}m=ΔxΔy​ — អត្រាបម្រែបម្រួលថេរអត្រាបម្រែបម្រួលមធ្យមf(x+h)−f(x)h\\frac{f(x+h)-f(x)}{h}hf(x+h)−f(x)​ — ចំណោតនៃ secant ចន្លោះ hhhលីមីត (Limit)តម្លៃដែលប្រាតិបត្តិការណ៍ខិតទៅ នៅពេល h→0h \\to 0h→0ដេរីវេ (Derivative)f′(x)=lim⁡h→0f(x+h)−f(x)hf'(x) = \\lim_{h\\to 0}\\frac{f(x+h)-f(x)}{h}f′(x)=limh→0​hf(x+h)−f(x)​ — អត្រាបម្រែបម្រួលភ្លាមៗច្បាប់ស្វ័យគុណ (Power rule)ddxxn=nxn−1\\frac{d}{dx} x^n = nx^{n-1}dxd​xn=nxn−1ច្បាប់បណ្តាក់ (Chain rule)ddxf(g(x))=f′(g(x))⋅g′(x)\\frac{d}{dx}f(g(x)) = f'(g(x))\\cdot g'(x)dxd​f(g(x))=f′(g(x))⋅g′(x) — ចំបាច់សម្រាប់ backpropដេរីវេដោយផ្នែក (Partial derivative)ដេរីវេ ដោយទុកអថេរផ្សេងទៀតនៅថេរហ្ក្រាដ្យង់ (Gradient)វ៉ិចទ័រនៃដេរីវេដោយផ្នែកទាំងអស់ — ចង្អុលទៅទិសដៅឡើងខ្លាំងបំផុត ដេរីវេគឺជាចម្លើយគណិតវិទ្យាចំពោះសំណួរថា \"តើផ្លូវណាជាផ្លូវឡើងទួល?\"។ ក្នុង machine learning យើងប្រើសញ្ញាផ្ទុយរបស់វាដើម្បីរក \"ផ្លូវចុះទួល\" ដើម្បីបង្ហាត់ម៉ូឌែលឱ្យកាន់តែឆ្លាតវៃ។",{"id":18982,"title":18983,"titles":18984,"content":18985,"level":10719},"\u002Fkm\u002Frooms\u002Fderivatives#តើត្រូវរៀនអ្វីបន្ត","តើត្រូវរៀនអ្វីបន្ត?",[18313],"ឥឡូវនេះអ្នកមានមូលដ្ឋានគ្រឹះ calculus ហើយ។ ជំហានបន្ទាប់គឺ Gradient Descent ដែលជាអាល់ហ្គោរីតយកគំនិតដេរីវេនេះ មកបង្កើតជាម៉ាស៊ីនសម្រាប់រៀនដោយស្វ័យប្រវត្តិ។ បន្ទប់បន្ទាប់: Gradient Descent\n    See how the derivative becomes an optimization algorithm — with interactive experiments, full Python code, and a walk through every step of the math.\n    \n      ចូលបន្ទប់ Gradient Descent → html pre.shiki code .spNyl, html code.shiki .spNyl{--shiki-light:#9C3EDA;--shiki-default:#C792EA;--shiki-dark:#C792EA}html pre.shiki code .s2Zo4, html code.shiki .s2Zo4{--shiki-light:#6182B8;--shiki-default:#82AAFF;--shiki-dark:#82AAFF}html pre.shiki code .sMK4o, html code.shiki .sMK4o{--shiki-light:#39ADB5;--shiki-default:#89DDFF;--shiki-dark:#89DDFF}html pre.shiki code .sHdIc, html code.shiki .sHdIc{--shiki-light:#90A4AE;--shiki-light-font-style:italic;--shiki-default:#EEFFFF;--shiki-default-font-style:italic;--shiki-dark:#BABED8;--shiki-dark-font-style:italic}html pre.shiki code .s7zQu, html code.shiki .s7zQu{--shiki-light:#39ADB5;--shiki-light-font-style:italic;--shiki-default:#89DDFF;--shiki-default-font-style:italic;--shiki-dark:#89DDFF;--shiki-dark-font-style:italic}html pre.shiki code .sTEyZ, html code.shiki .sTEyZ{--shiki-light:#90A4AE;--shiki-default:#EEFFFF;--shiki-dark:#BABED8}html pre.shiki code .sbssI, html code.shiki .sbssI{--shiki-light:#F76D47;--shiki-default:#F78C6C;--shiki-dark:#F78C6C}html pre.shiki code .sHwdD, html code.shiki .sHwdD{--shiki-light:#90A4AE;--shiki-light-font-style:italic;--shiki-default:#546E7A;--shiki-default-font-style:italic;--shiki-dark:#676E95;--shiki-dark-font-style:italic}html pre.shiki code .sfazB, html code.shiki .sfazB{--shiki-light:#91B859;--shiki-default:#C3E88D;--shiki-dark:#C3E88D}html .light .shiki span {color: var(--shiki-light);background: var(--shiki-light-bg);font-style: var(--shiki-light-font-style);font-weight: var(--shiki-light-font-weight);text-decoration: var(--shiki-light-text-decoration);}html.light .shiki span {color: var(--shiki-light);background: var(--shiki-light-bg);font-style: var(--shiki-light-font-style);font-weight: var(--shiki-light-font-weight);text-decoration: var(--shiki-light-text-decoration);}html .default .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html.dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}",{"id":18318,"title":18317,"titles":18987,"content":18988,"level":10699},[],"ស្វែងយល់ពីរ Algorithm ដែលជាមូលដ្ឋានគ្រឹះនៃ Machine Learning។ យល់ដឹងពីរបៀបដែលវាស្វែងរកតម្លៃអប្បបរមានៃអនុគមន៍ម្តងមួយជំហានៗ។ រូបភាពយកមកពី: Creating a Gradient Descent Animation in Python Gradient Descent គឺជា optimization algorithm មូលដ្ឋានមួយ ក្នុងចំណោម algorithms ដែលមាននៅក្នុង machine learning។ វាជាវិធីសាស្រ្តសម្រាប់ស្វែងរកតម្លៃអប្បបរមានៃអនុគមន៏ ដោយដើរម្តងមួយជំហាន (iteration) ជាបន្តបន្ទាប់ក្នុងទិសដៅ ដែលធ្វើឲ្យអនុគមន៏កាន់តែតូចទៅៗ។",{"id":18990,"title":18991,"titles":18992,"content":18993,"level":10719},"\u002Fkm\u002Frooms\u002Fgradient-descent#ប្រៀបធៀបលេងៗ","ប្រៀបធៀបលេងៗ",[18317],"ស្រមៃថា អ្នកកំពុងឈរនៅលើភ្នំមួយដែលមានអ័ព្ទក្រាស់ ហើយអ្នកចង់ទៅជ្រលងខាងក្រោម។ អ្នកមិនអាចមើលឃើញឆ្ងាយទេដោយសារមានអ័ព្ទក្រាស់ពេក ប៉ុន្តែអ្នកនៅបាតជើងរបស់អ្នក អាចដឹងថាកំពុងចុះជ្រៅទៅៗ រឺ ឡើងខ្ពង់ទៅៗ តាមរយៈជម្រោលចោត ។ Gradient descent ធ្វើរការដូចគ្នានេះដែរ: វាធ្វើម្តងមួយជំហានតូចៗចុះក្រោម តាមផ្លូវចម្រោងបំផុត រហូតដល់វាទៅដល់ចំណុចអប្បបរមា ទាបបំផុត។",{"id":18995,"title":18996,"titles":18997,"content":10692,"level":10719},"\u002Fkm\u002Frooms\u002Fgradient-descent#គណិតវិទ្យា","គណិតវិទ្យា",[18317],{"id":18999,"title":19000,"titles":19001,"content":19002,"level":10753},"\u002Fkm\u002Frooms\u002Fgradient-descent#រូបមន្តមូលដ្ឋាន","រូបមន្តមូលដ្ឋាន",[18317,18996],"Gradient descent ធ្វើបច្ចុប្បន្នភាព parameters ដោយប្រើរូបមន្តសាមញ្ញនេះ: θnew=θold−α∇J(θ)\\theta_{new} = \\theta_{old} - \\alpha \\nabla J(\\theta)θnew​=θold​−α∇J(θ) ដែល: θ\\thetaθ តំណាងឱ្យ parameters ដែលយើងកំពុង optimize (តើតម្លៃ Parameter ណាមួយដែលយើងកំពុងស្វែងរក ដែលធ្វើឲ្យអនុគមន៍ JJJ មានតម្លៃតិចបំផុត)α\\alphaα គឺ learning rate (អត្រាបោះជំហាន ឬ ទំហំជំហាន)J(θ)J(\\theta)J(θ) គឺ cost function ឬ objective function ដែលយើងចង់ រកតម្លៃ θ\\thetaθ ណាដែលធ្វើឲ្យ JJJ មានតម្លៃតូចបំផុត∇J(θ)\\nabla J(\\theta)∇J(θ) គឺ Gradient(ដេរីវេ | Derivative) នៃ JJJ ជាអនុគមន៍នៃ θ\\thetaθ",{"id":19004,"title":19005,"titles":19006,"content":19007,"level":10753},"\u002Fkm\u002Frooms\u002Fgradient-descent#ការយល់ដឹងពី-gradient-ពីសាមញ្ញទៅ-កំរិតខ្ពស់","ការយល់ដឹងពី Gradient: ពីសាមញ្ញទៅ កំរិតខ្ពស់",[18317,18996],"តោះបកស្រាយនិមិត្តសញ្ញា gradient ∇\\nabla∇ (ហៅថា \"nabla\" ឬ \"del\") ដោយបង្កើតពីករណីសាមញ្ញបំផុត។",{"id":19009,"title":19010,"titles":19011,"content":19012,"level":10760},"\u002Fkm\u002Frooms\u002Fgradient-descent#ករណី-1-អថេរតែមួយ-parameter-មួយ","ករណី 1: អថេរតែមួយ (Parameter មួយ)",[18317,18996,19005],"នៅពេលយើងមាន parameter តែមួយ, gradient គឺគ្រាន់តែជា ដេរីវេ: ∇J(θ)=dJdθ\\nabla J(\\theta) = \\frac{dJ}{d\\theta}∇J(θ)=dθdJ​ Derivative ប្រាប់យើងថា: \"ប្រសិនបើខ្ញុំបង្កើន θ\\thetaθ បន្តិចបន្តួច តើ JJJ ផ្លាស់ប្តូរប៉ុន្មាន?\" ឧទាហរណ៍: សម្រាប់ J(θ)=θ2J(\\theta) = \\theta^2J(θ)=θ2: ∇J(θ)=dJdθ=2θ\\nabla J(\\theta) = \\frac{dJ}{d\\theta} = 2\\theta∇J(θ)=dθdJ​=2θ ប្រសិនបើ θ=5\\theta = 5θ=5, នោះ ∇J(5)=10\\nabla J(5) = 10∇J(5)=10 → អនុគមន៍កំពុងកើនឡើង, ទៅខាងឆ្វេង (បន្ថយ θ\\thetaθ)ប្រសិនបើ θ=−3\\theta = -3θ=−3, នោះ ∇J(−3)=−6\\nabla J(-3) = -6∇J(−3)=−6 → អនុគមន៍កំពុងថយចុះ, ទៅខាងស្តាំ (បង្កើន θ\\thetaθ)ប្រសិនបើ θ=0\\theta = 0θ=0, នោះ ∇J(0)=0\\nabla J(0) = 0∇J(0)=0 → យើងស្ថិតនៅចំណុចអប្បបរមា!",{"id":19014,"title":19015,"titles":19016,"content":19017,"level":10760},"\u002Fkm\u002Frooms\u002Fgradient-descent#ករណី-2-អថេរពីរ-parameters-ពីរ","ករណី 2: អថេរពីរ (Parameters ពីរ)",[18317,18996,19005],"នៅពេលយើងមាន parameters ពីរ θ1\\theta_1θ1​ និង θ2\\theta_2θ2​, gradient ក្លាយជា vector មួយមានធាតុផ្សំពីរ: ∇J(θ1,θ2)=[∂J∂θ1∂J∂θ2]\\nabla J(\\theta_1, \\theta_2) = \\begin{bmatrix}\n\\frac{\\partial J}{\\partial \\theta_1} \\\\\n\\frac{\\partial J}{\\partial \\theta_2}\n\\end{bmatrix}∇J(θ1​,θ2​)=[∂θ1​∂J​∂θ2​∂J​​] Partial derivative នីមួយៗ ∂J∂θi\\frac{\\partial J}{\\partial \\theta_i}∂θi​∂J​ សួរថា: \"ប្រសិនបើខ្ញុំផ្លាស់ប្តូរតែ θi\\theta_iθi​ (រក្សា អថេរផ្សេងទៀតថេរ), តើ JJJ ផ្លាស់ប្តូរប៉ុន្មាន?\" ឧទាហរណ៍: សម្រាប់ J(θ1,θ2)=θ12+θ22J(\\theta_1, \\theta_2) = \\theta_1^2 + \\theta_2^2J(θ1​,θ2​)=θ12​+θ22​: ∇J=[2θ12θ2]\\nabla J = \\begin{bmatrix}\n2\\theta_1 \\\\\n2\\theta_2\n\\end{bmatrix}∇J=[2θ1​2θ2​​] នៅចំណុច (θ1=3,θ2=4)(\\theta_1=3, \\theta_2=4)(θ1​=3,θ2​=4): ∇J=[68]\\nabla J = \\begin{bmatrix}\n6 \\\\\n8\n\\end{bmatrix}∇J=[68​] Vector នេះចង្អុលទៅទិសនៃការឡើងចម្រោងបំផុត។ យើងទៅក្នុងទិសផ្ទុយ (ដក វា) ដើម្បីចុះក្រោម!",{"id":19019,"title":19020,"titles":19021,"content":19022,"level":10760},"\u002Fkm\u002Frooms\u002Fgradient-descent#ករណី-3-អថេរច្រើន-ករណីទូទៅ","ករណី 3: អថេរច្រើន (ករណីទូទៅ)",[18317,18996,19005],"សម្រាប់ n parameters θ1,θ2,…,θn\\theta_1, \\theta_2, \\ldots, \\theta_nθ1​,θ2​,…,θn​, gradient គឺ n-dimensional vector: ∇J(θ)=[∂J∂θ1∂J∂θ2⋮∂J∂θn]\\nabla J(\\theta) = \\begin{bmatrix}\n\\frac{\\partial J}{\\partial \\theta_1} \\\\\n\\frac{\\partial J}{\\partial \\theta_2} \\\\\n\\vdots \\\\\n\\frac{\\partial J}{\\partial \\theta_n}\n\\end{bmatrix}∇J(θ)=​∂θ1​∂J​∂θ2​∂J​⋮∂θn​∂J​​​ ធាតុផ្សំនីមួយៗប្រាប់យើងថា តើ JJJ ប្រែប្រួលប៉ុន្មាន ចំពោះការផ្លាស់ប្តូរនៃ parameter ក្នុងចំណោមណាមួយនោះ។ នេះគឺជាអ្វីដែលយើងត្រូវដឹង ដើម្បីកំណត់ថា តើយើងគួរកែ parameter នីមួយៗទៅទិសណា! ចំណុចសំខាន់: មិនថាអ្នកមាន parameter 1 ឬ 1 លាន, គំនិតគឺដូចគ្នាតេ: គណនាថា តើ parameter នីមួយៗប៉ះពាល់ដល់ cost ប៉ុន្មាន, បន្ទាប់មកកែសម្រួលវាក្នុងទិសផ្ទុយ។",{"id":19024,"title":19025,"titles":19026,"content":19027,"level":10719},"\u002Fkm\u002Frooms\u002Fgradient-descent#ឧទាហរណ៍ពេញលេញ","ឧទាហរណ៍ពេញលេញ",[18317],"តោះមើល gradient descent ក្នុងការដំណើរការជាមួយករណីសាមញ្ញបំផុត: អថេរតែមួយ។ សូមគិតរកការបន្ថយទៅប្រកដដែលអប្បបរមាសម្រាប់អនុគមន៍ quadratic: J(θ)=θ2J(\\theta) = \\theta^2J(θ)=θ2 Gradient (ដេរីវេ) គឺ: ∇J(θ)=dJdθ=2θ\\nabla J(\\theta) = \\frac{dJ}{d\\theta} = 2\\theta∇J(θ)=dθdJ​=2θ យើងអាចសរសេរ Gradient descent algorithm ជា: θnew=θold−α⋅2θold\\theta_{new} = \\theta_{old} - \\alpha \\cdot 2\\theta_{old}θnew​=θold​−α⋅2θold​ ចាប់ផ្ដើមនៅ θ0=10\\theta_0 = 10θ0​=10 ជាមួយ learning rate α=0.1\\alpha = 0.1α=0.1: Iteration 1 (ជំហានទី 1): θ1=10−0.1×(2×10)=10−2=8\\theta_1 = 10 - 0.1 \\times (2 \\times 10) = 10 - 2 = 8θ1​=10−0.1×(2×10)=10−2=8 Gradient វិជ្ជមាន (10 ជំរាលឡើងខាងលើ), ដូច្នេះយើងបានធ្វើចលនាទៅខាងឆ្វេង (បន្ថយ θ\\thetaθ) Iteration 2 (ជំហានទី 2): θ2=8−0.1×(2×8)=8−1.6=6.4\\theta_2 = 8 - 0.1 \\times (2 \\times 8) = 8 - 1.6 = 6.4θ2​=8−0.1×(2×8)=8−1.6=6.4 នៅតែជា gradient វិជ្ជមាន, កំពុងតូចទៅ, ដូច្នេះជំហានតូចជាង Iteration 3 (ជំហានទី 3): θ3=6.4−0.1×(2×6.4)=6.4−1.28=5.12\\theta_3 = 6.4 - 0.1 \\times (2 \\times 6.4) = 6.4 - 1.28 = 5.12θ3​=6.4−0.1×(2×6.4)=6.4−1.28=5.12 Pattern បន្ត: នៅពេលយើងចូលទៅកាន់ចំណុចអប្បបរមា, gradient កាន់តែតូចទៅៗ, ដូច្នេះជំហានរបស់យើងតូចជាងដោយស្វ័យប្រវត្តិ! ជាមួយនឹងជំហាននីមួយៗ, យើងចូលទៅកាន់ជិតនូវចំណុចអប្បបរមានៅ θ=0\\theta = 0θ=0។ សូមកត់សម្គាល់ថា ជំហានតូចជាងដោយធម្មជាតិ នៅពេល gradient ថយចុះក្បែរនូវចំណុចអប្បបរមា!",{"id":19029,"title":19030,"titles":19031,"content":10692,"level":10719},"\u002Fkm\u002Frooms\u002Fgradient-descent#គំនិតសំខាន់ៗ","គំនិតសំខាន់ៗ",[18317],{"id":19033,"title":19034,"titles":19035,"content":19036,"level":10753},"\u002Fkm\u002Frooms\u002Fgradient-descent#learning-rate-អត្រាបោះជំហាន-ឬ-ទំហំជំហាន","Learning Rate | អត្រាបោះជំហាន ឬ ទំហំជំហាន",[18317,19030],"Learning rate α\\alphaα មានសំខាន់សំខាន់: ទំហំពេក: យើងអាចរំលងចំណុចអប្បបរមា ឬ បង្កើតការវិលជុំមិនចប់ (មិនដល់គោលដៅ)ទំហំតូច: ចំណាយពេលច្រើនហើយ កម្រដល់គោលដៅសមស្រប: ទៅដល់គោលដៅបានយ៉ាងមានប្រសិទ្ធភាព",{"id":19038,"title":19039,"titles":19040,"content":10692,"level":10753},"\u002Fkm\u002Frooms\u002Fgradient-descent#តោះធ្វើតេស្តជាមួយ-learning-rates-ផ្សេងៗ-ហើយមើលថាវាអាចប៉ះពាល់ដល់ការចូលរួមគ្នា-convergence-យ៉ាងដូចម្តេច","តោះធ្វើតេស្តជាមួយ learning rates ផ្សេងៗ ហើយមើលថាវាអាចប៉ះពាល់ដល់ការចូលរួមគ្នា (convergence) យ៉ាងដូចម្តេច!",[18317,19030],{"id":19042,"title":19043,"titles":19044,"content":10692,"level":10753},"\u002Fkm\u002Frooms\u002Fgradient-descent#ប្រភេទនៃ-gradient-descent","ប្រភេទនៃ Gradient Descent",[18317,19030],{"id":19046,"title":18641,"titles":19047,"content":19048,"level":10760},"\u002Fkm\u002Frooms\u002Fgradient-descent#_1-batch-gradient-descent",[18317,19030,19043],"ប្រើទិន្នន័យទាំងអស់ ដើម្បីគណនា gradient: θ=θ−α∇θJ(θ)\\theta = \\theta - \\alpha \\nabla_\\theta J(\\theta)θ=θ−α∇θ​J(θ) ដែល J(θ)J(\\theta)J(θ) ត្រូវបានគណនាលើឧទាហរណ៍បន្តុបកយសិក្សាទាំងអស់។",{"id":19050,"title":18646,"titles":19051,"content":19052,"level":10760},"\u002Fkm\u002Frooms\u002Fgradient-descent#_2-stochastic-gradient-descent-sgd",[18317,19030,19043],"Update parameters ដោយប្រើឧទាហរណ៍បន្តុបកយសិក្សាមួយម្តងមួយ ក្នុងមួយពេល: θ=θ−α∇θJ(θ;x(i),y(i))\\theta = \\theta - \\alpha \\nabla_\\theta J(\\theta; x^{(i)}, y^{(i)})θ=θ−α∇θ​J(θ;x(i),y(i))",{"id":19054,"title":18651,"titles":19055,"content":19056,"level":10760},"\u002Fkm\u002Frooms\u002Fgradient-descent#_3-mini-batch-gradient-descent",[18317,19030,19043],"ជាការប្រទាក់ចូលគ្នា: ប្រើbatch តូចមួយ នៃឧទាហរណ៍: θ=θ−α∇θJ(θ;x(i:i+b),y(i:i+b))\\theta = \\theta - \\alpha \\nabla_\\theta J(\\theta; x^{(i:i+b)}, y^{(i:i+b)})θ=θ−α∇θ​J(θ;x(i:i+b),y(i:i+b)) ដែល bbb គឺ batch size។",{"id":19058,"title":19059,"titles":19060,"content":19061,"level":10719},"\u002Fkm\u002Frooms\u002Fgradient-descent#ចំណុចរួមតូច-convergence","ចំណុចរួមតូច (Convergence)",[18317],"Gradient descent ចូលរួមគ្នា នៅពេល gradient ក្លាយជាតូចបំផុត: ∣∇J(θ)∣\u003Cϵ|\\nabla J(\\theta)| \u003C \\epsilon∣∇J(θ)∣\u003Cϵ ដែល ϵ\\epsilonϵ គឺតម្លៃ threshold តូចមួយ។",{"id":19063,"title":19064,"titles":19065,"content":19066,"level":10719},"\u002Fkm\u002Frooms\u002Fgradient-descent#បញ្ហាប្រឈម","បញ្ហាប្រឈម",[18317],"Local Minima: Algorithm អាចជាប់គាំងនៅ local minima ជំនួសឱ្យការស្វែងរក global minimumSaddle Points: ចំណុចដែល gradient រកឃើញថាសូន្យ ប៉ុន្តែមិនមែនជាចំណុចអប្បបរមាPlateau Regions: តំបន់ដែល gradient មានទំហំតូចបំផុត ធ្វើឱ្យការសិក្សាយឺត",{"id":19068,"title":19069,"titles":19070,"content":19071,"level":10719},"\u002Fkm\u002Frooms\u002Fgradient-descent#ការអនុវត្តន៍ក្នុងពិភពលោកជាក់ស្តែង","ការអនុវត្តន៍ក្នុងពិភពលោកជាក់ស្តែង",[18317],"Gradient descent ត្រូវបានប្រើដើម្បីបណ្តុះបណ្តាល: Neural Networks: ការ Optimize parameters រាប់លានLinear Regression: ការស្វែងរក best-fit lineLogistic Regression: បញ្ហា classificationSupport Vector Machines: ការស្វែងរក optimal hyperplanes",{"id":19073,"title":19074,"titles":19075,"content":19076,"level":10753},"\u002Fkm\u002Frooms\u002Fgradient-descent#gradient-descent-ក្នុង-deep-learning","Gradient Descent ក្នុង Deep Learning",[18317,19069],"Deep Neural Network ប្រើ Gradient Descent ដើម្បីបណ្តុះបណ្តាលទម្ងន់ (weights) ក្នុង layers ផ្សេងៗ រូបភាពខាងលើបង្ហាញពី Deep Neural Network — ប្រភេទ Neural Network ពេញនិយមមួយ ដែលប្រើ Gradient Descent ដើម្បី optimize cost function។ ក្នុង Deep Learning: Input Layer ទទួល data (ឧ. pixels នៃរូបភាព, ពាក្យ,  លេខ)Hidden Layers ជ្រើរើសធ្វើ feature extraction — ស្វែងរកគំរូ (patterns) ស្មុគស្មាញ ពីទិន្នន័យOutput Layer ផ្តល់ការទស្សន៍ទាយ (prediction) ចុងក្រោយWeights www (ទម្ងន់) ក្នុង connections នីមួយៗ គឺជា parameters θ\\thetaθ ដែល Gradient Descent ត្រូវ optimize ក្នុងអំឡុងពេល Training: Forward Pass→Compute Loss J(θ)→Backpropagation→Gradient Descent Update\\begin{aligned}\n&\\text{Forward Pass} \\\\\n&\\rightarrow \\text{Compute Loss } J(\\theta) \\\\\n&\\rightarrow \\text{Backpropagation} \\\\\n&\\rightarrow \\text{Gradient Descent Update}\n\\end{aligned}​Forward Pass→Compute Loss J(θ)→Backpropagation→Gradient Descent Update​ Network មួយ មាន neurons រាប់លាន → weights រាប់លាន → gradient vector មាន រាប់លាន dimensions — ប៉ុន្តែ Gradient Descent ដំណើរការដូចគ្នានឹង 1D ដែរ: update ក្នុងទិស opposite នៃ gradient!",{"id":19078,"title":19079,"titles":19080,"content":19081,"level":10719},"\u002Fkm\u002Frooms\u002Fgradient-descent#អនុវត្តក្នុង-python","អនុវត្តក្នុង Python",[18317],"ខាងក្រោមជាការសរសេរកូដ Python ដោយមិនប្រើ ML libraries ណាមួយ។ Code block នីមួយៗ ត្រូវតទៅនឹងរូបមន្ត math ខាងលើ — ជួរ highlighted ជា formula ចម្បង។",{"id":19083,"title":19084,"titles":19085,"content":19086,"level":10753},"\u002Fkm\u002Frooms\u002Fgradient-descent#ជំហានទី-1-cost-function-និង-gradient","ជំហានទី 1 — Cost Function និង Gradient",[18317,19079],"J(θ)=θ2,∇J(θ)=2θJ(\\theta) = \\theta^2, \\qquad \\nabla J(\\theta) = 2\\thetaJ(θ)=θ2,∇J(θ)=2θ # J(θ) = θ²  →  អនុគមន៍ដែលយើងចង់ minimize\ndef cost(theta):\n    return theta ** 2\n\n# ∇J(θ) = dJ\u002Fdθ = 2θ  →  derivative (gradient) របស់វា\ndef gradient(theta):\n    return 2 * theta",{"id":19088,"title":19089,"titles":19090,"content":19091,"level":10753},"\u002Fkm\u002Frooms\u002Fgradient-descent#ជំហានទី-2-update-rule","ជំហានទី 2 — Update Rule",[18317,19079],"θnew=θold−α⋅∇J(θ)\\theta_{new} = \\theta_{old} - \\alpha \\cdot \\nabla J(\\theta)θnew​=θold​−α⋅∇J(θ) def update(theta, alpha):\n    grad = gradient(theta)           # ① គណនា  ∇J(θ)\n    return theta - alpha * grad      # ② អនុវត្ត  θ_new = θ_old − α·∇J(θ) ជួរទី 3 គឺ formula update rule ខាងលើ សរសេរដោយផ្ទាល់ជា Python។",{"id":19093,"title":19094,"titles":19095,"content":19096,"level":10753},"\u002Fkm\u002Frooms\u002Fgradient-descent#ជំហានទី-3-loop-រហូតដល់-convergence","ជំហានទី 3 — Loop រហូតដល់ Convergence",[18317,19079],"Run updates រហូតដល់ ∣∇J(θ)∣\u003Cε|\\nabla J(\\theta)| \u003C \\varepsilon∣∇J(θ)∣\u003Cε — នៅពេល gradient ស្ទើររកឃើញថាសូន្យ: def gradient_descent(theta_init, alpha, epsilon=1e-6, max_iters=1000):\n    theta = theta_init                           # θ₀ — ចំណុចចាប់ផ្ដើម\n    for i in range(max_iters):\n        grad = gradient(theta)                   # ∇J(θ) = 2θ\n        if abs(grad) \u003C epsilon:                  # ឈប់: |∇J(θ)| \u003C ε\n            print(f\"Converged at iteration {i}\")\n            break\n        theta = theta - alpha * grad             # θ_new = θ_old − α·∇J(θ)\n        if i \u003C 5:\n            print(f\"  iter {i+1:2d}: θ={theta:.5f}  J={cost(theta):.5f}  ∇J={grad:.5f}\")\n    return theta\n# ស្របតាម ការគណនា manual ខាងលើ: θ₀ = 10, α = 0.1\ntheta_min = gradient_descent(theta_init=10.0, alpha=0.1)\nprint(f\"\\nMinimum at θ = {theta_min:.8f}\") Output — ស្របតាម iterations manual ខាងលើ: iter  1: θ= 8.00000  J=64.00000  ∇J=20.00000\n  iter  2: θ= 6.40000  J=40.96000  ∇J=16.00000\n  iter  3: θ= 5.12000  J=26.21440  ∇J=12.80000\n  iter  4: θ= 4.09600  J=16.77722  ∇J=10.24000\n  iter  5: θ= 3.27680  J=10.73742  ∇J= 8.19200\nMinimum at θ = 0.00000001",{"id":19098,"title":19099,"titles":19100,"content":19101,"level":10753},"\u002Fkm\u002Frooms\u002Fgradient-descent#ជំហានទី-4-linear-regression-parameters-ពីរ","ជំហានទី 4 — Linear Regression: Parameters ពីរ",[18317,19079],"សម្រាប់ model y^=wX+b\\hat{y} = wX + by^​=wX+b, cost function ប្រើ mean squared error: J(w,b)=1m∑i=1m(y^(i)−y(i))2J(w, b) = \\frac{1}{m} \\sum_{i=1}^{m} \\left(\\hat{y}^{(i)} - y^{(i)}\\right)^2J(w,b)=m1​∑i=1m​(y^​(i)−y(i))2 ជាមួយ partial derivatives: ∂J∂w=2m∑i=1m(y^(i)−y(i))x(i),∂J∂b=2m∑i=1m(y^(i)−y(i))\\frac{\\partial J}{\\partial w} = \\frac{2}{m} \\sum_{i=1}^{m} \\left(\\hat{y}^{(i)} - y^{(i)}\\right) x^{(i)}, \\qquad \\frac{\\partial J}{\\partial b} = \\frac{2}{m} \\sum_{i=1}^{m} \\left(\\hat{y}^{(i)} - y^{(i)}\\right)∂w∂J​=m2​∑i=1m​(y^​(i)−y(i))x(i),∂b∂J​=m2​∑i=1m​(y^​(i)−y(i)) import numpy as np\n\ndef linear_regression_gd(X, y, alpha=0.01, epochs=500):\n    m = len(y)\n    w, b = 0.0, 0.0                        # θ = [w, b] — initialize ទៅ zero\n    for epoch in range(epochs):\n        y_pred = w * X + b                 # forward pass:  ŷ = w·X + b\n        error  = y_pred - y                # residuals:     ŷ − y\n        dw = (2 \u002F m) * np.dot(error, X)   # ∂J\u002F∂w = (2\u002Fm) Σ (ŷ−y)·x\n        db = (2 \u002F m) * np.sum(error)       # ∂J\u002F∂b = (2\u002Fm) Σ (ŷ−y)\n        w = w - alpha * dw                 # w_new = w_old − α·∂J\u002F∂w\n        b = b - alpha * db                 # b_new = b_old − α·∂J\u002F∂b\n        if epoch % 100 == 0:\n            loss = np.mean(error ** 2)     # J(w,b) = (1\u002Fm) Σ (ŷ−y)²\n            print(f\"Epoch {epoch:4d}: loss={loss:.4f}  w={w:.4f}  b={b:.4f}\")\n    return w, b\n\n# y = 2·x  →  model គួរ converge ទៅ w≈2, b≈0\nX = np.array([1.0, 2.0, 3.0, 4.0, 5.0])\ny = np.array([2.0, 4.0, 6.0, 8.0, 10.0])\nw, b = linear_regression_gd(X, y)\nprint(f\"\\nFitted:  ŷ = {w:.4f}·x + {b:.4f}\") ជួរ highlighted 7–12 ទំនាក់ទំនងដោយផ្ទាល់ទៅ formulas: ជួរ 7–8: forward pass y^=wX+b\\hat{y} = wX + by^​=wX+b និង residualsជួរ 9–10: partial derivatives ∂J∂w\\frac{\\partial J}{\\partial w}∂w∂J​ និង ∂J∂b\\frac{\\partial J}{\\partial b}∂b∂J​ជួរ 11–12: gradient descent update rule θnew=θold−α∇J\\theta_{new} = \\theta_{old} - \\alpha \\nabla Jθnew​=θold​−α∇J",{"id":19103,"title":19104,"titles":19105,"content":19106,"level":10719},"\u002Fkm\u002Frooms\u002Fgradient-descent#ជំហានបន្ទាប់","ជំហានបន្ទាប់",[18317],"នៅពេលអ្នកយល់ដឹង gradient descent, អ្នកអាចស្រាវជ្រាវ Algorithms បន្ថែមដូចជា: Momentum: បន្ថែមល្បឿនទៅក្នុងការ updateAdam: Adaptive learning rates សម្រាប់ parameter នីមួយៗRMSprop: គ្រប់គ្រង sparse gradients បានល្អជាង html pre.shiki code .sHwdD, html code.shiki .sHwdD{--shiki-light:#90A4AE;--shiki-light-font-style:italic;--shiki-default:#546E7A;--shiki-default-font-style:italic;--shiki-dark:#676E95;--shiki-dark-font-style:italic}html pre.shiki code .spNyl, html code.shiki .spNyl{--shiki-light:#9C3EDA;--shiki-default:#C792EA;--shiki-dark:#C792EA}html pre.shiki code .s2Zo4, html code.shiki .s2Zo4{--shiki-light:#6182B8;--shiki-default:#82AAFF;--shiki-dark:#82AAFF}html pre.shiki code .sMK4o, html code.shiki .sMK4o{--shiki-light:#39ADB5;--shiki-default:#89DDFF;--shiki-dark:#89DDFF}html pre.shiki code .sHdIc, html code.shiki .sHdIc{--shiki-light:#90A4AE;--shiki-light-font-style:italic;--shiki-default:#EEFFFF;--shiki-default-font-style:italic;--shiki-dark:#BABED8;--shiki-dark-font-style:italic}html pre.shiki code .s7zQu, html code.shiki .s7zQu{--shiki-light:#39ADB5;--shiki-light-font-style:italic;--shiki-default:#89DDFF;--shiki-default-font-style:italic;--shiki-dark:#89DDFF;--shiki-dark-font-style:italic}html pre.shiki code .sTEyZ, html code.shiki .sTEyZ{--shiki-light:#90A4AE;--shiki-default:#EEFFFF;--shiki-dark:#BABED8}html pre.shiki code .sbssI, html code.shiki .sbssI{--shiki-light:#F76D47;--shiki-default:#F78C6C;--shiki-dark:#F78C6C}html .light .shiki span {color: var(--shiki-light);background: var(--shiki-light-bg);font-style: var(--shiki-light-font-style);font-weight: var(--shiki-light-font-weight);text-decoration: var(--shiki-light-text-decoration);}html.light .shiki span {color: var(--shiki-light);background: var(--shiki-light-bg);font-style: var(--shiki-light-font-style);font-weight: var(--shiki-light-font-weight);text-decoration: var(--shiki-light-text-decoration);}html .default .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html.dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html pre.shiki code .sfazB, html code.shiki .sfazB{--shiki-light:#91B859;--shiki-default:#C3E88D;--shiki-dark:#C3E88D}html pre.shiki code .sBMFI, html code.shiki .sBMFI{--shiki-light:#E2931D;--shiki-default:#FFCB6B;--shiki-dark:#FFCB6B}",1776142915370]