Skip to content

Commit 454f165

Browse files
committed
update week 37
1 parent 6631356 commit 454f165

57 files changed

Lines changed: 5924 additions & 1043 deletions

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

doc/pub/week37/html/._week37-bs000.html

Lines changed: 31 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -148,10 +148,10 @@
148148
None,
149149
'adam-algorithm-taken-from-goodfellow-et-al-https-www-deeplearningbook-org-contents-optimization-html'),
150150
('Practical tips', 2, None, 'practical-tips'),
151-
('Sneaking in auotmatic differentiation using Autograd',
151+
('Sneaking in automatic differentiation using Autograd',
152152
2,
153153
None,
154-
'sneaking-in-auotmatic-differentiation-using-autograd'),
154+
'sneaking-in-automatic-differentiation-using-autograd'),
155155
('Same code but now with momentum gradient descent',
156156
2,
157157
None,
@@ -164,6 +164,10 @@
164164
2,
165165
None,
166166
'same-code-but-now-with-momentum-gradient-descent'),
167+
("But none of these can compete with Newton's method",
168+
2,
169+
None,
170+
'but-none-of-these-can-compete-with-newton-s-method'),
167171
('Similar (second order function now) problem but now with '
168172
'AdaGrad',
169173
2,
@@ -181,7 +185,20 @@
181185
('Material for the lab sessions',
182186
2,
183187
None,
184-
'material-for-the-lab-sessions')]}
188+
'material-for-the-lab-sessions'),
189+
('Reminder on different scaling methods',
190+
2,
191+
None,
192+
'reminder-on-different-scaling-methods'),
193+
('Functionality in Scikit-Learn',
194+
2,
195+
None,
196+
'functionality-in-scikit-learn'),
197+
('More preprocessing', 2, None, 'more-preprocessing'),
198+
('Frequently used scaling functions',
199+
2,
200+
None,
201+
'frequently-used-scaling-functions')]}
185202
end of tocinfo -->
186203

187204
<body>
@@ -258,14 +275,19 @@
258275
<!-- navigation toc: --> <li><a href="._week37-bs038.html#rmsprop-algorithm-taken-from-goodfellow-et-al-https-www-deeplearningbook-org-contents-optimization-html" style="font-size: 80%;">RMSProp algorithm, taken from "Goodfellow et al":"https://www.deeplearningbook.org/contents/optimization.html"</a></li>
259276
<!-- navigation toc: --> <li><a href="._week37-bs038.html#adam-algorithm-taken-from-goodfellow-et-al-https-www-deeplearningbook-org-contents-optimization-html" style="font-size: 80%;">ADAM algorithm, taken from "Goodfellow et al":"https://www.deeplearningbook.org/contents/optimization.html"</a></li>
260277
<!-- navigation toc: --> <li><a href="._week37-bs039.html#practical-tips" style="font-size: 80%;">Practical tips</a></li>
261-
<!-- navigation toc: --> <li><a href="._week37-bs040.html#sneaking-in-auotmatic-differentiation-using-autograd" style="font-size: 80%;">Sneaking in auotmatic differentiation using Autograd</a></li>
278+
<!-- navigation toc: --> <li><a href="._week37-bs040.html#sneaking-in-automatic-differentiation-using-autograd" style="font-size: 80%;">Sneaking in automatic differentiation using Autograd</a></li>
262279
<!-- navigation toc: --> <li><a href="._week37-bs043.html#same-code-but-now-with-momentum-gradient-descent" style="font-size: 80%;">Same code but now with momentum gradient descent</a></li>
263280
<!-- navigation toc: --> <li><a href="._week37-bs042.html#including-stochastic-gradient-descent-with-autograd" style="font-size: 80%;">Including Stochastic Gradient Descent with Autograd</a></li>
264281
<!-- navigation toc: --> <li><a href="._week37-bs043.html#same-code-but-now-with-momentum-gradient-descent" style="font-size: 80%;">Same code but now with momentum gradient descent</a></li>
265-
<!-- navigation toc: --> <li><a href="._week37-bs044.html#similar-second-order-function-now-problem-but-now-with-adagrad" style="font-size: 80%;">Similar (second order function now) problem but now with AdaGrad</a></li>
266-
<!-- navigation toc: --> <li><a href="._week37-bs045.html#rmsprop-for-adaptive-learning-rate-with-stochastic-gradient-descent" style="font-size: 80%;">RMSprop for adaptive learning rate with Stochastic Gradient Descent</a></li>
267-
<!-- navigation toc: --> <li><a href="._week37-bs046.html#and-finally-adam-https-arxiv-org-pdf-1412-6980-pdf" style="font-size: 80%;">And finally "ADAM":"https://arxiv.org/pdf/1412.6980.pdf"</a></li>
268-
<!-- navigation toc: --> <li><a href="._week37-bs047.html#material-for-the-lab-sessions" style="font-size: 80%;">Material for the lab sessions</a></li>
282+
<!-- navigation toc: --> <li><a href="._week37-bs044.html#but-none-of-these-can-compete-with-newton-s-method" style="font-size: 80%;">But none of these can compete with Newton's method</a></li>
283+
<!-- navigation toc: --> <li><a href="._week37-bs045.html#similar-second-order-function-now-problem-but-now-with-adagrad" style="font-size: 80%;">Similar (second order function now) problem but now with AdaGrad</a></li>
284+
<!-- navigation toc: --> <li><a href="._week37-bs046.html#rmsprop-for-adaptive-learning-rate-with-stochastic-gradient-descent" style="font-size: 80%;">RMSprop for adaptive learning rate with Stochastic Gradient Descent</a></li>
285+
<!-- navigation toc: --> <li><a href="._week37-bs047.html#and-finally-adam-https-arxiv-org-pdf-1412-6980-pdf" style="font-size: 80%;">And finally "ADAM":"https://arxiv.org/pdf/1412.6980.pdf"</a></li>
286+
<!-- navigation toc: --> <li><a href="._week37-bs048.html#material-for-the-lab-sessions" style="font-size: 80%;">Material for the lab sessions</a></li>
287+
<!-- navigation toc: --> <li><a href="._week37-bs049.html#reminder-on-different-scaling-methods" style="font-size: 80%;">Reminder on different scaling methods</a></li>
288+
<!-- navigation toc: --> <li><a href="._week37-bs050.html#functionality-in-scikit-learn" style="font-size: 80%;">Functionality in Scikit-Learn</a></li>
289+
<!-- navigation toc: --> <li><a href="._week37-bs051.html#more-preprocessing" style="font-size: 80%;">More preprocessing</a></li>
290+
<!-- navigation toc: --> <li><a href="._week37-bs052.html#frequently-used-scaling-functions" style="font-size: 80%;">Frequently used scaling functions</a></li>
269291

270292
</ul>
271293
</li>
@@ -319,7 +341,7 @@ <h4>September 8-12, 2025</h4>
319341
<li><a href="._week37-bs008.html">9</a></li>
320342
<li><a href="._week37-bs009.html">10</a></li>
321343
<li><a href="">...</a></li>
322-
<li><a href="._week37-bs047.html">48</a></li>
344+
<li><a href="._week37-bs052.html">53</a></li>
323345
<li><a href="._week37-bs001.html">&raquo;</a></li>
324346
</ul>
325347
<!-- ------------------- end of main content --------------- -->

doc/pub/week37/html/._week37-bs001.html

Lines changed: 31 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -148,10 +148,10 @@
148148
None,
149149
'adam-algorithm-taken-from-goodfellow-et-al-https-www-deeplearningbook-org-contents-optimization-html'),
150150
('Practical tips', 2, None, 'practical-tips'),
151-
('Sneaking in auotmatic differentiation using Autograd',
151+
('Sneaking in automatic differentiation using Autograd',
152152
2,
153153
None,
154-
'sneaking-in-auotmatic-differentiation-using-autograd'),
154+
'sneaking-in-automatic-differentiation-using-autograd'),
155155
('Same code but now with momentum gradient descent',
156156
2,
157157
None,
@@ -164,6 +164,10 @@
164164
2,
165165
None,
166166
'same-code-but-now-with-momentum-gradient-descent'),
167+
("But none of these can compete with Newton's method",
168+
2,
169+
None,
170+
'but-none-of-these-can-compete-with-newton-s-method'),
167171
('Similar (second order function now) problem but now with '
168172
'AdaGrad',
169173
2,
@@ -181,7 +185,20 @@
181185
('Material for the lab sessions',
182186
2,
183187
None,
184-
'material-for-the-lab-sessions')]}
188+
'material-for-the-lab-sessions'),
189+
('Reminder on different scaling methods',
190+
2,
191+
None,
192+
'reminder-on-different-scaling-methods'),
193+
('Functionality in Scikit-Learn',
194+
2,
195+
None,
196+
'functionality-in-scikit-learn'),
197+
('More preprocessing', 2, None, 'more-preprocessing'),
198+
('Frequently used scaling functions',
199+
2,
200+
None,
201+
'frequently-used-scaling-functions')]}
185202
end of tocinfo -->
186203

187204
<body>
@@ -258,14 +275,19 @@
258275
<!-- navigation toc: --> <li><a href="._week37-bs038.html#rmsprop-algorithm-taken-from-goodfellow-et-al-https-www-deeplearningbook-org-contents-optimization-html" style="font-size: 80%;">RMSProp algorithm, taken from "Goodfellow et al":"https://www.deeplearningbook.org/contents/optimization.html"</a></li>
259276
<!-- navigation toc: --> <li><a href="._week37-bs038.html#adam-algorithm-taken-from-goodfellow-et-al-https-www-deeplearningbook-org-contents-optimization-html" style="font-size: 80%;">ADAM algorithm, taken from "Goodfellow et al":"https://www.deeplearningbook.org/contents/optimization.html"</a></li>
260277
<!-- navigation toc: --> <li><a href="._week37-bs039.html#practical-tips" style="font-size: 80%;">Practical tips</a></li>
261-
<!-- navigation toc: --> <li><a href="._week37-bs040.html#sneaking-in-auotmatic-differentiation-using-autograd" style="font-size: 80%;">Sneaking in auotmatic differentiation using Autograd</a></li>
278+
<!-- navigation toc: --> <li><a href="._week37-bs040.html#sneaking-in-automatic-differentiation-using-autograd" style="font-size: 80%;">Sneaking in automatic differentiation using Autograd</a></li>
262279
<!-- navigation toc: --> <li><a href="._week37-bs043.html#same-code-but-now-with-momentum-gradient-descent" style="font-size: 80%;">Same code but now with momentum gradient descent</a></li>
263280
<!-- navigation toc: --> <li><a href="._week37-bs042.html#including-stochastic-gradient-descent-with-autograd" style="font-size: 80%;">Including Stochastic Gradient Descent with Autograd</a></li>
264281
<!-- navigation toc: --> <li><a href="._week37-bs043.html#same-code-but-now-with-momentum-gradient-descent" style="font-size: 80%;">Same code but now with momentum gradient descent</a></li>
265-
<!-- navigation toc: --> <li><a href="._week37-bs044.html#similar-second-order-function-now-problem-but-now-with-adagrad" style="font-size: 80%;">Similar (second order function now) problem but now with AdaGrad</a></li>
266-
<!-- navigation toc: --> <li><a href="._week37-bs045.html#rmsprop-for-adaptive-learning-rate-with-stochastic-gradient-descent" style="font-size: 80%;">RMSprop for adaptive learning rate with Stochastic Gradient Descent</a></li>
267-
<!-- navigation toc: --> <li><a href="._week37-bs046.html#and-finally-adam-https-arxiv-org-pdf-1412-6980-pdf" style="font-size: 80%;">And finally "ADAM":"https://arxiv.org/pdf/1412.6980.pdf"</a></li>
268-
<!-- navigation toc: --> <li><a href="._week37-bs047.html#material-for-the-lab-sessions" style="font-size: 80%;">Material for the lab sessions</a></li>
282+
<!-- navigation toc: --> <li><a href="._week37-bs044.html#but-none-of-these-can-compete-with-newton-s-method" style="font-size: 80%;">But none of these can compete with Newton's method</a></li>
283+
<!-- navigation toc: --> <li><a href="._week37-bs045.html#similar-second-order-function-now-problem-but-now-with-adagrad" style="font-size: 80%;">Similar (second order function now) problem but now with AdaGrad</a></li>
284+
<!-- navigation toc: --> <li><a href="._week37-bs046.html#rmsprop-for-adaptive-learning-rate-with-stochastic-gradient-descent" style="font-size: 80%;">RMSprop for adaptive learning rate with Stochastic Gradient Descent</a></li>
285+
<!-- navigation toc: --> <li><a href="._week37-bs047.html#and-finally-adam-https-arxiv-org-pdf-1412-6980-pdf" style="font-size: 80%;">And finally "ADAM":"https://arxiv.org/pdf/1412.6980.pdf"</a></li>
286+
<!-- navigation toc: --> <li><a href="._week37-bs048.html#material-for-the-lab-sessions" style="font-size: 80%;">Material for the lab sessions</a></li>
287+
<!-- navigation toc: --> <li><a href="._week37-bs049.html#reminder-on-different-scaling-methods" style="font-size: 80%;">Reminder on different scaling methods</a></li>
288+
<!-- navigation toc: --> <li><a href="._week37-bs050.html#functionality-in-scikit-learn" style="font-size: 80%;">Functionality in Scikit-Learn</a></li>
289+
<!-- navigation toc: --> <li><a href="._week37-bs051.html#more-preprocessing" style="font-size: 80%;">More preprocessing</a></li>
290+
<!-- navigation toc: --> <li><a href="._week37-bs052.html#frequently-used-scaling-functions" style="font-size: 80%;">Frequently used scaling functions</a></li>
269291

270292
</ul>
271293
</li>
@@ -310,7 +332,7 @@ <h2 id="plans-for-week-37-lecture-monday" class="anchor">Plans for week 37, lect
310332
<li><a href="._week37-bs009.html">10</a></li>
311333
<li><a href="._week37-bs010.html">11</a></li>
312334
<li><a href="">...</a></li>
313-
<li><a href="._week37-bs047.html">48</a></li>
335+
<li><a href="._week37-bs052.html">53</a></li>
314336
<li><a href="._week37-bs002.html">&raquo;</a></li>
315337
</ul>
316338
<!-- ------------------- end of main content --------------- -->

doc/pub/week37/html/._week37-bs002.html

Lines changed: 31 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -148,10 +148,10 @@
148148
None,
149149
'adam-algorithm-taken-from-goodfellow-et-al-https-www-deeplearningbook-org-contents-optimization-html'),
150150
('Practical tips', 2, None, 'practical-tips'),
151-
('Sneaking in auotmatic differentiation using Autograd',
151+
('Sneaking in automatic differentiation using Autograd',
152152
2,
153153
None,
154-
'sneaking-in-auotmatic-differentiation-using-autograd'),
154+
'sneaking-in-automatic-differentiation-using-autograd'),
155155
('Same code but now with momentum gradient descent',
156156
2,
157157
None,
@@ -164,6 +164,10 @@
164164
2,
165165
None,
166166
'same-code-but-now-with-momentum-gradient-descent'),
167+
("But none of these can compete with Newton's method",
168+
2,
169+
None,
170+
'but-none-of-these-can-compete-with-newton-s-method'),
167171
('Similar (second order function now) problem but now with '
168172
'AdaGrad',
169173
2,
@@ -181,7 +185,20 @@
181185
('Material for the lab sessions',
182186
2,
183187
None,
184-
'material-for-the-lab-sessions')]}
188+
'material-for-the-lab-sessions'),
189+
('Reminder on different scaling methods',
190+
2,
191+
None,
192+
'reminder-on-different-scaling-methods'),
193+
('Functionality in Scikit-Learn',
194+
2,
195+
None,
196+
'functionality-in-scikit-learn'),
197+
('More preprocessing', 2, None, 'more-preprocessing'),
198+
('Frequently used scaling functions',
199+
2,
200+
None,
201+
'frequently-used-scaling-functions')]}
185202
end of tocinfo -->
186203

187204
<body>
@@ -258,14 +275,19 @@
258275
<!-- navigation toc: --> <li><a href="._week37-bs038.html#rmsprop-algorithm-taken-from-goodfellow-et-al-https-www-deeplearningbook-org-contents-optimization-html" style="font-size: 80%;">RMSProp algorithm, taken from "Goodfellow et al":"https://www.deeplearningbook.org/contents/optimization.html"</a></li>
259276
<!-- navigation toc: --> <li><a href="._week37-bs038.html#adam-algorithm-taken-from-goodfellow-et-al-https-www-deeplearningbook-org-contents-optimization-html" style="font-size: 80%;">ADAM algorithm, taken from "Goodfellow et al":"https://www.deeplearningbook.org/contents/optimization.html"</a></li>
260277
<!-- navigation toc: --> <li><a href="._week37-bs039.html#practical-tips" style="font-size: 80%;">Practical tips</a></li>
261-
<!-- navigation toc: --> <li><a href="._week37-bs040.html#sneaking-in-auotmatic-differentiation-using-autograd" style="font-size: 80%;">Sneaking in auotmatic differentiation using Autograd</a></li>
278+
<!-- navigation toc: --> <li><a href="._week37-bs040.html#sneaking-in-automatic-differentiation-using-autograd" style="font-size: 80%;">Sneaking in automatic differentiation using Autograd</a></li>
262279
<!-- navigation toc: --> <li><a href="._week37-bs043.html#same-code-but-now-with-momentum-gradient-descent" style="font-size: 80%;">Same code but now with momentum gradient descent</a></li>
263280
<!-- navigation toc: --> <li><a href="._week37-bs042.html#including-stochastic-gradient-descent-with-autograd" style="font-size: 80%;">Including Stochastic Gradient Descent with Autograd</a></li>
264281
<!-- navigation toc: --> <li><a href="._week37-bs043.html#same-code-but-now-with-momentum-gradient-descent" style="font-size: 80%;">Same code but now with momentum gradient descent</a></li>
265-
<!-- navigation toc: --> <li><a href="._week37-bs044.html#similar-second-order-function-now-problem-but-now-with-adagrad" style="font-size: 80%;">Similar (second order function now) problem but now with AdaGrad</a></li>
266-
<!-- navigation toc: --> <li><a href="._week37-bs045.html#rmsprop-for-adaptive-learning-rate-with-stochastic-gradient-descent" style="font-size: 80%;">RMSprop for adaptive learning rate with Stochastic Gradient Descent</a></li>
267-
<!-- navigation toc: --> <li><a href="._week37-bs046.html#and-finally-adam-https-arxiv-org-pdf-1412-6980-pdf" style="font-size: 80%;">And finally "ADAM":"https://arxiv.org/pdf/1412.6980.pdf"</a></li>
268-
<!-- navigation toc: --> <li><a href="._week37-bs047.html#material-for-the-lab-sessions" style="font-size: 80%;">Material for the lab sessions</a></li>
282+
<!-- navigation toc: --> <li><a href="._week37-bs044.html#but-none-of-these-can-compete-with-newton-s-method" style="font-size: 80%;">But none of these can compete with Newton's method</a></li>
283+
<!-- navigation toc: --> <li><a href="._week37-bs045.html#similar-second-order-function-now-problem-but-now-with-adagrad" style="font-size: 80%;">Similar (second order function now) problem but now with AdaGrad</a></li>
284+
<!-- navigation toc: --> <li><a href="._week37-bs046.html#rmsprop-for-adaptive-learning-rate-with-stochastic-gradient-descent" style="font-size: 80%;">RMSprop for adaptive learning rate with Stochastic Gradient Descent</a></li>
285+
<!-- navigation toc: --> <li><a href="._week37-bs047.html#and-finally-adam-https-arxiv-org-pdf-1412-6980-pdf" style="font-size: 80%;">And finally "ADAM":"https://arxiv.org/pdf/1412.6980.pdf"</a></li>
286+
<!-- navigation toc: --> <li><a href="._week37-bs048.html#material-for-the-lab-sessions" style="font-size: 80%;">Material for the lab sessions</a></li>
287+
<!-- navigation toc: --> <li><a href="._week37-bs049.html#reminder-on-different-scaling-methods" style="font-size: 80%;">Reminder on different scaling methods</a></li>
288+
<!-- navigation toc: --> <li><a href="._week37-bs050.html#functionality-in-scikit-learn" style="font-size: 80%;">Functionality in Scikit-Learn</a></li>
289+
<!-- navigation toc: --> <li><a href="._week37-bs051.html#more-preprocessing" style="font-size: 80%;">More preprocessing</a></li>
290+
<!-- navigation toc: --> <li><a href="._week37-bs052.html#frequently-used-scaling-functions" style="font-size: 80%;">Frequently used scaling functions</a></li>
269291

270292
</ul>
271293
</li>
@@ -308,7 +330,7 @@ <h2 id="readings-and-videos" class="anchor">Readings and Videos: </h2>
308330
<li><a href="._week37-bs010.html">11</a></li>
309331
<li><a href="._week37-bs011.html">12</a></li>
310332
<li><a href="">...</a></li>
311-
<li><a href="._week37-bs047.html">48</a></li>
333+
<li><a href="._week37-bs052.html">53</a></li>
312334
<li><a href="._week37-bs003.html">&raquo;</a></li>
313335
</ul>
314336
<!-- ------------------- end of main content --------------- -->

0 commit comments

Comments
 (0)