From 45a24e1d116084986f1e387c3c7c4b6efb75234b Mon Sep 17 00:00:00 2001 From: Vadim Pushtaev Date: Wed, 1 Mar 2023 20:56:08 +0200 Subject: [PATCH] intro-to-pytorch: part 3, links fix --- .../Part 3 - Training Neural Networks (Exercises).ipynb | 6 +++--- .../Part 3 - Training Neural Networks (Solution).ipynb | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/intro-to-pytorch/Part 3 - Training Neural Networks (Exercises).ipynb b/intro-to-pytorch/Part 3 - Training Neural Networks (Exercises).ipynb index 3e837eac5c..9a3709fd7a 100644 --- a/intro-to-pytorch/Part 3 - Training Neural Networks (Exercises).ipynb +++ b/intro-to-pytorch/Part 3 - Training Neural Networks (Exercises).ipynb @@ -64,7 +64,7 @@ "\n", "Let's start by seeing how we calculate the loss with PyTorch. Through the `nn` module, PyTorch provides losses such as the cross-entropy loss (`nn.CrossEntropyLoss`). You'll usually see the loss assigned to `criterion`. As noted in the last part, with a classification problem such as MNIST, we're using the softmax function to predict class probabilities. With a softmax output, you want to use cross-entropy as the loss. To actually calculate the loss, you first define the criterion then pass in the output of your network and the correct labels.\n", "\n", - "Something really important to note here. Looking at [the documentation for `nn.CrossEntropyLoss`](https://pytorch.org/docs/stable/nn.html#torch.nn.CrossEntropyLoss),\n", + "Something really important to note here. Looking at [the documentation for `nn.CrossEntropyLoss`](https://pytorch.org/docs/stable/generated/torch.nn.CrossEntropyLoss.html#torch.nn.CrossEntropyLoss),\n", "\n", "> This criterion combines `nn.LogSoftmax()` and `nn.NLLLoss()` in one single class.\n", ">\n", @@ -505,7 +505,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3", + "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, @@ -519,7 +519,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.1" + "version": "3.8.10" } }, "nbformat": 4, diff --git a/intro-to-pytorch/Part 3 - Training Neural Networks (Solution).ipynb b/intro-to-pytorch/Part 3 - Training Neural Networks (Solution).ipynb index 20f6525171..3f38a15f71 100644 --- a/intro-to-pytorch/Part 3 - Training Neural Networks (Solution).ipynb +++ b/intro-to-pytorch/Part 3 - Training Neural Networks (Solution).ipynb @@ -64,7 +64,7 @@ "\n", "Let's start by seeing how we calculate the loss with PyTorch. Through the `nn` module, PyTorch provides losses such as the cross-entropy loss (`nn.CrossEntropyLoss`). You'll usually see the loss assigned to `criterion`. As noted in the last part, with a classification problem such as MNIST, we're using the softmax function to predict class probabilities. With a softmax output, you want to use cross-entropy as the loss. To actually calculate the loss, you first define the criterion then pass in the output of your network and the correct labels.\n", "\n", - "Something really important to note here. Looking at [the documentation for `nn.CrossEntropyLoss`](https://pytorch.org/docs/stable/nn.html#torch.nn.CrossEntropyLoss),\n", + "Something really important to note here. Looking at [the documentation for `nn.CrossEntropyLoss`](https://pytorch.org/docs/stable/generated/torch.nn.CrossEntropyLoss.html#torch.nn.CrossEntropyLoss),\n", "\n", "> This criterion combines `nn.LogSoftmax()` and `nn.NLLLoss()` in one single class.\n", ">\n", @@ -658,7 +658,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3", + "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, @@ -672,7 +672,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.1" + "version": "3.8.10" } }, "nbformat": 4,