From 75ccc75126f10ffa7e83383defb9ba4541b6d831 Mon Sep 17 00:00:00 2001 From: Zoltan Date: Tue, 9 Apr 2024 10:21:44 -0400 Subject: [PATCH 1/4] Update notebook-pr.yaml --- .github/workflows/notebook-pr.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/notebook-pr.yaml b/.github/workflows/notebook-pr.yaml index 5c1b72339..62e344662 100644 --- a/.github/workflows/notebook-pr.yaml +++ b/.github/workflows/notebook-pr.yaml @@ -86,7 +86,7 @@ jobs: python ci/verify_exercises.py $nbs --c "$COMMIT_MESSAGE" python ci/make_pr_comment.py $nbs --branch $branch --o comment.txt - # This package is outdated and no longer maintained + # This package is outdated and no longer maintained # - name: Add PR comment # if: "!contains(env.COMMIT_MESSAGE, 'skip ci')" # uses: machine-learning-apps/pr-comment@1.0.0 From 8e3c27b65fa3078bf94b983d93a7bfb0aee7e3bf Mon Sep 17 00:00:00 2001 From: Zoltan Date: Tue, 9 Apr 2024 10:25:15 -0400 Subject: [PATCH 2/4] Update W3D1_Tutorial2.ipynb --- .../W3D1_Tutorial2.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tutorials/W3D1_TimeSeriesAndNaturalLanguageProcessing/W3D1_Tutorial2.ipynb b/tutorials/W3D1_TimeSeriesAndNaturalLanguageProcessing/W3D1_Tutorial2.ipynb index a8f3f481e..cb1582b9a 100644 --- a/tutorials/W3D1_TimeSeriesAndNaturalLanguageProcessing/W3D1_Tutorial2.ipynb +++ b/tutorials/W3D1_TimeSeriesAndNaturalLanguageProcessing/W3D1_Tutorial2.ipynb @@ -301,7 +301,7 @@ "\n", "In classical transformer systems, a core principle is encoding and decoding. We can encode an input sequence as a vector (that implicitly codes what we just read). And we can then take this vector and decode it, e.g., as a new sentence. So a sequence-to-sequence (e.g., sentence translation) system may read a sentence (made out of words embedded in a relevant space) and encode it as an overall vector. It then takes the resulting encoding of the sentence and decodes it into a translated sentence.\n", "\n", - "In modern transformer systems, such as GPT, all words are used parallelly. In that sense, the transformers generalize the encoding/decoding idea. Examples of this strategy include all the modern large language models (such as GPT)." + "In modern transformer systems, such as GPT, all words are used in parallel. In that sense, the transformers generalize the encoding/decoding idea. Examples of this strategy include all the modern large language models (such as GPT)." ] }, { From 3b647960e8e49570f2fd63b00c494b9693a9393b Mon Sep 17 00:00:00 2001 From: Zoltan Date: Tue, 9 Apr 2024 10:46:31 -0400 Subject: [PATCH 3/4] Update notebook-pr.yaml ci:execute --- .github/workflows/notebook-pr.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/notebook-pr.yaml b/.github/workflows/notebook-pr.yaml index 62e344662..5d3a3f64c 100644 --- a/.github/workflows/notebook-pr.yaml +++ b/.github/workflows/notebook-pr.yaml @@ -86,7 +86,7 @@ jobs: python ci/verify_exercises.py $nbs --c "$COMMIT_MESSAGE" python ci/make_pr_comment.py $nbs --branch $branch --o comment.txt - # This package is outdated and no longer maintained + # This package is outdated and no longer maintained. # - name: Add PR comment # if: "!contains(env.COMMIT_MESSAGE, 'skip ci')" # uses: machine-learning-apps/pr-comment@1.0.0 From 2b49fb2071ccd2d6d47c392cb9c236dadce969ad Mon Sep 17 00:00:00 2001 From: GitHub Action Date: Tue, 9 Apr 2024 15:05:44 +0000 Subject: [PATCH 4/4] Process tutorial notebooks --- .../instructor/W3D1_Tutorial2.ipynb | 2 +- .../student/W3D1_Tutorial2.ipynb | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tutorials/W3D1_TimeSeriesAndNaturalLanguageProcessing/instructor/W3D1_Tutorial2.ipynb b/tutorials/W3D1_TimeSeriesAndNaturalLanguageProcessing/instructor/W3D1_Tutorial2.ipynb index 547e0add7..deefbfd34 100644 --- a/tutorials/W3D1_TimeSeriesAndNaturalLanguageProcessing/instructor/W3D1_Tutorial2.ipynb +++ b/tutorials/W3D1_TimeSeriesAndNaturalLanguageProcessing/instructor/W3D1_Tutorial2.ipynb @@ -301,7 +301,7 @@ "\n", "In classical transformer systems, a core principle is encoding and decoding. We can encode an input sequence as a vector (that implicitly codes what we just read). And we can then take this vector and decode it, e.g., as a new sentence. So a sequence-to-sequence (e.g., sentence translation) system may read a sentence (made out of words embedded in a relevant space) and encode it as an overall vector. It then takes the resulting encoding of the sentence and decodes it into a translated sentence.\n", "\n", - "In modern transformer systems, such as GPT, all words are used parallelly. In that sense, the transformers generalize the encoding/decoding idea. Examples of this strategy include all the modern large language models (such as GPT)." + "In modern transformer systems, such as GPT, all words are used in parallel. In that sense, the transformers generalize the encoding/decoding idea. Examples of this strategy include all the modern large language models (such as GPT)." ] }, { diff --git a/tutorials/W3D1_TimeSeriesAndNaturalLanguageProcessing/student/W3D1_Tutorial2.ipynb b/tutorials/W3D1_TimeSeriesAndNaturalLanguageProcessing/student/W3D1_Tutorial2.ipynb index 170e78797..453db45fa 100644 --- a/tutorials/W3D1_TimeSeriesAndNaturalLanguageProcessing/student/W3D1_Tutorial2.ipynb +++ b/tutorials/W3D1_TimeSeriesAndNaturalLanguageProcessing/student/W3D1_Tutorial2.ipynb @@ -301,7 +301,7 @@ "\n", "In classical transformer systems, a core principle is encoding and decoding. We can encode an input sequence as a vector (that implicitly codes what we just read). And we can then take this vector and decode it, e.g., as a new sentence. So a sequence-to-sequence (e.g., sentence translation) system may read a sentence (made out of words embedded in a relevant space) and encode it as an overall vector. It then takes the resulting encoding of the sentence and decodes it into a translated sentence.\n", "\n", - "In modern transformer systems, such as GPT, all words are used parallelly. In that sense, the transformers generalize the encoding/decoding idea. Examples of this strategy include all the modern large language models (such as GPT)." + "In modern transformer systems, such as GPT, all words are used in parallel. In that sense, the transformers generalize the encoding/decoding idea. Examples of this strategy include all the modern large language models (such as GPT)." ] }, {