Course: attention-in-transformers-concepts-and-code-in-pytorch
Lesson: introduction
Slug: han2t
[Dev Only] Loading...
Slug: han2t
[Dev Only] Loading...
[Dev Only] Debug Info
{
"courseVersionId": null
}{
"courseId": 1040,
"name": "Attention in Transformers: Concepts and Code in PyTorch",
"slug": "attention-in-transformers-concepts-and-code-in-pytorch",
"type": "short_course",
"progress": -1,
"maintenanceMode": false,
"releasedAt": "2025-02-12T08:00:00+00:00",
"nextCourseSlug": null,
"wpData": {
"courseName": "Attention in Transformers: Concepts and Code in PyTorch",
"courseDescription": "Understand and implement the attention mechanism, a key element of transformer-based LLMs, using PyTorch.",
"coursePartner": [
{
"title": "StatQuest",
"logo": "https://home-wordpress.deeplearning.ai/wp-content/uploads/2025/02/aOs7Ec0M_400x400.jpg"
}
],
"courseTopic": [
"Deep Learning",
"Embeddings",
"GenAI Applications",
"Machine Learning",
"NLP",
"Transformers"
],
"courseLevel": "Beginner",
"courseDuration": null,
"marketingSlug": "attention-in-transformers-concepts-and-code-in-pytorch",
"videoThumbnail": "https://home-wordpress.deeplearning.ai/wp-content/uploads/2025/02/1040_attention-in-transformers-concepts-and-code-in-pytorch.jpg"
},
"certProgress": 0,
"accomplishmentProgress": 0,
"lessons": {
"han2t": {
"index": 1,
"slug": "han2t",
"name": "Introduction",
"type": "video",
"videoId": 671,
"time": 396,
"programId": null,
"chatbotId": null,
"iframeUrl": null,
"quizId": null,
"progress": 0,
"readingMaterialId": null,
"accessControl": "full",
"requiredUserTier": "pro",
"features": {
"saveNotebookToolbar": true,
"saveNotebookCustomjsEnabled": false
}
},
"ym7dj": {
"index": 2,
"slug": "ym7dj",
"name": "The Main Ideas Behind Transformers and Attention",
"type": "video",
"videoId": 672,
"time": 265,
"programId": null,
"chatbotId": null,
"iframeUrl": null,
"quizId": null,
"progress": 0,
"readingMaterialId": null,
"accessControl": "full",
"requiredUserTier": "pro",
"features": {
"saveNotebookToolbar": true,
"saveNotebookCustomjsEnabled": false
}
},
"gb20l": {
"index": 3,
"slug": "gb20l",
"name": "The Matrix Math for Calculating Self-Attention",
"type": "video",
"videoId": 673,
"time": 690,
"programId": null,
"chatbotId": null,
"iframeUrl": null,
"quizId": null,
"progress": 0,
"readingMaterialId": null,
"accessControl": "full",
"requiredUserTier": "pro",
"features": {
"saveNotebookToolbar": true,
"saveNotebookCustomjsEnabled": false
}
},
"kxluu": {
"index": 4,
"slug": "kxluu",
"name": "Coding Self-Attention in PyTorch",
"type": "video_notebook",
"videoId": 674,
"time": 533,
"programId": 47001,
"chatbotId": null,
"iframeUrl": null,
"quizId": null,
"progress": 0,
"readingMaterialId": null,
"accessControl": "full",
"requiredUserTier": "pro",
"features": {
"saveNotebookToolbar": true,
"saveNotebookCustomjsEnabled": false
}
},
"xy1tc": {
"index": 5,
"slug": "xy1tc",
"name": "Self-Attention vs Masked Self-Attention",
"type": "video",
"videoId": 675,
"time": 870,
"programId": null,
"chatbotId": null,
"iframeUrl": null,
"quizId": null,
"progress": 0,
"readingMaterialId": null,
"accessControl": "full",
"requiredUserTier": "pro",
"features": {
"saveNotebookToolbar": true,
"saveNotebookCustomjsEnabled": false
}
},
"pid2l": {
"index": 6,
"slug": "pid2l",
"name": "The Matrix Math for Calculating Masked Self-Attention",
"type": "video",
"videoId": 676,
"time": 218,
"programId": null,
"chatbotId": null,
"iframeUrl": null,
"quizId": null,
"progress": 0,
"readingMaterialId": null,
"accessControl": "full",
"requiredUserTier": "pro",
"features": {
"saveNotebookToolbar": true,
"saveNotebookCustomjsEnabled": false
}
},
"uheue": {
"index": 7,
"slug": "uheue",
"name": "Coding Masked Self-Attention in PyTorch",
"type": "video_notebook",
"videoId": 677,
"time": 325,
"programId": 47002,
"chatbotId": null,
"iframeUrl": null,
"quizId": null,
"progress": 0,
"readingMaterialId": null,
"accessControl": "full",
"requiredUserTier": "pro",
"features": {
"saveNotebookToolbar": true,
"saveNotebookCustomjsEnabled": false
}
},
"ugekb": {
"index": 8,
"slug": "ugekb",
"name": "Encoder-Decoder Attention",
"type": "video",
"videoId": 678,
"time": 244,
"programId": null,
"chatbotId": null,
"iframeUrl": null,
"quizId": null,
"progress": 0,
"readingMaterialId": null,
"accessControl": "full",
"requiredUserTier": "pro",
"features": {
"saveNotebookToolbar": true,
"saveNotebookCustomjsEnabled": false
}
},
"h6tni": {
"index": 9,
"slug": "h6tni",
"name": "Multi-Head Attention",
"type": "video",
"videoId": 679,
"time": 157,
"programId": null,
"chatbotId": null,
"iframeUrl": null,
"quizId": null,
"progress": 0,
"readingMaterialId": null,
"accessControl": "full",
"requiredUserTier": "pro",
"features": {
"saveNotebookToolbar": true,
"saveNotebookCustomjsEnabled": false
}
},
"bn91t": {
"index": 10,
"slug": "bn91t",
"name": "Coding Encoder-Decoder Attention and Multi-Head Attention in PyTorch",
"type": "video_notebook",
"videoId": 680,
"time": 287,
"programId": 47003,
"chatbotId": null,
"iframeUrl": null,
"quizId": null,
"progress": 0,
"readingMaterialId": null,
"accessControl": "full",
"requiredUserTier": "pro",
"features": {
"saveNotebookToolbar": true,
"saveNotebookCustomjsEnabled": false
}
},
"j6fo8": {
"index": 11,
"slug": "j6fo8",
"name": "Conclusion",
"type": "video",
"videoId": 670,
"time": 30,
"programId": null,
"chatbotId": null,
"iframeUrl": null,
"quizId": null,
"progress": 0,
"readingMaterialId": null,
"accessControl": "full",
"requiredUserTier": "pro",
"features": {
"saveNotebookToolbar": true,
"saveNotebookCustomjsEnabled": false
}
},
"gxy4p": {
"index": 12,
"slug": "gxy4p",
"name": "Quiz",
"type": "quiz",
"videoId": null,
"time": 0,
"programId": null,
"chatbotId": null,
"iframeUrl": null,
"quizId": "5FF623EB2EFE4166B2F1E015",
"progress": 0,
"readingMaterialId": null,
"accessControl": "locked",
"requiredUserTier": "pro",
"features": {
"saveNotebookToolbar": false,
"saveNotebookCustomjsEnabled": false
}
}
},
"subtopics": {},
"listing": [
{
"timeFrame": null,
"moduleLabel": "Module 1",
"name": null,
"content": [
{
"key": "han2t",
"type": "lesson"
},
{
"key": "ym7dj",
"type": "lesson"
},
{
"key": "gb20l",
"type": "lesson"
},
{
"key": "kxluu",
"type": "lesson"
},
{
"key": "xy1tc",
"type": "lesson"
},
{
"key": "pid2l",
"type": "lesson"
},
{
"key": "uheue",
"type": "lesson"
},
{
"key": "ugekb",
"type": "lesson"
},
{
"key": "h6tni",
"type": "lesson"
},
{
"key": "bn91t",
"type": "lesson"
},
{
"key": "j6fo8",
"type": "lesson"
},
{
"key": "gxy4p",
"type": "lesson"
}
],
"progress": 0
}
],
"totalDurationSeconds": 4015,
"lastAccessTime": null,
"reviewsCnt": 0,
"activeVersionIds": []
}