Skip to content

Commit cfefbb2

Browse files
author
Svetlana Karslioglu
committed
Add Distributed tutorials landing page
1 parent 1125546 commit cfefbb2

File tree

4 files changed

+159
-8
lines changed

4 files changed

+159
-8
lines changed

_static/css/custom.css

+68
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,68 @@
1+
/* sphinx-design styles for cards/tabs
2+
*/
3+
4+
:root {
5+
--sd-color-primary: #6c6c6d;
6+
--sd-color-primary-highlight: #f3f4f7;
7+
--sd-color-card-border-hover: #ee4c2c;
8+
--sd-color-card-border: #f3f4f7;
9+
--sd-color-card-background: #f3f4f7;
10+
--sd-color-card-text: inherit;
11+
--sd-color-card-header: transparent;
12+
--sd-color-card-footer: transparent;
13+
--sd-color-tabs-label-active: hsla(231, 99%, 66%, 1);
14+
--sd-color-tabs-label-hover: hsla(231, 99%, 66%, 1);
15+
--sd-color-tabs-label-inactive: hsl(0, 0%, 66%);
16+
--sd-color-tabs-underline-active: hsla(231, 99%, 66%, 1);
17+
--sd-color-tabs-underline-hover: rgba(178, 206, 245, 0.62);
18+
--sd-color-tabs-underline-inactive: transparent;
19+
--sd-color-tabs-overline: rgb(222, 222, 222);
20+
--sd-color-tabs-underline: rgb(222, 222, 222);
21+
}
22+
23+
.sd-card {
24+
position: relative;
25+
background-color: #f3f4f7;
26+
opacity: 0.5;
27+
border-radius: 0px;
28+
width: 30%;
29+
}
30+
31+
.sd-card-img {
32+
opacity: 0.5;
33+
width: 200px;
34+
padding: 0px;
35+
}
36+
37+
.sd-card-img:hover {
38+
opacity: 1.0;
39+
background-color: #f3f4f7;
40+
}
41+
42+
.sd-card:after {
43+
display:block;
44+
opacity: 1;
45+
content: '';
46+
background-color: #fff;
47+
border: none;
48+
border-bottom: solid 1px #ee4c2c;
49+
transform: scaleX(0);
50+
transition: transform .250s ease-in-out;
51+
}
52+
53+
.sd-card:hover {
54+
background-color: #fff;
55+
opacity: 1;
56+
border: none;
57+
border-bottom: solid 1px #ee4c2c;
58+
transition: transform .250s ease-in-out;
59+
}
60+
61+
.sd-card:hover:after {
62+
border: none;
63+
transform: scaleX(1);
64+
}
65+
66+
.sd-card:after {
67+
transform-origin: 0% 50%;
68+
}

conf.py

+13-8
Original file line numberDiff line numberDiff line change
@@ -71,6 +71,7 @@
7171
'sphinx.ext.intersphinx',
7272
'sphinx_copybutton',
7373
'sphinx_gallery.gen_gallery',
74+
'sphinx_design'
7475
]
7576

7677
intersphinx_mapping = {
@@ -263,22 +264,26 @@
263264
'Miscellaneous'),
264265
]
265266

267+
html_css_files = [
268+
'https://cdn.jsdelivr.net/npm/[email protected]/dist/katex.min.css',
269+
'css/custom.css'
270+
]
266271

267272
def setup(app):
268273
# NOTE: in Sphinx 1.8+ `html_css_files` is an official configuration value
269274
# and can be moved outside of this function (and the setup(app) function
270275
# can be deleted).
271-
html_css_files = [
272-
'https://cdn.jsdelivr.net/npm/[email protected]/dist/katex.min.css'
273-
]
276+
#html_css_files = [
277+
# 'https://cdn.jsdelivr.net/npm/[email protected]/dist/katex.min.css',
278+
# 'css/custom.css'
279+
#]
274280
# In Sphinx 1.8 it was renamed to `add_css_file`, 1.7 and prior it is
275281
# `add_stylesheet` (deprecated in 1.8).
276-
add_css = getattr(app, 'add_css_file', app.add_stylesheet)
277-
for css_file in html_css_files:
278-
add_css(css_file)
279-
282+
#add_css = getattr(app, 'add_css_file', app.add_stylesheet)
283+
#for css_file in html_css_files:
284+
# add_css(css_file)
280285
# Custom CSS
281-
# app.add_stylesheet('css/pytorch_theme.css')
286+
#app.add_stylesheet('css/pytorch_theme.css')
282287
# app.add_stylesheet('https://fonts.googleapis.com/css?family=Lato')
283288
# Custom directives
284289
app.add_directive('includenodoc', IncludeDirective)

distributed/home.rst

+77
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,77 @@
1+
Distributed and Parallel Training Tutorials
2+
===========================================
3+
4+
This page includes all distributed and parallel trainings available
5+
at pytorch.org website.
6+
7+
Getting Started with Distributed Data-Parallel Training (DDP)
8+
-------------------------------------------------------------
9+
10+
.. grid:: 3
11+
12+
.. grid-item-card:: Getting Started with PyTorch Distributed
13+
:shadow: none
14+
:link: https://example.com
15+
:link-type: url
16+
17+
This tutorial provides a gentle intro to the PyTorch
18+
DistributedData Parallel.
19+
20+
.. grid-item-card:: Single Machine Model Parallel Best Practices
21+
:shadow: none
22+
:link: https://example.com
23+
:link-type: url
24+
25+
In this tutorial you will learn about best practices in
26+
using model parallel.
27+
28+
.. grid-item-card:: Writing Distributed Applications with PyTorch
29+
:shadow: none
30+
:link: https://example.com
31+
:link-type: url
32+
33+
This tutorial demonstrates how to write a distributed application
34+
with PyTorch.
35+
36+
Learn FSDP
37+
----------
38+
39+
Fully-Sharded Data Parallel (FSDP) is a tool that distributes model
40+
parameters across multiple workers, therefore enabling you to train larger
41+
models.
42+
43+
44+
.. grid:: 3
45+
46+
.. grid-item-card:: Getting Started with FSDP
47+
:shadow: none
48+
:img-top: ../_static/img/thumbnails/cropped/pytorch-logo.png
49+
:link: https://example.com
50+
:link-type: url
51+
52+
This tutorial provides a gentle intro to the PyTorch
53+
DistributedData Parallel.
54+
55+
.. grid-item-card:: Single Machine Model Parallel Best Practices
56+
:shadow: none
57+
:img-top: ../_static/img/thumbnails/cropped/pytorch-logo.png
58+
:link: https://example.com
59+
:link-type: url
60+
61+
In this tutorial you will learn about best practices in
62+
using model parallel.
63+
64+
.. grid-item-card:: Writing Distributed Applications with PyTorch
65+
:shadow: none
66+
:img-top: ../_static/img/thumbnails/cropped/pytorch-logo.png
67+
:link: https://example.com
68+
:link-type: url
69+
70+
This tutorial demonstrates how to write a distributed application
71+
with PyTorch.
72+
73+
Learn RPC
74+
---------
75+
76+
Distributed Remote Procedure Call (RPC) framework provides
77+
mechanisms for multi-machine model training

index.rst

+1
Original file line numberDiff line numberDiff line change
@@ -886,6 +886,7 @@ Additional Resources
886886
:hidden:
887887
:caption: Parallel and Distributed Training
888888

889+
distributed/home
889890
beginner/dist_overview
890891
intermediate/model_parallel_tutorial
891892
intermediate/ddp_tutorial

0 commit comments

Comments
 (0)