Skip to content

Commit 01e07e5

Browse files
committed
Generate Python docs from pytorch/pytorch@d51ca38
1 parent 2eb9421 commit 01e07e5

File tree

2,200 files changed

+21202
-3310
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

2,200 files changed

+21202
-3310
lines changed

docs/master/_dynamo.html

+2-1
Original file line numberDiff line numberDiff line change
@@ -237,7 +237,7 @@
237237
<div class="pytorch-left-menu-search">
238238

239239
<div class="version">
240-
<a href='https://pytorch.org/docs/versions.html'>master (2.0.0a0+git299ada9 ) &#x25BC</a>
240+
<a href='https://pytorch.org/docs/versions.html'>master (2.0.0a0+gitd51ca38 ) &#x25BC</a>
241241
</div>
242242

243243

@@ -330,6 +330,7 @@
330330
<li class="toctree-l1"><a class="reference internal" href="autograd.html">torch.autograd</a></li>
331331
<li class="toctree-l1"><a class="reference internal" href="library.html">torch.library</a></li>
332332
<li class="toctree-l1"><a class="reference internal" href="cuda.html">torch.cuda</a></li>
333+
<li class="toctree-l1"><a class="reference internal" href="mps.html">torch.mps</a></li>
333334
<li class="toctree-l1"><a class="reference internal" href="backends.html">torch.backends</a></li>
334335
<li class="toctree-l1"><a class="reference internal" href="distributed.html">torch.distributed</a></li>
335336
<li class="toctree-l1"><a class="reference internal" href="distributed.algorithms.join.html">torch.distributed.algorithms.join</a></li>

docs/master/_images/RReLU.png

62 Bytes
Loading

docs/master/_modules/index.html

+3-1
Original file line numberDiff line numberDiff line change
@@ -235,7 +235,7 @@
235235
<div class="pytorch-left-menu-search">
236236

237237
<div class="version">
238-
<a href='https://pytorch.org/docs/versions.html'>master (2.0.0a0+git299ada9 ) &#x25BC</a>
238+
<a href='https://pytorch.org/docs/versions.html'>master (2.0.0a0+gitd51ca38 ) &#x25BC</a>
239239
</div>
240240

241241

@@ -328,6 +328,7 @@
328328
<li class="toctree-l1"><a class="reference internal" href="../autograd.html">torch.autograd</a></li>
329329
<li class="toctree-l1"><a class="reference internal" href="../library.html">torch.library</a></li>
330330
<li class="toctree-l1"><a class="reference internal" href="../cuda.html">torch.cuda</a></li>
331+
<li class="toctree-l1"><a class="reference internal" href="../mps.html">torch.mps</a></li>
331332
<li class="toctree-l1"><a class="reference internal" href="../backends.html">torch.backends</a></li>
332333
<li class="toctree-l1"><a class="reference internal" href="../distributed.html">torch.distributed</a></li>
333334
<li class="toctree-l1"><a class="reference internal" href="../distributed.algorithms.join.html">torch.distributed.algorithms.join</a></li>
@@ -662,6 +663,7 @@ <h1>All modules for which code is available</h1>
662663
<li><a href="torch/jit/_trace.html">torch.jit._trace</a></li>
663664
</ul><li><a href="torch/library.html">torch.library</a></li>
664665
<li><a href="torch/monitor.html">torch.monitor</a></li>
666+
<li><a href="torch/mps.html">torch.mps</a></li>
665667
<li><a href="torch/multiprocessing.html">torch.multiprocessing</a></li>
666668
<ul><li><a href="torch/multiprocessing/spawn.html">torch.multiprocessing.spawn</a></li>
667669
</ul><li><a href="torch/nested.html">torch.nested</a></li>

docs/master/_modules/torch.html

+21-25
Large diffs are not rendered by default.

docs/master/_modules/torch/__config__.html

+2-1
Original file line numberDiff line numberDiff line change
@@ -235,7 +235,7 @@
235235
<div class="pytorch-left-menu-search">
236236

237237
<div class="version">
238-
<a href='https://pytorch.org/docs/versions.html'>master (2.0.0a0+git299ada9 ) &#x25BC</a>
238+
<a href='https://pytorch.org/docs/versions.html'>master (2.0.0a0+gitd51ca38 ) &#x25BC</a>
239239
</div>
240240

241241

@@ -328,6 +328,7 @@
328328
<li class="toctree-l1"><a class="reference internal" href="../../autograd.html">torch.autograd</a></li>
329329
<li class="toctree-l1"><a class="reference internal" href="../../library.html">torch.library</a></li>
330330
<li class="toctree-l1"><a class="reference internal" href="../../cuda.html">torch.cuda</a></li>
331+
<li class="toctree-l1"><a class="reference internal" href="../../mps.html">torch.mps</a></li>
331332
<li class="toctree-l1"><a class="reference internal" href="../../backends.html">torch.backends</a></li>
332333
<li class="toctree-l1"><a class="reference internal" href="../../distributed.html">torch.distributed</a></li>
333334
<li class="toctree-l1"><a class="reference internal" href="../../distributed.algorithms.join.html">torch.distributed.algorithms.join</a></li>

docs/master/_modules/torch/_dynamo.html

+2-1
Original file line numberDiff line numberDiff line change
@@ -235,7 +235,7 @@
235235
<div class="pytorch-left-menu-search">
236236

237237
<div class="version">
238-
<a href='https://pytorch.org/docs/versions.html'>master (2.0.0a0+git299ada9 ) &#x25BC</a>
238+
<a href='https://pytorch.org/docs/versions.html'>master (2.0.0a0+gitd51ca38 ) &#x25BC</a>
239239
</div>
240240

241241

@@ -328,6 +328,7 @@
328328
<li class="toctree-l1"><a class="reference internal" href="../../autograd.html">torch.autograd</a></li>
329329
<li class="toctree-l1"><a class="reference internal" href="../../library.html">torch.library</a></li>
330330
<li class="toctree-l1"><a class="reference internal" href="../../cuda.html">torch.cuda</a></li>
331+
<li class="toctree-l1"><a class="reference internal" href="../../mps.html">torch.mps</a></li>
331332
<li class="toctree-l1"><a class="reference internal" href="../../backends.html">torch.backends</a></li>
332333
<li class="toctree-l1"><a class="reference internal" href="../../distributed.html">torch.distributed</a></li>
333334
<li class="toctree-l1"><a class="reference internal" href="../../distributed.algorithms.join.html">torch.distributed.algorithms.join</a></li>

docs/master/_modules/torch/_dynamo/backends/registry.html

+2-1
Original file line numberDiff line numberDiff line change
@@ -235,7 +235,7 @@
235235
<div class="pytorch-left-menu-search">
236236

237237
<div class="version">
238-
<a href='https://pytorch.org/docs/versions.html'>master (2.0.0a0+git299ada9 ) &#x25BC</a>
238+
<a href='https://pytorch.org/docs/versions.html'>master (2.0.0a0+gitd51ca38 ) &#x25BC</a>
239239
</div>
240240

241241

@@ -328,6 +328,7 @@
328328
<li class="toctree-l1"><a class="reference internal" href="../../../../autograd.html">torch.autograd</a></li>
329329
<li class="toctree-l1"><a class="reference internal" href="../../../../library.html">torch.library</a></li>
330330
<li class="toctree-l1"><a class="reference internal" href="../../../../cuda.html">torch.cuda</a></li>
331+
<li class="toctree-l1"><a class="reference internal" href="../../../../mps.html">torch.mps</a></li>
331332
<li class="toctree-l1"><a class="reference internal" href="../../../../backends.html">torch.backends</a></li>
332333
<li class="toctree-l1"><a class="reference internal" href="../../../../distributed.html">torch.distributed</a></li>
333334
<li class="toctree-l1"><a class="reference internal" href="../../../../distributed.algorithms.join.html">torch.distributed.algorithms.join</a></li>

docs/master/_modules/torch/_dynamo/eval_frame.html

+24-12
Original file line numberDiff line numberDiff line change
@@ -235,7 +235,7 @@
235235
<div class="pytorch-left-menu-search">
236236

237237
<div class="version">
238-
<a href='https://pytorch.org/docs/versions.html'>master (2.0.0a0+git299ada9 ) &#x25BC</a>
238+
<a href='https://pytorch.org/docs/versions.html'>master (2.0.0a0+gitd51ca38 ) &#x25BC</a>
239239
</div>
240240

241241

@@ -328,6 +328,7 @@
328328
<li class="toctree-l1"><a class="reference internal" href="../../../autograd.html">torch.autograd</a></li>
329329
<li class="toctree-l1"><a class="reference internal" href="../../../library.html">torch.library</a></li>
330330
<li class="toctree-l1"><a class="reference internal" href="../../../cuda.html">torch.cuda</a></li>
331+
<li class="toctree-l1"><a class="reference internal" href="../../../mps.html">torch.mps</a></li>
331332
<li class="toctree-l1"><a class="reference internal" href="../../../backends.html">torch.backends</a></li>
332333
<li class="toctree-l1"><a class="reference internal" href="../../../distributed.html">torch.distributed</a></li>
333334
<li class="toctree-l1"><a class="reference internal" href="../../../distributed.algorithms.join.html">torch.distributed.algorithms.join</a></li>
@@ -510,7 +511,7 @@ <h1>Source code for torch._dynamo.eval_frame</h1><div class="highlight"><pre>
510511
<span class="kn">from</span> <span class="nn">.exc</span> <span class="kn">import</span> <span class="n">ResetRequired</span>
511512
<span class="kn">from</span> <span class="nn">.mutation_guard</span> <span class="kn">import</span> <span class="n">install_generation_tagging_init</span>
512513
<span class="kn">from</span> <span class="nn">.types</span> <span class="kn">import</span> <span class="n">DynamoCallback</span>
513-
<span class="kn">from</span> <span class="nn">.utils</span> <span class="kn">import</span> <span class="n">compile_times</span>
514+
<span class="kn">from</span> <span class="nn">.utils</span> <span class="kn">import</span> <span class="n">compile_times</span><span class="p">,</span> <span class="n">fake_mode_from_tensors</span>
514515

515516
<span class="n">log</span> <span class="o">=</span> <span class="n">logging</span><span class="o">.</span><span class="n">getLogger</span><span class="p">(</span><span class="vm">__name__</span><span class="p">)</span>
516517

@@ -991,6 +992,7 @@ <h1>Source code for torch._dynamo.eval_frame</h1><div class="highlight"><pre>
991992
<span class="n">f</span> <span class="o">=</span> <span class="n">innermost_fn</span><span class="p">(</span><span class="n">f</span><span class="p">)</span>
992993

993994
<span class="n">graph</span> <span class="o">=</span> <span class="kc">None</span>
995+
<span class="n">compile_time_inputs</span> <span class="o">=</span> <span class="kc">None</span>
994996
<span class="n">out_guards</span> <span class="o">=</span> <span class="kc">None</span>
995997
<span class="n">graph_captured_input</span> <span class="o">=</span> <span class="kc">None</span>
996998
<span class="n">graph_captured_result</span><span class="p">:</span> <span class="n">Optional</span><span class="p">[</span><span class="n">Tuple</span><span class="p">[</span><span class="n">torch</span><span class="o">.</span><span class="n">Tensor</span><span class="p">,</span> <span class="o">...</span><span class="p">]]</span> <span class="o">=</span> <span class="kc">None</span>
@@ -1033,9 +1035,11 @@ <h1>Source code for torch._dynamo.eval_frame</h1><div class="highlight"><pre>
10331035
<span class="n">gm</span><span class="p">:</span> <span class="n">torch</span><span class="o">.</span><span class="n">fx</span><span class="o">.</span><span class="n">GraphModule</span><span class="p">,</span> <span class="n">example_inputs</span>
10341036
<span class="p">):</span>
10351037
<span class="k">nonlocal</span> <span class="n">graph</span>
1038+
<span class="k">nonlocal</span> <span class="n">compile_time_inputs</span>
10361039

10371040
<span class="k">assert</span> <span class="n">graph</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">,</span> <span class="s2">&quot;whole graph export entails exactly one graph&quot;</span>
10381041
<span class="n">graph</span> <span class="o">=</span> <span class="n">gm</span>
1042+
<span class="n">compile_time_inputs</span> <span class="o">=</span> <span class="n">example_inputs</span>
10391043

10401044
<span class="k">def</span> <span class="nf">result_capturing_wrapper</span><span class="p">(</span><span class="o">*</span><span class="n">graph_inputs</span><span class="p">):</span>
10411045
<span class="k">nonlocal</span> <span class="n">graph_captured_result</span>
@@ -1092,6 +1096,8 @@ <h1>Source code for torch._dynamo.eval_frame</h1><div class="highlight"><pre>
10921096
<span class="n">arg</span> <span class="o">=</span> <span class="nb">next</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">old_args_gen</span><span class="p">)</span>
10931097
<span class="k">if</span> <span class="s2">&quot;val&quot;</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">current_node</span><span class="o">.</span><span class="n">meta</span><span class="p">:</span>
10941098
<span class="n">arg</span><span class="o">.</span><span class="n">node</span><span class="o">.</span><span class="n">meta</span><span class="p">[</span><span class="s2">&quot;val&quot;</span><span class="p">]</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">current_node</span><span class="o">.</span><span class="n">meta</span><span class="p">[</span><span class="s2">&quot;val&quot;</span><span class="p">]</span>
1099+
<span class="k">if</span> <span class="s2">&quot;tensor_dict&quot;</span> <span class="ow">in</span> <span class="bp">self</span><span class="o">.</span><span class="n">current_node</span><span class="o">.</span><span class="n">meta</span><span class="p">:</span>
1100+
<span class="n">arg</span><span class="o">.</span><span class="n">node</span><span class="o">.</span><span class="n">meta</span><span class="p">[</span><span class="s2">&quot;tensor_dict&quot;</span><span class="p">]</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">current_node</span><span class="o">.</span><span class="n">meta</span><span class="p">[</span><span class="s2">&quot;tensor_dict&quot;</span><span class="p">]</span>
10951101
<span class="k">return</span> <span class="n">arg</span>
10961102

10971103
<span class="k">def</span> <span class="nf">output</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">target</span><span class="p">,</span> <span class="n">args</span><span class="p">,</span> <span class="n">kwargs</span><span class="p">):</span>
@@ -1100,22 +1106,28 @@ <h1>Source code for torch._dynamo.eval_frame</h1><div class="highlight"><pre>
11001106
<span class="n">new_result_flat</span> <span class="o">=</span> <span class="p">[</span><span class="n">lookup</span><span class="p">[</span><span class="n">i</span><span class="p">]</span> <span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="n">matched_output_elements_positions</span><span class="p">]</span>
11011107
<span class="k">return</span> <span class="nb">super</span><span class="p">()</span><span class="o">.</span><span class="n">output</span><span class="p">(</span><span class="n">target</span><span class="p">,</span> <span class="p">(</span><span class="n">new_result_flat</span><span class="p">,),</span> <span class="p">{})</span>
11021108

1103-
<span class="k">def</span> <span class="nf">run_node</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">n</span><span class="p">):</span>
1104-
<span class="bp">self</span><span class="o">.</span><span class="n">current_node</span> <span class="o">=</span> <span class="n">n</span>
1105-
<span class="k">return</span> <span class="nb">super</span><span class="p">()</span><span class="o">.</span><span class="n">run_node</span><span class="p">(</span><span class="n">n</span><span class="p">)</span>
1106-
11071109
<span class="k">if</span> <span class="n">aten_graph</span><span class="p">:</span>
11081110
<span class="c1"># Running graph with interpreter is needed for propagating the stack_trace</span>
11091111
<span class="k">def</span> <span class="nf">graph_with_interpreter</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">):</span>
11101112
<span class="k">with</span> <span class="n">torch</span><span class="o">.</span><span class="n">fx</span><span class="o">.</span><span class="n">traceback</span><span class="o">.</span><span class="n">preserve_node_meta</span><span class="p">():</span>
11111113
<span class="k">return</span> <span class="n">torch</span><span class="o">.</span><span class="n">fx</span><span class="o">.</span><span class="n">Interpreter</span><span class="p">(</span><span class="n">graph</span><span class="p">)</span><span class="o">.</span><span class="n">run</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">)</span>
11121114

1113-
<span class="n">graph</span> <span class="o">=</span> <span class="n">make_fx</span><span class="p">(</span>
1114-
<span class="n">graph_with_interpreter</span><span class="p">,</span>
1115-
<span class="n">decomposition_table</span><span class="o">=</span><span class="n">decomposition_table</span><span class="p">,</span>
1116-
<span class="n">tracing_mode</span><span class="o">=</span><span class="n">tracing_mode</span><span class="p">,</span>
1117-
<span class="n">_allow_non_fake_inputs</span><span class="o">=</span><span class="kc">True</span><span class="p">,</span>
1118-
<span class="p">)(</span><span class="o">*</span><span class="n">graph_captured_input</span><span class="p">)</span>
1115+
<span class="k">if</span> <span class="n">tracing_mode</span> <span class="o">==</span> <span class="s2">&quot;real&quot;</span><span class="p">:</span>
1116+
<span class="n">graph</span> <span class="o">=</span> <span class="n">make_fx</span><span class="p">(</span>
1117+
<span class="n">graph_with_interpreter</span><span class="p">,</span>
1118+
<span class="n">decomposition_table</span><span class="o">=</span><span class="n">decomposition_table</span><span class="p">,</span>
1119+
<span class="p">)(</span><span class="o">*</span><span class="n">graph_captured_input</span><span class="p">)</span>
1120+
<span class="k">elif</span> <span class="n">tracing_mode</span> <span class="o">==</span> <span class="s2">&quot;symbolic&quot;</span><span class="p">:</span>
1121+
<span class="c1"># For dynamic shape, we need to make_fx through the graph with fake tensors under FakeTensorMode</span>
1122+
<span class="c1"># The fake tensors may contain the fine grain dynamic shape passed down from dynamo</span>
1123+
<span class="n">fake_mode</span> <span class="o">=</span> <span class="n">fake_mode_from_tensors</span><span class="p">(</span><span class="n">compile_time_inputs</span><span class="p">)</span>
1124+
<span class="k">with</span> <span class="n">fake_mode</span><span class="p">:</span>
1125+
<span class="n">graph</span> <span class="o">=</span> <span class="n">make_fx</span><span class="p">(</span>
1126+
<span class="n">graph_with_interpreter</span><span class="p">,</span>
1127+
<span class="n">decomposition_table</span><span class="o">=</span><span class="n">decomposition_table</span><span class="p">,</span>
1128+
<span class="p">)(</span><span class="o">*</span><span class="n">compile_time_inputs</span><span class="p">)</span>
1129+
<span class="k">else</span><span class="p">:</span>
1130+
<span class="k">raise</span> <span class="ne">AssertionError</span><span class="p">(</span><span class="sa">f</span><span class="s2">&quot;Unknown tracing mode </span><span class="si">{</span><span class="n">tracing_mode</span><span class="si">}</span><span class="s2">&quot;</span><span class="p">)</span>
11191131

11201132
<span class="n">new_graph</span> <span class="o">=</span> <span class="n">ChangeInputOutputSignature</span><span class="p">(</span>
11211133
<span class="n">graph</span><span class="p">,</span>

docs/master/_modules/torch/_jit_internal.html

+3-2
Original file line numberDiff line numberDiff line change
@@ -235,7 +235,7 @@
235235
<div class="pytorch-left-menu-search">
236236

237237
<div class="version">
238-
<a href='https://pytorch.org/docs/versions.html'>master (2.0.0a0+git299ada9 ) &#x25BC</a>
238+
<a href='https://pytorch.org/docs/versions.html'>master (2.0.0a0+gitd51ca38 ) &#x25BC</a>
239239
</div>
240240

241241

@@ -328,6 +328,7 @@
328328
<li class="toctree-l1"><a class="reference internal" href="../../autograd.html">torch.autograd</a></li>
329329
<li class="toctree-l1"><a class="reference internal" href="../../library.html">torch.library</a></li>
330330
<li class="toctree-l1"><a class="reference internal" href="../../cuda.html">torch.cuda</a></li>
331+
<li class="toctree-l1"><a class="reference internal" href="../../mps.html">torch.mps</a></li>
331332
<li class="toctree-l1"><a class="reference internal" href="../../backends.html">torch.backends</a></li>
332333
<li class="toctree-l1"><a class="reference internal" href="../../distributed.html">torch.distributed</a></li>
333334
<li class="toctree-l1"><a class="reference internal" href="../../distributed.algorithms.join.html">torch.distributed.algorithms.join</a></li>
@@ -1054,7 +1055,7 @@ <h1>Source code for torch._jit_internal</h1><div class="highlight"><pre>
10541055

10551056
<span class="sd"> class MyModule(nn.Module):</span>
10561057
<span class="sd"> def __init__(self, use_memory_efficient):</span>
1057-
<span class="sd"> super(MyModule, self).__init__()</span>
1058+
<span class="sd"> super().__init__()</span>
10581059
<span class="sd"> self.use_memory_efficient = use_memory_efficient</span>
10591060

10601061
<span class="sd"> @torch.jit.unused</span>

0 commit comments

Comments
 (0)