From 72fe5b293697fc046c2fc1ab8cc31befb1a200f4 Mon Sep 17 00:00:00 2001 From: Dustin Tran Date: Sun, 1 Oct 2017 14:37:15 -0700 Subject: [PATCH 01/10] move criticism docs to __init__; move ds docs to tutorial --- docs/generate_api_navbar_and_symbols.py | 31 ------------- docs/tex/api/criticism.tex | 44 ------------------- .../data-subsampling.tex} | 0 docs/tex/tutorials/index.tex | 3 ++ edward/criticisms/__init__.py | 13 +++++- 5 files changed, 15 insertions(+), 76 deletions(-) delete mode 100644 docs/tex/api/criticism.tex rename docs/tex/{api/inference-data-subsampling.tex => tutorials/data-subsampling.tex} (100%) diff --git a/docs/generate_api_navbar_and_symbols.py b/docs/generate_api_navbar_and_symbols.py index 18fe10f8d..38c168ff8 100644 --- a/docs/generate_api_navbar_and_symbols.py +++ b/docs/generate_api_navbar_and_symbols.py @@ -67,7 +67,6 @@ 'child_pages': [ 'inference-classes.tex', 'inference-compositionality.tex', - 'inference-data-subsampling.tex', 'inference-development.tex', ], }, @@ -87,14 +86,6 @@ ], 'child_pages': [], }, - { - 'page': 'inference-data-subsampling.tex', - 'title': 'Data Subsampling', - 'parent_pages': [ - 'inference.tex' - ], - 'child_pages': [], - }, { 'page': 'inference-development.tex', 'title': 'Development', @@ -103,12 +94,6 @@ ], 'child_pages': [], }, - { - 'page': 'criticism.tex', - 'title': 'Criticism', - 'parent_pages': [], - 'child_pages': [], - }, { 'page': 'reference.tex', 'title': 'Reference', @@ -207,19 +192,6 @@ def generate_models(): return '\n\item'.join(links) -def generate_criticisms(): - import edward.criticisms as module - objs = [getattr(module, name) for name in dir(module)] - objs = [obj for obj in objs - if (hasattr(obj, '__call__') or - isinstance(obj, type)) - ] - objs = sorted(objs, key=lambda cls: cls.__name__) - - links = [('@{{ed.criticisms.{}}}').format(cls.__name__) for cls in objs] - return '\n\item'.join(links) - - def generate_util(): import edward.util as module objs = [getattr(module, name) for name in dir(module)] @@ -260,9 +232,6 @@ def get_tensorflow_version(): if '{{models}}' in document: document = document.replace('{{models}}', generate_models()) - if '{{criticisms}}' in document: - document = document.replace('{{criticisms}}', generate_criticisms()) - if '{{util}}' in document: document = document.replace('{{util}}', generate_util()) diff --git a/docs/tex/api/criticism.tex b/docs/tex/api/criticism.tex deleted file mode 100644 index 32fd286ac..000000000 --- a/docs/tex/api/criticism.tex +++ /dev/null @@ -1,44 +0,0 @@ -\title{Criticism} - -{{navbar}} - -\subsubsection{Criticism} - -We can never validate whether a model is true. In practice, ``all -models are wrong'' \citep{box1976science}. However, we can try to -uncover where the model goes wrong. Model criticism helps justify the -model as an approximation or point to good directions for revising the -model. -For background, see the criticism \href{/tutorials/criticism}{tutorial}. - -Edward explores model criticism using -\begin{itemize} - \item point evaluations, such as mean squared error or - classification accuracy; - \item posterior predictive checks, for making probabilistic - assessments of the model fit using discrepancy functions. -\end{itemize} - -% \subsubsection{Developing new criticism techniques} - -% Criticism is defined simply with utility functions. They take random -% variables as input and output NumPy arrays. -% Criticism techniques are simply functions which take as input data, -% the probability model and variational model (binded through a latent -% variable dictionary), and any additional inputs. - -% \begin{lstlisting}[language=Python] -% def criticize(data, latent_vars, ...) -% ... -% \end{lstlisting} - -% Developing new criticism techniques is easy. They can be derived from -% the current techniques or built as a standalone function. - -\begin{center}\rule{3in}{0.4pt}\end{center} - -\begin{itemize} - \item {{criticisms}} -\end{itemize} - -\subsubsection{References}\label{references} diff --git a/docs/tex/api/inference-data-subsampling.tex b/docs/tex/tutorials/data-subsampling.tex similarity index 100% rename from docs/tex/api/inference-data-subsampling.tex rename to docs/tex/tutorials/data-subsampling.tex diff --git a/docs/tex/tutorials/index.tex b/docs/tex/tutorials/index.tex index bee8a8d25..7267af082 100644 --- a/docs/tex/tutorials/index.tex +++ b/docs/tex/tutorials/index.tex @@ -12,6 +12,9 @@ \subsection{Tutorials} \href{batch-training}{Batch training} \\ How to train a model using only minibatches of data at a time. +\href{data-subsampling}{Data subsampling} \\ +More advanced data subsampling such as for hierarchical models. + \href{tensorboard}{TensorBoard} \\ Visualize learning, explore the computational graph, and diagnose training problems. diff --git a/edward/criticisms/__init__.py b/edward/criticisms/__init__.py index 5a9aff3d6..a112ce780 100644 --- a/edward/criticisms/__init__.py +++ b/edward/criticisms/__init__.py @@ -1,4 +1,15 @@ -""" +"""We can never validate whether a model is true. In practice, ``all +models are wrong'' [@box1976science]. However, we can try to +uncover where the model goes wrong. Model criticism helps justify the +model as an approximation or point to good directions for revising the +model. For background, see the criticism [tutorial](/tutorials/criticism). + +Edward explores model criticism using + ++ point evaluations, such as mean squared error or + classification accuracy; ++ posterior predictive checks, for making probabilistic + assessments of the model fit using discrepancy functions. """ from __future__ import absolute_import from __future__ import division From 186971dce57c1d3ec36935f66b32b2ac5c6549c2 Mon Sep 17 00:00:00 2001 From: Dustin Tran Date: Sun, 1 Oct 2017 14:42:44 -0700 Subject: [PATCH 02/10] move all api docs to tutorials/ --- docs/generate_api_navbar_and_symbols.py | 65 ------------------- docs/tex/{api => tutorials}/data.tex | 0 docs/tex/tutorials/index.tex | 17 +++++ .../inference-api.tex} | 0 .../{api => tutorials}/inference-classes.tex | 0 .../inference-compositionality.tex | 0 .../inference-development.tex | 0 .../model.tex => tutorials/model-api.tex} | 0 .../model-compositionality.tex | 0 .../{api => tutorials}/model-development.tex | 0 10 files changed, 17 insertions(+), 65 deletions(-) rename docs/tex/{api => tutorials}/data.tex (100%) rename docs/tex/{api/inference.tex => tutorials/inference-api.tex} (100%) rename docs/tex/{api => tutorials}/inference-classes.tex (100%) rename docs/tex/{api => tutorials}/inference-compositionality.tex (100%) rename docs/tex/{api => tutorials}/inference-development.tex (100%) rename docs/tex/{api/model.tex => tutorials/model-api.tex} (100%) rename docs/tex/{api => tutorials}/model-compositionality.tex (100%) rename docs/tex/{api => tutorials}/model-development.tex (100%) diff --git a/docs/generate_api_navbar_and_symbols.py b/docs/generate_api_navbar_and_symbols.py index 38c168ff8..26afa0146 100644 --- a/docs/generate_api_navbar_and_symbols.py +++ b/docs/generate_api_navbar_and_symbols.py @@ -29,71 +29,6 @@ 'parent_pages': [], 'child_pages': [], }, - { - 'page': 'data.tex', - 'title': 'Data', - 'parent_pages': [], - 'child_pages': [], - }, - { - 'page': 'model.tex', - 'title': 'Model', - 'parent_pages': [], - 'child_pages': [ - 'model-compositionality.tex', - 'model-development.tex', - ], - }, - { - 'page': 'model-compositionality.tex', - 'title': 'Compositionality', - 'parent_pages': [ - 'model.tex' - ], - 'child_pages': [], - }, - { - 'page': 'model-development.tex', - 'title': 'Development', - 'parent_pages': [ - 'model.tex' - ], - 'child_pages': [], - }, - { - 'page': 'inference.tex', - 'title': 'Inference', - 'parent_pages': [], - 'child_pages': [ - 'inference-classes.tex', - 'inference-compositionality.tex', - 'inference-development.tex', - ], - }, - { - 'page': 'inference-classes.tex', - 'title': 'Classes', - 'parent_pages': [ - 'inference.tex' - ], - 'child_pages': [], - }, - { - 'page': 'inference-compositionality.tex', - 'title': 'Compositionality', - 'parent_pages': [ - 'inference.tex' - ], - 'child_pages': [], - }, - { - 'page': 'inference-development.tex', - 'title': 'Development', - 'parent_pages': [ - 'inference.tex' - ], - 'child_pages': [], - }, { 'page': 'reference.tex', 'title': 'Reference', diff --git a/docs/tex/api/data.tex b/docs/tex/tutorials/data.tex similarity index 100% rename from docs/tex/api/data.tex rename to docs/tex/tutorials/data.tex diff --git a/docs/tex/tutorials/index.tex b/docs/tex/tutorials/index.tex index 7267af082..ea53ce6ca 100644 --- a/docs/tex/tutorials/index.tex +++ b/docs/tex/tutorials/index.tex @@ -105,3 +105,20 @@ \subsubsection{Background} \item \href{/iclr2017}{"Deep probabilistic programming" at ICLR 2017} \end{itemize} + + +todo +\begin{itemize} +\item \href{data}{Data} +\item \href{model-api}{Model} + \begin{itemize} + \item \href{model-compositionality}{Model Compositionality} + \item \href{model-development}{Model Development} + \end{itemize} +\item \href{inference-api}{Inference} + \begin{itemize} + \item \href{inference-classes}{Inference Classes} + \item \href{inference-compositionality}{Inference Compositionality} + \item \href{inference-development}{Inference Development} + \end{itemize} +\end{itemize} diff --git a/docs/tex/api/inference.tex b/docs/tex/tutorials/inference-api.tex similarity index 100% rename from docs/tex/api/inference.tex rename to docs/tex/tutorials/inference-api.tex diff --git a/docs/tex/api/inference-classes.tex b/docs/tex/tutorials/inference-classes.tex similarity index 100% rename from docs/tex/api/inference-classes.tex rename to docs/tex/tutorials/inference-classes.tex diff --git a/docs/tex/api/inference-compositionality.tex b/docs/tex/tutorials/inference-compositionality.tex similarity index 100% rename from docs/tex/api/inference-compositionality.tex rename to docs/tex/tutorials/inference-compositionality.tex diff --git a/docs/tex/api/inference-development.tex b/docs/tex/tutorials/inference-development.tex similarity index 100% rename from docs/tex/api/inference-development.tex rename to docs/tex/tutorials/inference-development.tex diff --git a/docs/tex/api/model.tex b/docs/tex/tutorials/model-api.tex similarity index 100% rename from docs/tex/api/model.tex rename to docs/tex/tutorials/model-api.tex diff --git a/docs/tex/api/model-compositionality.tex b/docs/tex/tutorials/model-compositionality.tex similarity index 100% rename from docs/tex/api/model-compositionality.tex rename to docs/tex/tutorials/model-compositionality.tex diff --git a/docs/tex/api/model-development.tex b/docs/tex/tutorials/model-development.tex similarity index 100% rename from docs/tex/api/model-development.tex rename to docs/tex/tutorials/model-development.tex From 1c95393e1f9f0efae6e0be1687a577bf93db0b8a Mon Sep 17 00:00:00 2001 From: Dustin Tran Date: Sun, 1 Oct 2017 14:44:06 -0700 Subject: [PATCH 03/10] remove api links in doc --- docs/generate_api_navbar_and_symbols.py | 35 ----------------------- docs/tex/tutorials/inference-api.tex | 38 ------------------------- docs/tex/tutorials/model-api.tex | 7 ----- 3 files changed, 80 deletions(-) diff --git a/docs/generate_api_navbar_and_symbols.py b/docs/generate_api_navbar_and_symbols.py index 26afa0146..1baeb6fb0 100644 --- a/docs/generate_api_navbar_and_symbols.py +++ b/docs/generate_api_navbar_and_symbols.py @@ -111,35 +111,6 @@ def generate_top_navbar(): return navbar -def generate_models(): - import edward.models as module - from edward.models import RandomVariable - objs = [getattr(module, name) for name in dir(module)] - objs = [obj for obj in objs - if (isinstance(obj, type) and - issubclass(obj, RandomVariable) and - obj != RandomVariable - ) - ] - objs = sorted(objs, key=lambda cls: cls.__name__) - - links = [('@{{ed.models.{}}}').format(cls.__name__) for cls in objs] - return '\n\item'.join(links) - - -def generate_util(): - import edward.util as module - objs = [getattr(module, name) for name in dir(module)] - objs = [obj for obj in objs - if (hasattr(obj, '__call__') or - isinstance(obj, type)) - ] - objs = sorted(objs, key=lambda cls: cls.__name__) - - links = [('@{{ed.util.{}}}').format(cls.__name__) for cls in objs] - return '\n\item'.join(links) - - def get_tensorflow_version(): import tensorflow return str(getattr(tensorflow, '__version__', '')) @@ -164,12 +135,6 @@ def get_tensorflow_version(): ("File found for " + path + " but missing {{navbar}} tag.") document = document.replace('{{navbar}}', navbar) - if '{{models}}' in document: - document = document.replace('{{models}}', generate_models()) - - if '{{util}}' in document: - document = document.replace('{{util}}', generate_util()) - if '{{tensorflow_version}}' in document: document = document.replace('{{tensorflow_version}}', get_tensorflow_version()) diff --git a/docs/tex/tutorials/inference-api.tex b/docs/tex/tutorials/inference-api.tex index 56f3abf18..a4056bada 100644 --- a/docs/tex/tutorials/inference-api.tex +++ b/docs/tex/tutorials/inference-api.tex @@ -146,41 +146,3 @@ \subsubsection{Other Settings} For example, implicit prior samples are useful for generative adversarial networks. Their inference problem does not require any inference over the latent variables; it uses samples from the prior. - -\begin{center}\rule{3in}{0.4pt}\end{center} - -\begin{itemize} - \item @{ed.inferences.Inference} - \item @{ed.inferences.VariationalInference} - \begin{itemize} - \item @{ed.inferences.KLqp} - \begin{itemize} - \item @{ed.inferences.ReparameterizationKLqp} - \item @{ed.inferences.ReparameterizationKLKLqp} - \item @{ed.inferences.ReparameterizationEntropyKLqp} - \item @{ed.inferences.ScoreKLqp} - \item @{ed.inferences.ScoreKLKLqp} - \item @{ed.inferences.ScoreEntropyKLqp} - \end{itemize} - \item @{ed.inferences.KLpq} - \item @{ed.inferences.GANInference} - \begin{itemize} - \item @{ed.inferences.BiGANInference} - \item @{ed.inferences.ImplicitKLqp} - \item @{ed.inferences.WGANInference} - \end{itemize} - \item @{ed.inferences.MAP} - \begin{itemize} - \item @{ed.inferences.Laplace} - \end{itemize} - \end{itemize} - \item @{ed.inferences.MonteCarlo} - \begin{itemize} - \item @{ed.inferences.Gibbs} - \item @{ed.inferences.MetropolisHastings} - \item @{ed.inferences.HMC} - \item @{ed.inferences.SGLD} - \item @{ed.inferences.SGHMC} - \end{itemize} - \item @{ed.inferences.complete_conditional} -\end{itemize} diff --git a/docs/tex/tutorials/model-api.tex b/docs/tex/tutorials/model-api.tex index 9c66d5d6e..0742b743b 100644 --- a/docs/tex/tutorials/model-api.tex +++ b/docs/tex/tutorials/model-api.tex @@ -69,10 +69,3 @@ \subsubsection{Model} In the \href{/api/model-compositionality}{compositionality page}, we describe how to build models by composing random variables. - -\begin{center}\rule{3in}{0.4pt}\end{center} - -\begin{itemize} - \item @{ed.models.RandomVariable} - \item {{models}} -\end{itemize} From 2dcc71188cd10be3ac65b96885c837105feeedae Mon Sep 17 00:00:00 2001 From: Dustin Tran Date: Sun, 1 Oct 2017 14:48:22 -0700 Subject: [PATCH 04/10] remove navbar --- docs/generate_api_navbar_and_symbols.py | 80 ------------------- docs/tex/api/index.tex | 2 +- docs/tex/api/reference.tex | 2 +- docs/tex/tutorials/data-subsampling.tex | 2 - docs/tex/tutorials/data.tex | 2 - docs/tex/tutorials/inference-api.tex | 2 - docs/tex/tutorials/inference-classes.tex | 2 - .../tutorials/inference-compositionality.tex | 2 - docs/tex/tutorials/inference-development.tex | 2 - docs/tex/tutorials/model-api.tex | 2 - docs/tex/tutorials/model-compositionality.tex | 2 - docs/tex/tutorials/model-development.tex | 2 - 12 files changed, 2 insertions(+), 100 deletions(-) diff --git a/docs/generate_api_navbar_and_symbols.py b/docs/generate_api_navbar_and_symbols.py index 1baeb6fb0..137662860 100644 --- a/docs/generate_api_navbar_and_symbols.py +++ b/docs/generate_api_navbar_and_symbols.py @@ -38,79 +38,6 @@ ] -def generate_navbar(page_data): - """Return a string. It is the navigation bar for ``page_data``.""" - def generate_top_navbar(): - # Create top of navbar. (Note this can be cached and not run within a loop.) - top_navbar = """\\begin{abstract} -\section{API and Documentation} -\\begin{lstlisting}[raw=html] -
-
""" - for page_data in PAGES: - title = page_data['title'] - page_name = page_data['page'] - parent_pages = page_data['parent_pages'] - if len(parent_pages) == 0 and \ - page_name not in ['index.tex', 'reference.tex']: - top_navbar += '\n' - top_navbar += '' - top_navbar += title - top_navbar += '' - - top_navbar += '\n' - top_navbar += '
' - return top_navbar - - page_name = page_data['page'] - title = page_data['title'] - parent_pages = page_data['parent_pages'] - child_pages = page_data['child_pages'] - - navbar = generate_top_navbar() - # Create bottom of navbar if there are child pages for that section. - if len(child_pages) > 0 or len(parent_pages) > 0: - if len(parent_pages) > 0: - parent = parent_pages[0] - parent_page = [page_data for page_data in PAGES - if page_data['page'] == parent][0] - pgs = parent_page['child_pages'] - else: - pgs = child_pages - - navbar += '\n' - navbar += '
' - for child_page in pgs: - navbar += '\n' - navbar += '' - navbar += [page_data for page_data in PAGES - if page_data['page'] == child_page][0]['title'] - navbar += '' - - navbar += '\n' - navbar += '
' - - navbar += '\n' - navbar += """
-\end{lstlisting} -\end{abstract}""" - - # Set primary button in navbar. If a child page, set primary buttons - # for both top and bottom of navbar. - search_term = '" href="/api/' + page_name.replace('.tex', '') + '">' - navbar = navbar.replace(search_term, ' button-primary' + search_term) - if len(parent_pages) > 0: - parent = parent_pages[0] - search_term = '" href="/api/' + parent.replace('.tex', '') + '">' - navbar = navbar.replace(search_term, ' button-primary' + search_term) - - return navbar - - def get_tensorflow_version(): import tensorflow return str(getattr(tensorflow, '__version__', '')) @@ -126,15 +53,8 @@ def get_tensorflow_version(): path = os.path.join(out_dir, 'api', page_name) print(path) - # Generate navigation bar. - navbar = generate_navbar(page_data) - # Insert autogenerated content into page. document = open(path).read() - assert '{{navbar}}' in document, \ - ("File found for " + path + " but missing {{navbar}} tag.") - document = document.replace('{{navbar}}', navbar) - if '{{tensorflow_version}}' in document: document = document.replace('{{tensorflow_version}}', get_tensorflow_version()) diff --git a/docs/tex/api/index.tex b/docs/tex/api/index.tex index 971f01845..d44022bce 100644 --- a/docs/tex/api/index.tex +++ b/docs/tex/api/index.tex @@ -1,6 +1,6 @@ \title{API} -{{navbar}} +\section{API and Documentation} Edward's design reflects the building blocks for probabilistic modeling. It defines interchangeable components, enabling rapid diff --git a/docs/tex/api/reference.tex b/docs/tex/api/reference.tex index 18db85f23..8abcec42d 100644 --- a/docs/tex/api/reference.tex +++ b/docs/tex/api/reference.tex @@ -1,6 +1,6 @@ \title{Reference} -{{navbar}} +\section{API and Documentation} There are four modules in Edward: \texttt{ed.criticisms}, diff --git a/docs/tex/tutorials/data-subsampling.tex b/docs/tex/tutorials/data-subsampling.tex index 3e4009aca..2df7e6341 100644 --- a/docs/tex/tutorials/data-subsampling.tex +++ b/docs/tex/tutorials/data-subsampling.tex @@ -1,7 +1,5 @@ \title{Data Subsampling} -{{navbar}} - \subsubsection{Data Subsampling} Running algorithms which require the full data set for each update diff --git a/docs/tex/tutorials/data.tex b/docs/tex/tutorials/data.tex index c41cb8ab6..3cf2f5018 100644 --- a/docs/tex/tutorials/data.tex +++ b/docs/tex/tutorials/data.tex @@ -1,7 +1,5 @@ \title{Data} -{{navbar}} - \subsubsection{Data} Data defines a set of observations. There are three ways diff --git a/docs/tex/tutorials/inference-api.tex b/docs/tex/tutorials/inference-api.tex index a4056bada..eaf83c63c 100644 --- a/docs/tex/tutorials/inference-api.tex +++ b/docs/tex/tutorials/inference-api.tex @@ -1,7 +1,5 @@ \title{Inference} -{{navbar}} - \subsubsection{Inference} We describe how to perform inference in probabilistic models. diff --git a/docs/tex/tutorials/inference-classes.tex b/docs/tex/tutorials/inference-classes.tex index 487f5a0eb..7e67f0b09 100644 --- a/docs/tex/tutorials/inference-classes.tex +++ b/docs/tex/tutorials/inference-classes.tex @@ -1,7 +1,5 @@ \title{Classes of Inference} -{{navbar}} - \subsubsection{Classes of Inference} Inference is broadly classified under three classes: variational diff --git a/docs/tex/tutorials/inference-compositionality.tex b/docs/tex/tutorials/inference-compositionality.tex index f8a981e83..fdd62bceb 100644 --- a/docs/tex/tutorials/inference-compositionality.tex +++ b/docs/tex/tutorials/inference-compositionality.tex @@ -1,7 +1,5 @@ \title{Composing Inferences} -{{navbar}} - \subsubsection{Composing Inferences} Core to Edward's design is compositionality. Compositionality enables diff --git a/docs/tex/tutorials/inference-development.tex b/docs/tex/tutorials/inference-development.tex index ff73dbf99..867728306 100644 --- a/docs/tex/tutorials/inference-development.tex +++ b/docs/tex/tutorials/inference-development.tex @@ -1,7 +1,5 @@ \title{Developing Inference Algorithms} -{{navbar}} - \subsubsection{Developing Inference Algorithms} Edward uses class inheritance to provide a hierarchy of inference diff --git a/docs/tex/tutorials/model-api.tex b/docs/tex/tutorials/model-api.tex index 0742b743b..6e0ec8e1f 100644 --- a/docs/tex/tutorials/model-api.tex +++ b/docs/tex/tutorials/model-api.tex @@ -1,7 +1,5 @@ \title{Model} -{{navbar}} - \subsubsection{Model} A probabilistic model is a joint distribution $p(\mathbf{x}, diff --git a/docs/tex/tutorials/model-compositionality.tex b/docs/tex/tutorials/model-compositionality.tex index a28ae68ff..a4312f31e 100644 --- a/docs/tex/tutorials/model-compositionality.tex +++ b/docs/tex/tutorials/model-compositionality.tex @@ -1,7 +1,5 @@ \title{Composing Random Variables} -{{navbar}} - \subsubsection{Composing Random Variables} Core to Edward's design is compositionality. Compositionality enables diff --git a/docs/tex/tutorials/model-development.tex b/docs/tex/tutorials/model-development.tex index 10364e859..becabca15 100644 --- a/docs/tex/tutorials/model-development.tex +++ b/docs/tex/tutorials/model-development.tex @@ -1,7 +1,5 @@ \title{Developing Custom Random Variables} -{{navbar}} - \subsubsection{Developing Custom Random Variables} Oftentimes we'd like to implement our own random variables. From ad96b317ef2e2549566ff451973a4ac3056b59b5 Mon Sep 17 00:00:00 2001 From: Dustin Tran Date: Sun, 1 Oct 2017 15:11:44 -0700 Subject: [PATCH 05/10] move inference/model page to __init__ --- docs/tex/tutorials/index.tex | 4 +- docs/tex/tutorials/inference-api.tex | 146 -------------------------- docs/tex/tutorials/model-api.tex | 69 ------------- edward/inferences/__init__.py | 148 ++++++++++++++++++++++++++- edward/models/__init__.py | 66 +++++++++++- 5 files changed, 214 insertions(+), 219 deletions(-) delete mode 100644 docs/tex/tutorials/inference-api.tex delete mode 100644 docs/tex/tutorials/model-api.tex diff --git a/docs/tex/tutorials/index.tex b/docs/tex/tutorials/index.tex index ea53ce6ca..2ca2f4c88 100644 --- a/docs/tex/tutorials/index.tex +++ b/docs/tex/tutorials/index.tex @@ -110,12 +110,12 @@ \subsubsection{Background} todo \begin{itemize} \item \href{data}{Data} -\item \href{model-api}{Model} +\item model api \begin{itemize} \item \href{model-compositionality}{Model Compositionality} \item \href{model-development}{Model Development} \end{itemize} -\item \href{inference-api}{Inference} +\item inference api \begin{itemize} \item \href{inference-classes}{Inference Classes} \item \href{inference-compositionality}{Inference Compositionality} diff --git a/docs/tex/tutorials/inference-api.tex b/docs/tex/tutorials/inference-api.tex deleted file mode 100644 index eaf83c63c..000000000 --- a/docs/tex/tutorials/inference-api.tex +++ /dev/null @@ -1,146 +0,0 @@ -\title{Inference} - -\subsubsection{Inference} - -We describe how to perform inference in probabilistic models. -For background, see the -\href{/tutorials/inference}{Inference tutorial}. - -Suppose we have a model $p(\mathbf{x}, \mathbf{z}, \beta)$ of data $\mathbf{x}_{\text{train}}$ with latent variables $(\mathbf{z}, \beta)$. -Consider the posterior inference problem, -\begin{equation*} -q(\mathbf{z}, \beta)\approx p(\mathbf{z}, \beta\mid \mathbf{x}_{\text{train}}), -\end{equation*} -in which the task is to approximate the posterior -$p(\mathbf{z}, \beta\mid \mathbf{x}_{\text{train}})$ -using a family of distributions, $q(\mathbf{z},\beta; \lambda)$, -indexed by parameters $\lambda$. - -In Edward, let \texttt{z} and \texttt{beta} be latent variables in the model, -where we observe the random variable \texttt{x} with -data \texttt{x_train}. -Let \texttt{qz} and \texttt{qbeta} be random variables defined to -approximate the posterior. -We write this problem as follows: - -\begin{lstlisting}[language=Python] -inference = ed.Inference({z: qz, beta: qbeta}, {x: x_train}) -\end{lstlisting} - -\texttt{Inference} is an abstract class which takes two inputs. The -first is a collection of latent random variables \texttt{beta} and -\texttt{z}, along with ``posterior variables'' \texttt{qbeta} and -\texttt{qz}, which are associated to their respective latent -variables. The second is a collection of observed random variables -\texttt{x}, which is associated to the data \texttt{x_train}. - -Inference adjusts parameters of the distribution of \texttt{qbeta} -and \texttt{qz} to be close to the -posterior $p(\mathbf{z}, \beta\,|\,\mathbf{x}_{\text{train}})$. - -Running inference is as simple as running one method. - -\begin{lstlisting}[language=Python] -inference = ed.Inference({z: qz, beta: qbeta}, {x: x_train}) -inference.run() -\end{lstlisting} - -Inference also supports fine control of the training procedure. - -\begin{lstlisting}[language=Python] -inference = ed.Inference({z: qz, beta: qbeta}, {x: x_train}) -inference.initialize() - -tf.global_variables_initializer().run() - -for _ in range(inference.n_iter): - info_dict = inference.update() - inference.print_progress(info_dict) - -inference.finalize() -\end{lstlisting} - -\texttt{initialize()} builds the algorithm's update rules -(computational graph) for $\lambda$; -\texttt{tf.global_variables_initializer().run()} initializes $\lambda$ -(TensorFlow variables in the graph); -\texttt{update()} runs the graph once to update -$\lambda$, which is called in a loop until convergence; -\texttt{finalize()} runs any computation as the algorithm -terminates. - -The \texttt{run()} method is a simple wrapper for this procedure. - -\subsubsection{Other Settings} - -We highlight other settings during inference. - -\textbf{Model parameters}. -Model parameters are parameters in a model that we will always compute -point estimates for and not be uncertain about. -They are defined with \texttt{tf.Variable}s, where the inference -problem is -\begin{equation*} -\hat{\theta} \leftarrow^{\text{optimize}} -p(\mathbf{x}_{\text{train}}; \theta) -\end{equation*} - -\begin{lstlisting}[language=Python] -from edward.models import Normal - -theta = tf.Variable(0.0) -x = Normal(loc=tf.ones(10) * theta, scale=1.0) - -inference = ed.Inference({}, {x: x_train}) -\end{lstlisting} - -Only a subset of inference algorithms support estimation of model -parameters. -(Note also that this inference example does not have any latent -variables. It is only about estimating \texttt{theta} given that we -observe $\mathbf{x} = \mathbf{x}_{\text{train}}$. We can add them so -that inference is both posterior inference and parameter estimation.) - -For example, model parameters are useful when applying neural networks -from high-level libraries such as Keras and TensorFlow Slim. See -the \href{/api/model-compositionality}{model compositionality} page -for more details. - -\textbf{Conditional inference}. -In conditional inference, only a subset of the posterior is inferred -while the rest are fixed using other inferences. The inference -problem is -\begin{equation*} -q(\mathbf{z}\mid\beta)q(\beta)\approx -p(\mathbf{z}, \beta\mid\mathbf{x}_{\text{train}}) -\end{equation*} -where parameters in $q(\mathbf{z}\mid\beta)$ are estimated and -$q(\beta)$ is fixed. -% -In Edward, we enable conditioning by binding random variables to other -random variables in \texttt{data}. -\begin{lstlisting}[language=Python] -inference = ed.Inference({z: qz}, {x: x_train, beta: qbeta}) -\end{lstlisting} - -In the \href{/api/inference-compositionality}{compositionality page}, -we describe how to construct inference by composing -many conditional inference algorithms. - -\textbf{Implicit prior samples}. -Latent variables can be defined in the model without any posterior -inference over them. They are implicitly marginalized out with a -single sample. The inference problem is -\begin{equation*} -q(\beta)\approx -p(\beta\mid\mathbf{x}_{\text{train}}, \mathbf{z}^*) -\end{equation*} -where $\mathbf{z}^*\sim p(\mathbf{z}\mid\beta)$ is a prior sample. - -\begin{lstlisting}[language=Python] -inference = ed.Inference({beta: qbeta}, {x: x_train}) -\end{lstlisting} - -For example, implicit prior samples are useful for generative adversarial -networks. Their inference problem does not require any inference over -the latent variables; it uses samples from the prior. diff --git a/docs/tex/tutorials/model-api.tex b/docs/tex/tutorials/model-api.tex deleted file mode 100644 index 6e0ec8e1f..000000000 --- a/docs/tex/tutorials/model-api.tex +++ /dev/null @@ -1,69 +0,0 @@ -\title{Model} - -\subsubsection{Model} - -A probabilistic model is a joint distribution $p(\mathbf{x}, -\mathbf{z})$ of data $\mathbf{x}$ and latent variables $\mathbf{z}$. -For background, see the \href{/tutorials/model}{Probabilistic Models tutorial}. - -In Edward, we specify models using a simple language of random variables. -A random variable $\mathbf{x}$ is an object parameterized by -tensors $\theta^*$, where -the number of random variables in one object is determined by -the dimensions of its parameters. - -\begin{lstlisting}[language=Python] -from edward.models import Normal, Exponential - -# univariate normal -Normal(loc=tf.constant(0.0), scale=tf.constant(1.0)) -# vector of 5 univariate normals -Normal(loc=tf.zeros(5), scale=tf.ones(5)) -# 2 x 3 matrix of Exponentials -Exponential(rate=tf.ones([2, 3])) -\end{lstlisting} - -For multivariate distributions, the multivariate dimension is the -innermost (right-most) dimension of the parameters. - -\begin{lstlisting}[language=Python] -from edward.models import Dirichlet, MultivariateNormalTriL - -# K-dimensional Dirichlet -Dirichlet(concentration=tf.constant([0.1] * K)) -# vector of 5 K-dimensional multivariate normals with lower triangular cov -MultivariateNormalTriL(loc=tf.zeros([5, K]), scale_tril=tf.ones([5, K, K])) -# 2 x 5 matrix of K-dimensional multivariate normals -MultivariateNormalTriL(loc=tf.zeros([2, 5, K]), scale_tril=tf.ones([2, 5, K, K])) -\end{lstlisting} - -Random variables are equipped with methods such as -\texttt{log_prob()}, $\log p(\mathbf{x}\mid\theta^*)$, -\texttt{mean()}, $\mathbb{E}_{p(\mathbf{x}\mid\theta^*)}[\mathbf{x}]$, -and \texttt{sample()}, $\mathbf{x}^*\sim p(\mathbf{x}\mid\theta^*)$. -Further, each random variable is associated to a tensor $\mathbf{x}^*$ in the -computational graph, which represents a single sample $\mathbf{x}^*\sim -p(\mathbf{x}\mid\theta^*)$. - -This makes it easy to parameterize random variables with complex -deterministic structure, such as with deep neural networks, a diverse -set of math operations, and compatibility with third party libraries -which also build on TensorFlow. -The design also enables compositions of random variables -to capture complex stochastic structure. -They operate on $\mathbf{x}^*$. - -\includegraphics[width=375px]{/images/random_variable_ops.png} - -\begin{lstlisting}[language=Python] -from edward.models import Normal - -x = Normal(loc=tf.zeros(10), scale=tf.ones(10)) -y = tf.constant(5.0) -x + y, x - y, x * y, x / y -tf.tanh(x * y) -x[2] # 3rd normal rv in the vector -\end{lstlisting} - -In the \href{/api/model-compositionality}{compositionality page}, we -describe how to build models by composing random variables. diff --git a/edward/inferences/__init__.py b/edward/inferences/__init__.py index 38262fcb7..576a104ae 100644 --- a/edward/inferences/__init__.py +++ b/edward/inferences/__init__.py @@ -1,4 +1,150 @@ -""" +"""We describe how to perform inference in probabilistic models. +For background, see the +[Inference tutorial](/tutorials/inference). + +Suppose we have a model $p(\mathbf{x}, \mathbf{z}, \\beta)$ of data +$\mathbf{x}_{\\text{train}}$ with latent variables $(\mathbf{z}, \\beta)$. +Consider the posterior inference problem, + +$$ +q(\mathbf{z}, \\beta)\\approx p(\mathbf{z}, \\beta\mid \mathbf{x}_{\\text{train}}), +$$ + +in which the task is to approximate the posterior +$p(\mathbf{z}, \\beta\mid \mathbf{x}_{\\text{train}})$ +using a family of distributions, $q(\mathbf{z},\\beta; \lambda)$, +indexed by parameters $\lambda$. + +In Edward, let `z` and `beta` be latent variables in the model, +where we observe the random variable `x` with +data `x_train`. +Let `qz` and `qbeta` be random variables defined to +approximate the posterior. +We write this problem as follows: + +```python +inference = ed.Inference({z: qz, beta: qbeta}, {x: x_train}) +``` + +`Inference` is an abstract class which takes two inputs. The +first is a collection of latent random variables `beta` and +`z`, along with "posterior variables" `qbeta` and +`qz`, which are associated to their respective latent +variables. The second is a collection of observed random variables +`x`, which is associated to the data `x_train`. + +Inference adjusts parameters of the distribution of `qbeta` +and `qz` to be close to the +posterior $p(\mathbf{z}, \\beta\,|\,\mathbf{x}_{\\text{train}})$. + +Running inference is as simple as running one method. + +```python +inference = ed.Inference({z: qz, beta: qbeta}, {x: x_train}) +inference.run() +``` + +Inference also supports fine control of the training procedure. + +```python +inference = ed.Inference({z: qz, beta: qbeta}, {x: x_train}) +inference.initialize() + +tf.global_variables_initializer().run() + +for _ in range(inference.n_iter): + info_dict = inference.update() + inference.print_progress(info_dict) + +inference.finalize() +``` + +`initialize()` builds the algorithm's update rules +(computational graph) for $\lambda$; +`tf.global_variables_initializer().run()` initializes $\lambda$ +(TensorFlow variables in the graph); +`update()` runs the graph once to update +$\lambda$, which is called in a loop until convergence; +`finalize()` runs any computation as the algorithm +terminates. + +The `run()` method is a simple wrapper for this procedure. + +### Other Settings + +We highlight other settings during inference. + +__Model parameters__. +Model parameters are parameters in a model that we will always compute +point estimates for and not be uncertain about. +They are defined with `tf.Variable`s, where the inference +problem is + +$$ +\hat{\\theta} \leftarrow^{\\text{optimize}} +p(\mathbf{x}_{\\text{train}}; \\theta) +$$ + +```python +from edward.models import Normal + +theta = tf.Variable(0.0) +x = Normal(loc=tf.ones(10) * theta, scale=1.0) + +inference = ed.Inference({}, {x: x_train}) +``` + +Only a subset of inference algorithms support estimation of model +parameters. +(Note also that this inference example does not have any latent +variables. It is only about estimating `theta` given that we +observe $\mathbf{x} = \mathbf{x}_{\\text{train}}$. We can add them so +that inference is both posterior inference and parameter estimation.) + +For example, model parameters are useful when applying neural networks +from high-level libraries such as Keras and TensorFlow Slim. See +the [model compositionality](/tutorials/model-compositionality) page +for more details. + +__Conditional inference__. +In conditional inference, only a subset of the posterior is inferred +while the rest are fixed using other inferences. The inference +problem is + +$$ +q(\mathbf{z}\mid\\beta)q(\\beta)\\approx +p(\mathbf{z}, \\beta\mid\mathbf{x}_{\\text{train}}) +$$ + +where parameters in $q(\mathbf{z}\mid\\beta)$ are estimated and +$q(\\beta)$ is fixed. +In Edward, we enable conditioning by binding random variables to other +random variables in `data`. +```python +inference = ed.Inference({z: qz}, {x: x_train, beta: qbeta}) +``` + +In the [compositionality tutorial](/tutorials/inference-compositionality), +we describe how to construct inference by composing +many conditional inference algorithms. + +__Implicit prior samples__. +Latent variables can be defined in the model without any posterior +inference over them. They are implicitly marginalized out with a +single sample. The inference problem is +$$$ +q(\\beta)\\approx +p(\\beta\mid\mathbf{x}_{\\text{train}}, \mathbf{z}^*) +$$ +where $\mathbf{z}^*\sim p(\mathbf{z}\mid\\beta)$ is a prior sample. + +```python +inference = ed.Inference({beta: qbeta}, {x: x_train}) +``` + +For example, implicit prior samples are useful for generative adversarial +networks. Their inference problem does not require any inference over +the latent variables; it uses samples from the prior. """ from __future__ import absolute_import from __future__ import division diff --git a/edward/models/__init__.py b/edward/models/__init__.py index 2b2eaa2cc..01218e1af 100644 --- a/edward/models/__init__.py +++ b/edward/models/__init__.py @@ -1,4 +1,68 @@ -""" +"""A probabilistic model is a joint distribution $p(\mathbf{x}, +\mathbf{z})$ of data $\mathbf{x}$ and latent variables $\mathbf{z}$. +For background, see the [Probabilistic Models tutorial](/tutorials/model). + +In Edward, we specify models using a simple language of random variables. +A random variable $\mathbf{x}$ is an object parameterized by +tensors $\\theta^*$, where +the number of random variables in one object is determined by +the dimensions of its parameters. + +```python +from edward.models import Normal, Exponential + +# univariate normal +Normal(loc=tf.constant(0.0), scale=tf.constant(1.0)) +# vector of 5 univariate normals +Normal(loc=tf.zeros(5), scale=tf.ones(5)) +# 2 x 3 matrix of Exponentials +Exponential(rate=tf.ones([2, 3])) +``` + +For multivariate distributions, the multivariate dimension is the +innermost (right-most) dimension of the parameters. + +```python +from edward.models import Dirichlet, MultivariateNormalTriL + +# K-dimensional Dirichlet +Dirichlet(concentration=tf.constant([0.1] * K)) +# vector of 5 K-dimensional multivariate normals with lower triangular cov +MultivariateNormalTriL(loc=tf.zeros([5, K]), scale_tril=tf.ones([5, K, K])) +# 2 x 5 matrix of K-dimensional multivariate normals +MultivariateNormalTriL(loc=tf.zeros([2, 5, K]), scale_tril=tf.ones([2, 5, K, K])) +``` + +Random variables are equipped with methods such as +`log_prob()`, $\log p(\mathbf{x}\mid\\theta^*)$, +`mean()`, $\mathbb{E}_{p(\mathbf{x}\mid\\theta^*)}[\mathbf{x}]$, +and `sample()`, $\mathbf{x}^*\sim p(\mathbf{x}\mid\\theta^*)$. +Further, each random variable is associated to a tensor $\mathbf{x}^*$ in the +computational graph, which represents a single sample $\mathbf{x}^*\sim +p(\mathbf{x}\mid\\theta^*)$. + +This makes it easy to parameterize random variables with complex +deterministic structure, such as with deep neural networks, a diverse +set of math operations, and compatibility with third party libraries +which also build on TensorFlow. +The design also enables compositions of random variables +to capture complex stochastic structure. +They operate on $\mathbf{x}^*$. + + + +```python +from edward.models import Normal + +x = Normal(loc=tf.zeros(10), scale=tf.ones(10)) +y = tf.constant(5.0) +x + y, x - y, x * y, x / y +tf.tanh(x * y) +x[2] # 3rd normal rv in the vector +``` + +In the [compositionality tutorial](/tutorials/model-compositionality), we +describe how to build models by composing random variables. """ from __future__ import absolute_import from __future__ import division From 46287bb032099649fdf54ab32d50e7050d1bdb8f Mon Sep 17 00:00:00 2001 From: Dustin Tran Date: Sun, 1 Oct 2017 15:17:23 -0700 Subject: [PATCH 06/10] fix formatting --- docs/tex/tutorials/data-subsampling.tex | 4 +- docs/tex/tutorials/data.tex | 2 +- docs/tex/tutorials/index.tex | 39 ++++++++++--------- docs/tex/tutorials/inference-classes.tex | 2 +- .../tutorials/inference-compositionality.tex | 2 +- docs/tex/tutorials/inference-development.tex | 2 +- docs/tex/tutorials/model-compositionality.tex | 2 +- docs/tex/tutorials/model-development.tex | 2 +- 8 files changed, 28 insertions(+), 27 deletions(-) diff --git a/docs/tex/tutorials/data-subsampling.tex b/docs/tex/tutorials/data-subsampling.tex index 2df7e6341..e51f498dd 100644 --- a/docs/tex/tutorials/data-subsampling.tex +++ b/docs/tex/tutorials/data-subsampling.tex @@ -1,6 +1,6 @@ \title{Data Subsampling} -\subsubsection{Data Subsampling} +\subsection{Data Subsampling} Running algorithms which require the full data set for each update can be expensive when the data is large. In order to scale inferences, @@ -91,7 +91,7 @@ \subsubsection{Subgraphs} $\mathbf{z}$ given $\beta$. We also pass in a TensorFlow placeholder \texttt{x_ph} for the data, so we can change the data at each step. (Alternatively, -\href{/api/data}{batch tensors} can be used.) +\href{/tutorials/data}{batch tensors} can be used.) \begin{lstlisting}[language=Python] x_ph = tf.placeholder(tf.float32, [M]) diff --git a/docs/tex/tutorials/data.tex b/docs/tex/tutorials/data.tex index 3cf2f5018..a578e0514 100644 --- a/docs/tex/tutorials/data.tex +++ b/docs/tex/tutorials/data.tex @@ -1,6 +1,6 @@ \title{Data} -\subsubsection{Data} +\subsection{Data} Data defines a set of observations. There are three ways to read data in Edward. They follow the diff --git a/docs/tex/tutorials/index.tex b/docs/tex/tutorials/index.tex index 2ca2f4c88..f0ec1cbac 100644 --- a/docs/tex/tutorials/index.tex +++ b/docs/tex/tutorials/index.tex @@ -54,6 +54,25 @@ \subsection{Tutorials} If you're interested in contributing a tutorial, checking out the \href{/contributing}{contributing page}. +\subsubsection{Advanced Tutorials} + +\href{data}{Data} + +model api + +\begin{itemize} + \item \href{model-compositionality}{Model Compositionality} + \item \href{model-development}{Model Development} +\end{itemize} + +inference api + +\begin{itemize} + \item \href{inference-classes}{Inference Classes} + \item \href{inference-compositionality}{Inference Compositionality} + \item \href{inference-development}{Inference Development} +\end{itemize} + \subsubsection{Videos} \begin{itemize} @@ -102,23 +121,5 @@ \subsubsection{Background} There are also companion webpages for several papers about Edward. \begin{itemize} -\item -\href{/iclr2017}{"Deep probabilistic programming" at ICLR 2017} -\end{itemize} - - -todo -\begin{itemize} -\item \href{data}{Data} -\item model api - \begin{itemize} - \item \href{model-compositionality}{Model Compositionality} - \item \href{model-development}{Model Development} - \end{itemize} -\item inference api - \begin{itemize} - \item \href{inference-classes}{Inference Classes} - \item \href{inference-compositionality}{Inference Compositionality} - \item \href{inference-development}{Inference Development} - \end{itemize} + \item \href{/iclr2017}{"Deep probabilistic programming" at ICLR 2017} \end{itemize} diff --git a/docs/tex/tutorials/inference-classes.tex b/docs/tex/tutorials/inference-classes.tex index 7e67f0b09..1edbe49b6 100644 --- a/docs/tex/tutorials/inference-classes.tex +++ b/docs/tex/tutorials/inference-classes.tex @@ -1,6 +1,6 @@ \title{Classes of Inference} -\subsubsection{Classes of Inference} +\subsection{Classes of Inference} Inference is broadly classified under three classes: variational inference, Monte Carlo, and exact inference. diff --git a/docs/tex/tutorials/inference-compositionality.tex b/docs/tex/tutorials/inference-compositionality.tex index fdd62bceb..585befaa9 100644 --- a/docs/tex/tutorials/inference-compositionality.tex +++ b/docs/tex/tutorials/inference-compositionality.tex @@ -1,6 +1,6 @@ \title{Composing Inferences} -\subsubsection{Composing Inferences} +\subsection{Composing Inferences} Core to Edward's design is compositionality. Compositionality enables fine control of inference, where we can write inference as a diff --git a/docs/tex/tutorials/inference-development.tex b/docs/tex/tutorials/inference-development.tex index 867728306..2cea034da 100644 --- a/docs/tex/tutorials/inference-development.tex +++ b/docs/tex/tutorials/inference-development.tex @@ -1,6 +1,6 @@ \title{Developing Inference Algorithms} -\subsubsection{Developing Inference Algorithms} +\subsection{Developing Inference Algorithms} Edward uses class inheritance to provide a hierarchy of inference methods. This enables fast experimentation on top of existing diff --git a/docs/tex/tutorials/model-compositionality.tex b/docs/tex/tutorials/model-compositionality.tex index a4312f31e..04a658d44 100644 --- a/docs/tex/tutorials/model-compositionality.tex +++ b/docs/tex/tutorials/model-compositionality.tex @@ -1,6 +1,6 @@ \title{Composing Random Variables} -\subsubsection{Composing Random Variables} +\subsection{Composing Random Variables} Core to Edward's design is compositionality. Compositionality enables fine control of modeling, where models are represented as a collection diff --git a/docs/tex/tutorials/model-development.tex b/docs/tex/tutorials/model-development.tex index becabca15..b07765e5b 100644 --- a/docs/tex/tutorials/model-development.tex +++ b/docs/tex/tutorials/model-development.tex @@ -1,6 +1,6 @@ \title{Developing Custom Random Variables} -\subsubsection{Developing Custom Random Variables} +\subsection{Developing Custom Random Variables} Oftentimes we'd like to implement our own random variables. To do so, write a class that inherits From 15b8c818bbdc8ba1b35d5277e91d6b230202f550 Mon Sep 17 00:00:00 2001 From: Dustin Tran Date: Sun, 1 Oct 2017 15:30:53 -0700 Subject: [PATCH 07/10] move reference to /api/ed --- docs/generate_api_navbar_and_symbols.py | 7 +-- docs/generate_api_toc.py | 3 - docs/tex/api/index.tex | 2 +- docs/tex/api/reference.tex | 84 ------------------------- docs/tex/template-api.pandoc | 1 - edward/__init__.py | 18 ++++++ 6 files changed, 20 insertions(+), 95 deletions(-) delete mode 100644 docs/tex/api/reference.tex diff --git a/docs/generate_api_navbar_and_symbols.py b/docs/generate_api_navbar_and_symbols.py index 137662860..048ef52ef 100644 --- a/docs/generate_api_navbar_and_symbols.py +++ b/docs/generate_api_navbar_and_symbols.py @@ -29,12 +29,6 @@ 'parent_pages': [], 'child_pages': [], }, - { - 'page': 'reference.tex', - 'title': 'Reference', - 'parent_pages': [], - 'child_pages': [], - }, ] @@ -53,6 +47,7 @@ def get_tensorflow_version(): path = os.path.join(out_dir, 'api', page_name) print(path) + # TODO do for /api/ed # Insert autogenerated content into page. document = open(path).read() if '{{tensorflow_version}}' in document: diff --git a/docs/generate_api_toc.py b/docs/generate_api_toc.py index dfd00d90d..20d8902af 100644 --- a/docs/generate_api_toc.py +++ b/docs/generate_api_toc.py @@ -23,9 +23,6 @@ toc = '' for entry in data_map['toc']: title = entry['title'] - if title == 'ed': - continue - section = entry['section'] assert section[0]['title'] == 'Overview' path = section[0]['path'] diff --git a/docs/tex/api/index.tex b/docs/tex/api/index.tex index d44022bce..fe91289f4 100644 --- a/docs/tex/api/index.tex +++ b/docs/tex/api/index.tex @@ -36,6 +36,6 @@ \section{API and Documentation} Navigate modules enabling this analysis above. See the -\href{/api/reference}{reference page} for a list of the API. +\href{/api/ed}{reference page} for a list of the API. \subsubsection{References}\label{references} diff --git a/docs/tex/api/reference.tex b/docs/tex/api/reference.tex deleted file mode 100644 index 8abcec42d..000000000 --- a/docs/tex/api/reference.tex +++ /dev/null @@ -1,84 +0,0 @@ -\title{Reference} - -\section{API and Documentation} - -There are four modules in Edward: -\texttt{ed.criticisms}, -\texttt{ed.inferences}, -\texttt{ed.models}, -and -\texttt{ed.util}. - -\subsubsection{Criticism} - -\texttt{ed.criticisms} is comprised of functions. They operate on -random variables in a model or they operate on NumPy arrays -representing values drawn from the random variables. - -\begin{itemize} - \item {{criticisms}} -\end{itemize} - -\subsubsection{Inference} - -\texttt{ed.inferences} is mostly comprised of classes. They are -organized in a class hierarchy, where methods are shared via parent -classes and \texttt{Inference} is the top-most base class. - -\begin{itemize} - \item @{ed.inferences.Inference} - \item @{ed.inferences.VariationalInference} - \begin{itemize} - \item @{ed.inferences.KLqp} - \begin{itemize} - \item @{ed.inferences.ReparameterizationKLqp} - \item @{ed.inferences.ReparameterizationKLKLqp} - \item @{ed.inferences.ReparameterizationEntropyKLqp} - \item @{ed.inferences.ScoreKLqp} - \item @{ed.inferences.ScoreKLKLqp} - \item @{ed.inferences.ScoreEntropyKLqp} - \end{itemize} - \item @{ed.inferences.KLpq} - \item @{ed.inferences.GANInference} - \begin{itemize} - \item @{ed.inferences.BiGANInference} - \item @{ed.inferences.ImplicitKLqp} - \item @{ed.inferences.WGANInference} - \end{itemize} - \item @{ed.inferences.MAP} - \begin{itemize} - \item @{ed.inferences.Laplace} - \end{itemize} - \end{itemize} - \item @{ed.inferences.MonteCarlo} - \begin{itemize} - \item @{ed.inferences.Gibbs} - \item @{ed.inferences.MetropolisHastings} - \item @{ed.inferences.HMC} - \item @{ed.inferences.SGLD} - \item @{ed.inferences.SGHMC} - \end{itemize} - \item @{ed.inferences.complete_conditional} -\end{itemize} - -\subsubsection{Models} - -\texttt{ed.models} is comprised of random variables. -The list of available random variables depends on the TensorFlow -version installed. For TensorFlow {{tensorflow_version}}, the -following are available: - -\begin{itemize} - \item @{ed.models.RandomVariable} - \item {{models}} -\end{itemize} - -\subsubsection{Utilities} - -\texttt{ed.util} is comprised of functions for miscellaneous usage. - -\begin{itemize} - \item {{util}} - \item @{ed.VERSION} - \item @{ed.__version__} -\end{itemize} diff --git a/docs/tex/template-api.pandoc b/docs/tex/template-api.pandoc index ce9c6d584..e8a4cc3e2 100644 --- a/docs/tex/template-api.pandoc +++ b/docs/tex/template-api.pandoc @@ -114,7 +114,6 @@ $endfor$ API
- Reference {{toc}}
Date: Sun, 1 Oct 2017 15:41:51 -0700 Subject: [PATCH 08/10] add inline descriptions of modules --- edward/__init__.py | 18 +----------------- edward/criticisms/__init__.py | 4 +++- edward/inferences/__init__.py | 4 +++- edward/models/__init__.py | 4 +++- edward/util/__init__.py | 2 +- 5 files changed, 11 insertions(+), 21 deletions(-) diff --git a/edward/__init__.py b/edward/__init__.py index 15ffa7749..089a59c3e 100644 --- a/edward/__init__.py +++ b/edward/__init__.py @@ -1,20 +1,4 @@ -"""There are four modules in Edward: - -+ [`ed.criticisms`](/api/ed/criticisms) - is comprised of functions. They operate on - random variables in a model or they operate on NumPy arrays - representing values drawn from the random variables. -+ [`ed.inferences`](/api/ed/inferences) - is mostly comprised of classes. They are - organized in a class hierarchy, where methods are shared via parent - classes and `Inference` is the top-most base class. -+ [`ed.models`](/api/ed/models) - is comprised of random variables. - The list of available random variables depends on the TensorFlow - version installed. The module's API docs describe those available for - TensorFlow {{tensorflow_version}}. -+ [`ed.util`](/api/ed/util) - is comprised of functions for miscellaneous usage. +""" """ from __future__ import absolute_import from __future__ import division diff --git a/edward/criticisms/__init__.py b/edward/criticisms/__init__.py index a112ce780..7b11b536c 100644 --- a/edward/criticisms/__init__.py +++ b/edward/criticisms/__init__.py @@ -1,4 +1,6 @@ -"""We can never validate whether a model is true. In practice, ``all +"""Assessments for program and inference correctness. + +We can never validate whether a model is true. In practice, ``all models are wrong'' [@box1976science]. However, we can try to uncover where the model goes wrong. Model criticism helps justify the model as an approximation or point to good directions for revising the diff --git a/edward/inferences/__init__.py b/edward/inferences/__init__.py index 576a104ae..a81636fc9 100644 --- a/edward/inferences/__init__.py +++ b/edward/inferences/__init__.py @@ -1,4 +1,6 @@ -"""We describe how to perform inference in probabilistic models. +"""Algorithms for inferring parameters or latent variables. + +We describe how to perform inference in probabilistic models. For background, see the [Inference tutorial](/tutorials/inference). diff --git a/edward/models/__init__.py b/edward/models/__init__.py index 01218e1af..e44aeb0b6 100644 --- a/edward/models/__init__.py +++ b/edward/models/__init__.py @@ -1,4 +1,6 @@ -"""A probabilistic model is a joint distribution $p(\mathbf{x}, +"""Probabilistic program primitives. + +A probabilistic model is a joint distribution $p(\mathbf{x}, \mathbf{z})$ of data $\mathbf{x}$ and latent variables $\mathbf{z}$. For background, see the [Probabilistic Models tutorial](/tutorials/model). diff --git a/edward/util/__init__.py b/edward/util/__init__.py index dce454aed..e1e322a9a 100644 --- a/edward/util/__init__.py +++ b/edward/util/__init__.py @@ -1,4 +1,4 @@ -""" +"""Miscellaneous utility functions. """ from __future__ import absolute_import from __future__ import division From 656e75d85d9bd7dc593383acb1aff9ed60dd29bf Mon Sep 17 00:00:00 2001 From: Dustin Tran Date: Sun, 1 Oct 2017 21:49:03 -0700 Subject: [PATCH 09/10] add overview to TOC --- docs/parser/generate_lib.py | 8 ++++---- docs/tex/api/index.tex | 2 +- docs/tex/template-api.pandoc | 1 + 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/docs/parser/generate_lib.py b/docs/parser/generate_lib.py index be95ec7c3..8c3ca2158 100644 --- a/docs/parser/generate_lib.py +++ b/docs/parser/generate_lib.py @@ -165,10 +165,10 @@ def write_docs(output_dir, parser_config, yaml_toc): symbol_to_file[full_name] + '\n') # Write a global index containing all full names with links. - # with open(os.path.join(output_dir, 'index.md'), 'w') as f: - # f.write( - # parser.generate_global_index('Edward', parser_config.index, - # parser_config.reference_resolver)) + with open(os.path.join(output_dir, 'overview.md'), 'w') as f: + f.write( + parser.generate_global_index('Edward', parser_config.index, + parser_config.reference_resolver)) def add_dict_to_dict(add_from, add_to): diff --git a/docs/tex/api/index.tex b/docs/tex/api/index.tex index fe91289f4..ced76268a 100644 --- a/docs/tex/api/index.tex +++ b/docs/tex/api/index.tex @@ -36,6 +36,6 @@ \section{API and Documentation} Navigate modules enabling this analysis above. See the -\href{/api/ed}{reference page} for a list of the API. +\href{/api/overview}{overview page} for a list of the API. \subsubsection{References}\label{references} diff --git a/docs/tex/template-api.pandoc b/docs/tex/template-api.pandoc index e8a4cc3e2..d3b25be5c 100644 --- a/docs/tex/template-api.pandoc +++ b/docs/tex/template-api.pandoc @@ -114,6 +114,7 @@ $endfor$ API
+ Overview {{toc}}
Date: Sun, 1 Oct 2017 22:06:06 -0700 Subject: [PATCH 10/10] fix links --- docs/tex/iclr2017.tex | 6 +++--- docs/tex/tutorials/batch-training.tex | 2 +- docs/tex/tutorials/criticism.tex | 4 ++-- docs/tex/tutorials/inference.tex | 2 +- docs/tex/tutorials/mixture-density-network.tex | 4 +++- docs/tex/tutorials/model.tex | 2 +- docs/tex/tutorials/variational-inference.tex | 2 +- notebooks/batch_training.ipynb | 2 +- notebooks/iclr2017.ipynb | 6 +++--- 9 files changed, 16 insertions(+), 14 deletions(-) diff --git a/docs/tex/iclr2017.tex b/docs/tex/iclr2017.tex index 9483bb356..d04e6a68e 100644 --- a/docs/tex/iclr2017.tex +++ b/docs/tex/iclr2017.tex @@ -198,7 +198,7 @@ \subsubsection{Section 4. Compositional Representations for Inference} inference_m.update() \end{lstlisting} For more details, see the -\href{/api/inference-compositionality}{inference compositionality} webpage. +\href{/tutorials/inference-compositionality}{inference compositionality} webpage. See \href{https://github.com/blei-lab/edward/blob/master/examples/factor_analysis.py}{\texttt{examples/factor_analysis.py}} for a version performing Monte Carlo EM for logistic factor analysis @@ -234,7 +234,7 @@ \subsubsection{Section 4. Compositional Representations for Inference} inference.initialize(scale={x: float(N) / M, z: float(N) / M}) \end{lstlisting} For more details, see the -\href{/api/inference-data-subsampling}{data subsampling} webpage. +\href{/tutorials/data-subsampling}{data subsampling} webpage. \subsubsection{Section 5. Experiments} @@ -346,7 +346,7 @@ \subsubsection{Appendix B. Inference Examples} \textbf{Figure *}. Stochastic variational inference \citep{hoffman2013stochastic}. For more details, see the -\href{/api/inference-data-subsampling}{data subsampling} webpage. +\href{/tutorials/data-subsampling}{data subsampling} webpage. \subsubsection{Appendix C. Complete Examples} diff --git a/docs/tex/tutorials/batch-training.tex b/docs/tex/tutorials/batch-training.tex index 65dc888a9..bccefb44c 100644 --- a/docs/tex/tutorials/batch-training.tex +++ b/docs/tex/tutorials/batch-training.tex @@ -233,7 +233,7 @@ \subsubsection{Footnotes} illustrated batch training for models with only global latent variables, which are variables are shared across all data points. For more complex strategies, see the -\href{http://edwardlib.org/api/inference-data-subsampling} {inference +\href{http://edwardlib.org/tutorials/inference-data-subsampling} {inference data subsampling API}. \subsubsection{References}\label{references} diff --git a/docs/tex/tutorials/criticism.tex b/docs/tex/tutorials/criticism.tex index 46e7da043..83d997602 100644 --- a/docs/tex/tutorials/criticism.tex +++ b/docs/tex/tutorials/criticism.tex @@ -87,7 +87,7 @@ \subsubsection{Point Evaluation} in decision theory. Scoring rules are useful for model comparison, model selection, and model averaging. -See the \href{/api/criticism}{criticism API} for further details. +See the \href{/api/ed/criticisms}{criticism API} for further details. An example of point evaluation is in the \href{/tutorials/supervised-regression}{supervised learning (regression)} tutorial. @@ -141,7 +141,7 @@ \subsubsection{Posterior predictive checks} latent_vars={z: qz, beta: qbeta}) \end{lstlisting} -See the \href{/api/criticism}{criticism API} for further details. +See the \href{/api/ed/criticisms}{criticism API} for further details. PPCs are an excellent tool for revising models---simplifying or expanding the current model as one examines its fit to data. diff --git a/docs/tex/tutorials/inference.tex b/docs/tex/tutorials/inference.tex index 07e7c015c..8e319edcf 100644 --- a/docs/tex/tutorials/inference.tex +++ b/docs/tex/tutorials/inference.tex @@ -45,7 +45,7 @@ \subsubsection{Inferring the posterior} posterior. For details on how to specify inference in Edward, see the -\href{/api/inference}{inference API}. We describe several examples in +\href{/api/ed/inferences}{inference API}. We describe several examples in detail in the \href{/tutorials/}{tutorials}. diff --git a/docs/tex/tutorials/mixture-density-network.tex b/docs/tex/tutorials/mixture-density-network.tex index 24284adeb..7e674693c 100644 --- a/docs/tex/tutorials/mixture-density-network.tex +++ b/docs/tex/tutorials/mixture-density-network.tex @@ -48,7 +48,9 @@ \subsubsection{Data} \includegraphics[width=650px]{/images/mixture-density-network-fig0.png} -We define TensorFlow placeholders, which will be used to manually feed batches of data during inference. This is \href{http://edwardlib.org/api/data}{one of many ways} to train models with data in Edward. +We define TensorFlow placeholders, which will be used to manually feed +batches of data during inference. This is +\href{http://edwardlib.org/tutorials/data}{one of many ways} to train models with data in Edward. \begin{lstlisting}[language=Python] X_ph = tf.placeholder(tf.float32, [None, D]) diff --git a/docs/tex/tutorials/model.tex b/docs/tex/tutorials/model.tex index 1d4a9ca68..b395ebf03 100644 --- a/docs/tex/tutorials/model.tex +++ b/docs/tex/tutorials/model.tex @@ -30,5 +30,5 @@ \subsection{Probabilistic Models} present in the data. It posits a generating process of the hidden structure. For details on how to specify a model in Edward, see the -\href{/api/model}{model API}. We describe several examples in detail +\href{/api/ed/models}{model API}. We describe several examples in detail in the \href{/tutorials/}{tutorials}. diff --git a/docs/tex/tutorials/variational-inference.tex b/docs/tex/tutorials/variational-inference.tex index 508abeb6c..db0da3f72 100644 --- a/docs/tex/tutorials/variational-inference.tex +++ b/docs/tex/tutorials/variational-inference.tex @@ -36,7 +36,7 @@ \subsection{Variational Inference} models of data to best approximate the true process. For details on variational inference classes defined in Edward, -see the \href{/api/inference}{inference API}. +see the \href{/api/ed/inferences}{inference API}. For background on specific variational inference algorithms in Edward, see the other inference \href{/tutorials/}{tutorials}. diff --git a/notebooks/batch_training.ipynb b/notebooks/batch_training.ipynb index 8de81c27c..6915dfa96 100644 --- a/notebooks/batch_training.ipynb +++ b/notebooks/batch_training.ipynb @@ -380,7 +380,7 @@ "illustrated batch training for models with only global latent variables,\n", "which are variables are shared across all data points.\n", "For more complex strategies, see the\n", - "[inference data subsampling API](http://edwardlib.org/api/inference-data-subsampling)." + "[inference data subsampling API](http://edwardlib.org/tutorials/data-subsampling)." ] } ], diff --git a/notebooks/iclr2017.ipynb b/notebooks/iclr2017.ipynb index a93cb8320..0f3958f09 100644 --- a/notebooks/iclr2017.ipynb +++ b/notebooks/iclr2017.ipynb @@ -298,7 +298,7 @@ "metadata": {}, "source": [ "For more details, see the\n", - "[inference compositionality](http://edwardlib.org/api/inference-compositionality) webpage.\n", + "[inference compositionality](http://edwardlib.org/tutorials/inference-compositionality) webpage.\n", "See\n", "[`examples/factor_analysis.py`](https://github.com/blei-lab/edward/blob/master/examples/factor_analysis.py) for\n", "a version performing Monte Carlo EM for logistic factor analysis\n", @@ -346,7 +346,7 @@ "metadata": {}, "source": [ "For more details, see the\n", - "[data subsampling](http://edwardlib.org/api/inference-data-subsampling) webpage.\n", + "[data subsampling](http://edwardlib.org/tutorials/data-subsampling) webpage.\n", "\n", "## Section 5. Experiments\n", "\n", @@ -520,7 +520,7 @@ "\n", "__Figure *__. Stochastic variational inference (M. D. Hoffman, Blei, Wang, & Paisley, 2013). \n", "For more details, see the\n", - "[data subsampling](http://edwardlib.org/api/inference-data-subsampling) webpage.\n", + "[data subsampling](http://edwardlib.org/tutorials/data-subsampling) webpage.\n", "\n", "## Appendix C. Complete Examples\n", "\n",