text/gluing.tex
changeset 76 16d7f0938baa
parent 72 ed2594ff5870
child 79 8ef65f3bea2b
--- a/text/gluing.tex	Fri Jun 05 23:02:55 2009 +0000
+++ b/text/gluing.tex	Sun Jun 07 00:51:00 2009 +0000
@@ -244,157 +244,6 @@
 $$N \mapsto \bc_*(X \cup_{Y\du -Y} (N\times Y))$$ clearly gives an object in $\cG{M}$.
 Showing that it is an initial object is the content of the gluing theorem proved below.
 
-The definitions for a topological $A_\infty$-$n$-category are very similar to the above
-$n=1$ case.
-One replaces intervals with manifolds diffeomorphic to the ball $B^n$.
-Marked points are replaced by copies of $B^{n-1}$ in $\bdy B^n$.
-
-\nn{give examples: $A(J^n) = \bc_*(Z\times J)$ and $A(J^n) = C_*(\Maps(J \to M))$.}
-
-\todo{the motivating example $C_*(\Maps(X, M))$}
-
-
-
-\newcommand{\skel}[1]{\operatorname{skeleton}(#1)}
-
-Given a topological $A_\infty$-category $\cC$, we can construct an `algebraic' $A_\infty$ category $\skel{\cC}$. First, pick your
-favorite diffeomorphism $\phi: I \cup I \to I$.
-\begin{defn}
-We'll write $\skel{\cC} = (A, m_k)$. Define $A = \cC(I)$, and $m_2 : A \tensor A \to A$ by
-\begin{equation*}
-m_2 \cC(I) \tensor \cC(I) \xrightarrow{\gl_{I,I}} \cC(I \cup I) \xrightarrow{\cC(\phi)} \cC(I).
-\end{equation*}
-Next, we define all the `higher associators' $m_k$ by
-\todo{}
-\end{defn}
-
-Give an `algebraic' $A_\infty$ category $(A, m_k)$, we can construct a topological $A_\infty$-category, which we call $\bc_*^A$. You should
-think of this as a generalisation of the blob complex, although the construction we give will \emph{not} specialise to exactly the usual definition
-in the case the $A$ is actually an associative category.
-
-We'll first define $\cT_{k,n}$ to be the set of planar forests consisting of $n-k$ trees, with a total of $n$ leaves. Thus
-\todo{$\cT_{0,n}$ has 1 element, with $n$ vertical lines, $\cT_{1,n}$ has $n-1$ elements, each with a single trivalent vertex, $\cT_{2,n}$ etc...}
-\begin{align*}
-\end{align*}
-
-\begin{defn}
-The topological $A_\infty$ category $\bc_*^A$ is doubly graded, by `blob degree' and `internal degree'. We'll write $\bc_k^A$ for the blob degree $k$ piece.
-The homological degree of an element $a \in \bc_*^A(J)$
-is the sum of the blob degree and the internal degree.
-
-We first define $\bc_0^A(J)$ as a vector space by
-\begin{equation*}
-\bc_0^A(J) = \DirectSum_{\substack{\{J_i\}_{i=1}^n \\ \mathclap{\bigcup_i J_i = J}}} \Tensor_{i=1}^n (\CD{J_i \to I} \tensor A).
-\end{equation*}
-(That is, for each division of $J$ into finitely many subintervals,
-we have the tensor product of chains of diffeomorphisms from each subinterval to the standard interval,
-and a copy of $A$ for each subinterval.)
-The internal degree of an element $(f_1 \tensor a_1, \ldots, f_n \tensor a_n)$ is the sum of the dimensions of the singular chains
-plus the sum of the homological degrees of the elements of $A$.
-The differential is defined just by the graded Leibniz rule and the differentials on $\CD{J_i \to I}$ and on $A$.
-
-Next,
-\begin{equation*}
-\bc_1^A(J) = \DirectSum_{\substack{\{J_i\}_{i=1}^n \\ \mathclap{\bigcup_i J_i = J}}} \DirectSum_{T \in \cT_{1,n}} \Tensor_{i=1}^n (\CD{J_i \to I} \tensor A).
-\end{equation*}
-\end{defn}
-
-\begin{figure}[!ht]
-\begin{equation*}
-\mathfig{0.7}{associahedron/A4-vertices}
-\end{equation*}
-\caption{The vertices of the $k$-dimensional associahedron are indexed by binary trees on $k+2$ leaves.}
-\label{fig:A4-vertices}
-\end{figure}
-
-\begin{figure}[!ht]
-\begin{equation*}
-\mathfig{0.7}{associahedron/A4-faces}
-\end{equation*}
-\caption{The faces of the $k$-dimensional associahedron are indexed by trees with $2$ vertices on $k+2$ leaves.}
-\label{fig:A4-vertices}
-\end{figure}
-
-\newcommand{\tm}{\widetilde{m}}
-
-Let $\tm_1(a) = a$.
-
-We now define $\bdy(\tm_k(a_1 \tensor \cdots \tensor a_k))$, first giving an opaque formula, then explaining the combinatorics behind it.
-\begin{align}
-\notag \bdy(\tm_k(a_1 & \tensor \cdots \tensor a_k)) = \\
-\label{eq:bdy-tm-k-1}   & \phantom{+} \sum_{\ell'=0}^{k-1} (-1)^{\abs{\tm_k}+\sum_{j=1}^{\ell'} \abs{a_j}} \tm_k(a_1 \tensor \cdots \tensor \bdy a_{\ell'+1} \tensor \cdots \tensor a_k) + \\
-\label{eq:bdy-tm-k-2}   &          +  \sum_{\ell=1}^{k-1} \tm_{\ell}(a_1 \tensor \cdots \tensor a_{\ell}) \tensor \tm_{k-\ell}(a_{\ell+1} \tensor \cdots \tensor a_k) + \\
-\label{eq:bdy-tm-k-3}   &          +  \sum_{\ell=1}^{k-1} \sum_{\ell'=0}^{l-1} (-1)^{\abs{\tm_k}+\sum_{j=1}^{\ell'} \abs{a_j}} \tm_{\ell}(a_1 \tensor \cdots \tensor m_{k-\ell + 1}(a_{\ell' + 1} \tensor \cdots \tensor a_{\ell' + k - \ell + 1}) \tensor \cdots \tensor a_k)
-\end{align}
-The first set of terms in $\bdy(\tm_k(a_1 \tensor \cdots \tensor a_k))$ just have $\bdy$ acting on each argument $a_i$.
-The terms appearing in \eqref{eq:bdy-tm-k-2} and \eqref{eq:bdy-tm-k-3} are indexed by trees with $2$ vertices on $k+1$ leaves.
-Note here that we have one more leaf than there arguments of $\tm_k$.
-(See Figure \ref{fig:A4-vertices}, in which the rightmost branches are helpfully drawn in red.)
-We will treat the vertices which involve a rightmost (red) branch differently from the vertices which only involve the first $k$ leaves.
-The terms in \eqref{eq:bdy-tm-k-2} arise in the cases in which both
-vertices are rightmost, and the corresponding term in $\bdy(\tm_k(a_1 \tensor \cdots \tensor a_k))$ is a tensor product of the form
-$$\tm_{\ell}(a_1 \tensor \cdots \tensor a_{\ell}) \tensor \tm_{k-\ell}(a_{\ell+1} \tensor \cdots \tensor a_k)$$
-where $\ell + 1$ and $k - \ell + 1$ are the number of branches entering the vertices.
-If only one vertex is rightmost, we get the term $$\tm_{\ell}(a_1 \tensor \cdots \tensor m_{k-\ell+1}(a_{\ell' + 1} \tensor \cdots \tensor a_{\ell' + k - \ell}) \tensor \cdots \tensor a_k)$$
-in \eqref{eq:bdy-tm-k-3},
-where again $\ell + 1$ is the number of branches entering the rightmost vertex, $k-\ell+1$ is the number of branches entering the other vertex, and $\ell'$ is the number of edges meeting the rightmost vertex which start to the left of the other vertex.
-For example, we have
-\begin{align*}
-\bdy(\tm_2(a \tensor b)) & = \left(\tm_2(\bdy a \tensor b) + (-1)^{\abs{a}} \tm_2(a \tensor \bdy b)\right) + \\
-                         & \qquad - a \tensor b + m_2(a \tensor b) \\
-\bdy(\tm_3(a \tensor b \tensor c)) & = \left(- \tm_3(\bdy a \tensor b \tensor c) + (-1)^{\abs{a} + 1} \tm_3(a \tensor \bdy b \tensor c) + (-1)^{\abs{a} + \abs{b} + 1} \tm_3(a \tensor b \tensor \bdy c)\right) + \\
-                                   & \qquad + \left(- \tm_2(a \tensor b) \tensor c + a \tensor \tm_2(b \tensor c)\right) + \\
-                                   & \qquad + \left(- \tm_2(m_2(a \tensor b) \tensor c) + \tm_2(a, m_2(b \tensor c)) + m_3(a \tensor b \tensor c)\right)
-\end{align*}
-\begin{align*}
-\bdy(& \tm_4(a \tensor b \tensor c \tensor d)) = \left(\tm_4(\bdy a \tensor b \tensor c \tensor d) + \cdots + \tm_4(a \tensor b \tensor c \tensor \bdy d)\right) + \\
-                                             & + \left(\tm_3(a \tensor b \tensor c) \tensor d + \tm_2(a \tensor b) \tensor \tm_2(c \tensor d) + a \tensor \tm_3(b \tensor c \tensor d)\right) + \\
-                                             & + \left(\tm_3(m_2(a \tensor b) \tensor c \tensor d) + \tm_3(a \tensor m_2(b \tensor c) \tensor d) + \tm_3(a \tensor b \tensor m_2(c \tensor d))\right. + \\
-                                             & + \left.\tm_2(m_3(a \tensor b \tensor c) \tensor d) + \tm_2(a \tensor m_3(b \tensor c \tensor d)) + m_4(a \tensor b \tensor c \tensor d)\right) \\
-\end{align*}
-See Figure \ref{fig:A4-terms}, comparing it against Figure \ref{fig:A4-faces}, to see this illustrated in the case $k=4$. There the $3$ faces closest
-to the top of the diagram have two rightmost vertices, while the other $6$ faces have only one.
-
-\begin{figure}[!ht]
-\begin{equation*}
-\mathfig{1.0}{associahedron/A4-terms}
-\end{equation*}
-\caption{The terms of $\bdy(\tm_k(a_1 \tensor \cdots \tensor a_k))$ correspond to the faces of the $k-1$ dimensional associahedron.}
-\label{fig:A4-terms}
-\end{figure}
-
-\begin{lem}
-This definition actually results in a chain complex, that is $\bdy^2 = 0$.
-\end{lem}
-\begin{proof}
-\newcommand{\T}{\text{---}}
-\newcommand{\ssum}[1]{{\sum}^{(#1)}}
-For the duration of this proof, inside a summation over variables $l_1, \ldots, l_m$, an expression with $m$ dashes will be interpreted
-by replacing each dash with contiguous factors from $a_1 \tensor \cdots \tensor a_k$, so the first dash takes the first $l_1$ factors, the second
-takes the next $l_2$ factors, and so on. Further, we'll write $\ssum{m}$ for $\sum_{\sum_{i=1}^m l_i = k}$.
-In this notation, the formula for the differential becomes
-\begin{align}
-\notag
-\bdy \tm(\T) & = \ssum{2} \tm(\T) \tensor \tm(\T) \times \sigma_{0;l_1,l_2} + \ssum{3} \tm(\T \tensor m(\T) \tensor \T) \times \tau_{0;l_1,l_2,l_3} \\
-\intertext{and we calculate}
-\notag
-\bdy^2 \tm(\T) & = \ssum{2} \bdy \tm(\T) \tensor \tm(\T) \times \sigma_{0;l_1,l_2} \\
-\notag         & \qquad + \ssum{2} \tm(\T) \tensor \bdy \tm(\T) \times \sigma_{0;l_1,l_2} \\
-\notag         & \qquad + \ssum{3} \bdy \tm(\T \tensor m(\T) \tensor \T) \times \tau_{0;l_1,l_2,l_3} \\
-\label{eq:d21} & = \ssum{3} \tm(\T) \tensor \tm(\T) \tensor \tm(\T) \times \sigma_{0;l_1+l_2,l_3} \sigma_{0;l_1,l_2} \\
-\label{eq:d22} & \qquad + \ssum{4} \tm(\T \tensor m(\T) \tensor \T) \tensor \tm(\T) \times \sigma_{0;l_1+l_2+l_3,l_4} \tau_{0;l_1,l_2,l_3} \\
-\label{eq:d23} & \qquad + \ssum{3} \tm(\T) \tensor \tm(\T) \tensor \tm(\T) \times \sigma_{0;l_1,l_2+l_3} \sigma_{l_1;l_2,l_3} \\
-\label{eq:d24} & \qquad + \ssum{4} \tm(\T) \tensor \tm(\T \tensor m(\T) \tensor \T) \times \sigma_{0;l_1,l_2+l_3+l_4} \tau_{l_1;l_2,l_3,l_4} \\
-\label{eq:d25} & \qquad + \ssum{4} \tm(\T \tensor m(\T) \tensor \T) \tensor \tm(\T) \times \tau_{0;l_1,l_2,l_3+l_4} ??? \\
-\label{eq:d26} & \qquad + \ssum{4} \tm(\T) \tensor \tm(\T \tensor m(\T) \tensor \T) \times \tau_{0;l_1+l_2,l_3,l_4} \sigma_{0;l_1,l_2} \\
-\label{eq:d27} & \qquad + \ssum{5} \tm(\T \tensor m(\T) \tensor \T \tensor m(\T) \tensor \T) \times \tau_{0;l_1+l_2+l_3,l_4,l_5} \tau_{0;l_1,l_2,l_3}  \\
-\label{eq:d28} & \qquad + \ssum{5} \tm(\T \tensor m(\T \tensor m(\T) \tensor \T) \tensor \T) \times \tau_{0;l_1,l_2+l_3+l_4,l_5} ??? \\
-\label{eq:d29} & \qquad + \ssum{5} \tm(\T \tensor m(\T) \tensor \T \tensor m(\T) \tensor \T) \times \tau_{0;l_1,l_2,l_3+l_4+l_5} ???
-\end{align}
-Now, we see the the expressions on the right hand side of line \eqref{eq:d21} and those on \eqref{eq:d23} cancel. Similarly, line \eqref{eq:d22} cancels
-with \eqref{eq:d25}, \eqref{eq:d24} with \eqref{eq:d26}, and \eqref{eq:d27} with \eqref{eq:d29}. Finally, we need to see that \eqref{eq:d28} gives $0$,
-by the usual relations between the $m_k$ in an $A_\infty$ algebra.
-\end{proof}
 
 \nn{Need to let the input $n$-category $C$ be a graded thing (e.g. DG
 $n$-category or $A_\infty$ $n$-category). DG $n$-category case is pretty