1117 Let $\tm_1(a) = a$. |
1117 Let $\tm_1(a) = a$. |
1118 |
1118 |
1119 We now define $\bdy(\tm_k(a_1 \tensor \cdots \tensor a_k))$, first giving an opaque formula, then explaining the combinatorics behind it. |
1119 We now define $\bdy(\tm_k(a_1 \tensor \cdots \tensor a_k))$, first giving an opaque formula, then explaining the combinatorics behind it. |
1120 \begin{align} |
1120 \begin{align} |
1121 \notag \bdy(\tm_k(a_1 & \tensor \cdots \tensor a_k)) = \\ |
1121 \notag \bdy(\tm_k(a_1 & \tensor \cdots \tensor a_k)) = \\ |
1122 \label{eq:bdy-tm-k-1} & \phantom{+} \sum_{i=1}^k (-1)^{\sum_{j=1}^{i-1} \deg(a_j)} \tm_k(a_1 \tensor \cdots \tensor \bdy a_i \tensor \cdots \tensor a_k) + \\ |
1122 \label{eq:bdy-tm-k-1} & \phantom{+} \sum_{\ell'=0}^{k-1} (-1)^{\sum_{j=1}^{\ell'} \deg(a_j)} \tm_k(a_1 \tensor \cdots \tensor \bdy a_{\ell'+1} \tensor \cdots \tensor a_k) + \\ |
1123 \label{eq:bdy-tm-k-2} & + \sum_{\ell=1}^{k-1} \tm_{\ell}(a_1 \tensor \cdots \tensor a_{\ell}) \tensor \tm_{k-\ell}(a_{\ell+1} \tensor \cdots \tensor a_k) + \\ |
1123 \label{eq:bdy-tm-k-2} & + \sum_{\ell=1}^{k-1} \tm_{\ell}(a_1 \tensor \cdots \tensor a_{\ell}) \tensor \tm_{k-\ell}(a_{\ell+1} \tensor \cdots \tensor a_k) + \\ |
1124 \label{eq:bdy-tm-k-3} & + \sum_{\ell=1}^{k-1} \sum_{\ell'=0}^{l-1} \tm_{\ell}(a_1 \tensor \cdots \tensor m_{k-\ell + 1}(a_{\ell' + 1} \tensor \cdots \tensor a_{\ell' + k - \ell + 1}) \tensor \cdots \tensor a_k) |
1124 \label{eq:bdy-tm-k-3} & + \sum_{\ell=1}^{k-1} \sum_{\ell'=0}^{l-1} \tm_{\ell}(a_1 \tensor \cdots \tensor m_{k-\ell + 1}(a_{\ell' + 1} \tensor \cdots \tensor a_{\ell' + k - \ell + 1}) \tensor \cdots \tensor a_k) |
1125 \end{align} |
1125 \end{align} |
1126 The first set of terms in $\bdy(\tm_k(a_1 \tensor \cdots \tensor a_k))$ just have $\bdy$ acting on each argument $a_i$. |
1126 The first set of terms in $\bdy(\tm_k(a_1 \tensor \cdots \tensor a_k))$ just have $\bdy$ acting on each argument $a_i$. |
1127 The terms appearing in \eqref{eq:bdy-tm-k-2} and \eqref{eq:bdy-tm-k-3} are indexed by trees with $2$ vertices on $k+1$ leaves. |
1127 The terms appearing in \eqref{eq:bdy-tm-k-2} and \eqref{eq:bdy-tm-k-3} are indexed by trees with $2$ vertices on $k+1$ leaves. |
1159 \end{equation*} |
1159 \end{equation*} |
1160 \caption{The terms of $\bdy(\tm_k(a_1 \tensor \cdots \tensor a_k))$ correspond to the faces of the $k-1$ dimensional associahedron.} |
1160 \caption{The terms of $\bdy(\tm_k(a_1 \tensor \cdots \tensor a_k))$ correspond to the faces of the $k-1$ dimensional associahedron.} |
1161 \label{fig:A4-terms} |
1161 \label{fig:A4-terms} |
1162 \end{figure} |
1162 \end{figure} |
1163 |
1163 |
|
1164 \begin{lem} |
|
1165 This definition actually results in a chain complex, that is $\bdy^2 = 0$. |
|
1166 \end{lem} |
|
1167 \begin{proof} |
|
1168 \newcommand{\T}{\text{---}} |
|
1169 \newcommand{\ssum}[1]{{\sum}^{(#1)}} |
|
1170 For the duration of this proof, inside a summation over variables $l_1, \ldots, l_m$, an expression with $m$ dashes will be interpreted |
|
1171 by replacing each dash with contiguous factors from $a_1 \tensor \cdots \tensor a_k$, so the first dash takes the first $l_1$ factors, the second |
|
1172 takes the next $l_2$ factors, and so on. Further, we'll write $\ssum{m}$ for $\sum_{\sum_{i=1}^m l_i = k}$. |
|
1173 In this notation, the formula for the differential becomes |
|
1174 \begin{align} |
|
1175 \notag |
|
1176 \bdy \tm(\T) & = \ssum{2} \tm(\T) \tensor \tm(\T) \times \sigma_{0;l_1,l_2} + \ssum{3} \tm(\T \tensor m(\T) \tensor \T) \times \tau_{0;l_1,l_2,l_3} \\ |
|
1177 \intertext{and we calculate} |
|
1178 \notag |
|
1179 \bdy^2 \tm(\T) & = \ssum{2} (\bdy \tm(\T)) \tensor \tm(\T) \times \sigma_{0;l_1,l_2} \\ |
|
1180 \notag & \qquad + \ssum{2} \tm(\T) \tensor (\bdy \tm(\T)) \times \sigma_{0;l_1,l_2} \\ |
|
1181 \notag & \qquad + \ssum{3} \bdy \tm(\T \tensor m(\T) \tensor \T) \times \tau_{0;l_1,l_2,l_3} \\ |
|
1182 \label{eq:d21} & = \ssum{3} \tm(\T) \tensor \tm(\T) \tensor \tm(\T) \times \sigma_{0;l_1+l_2,l_3} \sigma_{0;l_1,l_2} \\ |
|
1183 \label{eq:d22} & \qquad + \ssum{4} \tm(\T \tensor m(\T) \tensor \T) \tensor \tm(\T) \times \sigma_{0;l_1+l_2+l_3,l_4} \tau_{0;l_1,l_2,l_3} \\ |
|
1184 \label{eq:d23} & \qquad + \ssum{3} \tm(\T) \tensor \tm(\T) \tensor \tm(\T) \times \sigma_{0;l_1,l_2+l_3} \sigma_{l_1;l_2,l_3} \\ |
|
1185 \label{eq:d24} & \qquad + \ssum{4} \tm(\T) \tensor \tm(\T \tensor m(\T) \tensor \T) \times \sigma_{0;l_1,l_2+l_3+l_4} \tau_{l_1;l_2,l_3,l_4} \\ |
|
1186 \label{eq:d25} & \qquad + \ssum{4} \tm(\T \tensor m(\T) \tensor \T) \tensor \tm(\T) \times \tau_{0;l_1,l_2,l_3+l_4} ??? \\ |
|
1187 \label{eq:d26} & \qquad + \ssum{4} \tm(\T) \tensor \tm(\T \tensor m(\T) \tensor \T) \times \tau_{0;l_1+l_2,l_3,l_4} \sigma_{0;l_1,l_2} \\ |
|
1188 \label{eq:d27} & \qquad + \ssum{5} \tm(\T \tensor m(\T) \tensor \T \tensor m(\T) \tensor \T) \times \tau_{0;l_1+l_2+l_3,l_4,l_5} \tau_{0;l_1,l_2,l_3} \\ |
|
1189 \label{eq:d28} & \qquad + \ssum{5} \tm(\T \tensor m(\T \tensor m(\T) \tensor \T) \tensor \T) \times \tau_{0;l_1,l_2+l_3+l_4,l_5} ??? \\ |
|
1190 \label{eq:d29} & \qquad + \ssum{5} \tm(\T \tensor m(\T) \tensor \T \tensor m(\T) \tensor \T) \times \tau_{0;l_1,l_2,l_3+l_4+l_5} ??? |
|
1191 \end{align} |
|
1192 Now, we see the the expressions on the right hand side of line \eqref{eq:d21} and those on \eqref{eq:d23} cancel. Similarly, line \eqref{eq:d22} cancels |
|
1193 with \eqref{eq:d25}, \eqref{eq:d24} with \eqref{eq:d26}, and \eqref{eq:d27} with \eqref{eq:d29}. Finally, we need to see that \eqref{eq:d28} gives $0$, |
|
1194 by the usual relations between the $m_k$ in an $A_\infty$ algebra. |
|
1195 \end{proof} |
|
1196 |
1164 \nn{Need to let the input $n$-category $C$ be a graded thing (e.g. DG |
1197 \nn{Need to let the input $n$-category $C$ be a graded thing (e.g. DG |
1165 $n$-category or $A_\infty$ $n$-category). DG $n$-category case is pretty |
1198 $n$-category or $A_\infty$ $n$-category). DG $n$-category case is pretty |
1166 easy, I think, so maybe it should be done earlier??} |
1199 easy, I think, so maybe it should be done earlier??} |
1167 |
1200 |
1168 \bigskip |
1201 \bigskip |