@@ -98,7 +98,14 @@ function walkFetchEmbed({ embedTokens, compile, fetch }, cb) {
9898 }
9999 }
100100
101- cb ( { token : currentToken , embedToken } ) ;
101+ cb ( {
102+ token : currentToken ,
103+ embedToken,
104+ rowIndex : currentToken . rowIndex ,
105+ cellIndex : currentToken . cellIndex ,
106+ tokenRef : currentToken . tokenRef ,
107+ } ) ;
108+
102109 if ( ++ count >= embedTokens . length ) {
103110 cb ( { } ) ;
104111 }
@@ -126,51 +133,82 @@ export function prerenderEmbed({ compiler, raw = '', fetch }, done) {
126133 const linkRE = compile . Lexer . rules . inline . normal . link ;
127134 const links = tokens . links ;
128135
136+ const linkMatcher = new RegExp ( linkRE . source , 'g' ) ;
137+
129138 tokens . forEach ( ( token , index ) => {
130139 if ( token . type === 'paragraph' ) {
131140 token . text = token . text . replace (
132- new RegExp ( linkRE . source , 'g' ) ,
141+ linkMatcher ,
133142 ( src , filename , href , title ) => {
134143 const embed = compiler . compileEmbed ( href , title ) ;
135-
136144 if ( embed ) {
137145 embedTokens . push ( {
138146 index,
147+ tokenRef : token ,
139148 embed,
140149 } ) ;
141150 }
142-
143151 return src ;
144152 } ,
145153 ) ;
154+ } else if ( token . type === 'table' ) {
155+ token . rows . forEach ( ( row , rowIndex ) => {
156+ row . forEach ( ( cell , cellIndex ) => {
157+ cell . text = cell . text . replace (
158+ linkMatcher ,
159+ ( src , filename , href , title ) => {
160+ const embed = compiler . compileEmbed ( href , title ) ;
161+ if ( embed ) {
162+ embedTokens . push ( {
163+ index,
164+ tokenRef : token ,
165+ rowIndex,
166+ cellIndex,
167+ embed,
168+ } ) ;
169+ }
170+ return src ;
171+ } ,
172+ ) ;
173+ } ) ;
174+ } ) ;
146175 }
147176 } ) ;
148177
149178 // keep track of which tokens have been embedded so far
150179 // so that we know where to insert the embedded tokens as they
151180 // are returned
152181 const moves = [ ] ;
153- walkFetchEmbed ( { compile, embedTokens, fetch } , ( { embedToken, token } ) => {
154- if ( token ) {
155- // iterate through the array of previously inserted tokens
156- // to determine where the current embedded tokens should be inserted
157- let index = token . index ;
158- moves . forEach ( pos => {
159- if ( index > pos . start ) {
160- index += pos . length ;
161- }
162- } ) ;
182+ walkFetchEmbed (
183+ { compile, embedTokens, fetch } ,
184+ ( { embedToken, token, rowIndex, cellIndex, tokenRef } ) => {
185+ if ( token ) {
186+ if ( typeof rowIndex === 'number' && typeof cellIndex === 'number' ) {
187+ const cell = tokenRef . rows [ rowIndex ] [ cellIndex ] ;
188+
189+ cell . embedTokens = embedToken ;
190+ } else {
191+ // iterate through the array of previously inserted tokens
192+ // to determine where the current embedded tokens should be inserted
193+ let index = token . index ;
194+ moves . forEach ( pos => {
195+ if ( index > pos . start ) {
196+ index += pos . length ;
197+ }
198+ } ) ;
163199
164- Object . assign ( links , embedToken . links ) ;
200+ Object . assign ( links , embedToken . links ) ;
165201
166- tokens = tokens
167- . slice ( 0 , index )
168- . concat ( embedToken , tokens . slice ( index + 1 ) ) ;
169- moves . push ( { start : index , length : embedToken . length - 1 } ) ;
170- } else {
171- cached [ raw ] = tokens . concat ( ) ;
172- tokens . links = cached [ raw ] . links = links ;
173- done ( tokens ) ;
174- }
175- } ) ;
202+ tokens = tokens
203+ . slice ( 0 , index )
204+ . concat ( embedToken , tokens . slice ( index + 1 ) ) ;
205+ moves . push ( { start : index , length : embedToken . length - 1 } ) ;
206+ }
207+ } else {
208+ cached [ raw ] = tokens . concat ( ) ;
209+ tokens . links = cached [ raw ] . links = links ;
210+ done ( tokens ) ;
211+ }
212+ } ,
213+ ) ;
176214}
0 commit comments