@@ -122,6 +122,7 @@ export async function handleStreamingResponse(
122122 conversation : ConversationClass | Conversation ,
123123 onChunk : ( content : string ) => void ,
124124 abortController : AbortController ,
125+ retryCount = 0 ,
125126) : Promise < void > {
126127 const data = conversation instanceof ConversationClass ? conversation . data : conversation ;
127128 const model = conversation . model ;
@@ -149,28 +150,37 @@ export async function handleStreamingResponse(
149150 enabledMCPs : getEnabledMCPs ( ) ,
150151 } ;
151152
152- const reader = await StreamReader . fromFetch ( "/api/generate" , {
153- method : "POST" ,
154- headers : {
155- "Content-Type" : "application/json" ,
156- } ,
157- body : JSON . stringify ( requestBody ) ,
158- signal : abortController . signal ,
159- } ) ;
153+ try {
154+ const reader = await StreamReader . fromFetch ( "/api/generate" , {
155+ method : "POST" ,
156+ headers : {
157+ "Content-Type" : "application/json" ,
158+ } ,
159+ body : JSON . stringify ( requestBody ) ,
160+ signal : abortController . signal ,
161+ } ) ;
160162
161- let out = "" ;
162- for await ( const chunk of reader . read ( ) ) {
163- if ( chunk . type === "chunk" && chunk . content ) {
164- out += chunk . content ;
165- onChunk ( out ) ;
166- } else if ( chunk . type === "error" ) {
167- throw new Error ( chunk . error || "Stream error" ) ;
163+ let out = "" ;
164+ for await ( const chunk of reader . read ( ) ) {
165+ if ( chunk . type === "chunk" && chunk . content ) {
166+ out += chunk . content ;
167+ onChunk ( out ) ;
168+ } else if ( chunk . type === "error" ) {
169+ throw new Error ( chunk . error || "Stream error" ) ;
170+ }
171+ }
172+ } catch ( error ) {
173+ if ( error instanceof Error && error . message . includes ( "401" ) && retryCount === 0 ) {
174+ await token . requestTokenFromParent ( ) ;
175+ return handleStreamingResponse ( conversation , onChunk , abortController , retryCount + 1 ) ;
168176 }
177+ throw error ;
169178 }
170179}
171180
172181export async function handleNonStreamingResponse (
173182 conversation : ConversationClass | Conversation ,
183+ retryCount = 0 ,
174184) : Promise < { message : ChatCompletionOutputMessage ; completion_tokens : number } > {
175185 const data = conversation instanceof ConversationClass ? conversation . data : conversation ;
176186 const model = conversation . model ;
@@ -207,6 +217,10 @@ export async function handleNonStreamingResponse(
207217 } ) ;
208218
209219 if ( ! response . ok ) {
220+ if ( response . status === 401 && retryCount === 0 ) {
221+ await token . requestTokenFromParent ( ) ;
222+ return handleNonStreamingResponse ( conversation , retryCount + 1 ) ;
223+ }
210224 const error = await response . json ( ) ;
211225 throw new Error ( error . error || "Failed to generate response" ) ;
212226 }
0 commit comments