@@ -60,7 +60,7 @@ import { MESSAGES } from './util/messages'
6060const debug = Debug ( 'api' )
6161let MAX_RETRY_LIMIT
6262let RETRY_DELAY_BASE = 200 // Default base delay in milliseconds
63- let TIMEOUT = 30000 // Default timeout in milliseconds
63+ let TIMEOUT = 60000 // Increased from 30000 to 60000 (60 seconds) for large stack syncs
6464let Contentstack
6565
6666/**
@@ -186,21 +186,26 @@ export const get = (req, RETRY = 1) => {
186186 }
187187
188188 // Clear the invalid token parameters and reinitialize
189- if ( req . qs . sync_token ) {
190- delete req . qs . sync_token
191- }
192- if ( req . qs . pagination_token ) {
193- delete req . qs . pagination_token
194- }
189+ delete req . qs . sync_token
190+ delete req . qs . pagination_token
195191 req . qs . init = true
196-
192+ // Reset req.path so it gets rebuilt from Contentstack.apis.sync
193+ // (req.path has the old query string baked in from line 109)
194+ delete req . path
195+
197196 // Mark this as a recovery attempt to prevent infinite loops
198197 if ( ! req . _error141Recovery ) {
199198 req . _error141Recovery = true
200199 debug ( 'Retrying with init=true after Error 141' )
201- return get ( req , 1 ) // Reset retry counter for fresh start
202- . then ( resolve )
203- . catch ( reject )
200+ // Use delayed retry
201+ timeDelay = Math . pow ( Math . SQRT2 , RETRY ) * RETRY_DELAY_BASE
202+ debug ( `Error 141 recovery: waiting ${ timeDelay } ms before retry` )
203+
204+ return setTimeout ( ( ) => {
205+ return get ( req , RETRY )
206+ . then ( resolve )
207+ . catch ( reject )
208+ } , timeDelay )
204209 } else {
205210 debug ( 'Error 141 recovery already attempted, failing to prevent infinite loop' )
206211 }
@@ -223,14 +228,30 @@ export const get = (req, RETRY = 1) => {
223228 reject ( new Error ( 'Request timeout' ) )
224229 } )
225230
226- // Enhanced error handling for socket hang ups and connection resets
231+ // Enhanced error handling for network and connection errors
227232 httpRequest . on ( 'error' , ( error : any ) => {
228233 debug ( MESSAGES . API . REQUEST_ERROR ( options . path , error ?. message , error ?. code ) )
229234
230- // Handle socket hang up and connection reset errors with retry
231- if ( ( error ?. code === 'ECONNRESET' || error ?. message ?. includes ( 'socket hang up' ) ) && RETRY <= MAX_RETRY_LIMIT ) {
235+ // List of retryable network error codes
236+ const retryableErrors = [
237+ 'ECONNRESET' , // Connection reset by peer
238+ 'ETIMEDOUT' , // Connection timeout
239+ 'ECONNREFUSED' , // Connection refused
240+ 'ENOTFOUND' , // DNS lookup failed
241+ 'ENETUNREACH' , // Network unreachable
242+ 'EAI_AGAIN' , // DNS lookup timeout
243+ 'EPIPE' , // Broken pipe
244+ 'EHOSTUNREACH' , // Host unreachable
245+ ]
246+
247+ // Check if error is retryable
248+ const isRetryable = retryableErrors . includes ( error ?. code ) ||
249+ error ?. message ?. includes ( 'socket hang up' ) ||
250+ error ?. message ?. includes ( 'ETIMEDOUT' )
251+
252+ if ( isRetryable && RETRY <= MAX_RETRY_LIMIT ) {
232253 timeDelay = Math . pow ( Math . SQRT2 , RETRY ) * RETRY_DELAY_BASE
233- debug ( MESSAGES . API . SOCKET_HANGUP_RETRY ( options . path , timeDelay , RETRY , MAX_RETRY_LIMIT ) )
254+ debug ( `Network error ${ error ?. code || error ?. message } : waiting ${ timeDelay } ms before retry ${ RETRY } / ${ MAX_RETRY_LIMIT } ` )
234255 RETRY ++
235256
236257 return setTimeout ( ( ) => {
0 commit comments