@@ -827,14 +827,16 @@ async def fail_crawl(
827827 crawl : CrawlSpec ,
828828 status : CrawlStatus ,
829829 pods : dict ,
830- stats : Optional [ CrawlStats ] = None ,
831- fail_reason : Optional [ str ] = "" ,
830+ stats : CrawlStats ,
831+ redis : Redis ,
832832 ) -> bool :
833833 """Mark crawl as failed, log crawl state and print crawl logs, if possible"""
834834 prev_state = status .state
835835
836836 failed_state : Literal ["failed" , "failed_not_logged_in" ] = "failed"
837837
838+ fail_reason = await redis .get (f"{ crawl .id } :failReason" )
839+
838840 if fail_reason == "not_logged_in" :
839841 failed_state = "failed_not_logged_in"
840842
@@ -1588,7 +1590,7 @@ async def update_crawl_state(
15881590 # check if one-page crawls actually succeeded
15891591 # if only one page found, and no files, assume failed
15901592 if status .pagesFound == 1 and not status .filesAdded :
1591- await self .fail_crawl (crawl , status , pods , stats )
1593+ await self .fail_crawl (crawl , status , pods , stats , redis )
15921594 return status
15931595
15941596 state : TYPE_NON_RUNNING_STATES
@@ -1611,9 +1613,7 @@ async def update_crawl_state(
16111613 if status .stopping and not status .pagesDone :
16121614 await self .mark_finished (crawl , status , "canceled" , stats )
16131615 else :
1614- fail_reason = await redis .get (f"{ crawl .id } :failReason" )
1615-
1616- await self .fail_crawl (crawl , status , pods , stats , fail_reason )
1616+ await self .fail_crawl (crawl , status , pods , stats , redis )
16171617
16181618 # check for other statuses, default to "running"
16191619 else :
0 commit comments