bitbake: fetch/wget: Increase timeout to 100s from 30s

Testing shows the worst case CDN response time can be up to 100s. The wget fetcher
is used for accessing sstate from the CDN so increase our timeouts there to match
our worst case repsonse times.

(Bitbake rev: c7f282cd27edfd78830b61db586ed669808893a5)

Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Signed-off-by: Steve Sakoman <steve@sakoman.com>
This commit is contained in:
Richard Purdie
2024-11-20 21:26:03 +00:00
committed by Steve Sakoman
parent d77302b2fe
commit dbb243bf0b

View File

@@ -87,7 +87,7 @@ class Wget(FetchMethod):
if not ud.localfile: if not ud.localfile:
ud.localfile = d.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", ".")) ud.localfile = d.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", "."))
self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 30" self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 100"
if ud.type == 'ftp' or ud.type == 'ftps': if ud.type == 'ftp' or ud.type == 'ftps':
self.basecmd += " --passive-ftp" self.basecmd += " --passive-ftp"
@@ -371,7 +371,7 @@ class Wget(FetchMethod):
except (FileNotFoundError, netrc.NetrcParseError): except (FileNotFoundError, netrc.NetrcParseError):
pass pass
with opener.open(r, timeout=30) as response: with opener.open(r, timeout=100) as response:
pass pass
except (urllib.error.URLError, ConnectionResetError, TimeoutError) as e: except (urllib.error.URLError, ConnectionResetError, TimeoutError) as e:
if try_again: if try_again: