diff --git a/Doc/library/urllib.request.rst b/Doc/library/urllib.request.rst index 64e915d042d4a0..6a15a3bc6ebddf 100644 --- a/Doc/library/urllib.request.rst +++ b/Doc/library/urllib.request.rst @@ -1386,7 +1386,7 @@ containing parameters:: >>> import urllib.request >>> import urllib.parse >>> params = urllib.parse.urlencode({'spam': 1, 'eggs': 2, 'bacon': 0}) - >>> url = "http://www.musi-cal.com/cgi-bin/query?%s" % params + >>> url = "https://www.python.org/search?%s" % params >>> with urllib.request.urlopen(url) as f: ... print(f.read().decode('utf-8')) ... @@ -1398,7 +1398,7 @@ from urlencode is encoded to bytes before it is sent to urlopen as data:: >>> import urllib.parse >>> data = urllib.parse.urlencode({'spam': 1, 'eggs': 2, 'bacon': 0}) >>> data = data.encode('ascii') - >>> with urllib.request.urlopen("http://requestb.in/xrbl82xr", data) as f: + >>> with urllib.request.urlopen("https://httpbin.org/post", data) as f: ... print(f.read().decode('utf-8')) ... diff --git a/Doc/library/urllib.robotparser.rst b/Doc/library/urllib.robotparser.rst index 492c65ae209d92..1fa7fc13baa539 100644 --- a/Doc/library/urllib.robotparser.rst +++ b/Doc/library/urllib.robotparser.rst @@ -18,7 +18,7 @@ This module provides a single class, :class:`RobotFileParser`, which answers questions about whether or not a particular user agent can fetch a URL on the website that published the :file:`robots.txt` file. For more details on the -structure of :file:`robots.txt` files, see http://www.robotstxt.org/orig.html. +structure of :file:`robots.txt` files, see :rfc:`9309`. .. class:: RobotFileParser(url='')