--- Lib/robotparser.py.orig 2012-09-02 11:14:27.000000000 -0700 +++ Lib/robotparser.py 2012-09-02 11:08:50.000000000 -0700 @@ -28,6 +28,7 @@ self.allow_all = False self.set_url(url) self.last_checked = 0 + self.opener = URLopener() def mtime(self): """Returns the time the robots.txt file was last fetched. @@ -53,11 +54,10 @@ def read(self): """Reads the robots.txt URL and feeds it to the parser.""" - opener = URLopener() - f = opener.open(self.url) + f = self.opener.open(self.url) lines = [line.strip() for line in f] f.close() - self.errcode = opener.errcode + self.errcode = self.opener.errcode if self.errcode in (401, 403): self.disallow_all = True elif self.errcode >= 400: