diff -r 22b6b59c70e6 Lib/test/test_robotparser.py --- a/Lib/test/test_robotparser.py Fri Mar 01 14:53:45 2013 +0200 +++ b/Lib/test/test_robotparser.py Mon Mar 04 09:40:18 2013 -0600 @@ -5,18 +5,7 @@ from urllib.request import urlopen from test import support -class RobotTestCase(unittest.TestCase): - def __init__(self, index, parser, url, good, agent): - unittest.TestCase.__init__(self) - if good: - self.str = "RobotTest(%d, good, %s)" % (index, url) - else: - self.str = "RobotTest(%d, bad, %s)" % (index, url) - self.parser = parser - self.url = url - self.good = good - self.agent = agent - +class _RobotTestCase: def runTest(self): if isinstance(self.url, tuple): agent, url = self.url @@ -31,6 +20,22 @@ def __str__(self): return self.str +def make_RobotTestCase(index, parser, url, good, agent): + bases = (_RobotTestCase, unittest.TestCase) + + case = type("RobotTestCase", bases, {})() + + case.parser = parser + case.url = url + case.good = good + case.agent = agent + if good: + case.str = "RobotTest(%d, good, %s)" % (index, url) + else: + case.str = "RobotTest(%d, bad, %s)" % (index, url) + + return case + tests = unittest.TestSuite() def RobotTest(index, robots_txt, good_urls, bad_urls, @@ -40,9 +45,10 @@ parser = urllib.robotparser.RobotFileParser() parser.parse(lines) for url in good_urls: - tests.addTest(RobotTestCase(index, parser, url, 1, agent)) + tests.addTest(make_RobotTestCase(index, parser, url, 1, agent)) for url in bad_urls: - tests.addTest(RobotTestCase(index, parser, url, 0, agent)) + tests.addTest(make_RobotTestCase(index, parser, url, 0, agent)) + # Examples from http://www.robotstxt.org/wc/norobots.html (fetched 2002) @@ -269,10 +275,10 @@ self.assertTrue( parser.can_fetch("*", "http://www.python.org/robots.txt")) -def test_main(): - support.run_unittest(NetworkTestCase) - support.run_unittest(tests) +def load_tests(loader, suite, pattern): + suite.addTest(tests) + return suite if __name__=='__main__': - support.verbose = 1 - test_main() + support.use_resources = ['network'] + unittest.main()