| 1 | n/a | import io |
|---|
| 2 | n/a | import os |
|---|
| 3 | n/a | import unittest |
|---|
| 4 | n/a | import urllib.robotparser |
|---|
| 5 | n/a | from collections import namedtuple |
|---|
| 6 | n/a | from test import support |
|---|
| 7 | n/a | from http.server import BaseHTTPRequestHandler, HTTPServer |
|---|
| 8 | n/a | try: |
|---|
| 9 | n/a | import threading |
|---|
| 10 | n/a | except ImportError: |
|---|
| 11 | n/a | threading = None |
|---|
| 12 | n/a | |
|---|
| 13 | n/a | |
|---|
| 14 | n/a | class BaseRobotTest: |
|---|
| 15 | n/a | robots_txt = '' |
|---|
| 16 | n/a | agent = 'test_robotparser' |
|---|
| 17 | n/a | good = [] |
|---|
| 18 | n/a | bad = [] |
|---|
| 19 | n/a | |
|---|
| 20 | n/a | def setUp(self): |
|---|
| 21 | n/a | lines = io.StringIO(self.robots_txt).readlines() |
|---|
| 22 | n/a | self.parser = urllib.robotparser.RobotFileParser() |
|---|
| 23 | n/a | self.parser.parse(lines) |
|---|
| 24 | n/a | |
|---|
| 25 | n/a | def get_agent_and_url(self, url): |
|---|
| 26 | n/a | if isinstance(url, tuple): |
|---|
| 27 | n/a | agent, url = url |
|---|
| 28 | n/a | return agent, url |
|---|
| 29 | n/a | return self.agent, url |
|---|
| 30 | n/a | |
|---|
| 31 | n/a | def test_good_urls(self): |
|---|
| 32 | n/a | for url in self.good: |
|---|
| 33 | n/a | agent, url = self.get_agent_and_url(url) |
|---|
| 34 | n/a | with self.subTest(url=url, agent=agent): |
|---|
| 35 | n/a | self.assertTrue(self.parser.can_fetch(agent, url)) |
|---|
| 36 | n/a | |
|---|
| 37 | n/a | def test_bad_urls(self): |
|---|
| 38 | n/a | for url in self.bad: |
|---|
| 39 | n/a | agent, url = self.get_agent_and_url(url) |
|---|
| 40 | n/a | with self.subTest(url=url, agent=agent): |
|---|
| 41 | n/a | self.assertFalse(self.parser.can_fetch(agent, url)) |
|---|
| 42 | n/a | |
|---|
| 43 | n/a | |
|---|
| 44 | n/a | class UserAgentWildcardTest(BaseRobotTest, unittest.TestCase): |
|---|
| 45 | n/a | robots_txt = """\ |
|---|
| 46 | n/a | User-agent: * |
|---|
| 47 | n/a | Disallow: /cyberworld/map/ # This is an infinite virtual URL space |
|---|
| 48 | n/a | Disallow: /tmp/ # these will soon disappear |
|---|
| 49 | n/a | Disallow: /foo.html |
|---|
| 50 | n/a | """ |
|---|
| 51 | n/a | good = ['/', '/test.html'] |
|---|
| 52 | n/a | bad = ['/cyberworld/map/index.html', '/tmp/xxx', '/foo.html'] |
|---|
| 53 | n/a | |
|---|
| 54 | n/a | |
|---|
| 55 | n/a | class CrawlDelayAndCustomAgentTest(BaseRobotTest, unittest.TestCase): |
|---|
| 56 | n/a | robots_txt = """\ |
|---|
| 57 | n/a | # robots.txt for http://www.example.com/ |
|---|
| 58 | n/a | |
|---|
| 59 | n/a | User-agent: * |
|---|
| 60 | n/a | Crawl-delay: 1 |
|---|
| 61 | n/a | Request-rate: 3/15 |
|---|
| 62 | n/a | Disallow: /cyberworld/map/ # This is an infinite virtual URL space |
|---|
| 63 | n/a | |
|---|
| 64 | n/a | # Cybermapper knows where to go. |
|---|
| 65 | n/a | User-agent: cybermapper |
|---|
| 66 | n/a | Disallow: |
|---|
| 67 | n/a | """ |
|---|
| 68 | n/a | good = ['/', '/test.html', ('cybermapper', '/cyberworld/map/index.html')] |
|---|
| 69 | n/a | bad = ['/cyberworld/map/index.html'] |
|---|
| 70 | n/a | |
|---|
| 71 | n/a | |
|---|
| 72 | n/a | class RejectAllRobotsTest(BaseRobotTest, unittest.TestCase): |
|---|
| 73 | n/a | robots_txt = """\ |
|---|
| 74 | n/a | # go away |
|---|
| 75 | n/a | User-agent: * |
|---|
| 76 | n/a | Disallow: / |
|---|
| 77 | n/a | """ |
|---|
| 78 | n/a | good = [] |
|---|
| 79 | n/a | bad = ['/cyberworld/map/index.html', '/', '/tmp/'] |
|---|
| 80 | n/a | |
|---|
| 81 | n/a | |
|---|
| 82 | n/a | class BaseRequestRateTest(BaseRobotTest): |
|---|
| 83 | n/a | |
|---|
| 84 | n/a | def test_request_rate(self): |
|---|
| 85 | n/a | for url in self.good + self.bad: |
|---|
| 86 | n/a | agent, url = self.get_agent_and_url(url) |
|---|
| 87 | n/a | with self.subTest(url=url, agent=agent): |
|---|
| 88 | n/a | if self.crawl_delay: |
|---|
| 89 | n/a | self.assertEqual( |
|---|
| 90 | n/a | self.parser.crawl_delay(agent), self.crawl_delay |
|---|
| 91 | n/a | ) |
|---|
| 92 | n/a | if self.request_rate: |
|---|
| 93 | n/a | self.assertEqual( |
|---|
| 94 | n/a | self.parser.request_rate(agent).requests, |
|---|
| 95 | n/a | self.request_rate.requests |
|---|
| 96 | n/a | ) |
|---|
| 97 | n/a | self.assertEqual( |
|---|
| 98 | n/a | self.parser.request_rate(agent).seconds, |
|---|
| 99 | n/a | self.request_rate.seconds |
|---|
| 100 | n/a | ) |
|---|
| 101 | n/a | |
|---|
| 102 | n/a | |
|---|
| 103 | n/a | class CrawlDelayAndRequestRateTest(BaseRequestRateTest, unittest.TestCase): |
|---|
| 104 | n/a | robots_txt = """\ |
|---|
| 105 | n/a | User-agent: figtree |
|---|
| 106 | n/a | Crawl-delay: 3 |
|---|
| 107 | n/a | Request-rate: 9/30 |
|---|
| 108 | n/a | Disallow: /tmp |
|---|
| 109 | n/a | Disallow: /a%3cd.html |
|---|
| 110 | n/a | Disallow: /a%2fb.html |
|---|
| 111 | n/a | Disallow: /%7ejoe/index.html |
|---|
| 112 | n/a | """ |
|---|
| 113 | n/a | agent = 'figtree' |
|---|
| 114 | n/a | request_rate = namedtuple('req_rate', 'requests seconds')(9, 30) |
|---|
| 115 | n/a | crawl_delay = 3 |
|---|
| 116 | n/a | good = [('figtree', '/foo.html')] |
|---|
| 117 | n/a | bad = ['/tmp', '/tmp.html', '/tmp/a.html', '/a%3cd.html', '/a%3Cd.html', |
|---|
| 118 | n/a | '/a%2fb.html', '/~joe/index.html'] |
|---|
| 119 | n/a | |
|---|
| 120 | n/a | |
|---|
| 121 | n/a | class DifferentAgentTest(CrawlDelayAndRequestRateTest): |
|---|
| 122 | n/a | agent = 'FigTree Robot libwww-perl/5.04' |
|---|
| 123 | n/a | # these are not actually tested, but we still need to parse it |
|---|
| 124 | n/a | # in order to accommodate the input parameters |
|---|
| 125 | n/a | request_rate = None |
|---|
| 126 | n/a | crawl_delay = None |
|---|
| 127 | n/a | |
|---|
| 128 | n/a | |
|---|
| 129 | n/a | class InvalidRequestRateTest(BaseRobotTest, unittest.TestCase): |
|---|
| 130 | n/a | robots_txt = """\ |
|---|
| 131 | n/a | User-agent: * |
|---|
| 132 | n/a | Disallow: /tmp/ |
|---|
| 133 | n/a | Disallow: /a%3Cd.html |
|---|
| 134 | n/a | Disallow: /a/b.html |
|---|
| 135 | n/a | Disallow: /%7ejoe/index.html |
|---|
| 136 | n/a | Crawl-delay: 3 |
|---|
| 137 | n/a | Request-rate: 9/banana |
|---|
| 138 | n/a | """ |
|---|
| 139 | n/a | good = ['/tmp'] |
|---|
| 140 | n/a | bad = ['/tmp/', '/tmp/a.html', '/a%3cd.html', '/a%3Cd.html', '/a/b.html', |
|---|
| 141 | n/a | '/%7Ejoe/index.html'] |
|---|
| 142 | n/a | crawl_delay = 3 |
|---|
| 143 | n/a | |
|---|
| 144 | n/a | |
|---|
| 145 | n/a | class InvalidCrawlDelayTest(BaseRobotTest, unittest.TestCase): |
|---|
| 146 | n/a | # From bug report #523041 |
|---|
| 147 | n/a | robots_txt = """\ |
|---|
| 148 | n/a | User-Agent: * |
|---|
| 149 | n/a | Disallow: /. |
|---|
| 150 | n/a | Crawl-delay: pears |
|---|
| 151 | n/a | """ |
|---|
| 152 | n/a | good = ['/foo.html'] |
|---|
| 153 | n/a | # bug report says "/" should be denied, but that is not in the RFC |
|---|
| 154 | n/a | bad = [] |
|---|
| 155 | n/a | |
|---|
| 156 | n/a | |
|---|
| 157 | n/a | class AnotherInvalidRequestRateTest(BaseRobotTest, unittest.TestCase): |
|---|
| 158 | n/a | # also test that Allow and Diasallow works well with each other |
|---|
| 159 | n/a | robots_txt = """\ |
|---|
| 160 | n/a | User-agent: Googlebot |
|---|
| 161 | n/a | Allow: /folder1/myfile.html |
|---|
| 162 | n/a | Disallow: /folder1/ |
|---|
| 163 | n/a | Request-rate: whale/banana |
|---|
| 164 | n/a | """ |
|---|
| 165 | n/a | agent = 'Googlebot' |
|---|
| 166 | n/a | good = ['/folder1/myfile.html'] |
|---|
| 167 | n/a | bad = ['/folder1/anotherfile.html'] |
|---|
| 168 | n/a | |
|---|
| 169 | n/a | |
|---|
| 170 | n/a | class UserAgentOrderingTest(BaseRobotTest, unittest.TestCase): |
|---|
| 171 | n/a | # the order of User-agent should be correct. note |
|---|
| 172 | n/a | # that this file is incorrect because "Googlebot" is a |
|---|
| 173 | n/a | # substring of "Googlebot-Mobile" |
|---|
| 174 | n/a | robots_txt = """\ |
|---|
| 175 | n/a | User-agent: Googlebot |
|---|
| 176 | n/a | Disallow: / |
|---|
| 177 | n/a | |
|---|
| 178 | n/a | User-agent: Googlebot-Mobile |
|---|
| 179 | n/a | Allow: / |
|---|
| 180 | n/a | """ |
|---|
| 181 | n/a | agent = 'Googlebot' |
|---|
| 182 | n/a | bad = ['/something.jpg'] |
|---|
| 183 | n/a | |
|---|
| 184 | n/a | |
|---|
| 185 | n/a | class UserAgentGoogleMobileTest(UserAgentOrderingTest): |
|---|
| 186 | n/a | agent = 'Googlebot-Mobile' |
|---|
| 187 | n/a | |
|---|
| 188 | n/a | |
|---|
| 189 | n/a | class GoogleURLOrderingTest(BaseRobotTest, unittest.TestCase): |
|---|
| 190 | n/a | # Google also got the order wrong. You need |
|---|
| 191 | n/a | # to specify the URLs from more specific to more general |
|---|
| 192 | n/a | robots_txt = """\ |
|---|
| 193 | n/a | User-agent: Googlebot |
|---|
| 194 | n/a | Allow: /folder1/myfile.html |
|---|
| 195 | n/a | Disallow: /folder1/ |
|---|
| 196 | n/a | """ |
|---|
| 197 | n/a | agent = 'googlebot' |
|---|
| 198 | n/a | good = ['/folder1/myfile.html'] |
|---|
| 199 | n/a | bad = ['/folder1/anotherfile.html'] |
|---|
| 200 | n/a | |
|---|
| 201 | n/a | |
|---|
| 202 | n/a | class DisallowQueryStringTest(BaseRobotTest, unittest.TestCase): |
|---|
| 203 | n/a | # see issue #6325 for details |
|---|
| 204 | n/a | robots_txt = """\ |
|---|
| 205 | n/a | User-agent: * |
|---|
| 206 | n/a | Disallow: /some/path?name=value |
|---|
| 207 | n/a | """ |
|---|
| 208 | n/a | good = ['/some/path'] |
|---|
| 209 | n/a | bad = ['/some/path?name=value'] |
|---|
| 210 | n/a | |
|---|
| 211 | n/a | |
|---|
| 212 | n/a | class UseFirstUserAgentWildcardTest(BaseRobotTest, unittest.TestCase): |
|---|
| 213 | n/a | # obey first * entry (#4108) |
|---|
| 214 | n/a | robots_txt = """\ |
|---|
| 215 | n/a | User-agent: * |
|---|
| 216 | n/a | Disallow: /some/path |
|---|
| 217 | n/a | |
|---|
| 218 | n/a | User-agent: * |
|---|
| 219 | n/a | Disallow: /another/path |
|---|
| 220 | n/a | """ |
|---|
| 221 | n/a | good = ['/another/path'] |
|---|
| 222 | n/a | bad = ['/some/path'] |
|---|
| 223 | n/a | |
|---|
| 224 | n/a | |
|---|
| 225 | n/a | class EmptyQueryStringTest(BaseRobotTest, unittest.TestCase): |
|---|
| 226 | n/a | # normalize the URL first (#17403) |
|---|
| 227 | n/a | robots_txt = """\ |
|---|
| 228 | n/a | User-agent: * |
|---|
| 229 | n/a | Allow: /some/path? |
|---|
| 230 | n/a | Disallow: /another/path? |
|---|
| 231 | n/a | """ |
|---|
| 232 | n/a | good = ['/some/path?'] |
|---|
| 233 | n/a | bad = ['/another/path?'] |
|---|
| 234 | n/a | |
|---|
| 235 | n/a | |
|---|
| 236 | n/a | class DefaultEntryTest(BaseRequestRateTest, unittest.TestCase): |
|---|
| 237 | n/a | robots_txt = """\ |
|---|
| 238 | n/a | User-agent: * |
|---|
| 239 | n/a | Crawl-delay: 1 |
|---|
| 240 | n/a | Request-rate: 3/15 |
|---|
| 241 | n/a | Disallow: /cyberworld/map/ |
|---|
| 242 | n/a | """ |
|---|
| 243 | n/a | request_rate = namedtuple('req_rate', 'requests seconds')(3, 15) |
|---|
| 244 | n/a | crawl_delay = 1 |
|---|
| 245 | n/a | good = ['/', '/test.html'] |
|---|
| 246 | n/a | bad = ['/cyberworld/map/index.html'] |
|---|
| 247 | n/a | |
|---|
| 248 | n/a | |
|---|
| 249 | n/a | class RobotHandler(BaseHTTPRequestHandler): |
|---|
| 250 | n/a | |
|---|
| 251 | n/a | def do_GET(self): |
|---|
| 252 | n/a | self.send_error(403, "Forbidden access") |
|---|
| 253 | n/a | |
|---|
| 254 | n/a | def log_message(self, format, *args): |
|---|
| 255 | n/a | pass |
|---|
| 256 | n/a | |
|---|
| 257 | n/a | |
|---|
| 258 | n/a | @unittest.skipUnless(threading, 'threading required for this test') |
|---|
| 259 | n/a | class PasswordProtectedSiteTestCase(unittest.TestCase): |
|---|
| 260 | n/a | |
|---|
| 261 | n/a | def setUp(self): |
|---|
| 262 | n/a | self.server = HTTPServer((support.HOST, 0), RobotHandler) |
|---|
| 263 | n/a | |
|---|
| 264 | n/a | self.t = threading.Thread( |
|---|
| 265 | n/a | name='HTTPServer serving', |
|---|
| 266 | n/a | target=self.server.serve_forever, |
|---|
| 267 | n/a | # Short poll interval to make the test finish quickly. |
|---|
| 268 | n/a | # Time between requests is short enough that we won't wake |
|---|
| 269 | n/a | # up spuriously too many times. |
|---|
| 270 | n/a | kwargs={'poll_interval':0.01}) |
|---|
| 271 | n/a | self.t.daemon = True # In case this function raises. |
|---|
| 272 | n/a | self.t.start() |
|---|
| 273 | n/a | |
|---|
| 274 | n/a | def tearDown(self): |
|---|
| 275 | n/a | self.server.shutdown() |
|---|
| 276 | n/a | self.t.join() |
|---|
| 277 | n/a | self.server.server_close() |
|---|
| 278 | n/a | |
|---|
| 279 | n/a | @support.reap_threads |
|---|
| 280 | n/a | def testPasswordProtectedSite(self): |
|---|
| 281 | n/a | addr = self.server.server_address |
|---|
| 282 | n/a | url = 'http://' + support.HOST + ':' + str(addr[1]) |
|---|
| 283 | n/a | robots_url = url + "/robots.txt" |
|---|
| 284 | n/a | parser = urllib.robotparser.RobotFileParser() |
|---|
| 285 | n/a | parser.set_url(url) |
|---|
| 286 | n/a | parser.read() |
|---|
| 287 | n/a | self.assertFalse(parser.can_fetch("*", robots_url)) |
|---|
| 288 | n/a | |
|---|
| 289 | n/a | |
|---|
| 290 | n/a | class NetworkTestCase(unittest.TestCase): |
|---|
| 291 | n/a | |
|---|
| 292 | n/a | base_url = 'http://www.pythontest.net/' |
|---|
| 293 | n/a | robots_txt = '{}elsewhere/robots.txt'.format(base_url) |
|---|
| 294 | n/a | |
|---|
| 295 | n/a | @classmethod |
|---|
| 296 | n/a | def setUpClass(cls): |
|---|
| 297 | n/a | support.requires('network') |
|---|
| 298 | n/a | with support.transient_internet(cls.base_url): |
|---|
| 299 | n/a | cls.parser = urllib.robotparser.RobotFileParser(cls.robots_txt) |
|---|
| 300 | n/a | cls.parser.read() |
|---|
| 301 | n/a | |
|---|
| 302 | n/a | def url(self, path): |
|---|
| 303 | n/a | return '{}{}{}'.format( |
|---|
| 304 | n/a | self.base_url, path, '/' if not os.path.splitext(path)[1] else '' |
|---|
| 305 | n/a | ) |
|---|
| 306 | n/a | |
|---|
| 307 | n/a | def test_basic(self): |
|---|
| 308 | n/a | self.assertFalse(self.parser.disallow_all) |
|---|
| 309 | n/a | self.assertFalse(self.parser.allow_all) |
|---|
| 310 | n/a | self.assertGreater(self.parser.mtime(), 0) |
|---|
| 311 | n/a | self.assertFalse(self.parser.crawl_delay('*')) |
|---|
| 312 | n/a | self.assertFalse(self.parser.request_rate('*')) |
|---|
| 313 | n/a | |
|---|
| 314 | n/a | def test_can_fetch(self): |
|---|
| 315 | n/a | self.assertTrue(self.parser.can_fetch('*', self.url('elsewhere'))) |
|---|
| 316 | n/a | self.assertFalse(self.parser.can_fetch('Nutch', self.base_url)) |
|---|
| 317 | n/a | self.assertFalse(self.parser.can_fetch('Nutch', self.url('brian'))) |
|---|
| 318 | n/a | self.assertFalse(self.parser.can_fetch('Nutch', self.url('webstats'))) |
|---|
| 319 | n/a | self.assertFalse(self.parser.can_fetch('*', self.url('webstats'))) |
|---|
| 320 | n/a | self.assertTrue(self.parser.can_fetch('*', self.base_url)) |
|---|
| 321 | n/a | |
|---|
| 322 | n/a | def test_read_404(self): |
|---|
| 323 | n/a | parser = urllib.robotparser.RobotFileParser(self.url('i-robot.txt')) |
|---|
| 324 | n/a | parser.read() |
|---|
| 325 | n/a | self.assertTrue(parser.allow_all) |
|---|
| 326 | n/a | self.assertFalse(parser.disallow_all) |
|---|
| 327 | n/a | self.assertEqual(parser.mtime(), 0) |
|---|
| 328 | n/a | self.assertIsNone(parser.crawl_delay('*')) |
|---|
| 329 | n/a | self.assertIsNone(parser.request_rate('*')) |
|---|
| 330 | n/a | |
|---|
| 331 | n/a | if __name__=='__main__': |
|---|
| 332 | n/a | unittest.main() |
|---|