# -*- coding: utf-8 -*- import unittest from packaging.tests.support import fake_dec from packaging.pypi.simple import Crawler try: import _thread from packaging.tests.pypi_server import (use_pypi_server, PyPIServer, PYPI_DEFAULT_STATIC_PATH) except ImportError: _thread = None use_pypi_server = fake_dec from packaging import tests as packaging_tests PYPI_DEFAULT_STATIC_PATH = os.path.join( os.path.dirname(os.path.abspath(packaging_tests)), 'pypiserver') STD_THREAD_MSG = "Test PyPI requires thread support." class TestName(unittest.TestCase): def _get_simple_crawler(self, server, base_url="/simple/", hosts=None, *args, **kwargs): """Build and return a SimpleIndex with the test server urls""" if hosts is None: hosts = (server.full_address.replace("http://", ""),) kwargs['hosts'] = hosts return Crawler(server.full_address + base_url, *args, **kwargs) @unittest.skipIf(_thread is None, STD_THREAD_MSG) @use_pypi_server('foo_bar_baz') def test_name(self, server): #: Create a crawler from the testing server. crawler = self._get_simple_crawler(server) #: Easiest way to get a list of projects is to search for them ;) projects = crawler.search_projects('') project_names = ['foo', 'bar', 'baz'] project_names.sort() names = [p.name for p in projects] names.sort() self.assertListEqual(names, project_names)