mirror of
https://github.com/python/cpython.git
synced 2025-11-01 06:01:29 +00:00
initial import of the packaging package in the standard library
This commit is contained in:
parent
566f8a646e
commit
1231a4e097
193 changed files with 30376 additions and 149 deletions
17
Lib/packaging/__init__.py
Normal file
17
Lib/packaging/__init__.py
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
"""Support for packaging, distribution and installation of Python projects.
|
||||
|
||||
Third-party tools can use parts of packaging as building blocks
|
||||
without causing the other modules to be imported:
|
||||
|
||||
import packaging.version
|
||||
import packaging.metadata
|
||||
import packaging.pypi.simple
|
||||
import packaging.tests.pypi_server
|
||||
"""
|
||||
|
||||
from logging import getLogger
|
||||
|
||||
__all__ = ['__version__', 'logger']
|
||||
|
||||
__version__ = "1.0a3"
|
||||
logger = getLogger('packaging')
|
||||
552
Lib/packaging/_trove.py
Normal file
552
Lib/packaging/_trove.py
Normal file
|
|
@ -0,0 +1,552 @@
|
|||
"""Temporary helper for create."""
|
||||
|
||||
# XXX get the list from PyPI and cache it instead of hardcoding
|
||||
|
||||
# XXX see if it would be more useful to store it as another structure
|
||||
# than a list of strings
|
||||
|
||||
all_classifiers = [
|
||||
'Development Status :: 1 - Planning',
|
||||
'Development Status :: 2 - Pre-Alpha',
|
||||
'Development Status :: 3 - Alpha',
|
||||
'Development Status :: 4 - Beta',
|
||||
'Development Status :: 5 - Production/Stable',
|
||||
'Development Status :: 6 - Mature',
|
||||
'Development Status :: 7 - Inactive',
|
||||
'Environment :: Console',
|
||||
'Environment :: Console :: Curses',
|
||||
'Environment :: Console :: Framebuffer',
|
||||
'Environment :: Console :: Newt',
|
||||
'Environment :: Console :: svgalib',
|
||||
"Environment :: Handhelds/PDA's",
|
||||
'Environment :: MacOS X',
|
||||
'Environment :: MacOS X :: Aqua',
|
||||
'Environment :: MacOS X :: Carbon',
|
||||
'Environment :: MacOS X :: Cocoa',
|
||||
'Environment :: No Input/Output (Daemon)',
|
||||
'Environment :: Other Environment',
|
||||
'Environment :: Plugins',
|
||||
'Environment :: Web Environment',
|
||||
'Environment :: Web Environment :: Buffet',
|
||||
'Environment :: Web Environment :: Mozilla',
|
||||
'Environment :: Web Environment :: ToscaWidgets',
|
||||
'Environment :: Win32 (MS Windows)',
|
||||
'Environment :: X11 Applications',
|
||||
'Environment :: X11 Applications :: Gnome',
|
||||
'Environment :: X11 Applications :: GTK',
|
||||
'Environment :: X11 Applications :: KDE',
|
||||
'Environment :: X11 Applications :: Qt',
|
||||
'Framework :: BFG',
|
||||
'Framework :: Buildout',
|
||||
'Framework :: Chandler',
|
||||
'Framework :: CubicWeb',
|
||||
'Framework :: Django',
|
||||
'Framework :: IDLE',
|
||||
'Framework :: Paste',
|
||||
'Framework :: Plone',
|
||||
'Framework :: Pylons',
|
||||
'Framework :: Setuptools Plugin',
|
||||
'Framework :: Trac',
|
||||
'Framework :: TurboGears',
|
||||
'Framework :: TurboGears :: Applications',
|
||||
'Framework :: TurboGears :: Widgets',
|
||||
'Framework :: Twisted',
|
||||
'Framework :: ZODB',
|
||||
'Framework :: Zope2',
|
||||
'Framework :: Zope3',
|
||||
'Intended Audience :: Customer Service',
|
||||
'Intended Audience :: Developers',
|
||||
'Intended Audience :: Education',
|
||||
'Intended Audience :: End Users/Desktop',
|
||||
'Intended Audience :: Financial and Insurance Industry',
|
||||
'Intended Audience :: Healthcare Industry',
|
||||
'Intended Audience :: Information Technology',
|
||||
'Intended Audience :: Legal Industry',
|
||||
'Intended Audience :: Manufacturing',
|
||||
'Intended Audience :: Other Audience',
|
||||
'Intended Audience :: Religion',
|
||||
'Intended Audience :: Science/Research',
|
||||
'Intended Audience :: System Administrators',
|
||||
'Intended Audience :: Telecommunications Industry',
|
||||
'License :: Aladdin Free Public License (AFPL)',
|
||||
'License :: DFSG approved',
|
||||
'License :: Eiffel Forum License (EFL)',
|
||||
'License :: Free For Educational Use',
|
||||
'License :: Free For Home Use',
|
||||
'License :: Free for non-commercial use',
|
||||
'License :: Freely Distributable',
|
||||
'License :: Free To Use But Restricted',
|
||||
'License :: Freeware',
|
||||
'License :: Netscape Public License (NPL)',
|
||||
'License :: Nokia Open Source License (NOKOS)',
|
||||
'License :: OSI Approved',
|
||||
'License :: OSI Approved :: Academic Free License (AFL)',
|
||||
'License :: OSI Approved :: Apache Software License',
|
||||
'License :: OSI Approved :: Apple Public Source License',
|
||||
'License :: OSI Approved :: Artistic License',
|
||||
'License :: OSI Approved :: Attribution Assurance License',
|
||||
'License :: OSI Approved :: BSD License',
|
||||
'License :: OSI Approved :: Common Public License',
|
||||
'License :: OSI Approved :: Eiffel Forum License',
|
||||
'License :: OSI Approved :: European Union Public Licence 1.0 (EUPL 1.0)',
|
||||
'License :: OSI Approved :: European Union Public Licence 1.1 (EUPL 1.1)',
|
||||
'License :: OSI Approved :: GNU Affero General Public License v3',
|
||||
'License :: OSI Approved :: GNU Free Documentation License (FDL)',
|
||||
'License :: OSI Approved :: GNU General Public License (GPL)',
|
||||
'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)',
|
||||
'License :: OSI Approved :: IBM Public License',
|
||||
'License :: OSI Approved :: Intel Open Source License',
|
||||
'License :: OSI Approved :: ISC License (ISCL)',
|
||||
'License :: OSI Approved :: Jabber Open Source License',
|
||||
'License :: OSI Approved :: MIT License',
|
||||
'License :: OSI Approved :: MITRE Collaborative Virtual Workspace License (CVW)',
|
||||
'License :: OSI Approved :: Motosoto License',
|
||||
'License :: OSI Approved :: Mozilla Public License 1.0 (MPL)',
|
||||
'License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)',
|
||||
'License :: OSI Approved :: Nethack General Public License',
|
||||
'License :: OSI Approved :: Nokia Open Source License',
|
||||
'License :: OSI Approved :: Open Group Test Suite License',
|
||||
'License :: OSI Approved :: Python License (CNRI Python License)',
|
||||
'License :: OSI Approved :: Python Software Foundation License',
|
||||
'License :: OSI Approved :: Qt Public License (QPL)',
|
||||
'License :: OSI Approved :: Ricoh Source Code Public License',
|
||||
'License :: OSI Approved :: Sleepycat License',
|
||||
'License :: OSI Approved :: Sun Industry Standards Source License (SISSL)',
|
||||
'License :: OSI Approved :: Sun Public License',
|
||||
'License :: OSI Approved :: University of Illinois/NCSA Open Source License',
|
||||
'License :: OSI Approved :: Vovida Software License 1.0',
|
||||
'License :: OSI Approved :: W3C License',
|
||||
'License :: OSI Approved :: X.Net License',
|
||||
'License :: OSI Approved :: zlib/libpng License',
|
||||
'License :: OSI Approved :: Zope Public License',
|
||||
'License :: Other/Proprietary License',
|
||||
'License :: Public Domain',
|
||||
'License :: Repoze Public License',
|
||||
'Natural Language :: Afrikaans',
|
||||
'Natural Language :: Arabic',
|
||||
'Natural Language :: Bengali',
|
||||
'Natural Language :: Bosnian',
|
||||
'Natural Language :: Bulgarian',
|
||||
'Natural Language :: Catalan',
|
||||
'Natural Language :: Chinese (Simplified)',
|
||||
'Natural Language :: Chinese (Traditional)',
|
||||
'Natural Language :: Croatian',
|
||||
'Natural Language :: Czech',
|
||||
'Natural Language :: Danish',
|
||||
'Natural Language :: Dutch',
|
||||
'Natural Language :: English',
|
||||
'Natural Language :: Esperanto',
|
||||
'Natural Language :: Finnish',
|
||||
'Natural Language :: French',
|
||||
'Natural Language :: German',
|
||||
'Natural Language :: Greek',
|
||||
'Natural Language :: Hebrew',
|
||||
'Natural Language :: Hindi',
|
||||
'Natural Language :: Hungarian',
|
||||
'Natural Language :: Icelandic',
|
||||
'Natural Language :: Indonesian',
|
||||
'Natural Language :: Italian',
|
||||
'Natural Language :: Japanese',
|
||||
'Natural Language :: Javanese',
|
||||
'Natural Language :: Korean',
|
||||
'Natural Language :: Latin',
|
||||
'Natural Language :: Latvian',
|
||||
'Natural Language :: Macedonian',
|
||||
'Natural Language :: Malay',
|
||||
'Natural Language :: Marathi',
|
||||
'Natural Language :: Norwegian',
|
||||
'Natural Language :: Panjabi',
|
||||
'Natural Language :: Persian',
|
||||
'Natural Language :: Polish',
|
||||
'Natural Language :: Portuguese',
|
||||
'Natural Language :: Portuguese (Brazilian)',
|
||||
'Natural Language :: Romanian',
|
||||
'Natural Language :: Russian',
|
||||
'Natural Language :: Serbian',
|
||||
'Natural Language :: Slovak',
|
||||
'Natural Language :: Slovenian',
|
||||
'Natural Language :: Spanish',
|
||||
'Natural Language :: Swedish',
|
||||
'Natural Language :: Tamil',
|
||||
'Natural Language :: Telugu',
|
||||
'Natural Language :: Thai',
|
||||
'Natural Language :: Turkish',
|
||||
'Natural Language :: Ukranian',
|
||||
'Natural Language :: Urdu',
|
||||
'Natural Language :: Vietnamese',
|
||||
'Operating System :: BeOS',
|
||||
'Operating System :: MacOS',
|
||||
'Operating System :: MacOS :: MacOS 9',
|
||||
'Operating System :: MacOS :: MacOS X',
|
||||
'Operating System :: Microsoft',
|
||||
'Operating System :: Microsoft :: MS-DOS',
|
||||
'Operating System :: Microsoft :: Windows',
|
||||
'Operating System :: Microsoft :: Windows :: Windows 3.1 or Earlier',
|
||||
'Operating System :: Microsoft :: Windows :: Windows 95/98/2000',
|
||||
'Operating System :: Microsoft :: Windows :: Windows CE',
|
||||
'Operating System :: Microsoft :: Windows :: Windows NT/2000',
|
||||
'Operating System :: OS/2',
|
||||
'Operating System :: OS Independent',
|
||||
'Operating System :: Other OS',
|
||||
'Operating System :: PalmOS',
|
||||
'Operating System :: PDA Systems',
|
||||
'Operating System :: POSIX',
|
||||
'Operating System :: POSIX :: AIX',
|
||||
'Operating System :: POSIX :: BSD',
|
||||
'Operating System :: POSIX :: BSD :: BSD/OS',
|
||||
'Operating System :: POSIX :: BSD :: FreeBSD',
|
||||
'Operating System :: POSIX :: BSD :: NetBSD',
|
||||
'Operating System :: POSIX :: BSD :: OpenBSD',
|
||||
'Operating System :: POSIX :: GNU Hurd',
|
||||
'Operating System :: POSIX :: HP-UX',
|
||||
'Operating System :: POSIX :: IRIX',
|
||||
'Operating System :: POSIX :: Linux',
|
||||
'Operating System :: POSIX :: Other',
|
||||
'Operating System :: POSIX :: SCO',
|
||||
'Operating System :: POSIX :: SunOS/Solaris',
|
||||
'Operating System :: Unix',
|
||||
'Programming Language :: Ada',
|
||||
'Programming Language :: APL',
|
||||
'Programming Language :: ASP',
|
||||
'Programming Language :: Assembly',
|
||||
'Programming Language :: Awk',
|
||||
'Programming Language :: Basic',
|
||||
'Programming Language :: C',
|
||||
'Programming Language :: C#',
|
||||
'Programming Language :: C++',
|
||||
'Programming Language :: Cold Fusion',
|
||||
'Programming Language :: Cython',
|
||||
'Programming Language :: Delphi/Kylix',
|
||||
'Programming Language :: Dylan',
|
||||
'Programming Language :: Eiffel',
|
||||
'Programming Language :: Emacs-Lisp',
|
||||
'Programming Language :: Erlang',
|
||||
'Programming Language :: Euler',
|
||||
'Programming Language :: Euphoria',
|
||||
'Programming Language :: Forth',
|
||||
'Programming Language :: Fortran',
|
||||
'Programming Language :: Haskell',
|
||||
'Programming Language :: Java',
|
||||
'Programming Language :: JavaScript',
|
||||
'Programming Language :: Lisp',
|
||||
'Programming Language :: Logo',
|
||||
'Programming Language :: ML',
|
||||
'Programming Language :: Modula',
|
||||
'Programming Language :: Objective C',
|
||||
'Programming Language :: Object Pascal',
|
||||
'Programming Language :: OCaml',
|
||||
'Programming Language :: Other',
|
||||
'Programming Language :: Other Scripting Engines',
|
||||
'Programming Language :: Pascal',
|
||||
'Programming Language :: Perl',
|
||||
'Programming Language :: PHP',
|
||||
'Programming Language :: Pike',
|
||||
'Programming Language :: Pliant',
|
||||
'Programming Language :: PL/SQL',
|
||||
'Programming Language :: PROGRESS',
|
||||
'Programming Language :: Prolog',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 2',
|
||||
'Programming Language :: Python :: 2.3',
|
||||
'Programming Language :: Python :: 2.4',
|
||||
'Programming Language :: Python :: 2.5',
|
||||
'Programming Language :: Python :: 2.6',
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Programming Language :: Python :: 3.0',
|
||||
'Programming Language :: Python :: 3.1',
|
||||
'Programming Language :: Python :: 3.2',
|
||||
'Programming Language :: REBOL',
|
||||
'Programming Language :: Rexx',
|
||||
'Programming Language :: Ruby',
|
||||
'Programming Language :: Scheme',
|
||||
'Programming Language :: Simula',
|
||||
'Programming Language :: Smalltalk',
|
||||
'Programming Language :: SQL',
|
||||
'Programming Language :: Tcl',
|
||||
'Programming Language :: Unix Shell',
|
||||
'Programming Language :: Visual Basic',
|
||||
'Programming Language :: XBasic',
|
||||
'Programming Language :: YACC',
|
||||
'Programming Language :: Zope',
|
||||
'Topic :: Adaptive Technologies',
|
||||
'Topic :: Artistic Software',
|
||||
'Topic :: Communications',
|
||||
'Topic :: Communications :: BBS',
|
||||
'Topic :: Communications :: Chat',
|
||||
'Topic :: Communications :: Chat :: AOL Instant Messenger',
|
||||
'Topic :: Communications :: Chat :: ICQ',
|
||||
'Topic :: Communications :: Chat :: Internet Relay Chat',
|
||||
'Topic :: Communications :: Chat :: Unix Talk',
|
||||
'Topic :: Communications :: Conferencing',
|
||||
'Topic :: Communications :: Email',
|
||||
'Topic :: Communications :: Email :: Address Book',
|
||||
'Topic :: Communications :: Email :: Email Clients (MUA)',
|
||||
'Topic :: Communications :: Email :: Filters',
|
||||
'Topic :: Communications :: Email :: Mailing List Servers',
|
||||
'Topic :: Communications :: Email :: Mail Transport Agents',
|
||||
'Topic :: Communications :: Email :: Post-Office',
|
||||
'Topic :: Communications :: Email :: Post-Office :: IMAP',
|
||||
'Topic :: Communications :: Email :: Post-Office :: POP3',
|
||||
'Topic :: Communications :: Fax',
|
||||
'Topic :: Communications :: FIDO',
|
||||
'Topic :: Communications :: File Sharing',
|
||||
'Topic :: Communications :: File Sharing :: Gnutella',
|
||||
'Topic :: Communications :: File Sharing :: Napster',
|
||||
'Topic :: Communications :: Ham Radio',
|
||||
'Topic :: Communications :: Internet Phone',
|
||||
'Topic :: Communications :: Telephony',
|
||||
'Topic :: Communications :: Usenet News',
|
||||
'Topic :: Database',
|
||||
'Topic :: Database :: Database Engines/Servers',
|
||||
'Topic :: Database :: Front-Ends',
|
||||
'Topic :: Desktop Environment',
|
||||
'Topic :: Desktop Environment :: File Managers',
|
||||
'Topic :: Desktop Environment :: Gnome',
|
||||
'Topic :: Desktop Environment :: GNUstep',
|
||||
'Topic :: Desktop Environment :: K Desktop Environment (KDE)',
|
||||
'Topic :: Desktop Environment :: K Desktop Environment (KDE) :: Themes',
|
||||
'Topic :: Desktop Environment :: PicoGUI',
|
||||
'Topic :: Desktop Environment :: PicoGUI :: Applications',
|
||||
'Topic :: Desktop Environment :: PicoGUI :: Themes',
|
||||
'Topic :: Desktop Environment :: Screen Savers',
|
||||
'Topic :: Desktop Environment :: Window Managers',
|
||||
'Topic :: Desktop Environment :: Window Managers :: Afterstep',
|
||||
'Topic :: Desktop Environment :: Window Managers :: Afterstep :: Themes',
|
||||
'Topic :: Desktop Environment :: Window Managers :: Applets',
|
||||
'Topic :: Desktop Environment :: Window Managers :: Blackbox',
|
||||
'Topic :: Desktop Environment :: Window Managers :: Blackbox :: Themes',
|
||||
'Topic :: Desktop Environment :: Window Managers :: CTWM',
|
||||
'Topic :: Desktop Environment :: Window Managers :: CTWM :: Themes',
|
||||
'Topic :: Desktop Environment :: Window Managers :: Enlightenment',
|
||||
'Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Epplets',
|
||||
'Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Themes DR15',
|
||||
'Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Themes DR16',
|
||||
'Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Themes DR17',
|
||||
'Topic :: Desktop Environment :: Window Managers :: Fluxbox',
|
||||
'Topic :: Desktop Environment :: Window Managers :: Fluxbox :: Themes',
|
||||
'Topic :: Desktop Environment :: Window Managers :: FVWM',
|
||||
'Topic :: Desktop Environment :: Window Managers :: FVWM :: Themes',
|
||||
'Topic :: Desktop Environment :: Window Managers :: IceWM',
|
||||
'Topic :: Desktop Environment :: Window Managers :: IceWM :: Themes',
|
||||
'Topic :: Desktop Environment :: Window Managers :: MetaCity',
|
||||
'Topic :: Desktop Environment :: Window Managers :: MetaCity :: Themes',
|
||||
'Topic :: Desktop Environment :: Window Managers :: Oroborus',
|
||||
'Topic :: Desktop Environment :: Window Managers :: Oroborus :: Themes',
|
||||
'Topic :: Desktop Environment :: Window Managers :: Sawfish',
|
||||
'Topic :: Desktop Environment :: Window Managers :: Sawfish :: Themes 0.30',
|
||||
'Topic :: Desktop Environment :: Window Managers :: Sawfish :: Themes pre-0.30',
|
||||
'Topic :: Desktop Environment :: Window Managers :: Waimea',
|
||||
'Topic :: Desktop Environment :: Window Managers :: Waimea :: Themes',
|
||||
'Topic :: Desktop Environment :: Window Managers :: Window Maker',
|
||||
'Topic :: Desktop Environment :: Window Managers :: Window Maker :: Applets',
|
||||
'Topic :: Desktop Environment :: Window Managers :: Window Maker :: Themes',
|
||||
'Topic :: Desktop Environment :: Window Managers :: XFCE',
|
||||
'Topic :: Desktop Environment :: Window Managers :: XFCE :: Themes',
|
||||
'Topic :: Documentation',
|
||||
'Topic :: Education',
|
||||
'Topic :: Education :: Computer Aided Instruction (CAI)',
|
||||
'Topic :: Education :: Testing',
|
||||
'Topic :: Games/Entertainment',
|
||||
'Topic :: Games/Entertainment :: Arcade',
|
||||
'Topic :: Games/Entertainment :: Board Games',
|
||||
'Topic :: Games/Entertainment :: First Person Shooters',
|
||||
'Topic :: Games/Entertainment :: Fortune Cookies',
|
||||
'Topic :: Games/Entertainment :: Multi-User Dungeons (MUD)',
|
||||
'Topic :: Games/Entertainment :: Puzzle Games',
|
||||
'Topic :: Games/Entertainment :: Real Time Strategy',
|
||||
'Topic :: Games/Entertainment :: Role-Playing',
|
||||
'Topic :: Games/Entertainment :: Side-Scrolling/Arcade Games',
|
||||
'Topic :: Games/Entertainment :: Simulation',
|
||||
'Topic :: Games/Entertainment :: Turn Based Strategy',
|
||||
'Topic :: Home Automation',
|
||||
'Topic :: Internet',
|
||||
'Topic :: Internet :: File Transfer Protocol (FTP)',
|
||||
'Topic :: Internet :: Finger',
|
||||
'Topic :: Internet :: Log Analysis',
|
||||
'Topic :: Internet :: Name Service (DNS)',
|
||||
'Topic :: Internet :: Proxy Servers',
|
||||
'Topic :: Internet :: WAP',
|
||||
'Topic :: Internet :: WWW/HTTP',
|
||||
'Topic :: Internet :: WWW/HTTP :: Browsers',
|
||||
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
|
||||
'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries',
|
||||
'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: Message Boards',
|
||||
'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: News/Diary',
|
||||
'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: Page Counters',
|
||||
'Topic :: Internet :: WWW/HTTP :: HTTP Servers',
|
||||
'Topic :: Internet :: WWW/HTTP :: Indexing/Search',
|
||||
'Topic :: Internet :: WWW/HTTP :: Site Management',
|
||||
'Topic :: Internet :: WWW/HTTP :: Site Management :: Link Checking',
|
||||
'Topic :: Internet :: WWW/HTTP :: WSGI',
|
||||
'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
|
||||
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware',
|
||||
'Topic :: Internet :: WWW/HTTP :: WSGI :: Server',
|
||||
'Topic :: Internet :: Z39.50',
|
||||
'Topic :: Multimedia',
|
||||
'Topic :: Multimedia :: Graphics',
|
||||
'Topic :: Multimedia :: Graphics :: 3D Modeling',
|
||||
'Topic :: Multimedia :: Graphics :: 3D Rendering',
|
||||
'Topic :: Multimedia :: Graphics :: Capture',
|
||||
'Topic :: Multimedia :: Graphics :: Capture :: Digital Camera',
|
||||
'Topic :: Multimedia :: Graphics :: Capture :: Scanners',
|
||||
'Topic :: Multimedia :: Graphics :: Capture :: Screen Capture',
|
||||
'Topic :: Multimedia :: Graphics :: Editors',
|
||||
'Topic :: Multimedia :: Graphics :: Editors :: Raster-Based',
|
||||
'Topic :: Multimedia :: Graphics :: Editors :: Vector-Based',
|
||||
'Topic :: Multimedia :: Graphics :: Graphics Conversion',
|
||||
'Topic :: Multimedia :: Graphics :: Presentation',
|
||||
'Topic :: Multimedia :: Graphics :: Viewers',
|
||||
'Topic :: Multimedia :: Sound/Audio',
|
||||
'Topic :: Multimedia :: Sound/Audio :: Analysis',
|
||||
'Topic :: Multimedia :: Sound/Audio :: Capture/Recording',
|
||||
'Topic :: Multimedia :: Sound/Audio :: CD Audio',
|
||||
'Topic :: Multimedia :: Sound/Audio :: CD Audio :: CD Playing',
|
||||
'Topic :: Multimedia :: Sound/Audio :: CD Audio :: CD Ripping',
|
||||
'Topic :: Multimedia :: Sound/Audio :: CD Audio :: CD Writing',
|
||||
'Topic :: Multimedia :: Sound/Audio :: Conversion',
|
||||
'Topic :: Multimedia :: Sound/Audio :: Editors',
|
||||
'Topic :: Multimedia :: Sound/Audio :: MIDI',
|
||||
'Topic :: Multimedia :: Sound/Audio :: Mixers',
|
||||
'Topic :: Multimedia :: Sound/Audio :: Players',
|
||||
'Topic :: Multimedia :: Sound/Audio :: Players :: MP3',
|
||||
'Topic :: Multimedia :: Sound/Audio :: Sound Synthesis',
|
||||
'Topic :: Multimedia :: Sound/Audio :: Speech',
|
||||
'Topic :: Multimedia :: Video',
|
||||
'Topic :: Multimedia :: Video :: Capture',
|
||||
'Topic :: Multimedia :: Video :: Conversion',
|
||||
'Topic :: Multimedia :: Video :: Display',
|
||||
'Topic :: Multimedia :: Video :: Non-Linear Editor',
|
||||
'Topic :: Office/Business',
|
||||
'Topic :: Office/Business :: Financial',
|
||||
'Topic :: Office/Business :: Financial :: Accounting',
|
||||
'Topic :: Office/Business :: Financial :: Investment',
|
||||
'Topic :: Office/Business :: Financial :: Point-Of-Sale',
|
||||
'Topic :: Office/Business :: Financial :: Spreadsheet',
|
||||
'Topic :: Office/Business :: Groupware',
|
||||
'Topic :: Office/Business :: News/Diary',
|
||||
'Topic :: Office/Business :: Office Suites',
|
||||
'Topic :: Office/Business :: Scheduling',
|
||||
'Topic :: Other/Nonlisted Topic',
|
||||
'Topic :: Printing',
|
||||
'Topic :: Religion',
|
||||
'Topic :: Scientific/Engineering',
|
||||
'Topic :: Scientific/Engineering :: Artificial Intelligence',
|
||||
'Topic :: Scientific/Engineering :: Astronomy',
|
||||
'Topic :: Scientific/Engineering :: Atmospheric Science',
|
||||
'Topic :: Scientific/Engineering :: Bio-Informatics',
|
||||
'Topic :: Scientific/Engineering :: Chemistry',
|
||||
'Topic :: Scientific/Engineering :: Electronic Design Automation (EDA)',
|
||||
'Topic :: Scientific/Engineering :: GIS',
|
||||
'Topic :: Scientific/Engineering :: Human Machine Interfaces',
|
||||
'Topic :: Scientific/Engineering :: Image Recognition',
|
||||
'Topic :: Scientific/Engineering :: Information Analysis',
|
||||
'Topic :: Scientific/Engineering :: Interface Engine/Protocol Translator',
|
||||
'Topic :: Scientific/Engineering :: Mathematics',
|
||||
'Topic :: Scientific/Engineering :: Medical Science Apps.',
|
||||
'Topic :: Scientific/Engineering :: Physics',
|
||||
'Topic :: Scientific/Engineering :: Visualization',
|
||||
'Topic :: Security',
|
||||
'Topic :: Security :: Cryptography',
|
||||
'Topic :: Sociology',
|
||||
'Topic :: Sociology :: Genealogy',
|
||||
'Topic :: Sociology :: History',
|
||||
'Topic :: Software Development',
|
||||
'Topic :: Software Development :: Assemblers',
|
||||
'Topic :: Software Development :: Bug Tracking',
|
||||
'Topic :: Software Development :: Build Tools',
|
||||
'Topic :: Software Development :: Code Generators',
|
||||
'Topic :: Software Development :: Compilers',
|
||||
'Topic :: Software Development :: Debuggers',
|
||||
'Topic :: Software Development :: Disassemblers',
|
||||
'Topic :: Software Development :: Documentation',
|
||||
'Topic :: Software Development :: Embedded Systems',
|
||||
'Topic :: Software Development :: Internationalization',
|
||||
'Topic :: Software Development :: Interpreters',
|
||||
'Topic :: Software Development :: Libraries',
|
||||
'Topic :: Software Development :: Libraries :: Application Frameworks',
|
||||
'Topic :: Software Development :: Libraries :: Java Libraries',
|
||||
'Topic :: Software Development :: Libraries :: Perl Modules',
|
||||
'Topic :: Software Development :: Libraries :: PHP Classes',
|
||||
'Topic :: Software Development :: Libraries :: Pike Modules',
|
||||
'Topic :: Software Development :: Libraries :: pygame',
|
||||
'Topic :: Software Development :: Libraries :: Python Modules',
|
||||
'Topic :: Software Development :: Libraries :: Ruby Modules',
|
||||
'Topic :: Software Development :: Libraries :: Tcl Extensions',
|
||||
'Topic :: Software Development :: Localization',
|
||||
'Topic :: Software Development :: Object Brokering',
|
||||
'Topic :: Software Development :: Object Brokering :: CORBA',
|
||||
'Topic :: Software Development :: Pre-processors',
|
||||
'Topic :: Software Development :: Quality Assurance',
|
||||
'Topic :: Software Development :: Testing',
|
||||
'Topic :: Software Development :: Testing :: Traffic Generation',
|
||||
'Topic :: Software Development :: User Interfaces',
|
||||
'Topic :: Software Development :: Version Control',
|
||||
'Topic :: Software Development :: Version Control :: CVS',
|
||||
'Topic :: Software Development :: Version Control :: RCS',
|
||||
'Topic :: Software Development :: Version Control :: SCCS',
|
||||
'Topic :: Software Development :: Widget Sets',
|
||||
'Topic :: System',
|
||||
'Topic :: System :: Archiving',
|
||||
'Topic :: System :: Archiving :: Backup',
|
||||
'Topic :: System :: Archiving :: Compression',
|
||||
'Topic :: System :: Archiving :: Mirroring',
|
||||
'Topic :: System :: Archiving :: Packaging',
|
||||
'Topic :: System :: Benchmark',
|
||||
'Topic :: System :: Boot',
|
||||
'Topic :: System :: Boot :: Init',
|
||||
'Topic :: System :: Clustering',
|
||||
'Topic :: System :: Console Fonts',
|
||||
'Topic :: System :: Distributed Computing',
|
||||
'Topic :: System :: Emulators',
|
||||
'Topic :: System :: Filesystems',
|
||||
'Topic :: System :: Hardware',
|
||||
'Topic :: System :: Hardware :: Hardware Drivers',
|
||||
'Topic :: System :: Hardware :: Mainframes',
|
||||
'Topic :: System :: Hardware :: Symmetric Multi-processing',
|
||||
'Topic :: System :: Installation/Setup',
|
||||
'Topic :: System :: Logging',
|
||||
'Topic :: System :: Monitoring',
|
||||
'Topic :: System :: Networking',
|
||||
'Topic :: System :: Networking :: Firewalls',
|
||||
'Topic :: System :: Networking :: Monitoring',
|
||||
'Topic :: System :: Networking :: Monitoring :: Hardware Watchdog',
|
||||
'Topic :: System :: Networking :: Time Synchronization',
|
||||
'Topic :: System :: Operating System',
|
||||
'Topic :: System :: Operating System Kernels',
|
||||
'Topic :: System :: Operating System Kernels :: BSD',
|
||||
'Topic :: System :: Operating System Kernels :: GNU Hurd',
|
||||
'Topic :: System :: Operating System Kernels :: Linux',
|
||||
'Topic :: System :: Power (UPS)',
|
||||
'Topic :: System :: Recovery Tools',
|
||||
'Topic :: System :: Shells',
|
||||
'Topic :: System :: Software Distribution',
|
||||
'Topic :: System :: Systems Administration',
|
||||
'Topic :: System :: Systems Administration :: Authentication/Directory',
|
||||
'Topic :: System :: Systems Administration :: Authentication/Directory :: LDAP',
|
||||
'Topic :: System :: Systems Administration :: Authentication/Directory :: NIS',
|
||||
'Topic :: System :: System Shells',
|
||||
'Topic :: Terminals',
|
||||
'Topic :: Terminals :: Serial',
|
||||
'Topic :: Terminals :: Telnet',
|
||||
'Topic :: Terminals :: Terminal Emulators/X Terminals',
|
||||
'Topic :: Text Editors',
|
||||
'Topic :: Text Editors :: Documentation',
|
||||
'Topic :: Text Editors :: Emacs',
|
||||
'Topic :: Text Editors :: Integrated Development Environments (IDE)',
|
||||
'Topic :: Text Editors :: Text Processing',
|
||||
'Topic :: Text Editors :: Word Processors',
|
||||
'Topic :: Text Processing',
|
||||
'Topic :: Text Processing :: Filters',
|
||||
'Topic :: Text Processing :: Fonts',
|
||||
'Topic :: Text Processing :: General',
|
||||
'Topic :: Text Processing :: Indexing',
|
||||
'Topic :: Text Processing :: Linguistic',
|
||||
'Topic :: Text Processing :: Markup',
|
||||
'Topic :: Text Processing :: Markup :: HTML',
|
||||
'Topic :: Text Processing :: Markup :: LaTeX',
|
||||
'Topic :: Text Processing :: Markup :: SGML',
|
||||
'Topic :: Text Processing :: Markup :: VRML',
|
||||
'Topic :: Text Processing :: Markup :: XML',
|
||||
'Topic :: Utilities',
|
||||
]
|
||||
56
Lib/packaging/command/__init__.py
Normal file
56
Lib/packaging/command/__init__.py
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
"""Subpackage containing all standard commands."""
|
||||
|
||||
from packaging.errors import PackagingModuleError
|
||||
from packaging.util import resolve_name
|
||||
|
||||
__all__ = ['get_command_names', 'set_command', 'get_command_class',
|
||||
'STANDARD_COMMANDS']
|
||||
|
||||
_COMMANDS = {
|
||||
'check': 'packaging.command.check.check',
|
||||
'test': 'packaging.command.test.test',
|
||||
'build': 'packaging.command.build.build',
|
||||
'build_py': 'packaging.command.build_py.build_py',
|
||||
'build_ext': 'packaging.command.build_ext.build_ext',
|
||||
'build_clib': 'packaging.command.build_clib.build_clib',
|
||||
'build_scripts': 'packaging.command.build_scripts.build_scripts',
|
||||
'clean': 'packaging.command.clean.clean',
|
||||
'install_dist': 'packaging.command.install_dist.install_dist',
|
||||
'install_lib': 'packaging.command.install_lib.install_lib',
|
||||
'install_headers': 'packaging.command.install_headers.install_headers',
|
||||
'install_scripts': 'packaging.command.install_scripts.install_scripts',
|
||||
'install_data': 'packaging.command.install_data.install_data',
|
||||
'install_distinfo':
|
||||
'packaging.command.install_distinfo.install_distinfo',
|
||||
'sdist': 'packaging.command.sdist.sdist',
|
||||
'bdist': 'packaging.command.bdist.bdist',
|
||||
'bdist_dumb': 'packaging.command.bdist_dumb.bdist_dumb',
|
||||
'bdist_wininst': 'packaging.command.bdist_wininst.bdist_wininst',
|
||||
'register': 'packaging.command.register.register',
|
||||
'upload': 'packaging.command.upload.upload',
|
||||
'upload_docs': 'packaging.command.upload_docs.upload_docs'}
|
||||
|
||||
STANDARD_COMMANDS = set(_COMMANDS)
|
||||
|
||||
|
||||
def get_command_names():
|
||||
"""Return registered commands"""
|
||||
return sorted(_COMMANDS)
|
||||
|
||||
|
||||
def set_command(location):
|
||||
cls = resolve_name(location)
|
||||
# XXX we want to do the duck-type checking here
|
||||
_COMMANDS[cls.get_command_name()] = cls
|
||||
|
||||
|
||||
def get_command_class(name):
|
||||
"""Return the registered command"""
|
||||
try:
|
||||
cls = _COMMANDS[name]
|
||||
if isinstance(cls, str):
|
||||
cls = resolve_name(cls)
|
||||
_COMMANDS[name] = cls
|
||||
return cls
|
||||
except KeyError:
|
||||
raise PackagingModuleError("Invalid command %s" % name)
|
||||
141
Lib/packaging/command/bdist.py
Normal file
141
Lib/packaging/command/bdist.py
Normal file
|
|
@ -0,0 +1,141 @@
|
|||
"""Create a built (binary) distribution.
|
||||
|
||||
If a --formats option was given on the command line, this command will
|
||||
call the corresponding bdist_* commands; if the option was absent, a
|
||||
bdist_* command depending on the current platform will be called.
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from packaging import util
|
||||
from packaging.command.cmd import Command
|
||||
from packaging.errors import PackagingPlatformError, PackagingOptionError
|
||||
|
||||
|
||||
def show_formats():
|
||||
"""Print list of available formats (arguments to "--format" option).
|
||||
"""
|
||||
from packaging.fancy_getopt import FancyGetopt
|
||||
formats = []
|
||||
for format in bdist.format_commands:
|
||||
formats.append(("formats=" + format, None,
|
||||
bdist.format_command[format][1]))
|
||||
pretty_printer = FancyGetopt(formats)
|
||||
pretty_printer.print_help("List of available distribution formats:")
|
||||
|
||||
|
||||
class bdist(Command):
|
||||
|
||||
description = "create a built (binary) distribution"
|
||||
|
||||
user_options = [('bdist-base=', 'b',
|
||||
"temporary directory for creating built distributions"),
|
||||
('plat-name=', 'p',
|
||||
"platform name to embed in generated filenames "
|
||||
"(default: %s)" % util.get_platform()),
|
||||
('formats=', None,
|
||||
"formats for distribution (comma-separated list)"),
|
||||
('dist-dir=', 'd',
|
||||
"directory to put final built distributions in "
|
||||
"[default: dist]"),
|
||||
('skip-build', None,
|
||||
"skip rebuilding everything (for testing/debugging)"),
|
||||
('owner=', 'u',
|
||||
"Owner name used when creating a tar file"
|
||||
" [default: current user]"),
|
||||
('group=', 'g',
|
||||
"Group name used when creating a tar file"
|
||||
" [default: current group]"),
|
||||
]
|
||||
|
||||
boolean_options = ['skip-build']
|
||||
|
||||
help_options = [
|
||||
('help-formats', None,
|
||||
"lists available distribution formats", show_formats),
|
||||
]
|
||||
|
||||
# This is of course very simplistic. The various UNIX family operating
|
||||
# systems have their specific formats, but they are out of scope for us;
|
||||
# bdist_dumb is, well, dumb; it's more a building block for other
|
||||
# packaging tools than a real end-user binary format.
|
||||
default_format = {'posix': 'gztar',
|
||||
'nt': 'zip',
|
||||
'os2': 'zip'}
|
||||
|
||||
# Establish the preferred order (for the --help-formats option).
|
||||
format_commands = ['gztar', 'bztar', 'ztar', 'tar',
|
||||
'wininst', 'zip', 'msi']
|
||||
|
||||
# And the real information.
|
||||
format_command = {'gztar': ('bdist_dumb', "gzip'ed tar file"),
|
||||
'bztar': ('bdist_dumb', "bzip2'ed tar file"),
|
||||
'ztar': ('bdist_dumb', "compressed tar file"),
|
||||
'tar': ('bdist_dumb', "tar file"),
|
||||
'wininst': ('bdist_wininst',
|
||||
"Windows executable installer"),
|
||||
'zip': ('bdist_dumb', "ZIP file"),
|
||||
'msi': ('bdist_msi', "Microsoft Installer")
|
||||
}
|
||||
|
||||
|
||||
def initialize_options(self):
|
||||
self.bdist_base = None
|
||||
self.plat_name = None
|
||||
self.formats = None
|
||||
self.dist_dir = None
|
||||
self.skip_build = False
|
||||
self.group = None
|
||||
self.owner = None
|
||||
|
||||
def finalize_options(self):
|
||||
# have to finalize 'plat_name' before 'bdist_base'
|
||||
if self.plat_name is None:
|
||||
if self.skip_build:
|
||||
self.plat_name = util.get_platform()
|
||||
else:
|
||||
self.plat_name = self.get_finalized_command('build').plat_name
|
||||
|
||||
# 'bdist_base' -- parent of per-built-distribution-format
|
||||
# temporary directories (eg. we'll probably have
|
||||
# "build/bdist.<plat>/dumb", etc.)
|
||||
if self.bdist_base is None:
|
||||
build_base = self.get_finalized_command('build').build_base
|
||||
self.bdist_base = os.path.join(build_base,
|
||||
'bdist.' + self.plat_name)
|
||||
|
||||
self.ensure_string_list('formats')
|
||||
if self.formats is None:
|
||||
try:
|
||||
self.formats = [self.default_format[os.name]]
|
||||
except KeyError:
|
||||
raise PackagingPlatformError("don't know how to create built distributions " + \
|
||||
"on platform %s" % os.name)
|
||||
|
||||
if self.dist_dir is None:
|
||||
self.dist_dir = "dist"
|
||||
|
||||
def run(self):
|
||||
# Figure out which sub-commands we need to run.
|
||||
commands = []
|
||||
for format in self.formats:
|
||||
try:
|
||||
commands.append(self.format_command[format][0])
|
||||
except KeyError:
|
||||
raise PackagingOptionError("invalid format '%s'" % format)
|
||||
|
||||
# Reinitialize and run each command.
|
||||
for i in range(len(self.formats)):
|
||||
cmd_name = commands[i]
|
||||
sub_cmd = self.get_reinitialized_command(cmd_name)
|
||||
|
||||
# passing the owner and group names for tar archiving
|
||||
if cmd_name == 'bdist_dumb':
|
||||
sub_cmd.owner = self.owner
|
||||
sub_cmd.group = self.group
|
||||
|
||||
# If we're going to need to run this command again, tell it to
|
||||
# keep its temporary files around so subsequent runs go faster.
|
||||
if cmd_name in commands[i+1:]:
|
||||
sub_cmd.keep_temp = True
|
||||
self.run_command(cmd_name)
|
||||
137
Lib/packaging/command/bdist_dumb.py
Normal file
137
Lib/packaging/command/bdist_dumb.py
Normal file
|
|
@ -0,0 +1,137 @@
|
|||
"""Create a "dumb" built distribution.
|
||||
|
||||
A dumb distribution is just an archive meant to be unpacked under
|
||||
sys.prefix or sys.exec_prefix.
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from shutil import rmtree
|
||||
from sysconfig import get_python_version
|
||||
from packaging.util import get_platform
|
||||
from packaging.command.cmd import Command
|
||||
from packaging.errors import PackagingPlatformError
|
||||
from packaging import logger
|
||||
|
||||
class bdist_dumb(Command):
|
||||
|
||||
description = 'create a "dumb" built distribution'
|
||||
|
||||
user_options = [('bdist-dir=', 'd',
|
||||
"temporary directory for creating the distribution"),
|
||||
('plat-name=', 'p',
|
||||
"platform name to embed in generated filenames "
|
||||
"(default: %s)" % get_platform()),
|
||||
('format=', 'f',
|
||||
"archive format to create (tar, ztar, gztar, zip)"),
|
||||
('keep-temp', 'k',
|
||||
"keep the pseudo-installation tree around after " +
|
||||
"creating the distribution archive"),
|
||||
('dist-dir=', 'd',
|
||||
"directory to put final built distributions in"),
|
||||
('skip-build', None,
|
||||
"skip rebuilding everything (for testing/debugging)"),
|
||||
('relative', None,
|
||||
"build the archive using relative paths"
|
||||
"(default: false)"),
|
||||
('owner=', 'u',
|
||||
"Owner name used when creating a tar file"
|
||||
" [default: current user]"),
|
||||
('group=', 'g',
|
||||
"Group name used when creating a tar file"
|
||||
" [default: current group]"),
|
||||
]
|
||||
|
||||
boolean_options = ['keep-temp', 'skip-build', 'relative']
|
||||
|
||||
default_format = { 'posix': 'gztar',
|
||||
'nt': 'zip',
|
||||
'os2': 'zip' }
|
||||
|
||||
|
||||
def initialize_options(self):
|
||||
self.bdist_dir = None
|
||||
self.plat_name = None
|
||||
self.format = None
|
||||
self.keep_temp = False
|
||||
self.dist_dir = None
|
||||
self.skip_build = False
|
||||
self.relative = False
|
||||
self.owner = None
|
||||
self.group = None
|
||||
|
||||
def finalize_options(self):
|
||||
if self.bdist_dir is None:
|
||||
bdist_base = self.get_finalized_command('bdist').bdist_base
|
||||
self.bdist_dir = os.path.join(bdist_base, 'dumb')
|
||||
|
||||
if self.format is None:
|
||||
try:
|
||||
self.format = self.default_format[os.name]
|
||||
except KeyError:
|
||||
raise PackagingPlatformError(("don't know how to create dumb built distributions " +
|
||||
"on platform %s") % os.name)
|
||||
|
||||
self.set_undefined_options('bdist', 'dist_dir', 'plat_name')
|
||||
|
||||
def run(self):
|
||||
if not self.skip_build:
|
||||
self.run_command('build')
|
||||
|
||||
install = self.get_reinitialized_command('install_dist',
|
||||
reinit_subcommands=True)
|
||||
install.root = self.bdist_dir
|
||||
install.skip_build = self.skip_build
|
||||
install.warn_dir = False
|
||||
|
||||
logger.info("installing to %s", self.bdist_dir)
|
||||
self.run_command('install_dist')
|
||||
|
||||
# And make an archive relative to the root of the
|
||||
# pseudo-installation tree.
|
||||
archive_basename = "%s.%s" % (self.distribution.get_fullname(),
|
||||
self.plat_name)
|
||||
|
||||
# OS/2 objects to any ":" characters in a filename (such as when
|
||||
# a timestamp is used in a version) so change them to hyphens.
|
||||
if os.name == "os2":
|
||||
archive_basename = archive_basename.replace(":", "-")
|
||||
|
||||
pseudoinstall_root = os.path.join(self.dist_dir, archive_basename)
|
||||
if not self.relative:
|
||||
archive_root = self.bdist_dir
|
||||
else:
|
||||
if (self.distribution.has_ext_modules() and
|
||||
(install.install_base != install.install_platbase)):
|
||||
raise PackagingPlatformError(
|
||||
"can't make a dumb built distribution where base and "
|
||||
"platbase are different (%r, %r)" %
|
||||
(install.install_base, install.install_platbase))
|
||||
else:
|
||||
archive_root = os.path.join(
|
||||
self.bdist_dir,
|
||||
self._ensure_relative(install.install_base))
|
||||
|
||||
# Make the archive
|
||||
filename = self.make_archive(pseudoinstall_root,
|
||||
self.format, root_dir=archive_root,
|
||||
owner=self.owner, group=self.group)
|
||||
if self.distribution.has_ext_modules():
|
||||
pyversion = get_python_version()
|
||||
else:
|
||||
pyversion = 'any'
|
||||
self.distribution.dist_files.append(('bdist_dumb', pyversion,
|
||||
filename))
|
||||
|
||||
if not self.keep_temp:
|
||||
if self.dry_run:
|
||||
logger.info('removing %s', self.bdist_dir)
|
||||
else:
|
||||
rmtree(self.bdist_dir)
|
||||
|
||||
def _ensure_relative(self, path):
|
||||
# copied from dir_util, deleted
|
||||
drive, path = os.path.splitdrive(path)
|
||||
if path[0:1] == os.sep:
|
||||
path = drive + path[1:]
|
||||
return path
|
||||
740
Lib/packaging/command/bdist_msi.py
Normal file
740
Lib/packaging/command/bdist_msi.py
Normal file
|
|
@ -0,0 +1,740 @@
|
|||
"""Create a Microsoft Installer (.msi) binary distribution."""
|
||||
|
||||
# Copyright (C) 2005, 2006 Martin von Löwis
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
|
||||
import sys
|
||||
import os
|
||||
import msilib
|
||||
|
||||
|
||||
from sysconfig import get_python_version
|
||||
from shutil import rmtree
|
||||
from packaging.command.cmd import Command
|
||||
from packaging.version import NormalizedVersion
|
||||
from packaging.errors import PackagingOptionError
|
||||
from packaging import logger as log
|
||||
from packaging.util import get_platform
|
||||
from msilib import schema, sequence, text
|
||||
from msilib import Directory, Feature, Dialog, add_data
|
||||
|
||||
class MSIVersion(NormalizedVersion):
|
||||
"""
|
||||
MSI ProductVersion must be strictly numeric.
|
||||
MSIVersion disallows prerelease and postrelease versions.
|
||||
"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(MSIVersion, self).__init__(*args, **kwargs)
|
||||
if not self.is_final:
|
||||
raise ValueError("ProductVersion must be strictly numeric")
|
||||
|
||||
class PyDialog(Dialog):
|
||||
"""Dialog class with a fixed layout: controls at the top, then a ruler,
|
||||
then a list of buttons: back, next, cancel. Optionally a bitmap at the
|
||||
left."""
|
||||
def __init__(self, *args, **kw):
|
||||
"""Dialog(database, name, x, y, w, h, attributes, title, first,
|
||||
default, cancel, bitmap=true)"""
|
||||
Dialog.__init__(self, *args)
|
||||
ruler = self.h - 36
|
||||
#if kw.get("bitmap", True):
|
||||
# self.bitmap("Bitmap", 0, 0, bmwidth, ruler, "PythonWin")
|
||||
self.line("BottomLine", 0, ruler, self.w, 0)
|
||||
|
||||
def title(self, title):
|
||||
"Set the title text of the dialog at the top."
|
||||
# name, x, y, w, h, flags=Visible|Enabled|Transparent|NoPrefix,
|
||||
# text, in VerdanaBold10
|
||||
self.text("Title", 15, 10, 320, 60, 0x30003,
|
||||
r"{\VerdanaBold10}%s" % title)
|
||||
|
||||
def back(self, title, next, name = "Back", active = 1):
|
||||
"""Add a back button with a given title, the tab-next button,
|
||||
its name in the Control table, possibly initially disabled.
|
||||
|
||||
Return the button, so that events can be associated"""
|
||||
if active:
|
||||
flags = 3 # Visible|Enabled
|
||||
else:
|
||||
flags = 1 # Visible
|
||||
return self.pushbutton(name, 180, self.h-27 , 56, 17, flags, title, next)
|
||||
|
||||
def cancel(self, title, next, name = "Cancel", active = 1):
|
||||
"""Add a cancel button with a given title, the tab-next button,
|
||||
its name in the Control table, possibly initially disabled.
|
||||
|
||||
Return the button, so that events can be associated"""
|
||||
if active:
|
||||
flags = 3 # Visible|Enabled
|
||||
else:
|
||||
flags = 1 # Visible
|
||||
return self.pushbutton(name, 304, self.h-27, 56, 17, flags, title, next)
|
||||
|
||||
def next(self, title, next, name = "Next", active = 1):
|
||||
"""Add a Next button with a given title, the tab-next button,
|
||||
its name in the Control table, possibly initially disabled.
|
||||
|
||||
Return the button, so that events can be associated"""
|
||||
if active:
|
||||
flags = 3 # Visible|Enabled
|
||||
else:
|
||||
flags = 1 # Visible
|
||||
return self.pushbutton(name, 236, self.h-27, 56, 17, flags, title, next)
|
||||
|
||||
def xbutton(self, name, title, next, xpos):
|
||||
"""Add a button with a given title, the tab-next button,
|
||||
its name in the Control table, giving its x position; the
|
||||
y-position is aligned with the other buttons.
|
||||
|
||||
Return the button, so that events can be associated"""
|
||||
return self.pushbutton(name, int(self.w*xpos - 28), self.h-27, 56, 17, 3, title, next)
|
||||
|
||||
class bdist_msi(Command):
|
||||
|
||||
description = "create a Microsoft Installer (.msi) binary distribution"
|
||||
|
||||
user_options = [('bdist-dir=', None,
|
||||
"temporary directory for creating the distribution"),
|
||||
('plat-name=', 'p',
|
||||
"platform name to embed in generated filenames "
|
||||
"(default: %s)" % get_platform()),
|
||||
('keep-temp', 'k',
|
||||
"keep the pseudo-installation tree around after " +
|
||||
"creating the distribution archive"),
|
||||
('target-version=', None,
|
||||
"require a specific python version" +
|
||||
" on the target system"),
|
||||
('no-target-compile', 'c',
|
||||
"do not compile .py to .pyc on the target system"),
|
||||
('no-target-optimize', 'o',
|
||||
"do not compile .py to .pyo (optimized)"
|
||||
"on the target system"),
|
||||
('dist-dir=', 'd',
|
||||
"directory to put final built distributions in"),
|
||||
('skip-build', None,
|
||||
"skip rebuilding everything (for testing/debugging)"),
|
||||
('install-script=', None,
|
||||
"basename of installation script to be run after"
|
||||
"installation or before deinstallation"),
|
||||
('pre-install-script=', None,
|
||||
"Fully qualified filename of a script to be run before "
|
||||
"any files are installed. This script need not be in the "
|
||||
"distribution"),
|
||||
]
|
||||
|
||||
boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize',
|
||||
'skip-build']
|
||||
|
||||
all_versions = ['2.0', '2.1', '2.2', '2.3', '2.4',
|
||||
'2.5', '2.6', '2.7', '2.8', '2.9',
|
||||
'3.0', '3.1', '3.2', '3.3', '3.4',
|
||||
'3.5', '3.6', '3.7', '3.8', '3.9']
|
||||
other_version = 'X'
|
||||
|
||||
def initialize_options(self):
|
||||
self.bdist_dir = None
|
||||
self.plat_name = None
|
||||
self.keep_temp = False
|
||||
self.no_target_compile = False
|
||||
self.no_target_optimize = False
|
||||
self.target_version = None
|
||||
self.dist_dir = None
|
||||
self.skip_build = False
|
||||
self.install_script = None
|
||||
self.pre_install_script = None
|
||||
self.versions = None
|
||||
|
||||
def finalize_options(self):
|
||||
if self.bdist_dir is None:
|
||||
bdist_base = self.get_finalized_command('bdist').bdist_base
|
||||
self.bdist_dir = os.path.join(bdist_base, 'msi')
|
||||
short_version = get_python_version()
|
||||
if (not self.target_version) and self.distribution.has_ext_modules():
|
||||
self.target_version = short_version
|
||||
if self.target_version:
|
||||
self.versions = [self.target_version]
|
||||
if not self.skip_build and self.distribution.has_ext_modules()\
|
||||
and self.target_version != short_version:
|
||||
raise PackagingOptionError("target version can only be %s, or the '--skip-build'" \
|
||||
" option must be specified" % (short_version,))
|
||||
else:
|
||||
self.versions = list(self.all_versions)
|
||||
|
||||
self.set_undefined_options('bdist', 'dist_dir', 'plat_name')
|
||||
|
||||
if self.pre_install_script:
|
||||
raise PackagingOptionError("the pre-install-script feature is not yet implemented")
|
||||
|
||||
if self.install_script:
|
||||
for script in self.distribution.scripts:
|
||||
if self.install_script == os.path.basename(script):
|
||||
break
|
||||
else:
|
||||
raise PackagingOptionError("install_script '%s' not found in scripts" % \
|
||||
self.install_script)
|
||||
self.install_script_key = None
|
||||
|
||||
|
||||
def run(self):
|
||||
if not self.skip_build:
|
||||
self.run_command('build')
|
||||
|
||||
install = self.get_reinitialized_command('install_dist',
|
||||
reinit_subcommands=True)
|
||||
install.prefix = self.bdist_dir
|
||||
install.skip_build = self.skip_build
|
||||
install.warn_dir = False
|
||||
|
||||
install_lib = self.get_reinitialized_command('install_lib')
|
||||
# we do not want to include pyc or pyo files
|
||||
install_lib.compile = False
|
||||
install_lib.optimize = 0
|
||||
|
||||
if self.distribution.has_ext_modules():
|
||||
# If we are building an installer for a Python version other
|
||||
# than the one we are currently running, then we need to ensure
|
||||
# our build_lib reflects the other Python version rather than ours.
|
||||
# Note that for target_version!=sys.version, we must have skipped the
|
||||
# build step, so there is no issue with enforcing the build of this
|
||||
# version.
|
||||
target_version = self.target_version
|
||||
if not target_version:
|
||||
assert self.skip_build, "Should have already checked this"
|
||||
target_version = sys.version[0:3]
|
||||
plat_specifier = ".%s-%s" % (self.plat_name, target_version)
|
||||
build = self.get_finalized_command('build')
|
||||
build.build_lib = os.path.join(build.build_base,
|
||||
'lib' + plat_specifier)
|
||||
|
||||
log.info("installing to %s", self.bdist_dir)
|
||||
install.ensure_finalized()
|
||||
|
||||
# avoid warning of 'install_lib' about installing
|
||||
# into a directory not in sys.path
|
||||
sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB'))
|
||||
|
||||
install.run()
|
||||
|
||||
del sys.path[0]
|
||||
|
||||
self.mkpath(self.dist_dir)
|
||||
fullname = self.distribution.get_fullname()
|
||||
installer_name = self.get_installer_filename(fullname)
|
||||
installer_name = os.path.abspath(installer_name)
|
||||
if os.path.exists(installer_name): os.unlink(installer_name)
|
||||
|
||||
metadata = self.distribution.metadata
|
||||
author = metadata.author
|
||||
if not author:
|
||||
author = metadata.maintainer
|
||||
if not author:
|
||||
author = "UNKNOWN"
|
||||
version = MSIVersion(metadata.get_version())
|
||||
# Prefix ProductName with Python x.y, so that
|
||||
# it sorts together with the other Python packages
|
||||
# in Add-Remove-Programs (APR)
|
||||
fullname = self.distribution.get_fullname()
|
||||
if self.target_version:
|
||||
product_name = "Python %s %s" % (self.target_version, fullname)
|
||||
else:
|
||||
product_name = "Python %s" % (fullname)
|
||||
self.db = msilib.init_database(installer_name, schema,
|
||||
product_name, msilib.gen_uuid(),
|
||||
str(version), author)
|
||||
msilib.add_tables(self.db, sequence)
|
||||
props = [('DistVersion', version)]
|
||||
email = metadata.author_email or metadata.maintainer_email
|
||||
if email:
|
||||
props.append(("ARPCONTACT", email))
|
||||
if metadata.url:
|
||||
props.append(("ARPURLINFOABOUT", metadata.url))
|
||||
if props:
|
||||
add_data(self.db, 'Property', props)
|
||||
|
||||
self.add_find_python()
|
||||
self.add_files()
|
||||
self.add_scripts()
|
||||
self.add_ui()
|
||||
self.db.Commit()
|
||||
|
||||
if hasattr(self.distribution, 'dist_files'):
|
||||
tup = 'bdist_msi', self.target_version or 'any', fullname
|
||||
self.distribution.dist_files.append(tup)
|
||||
|
||||
if not self.keep_temp:
|
||||
log.info("removing temporary build directory %s", self.bdist_dir)
|
||||
if not self.dry_run:
|
||||
rmtree(self.bdist_dir)
|
||||
|
||||
def add_files(self):
|
||||
db = self.db
|
||||
cab = msilib.CAB("distfiles")
|
||||
rootdir = os.path.abspath(self.bdist_dir)
|
||||
|
||||
root = Directory(db, cab, None, rootdir, "TARGETDIR", "SourceDir")
|
||||
f = Feature(db, "Python", "Python", "Everything",
|
||||
0, 1, directory="TARGETDIR")
|
||||
|
||||
items = [(f, root, '')]
|
||||
for version in self.versions + [self.other_version]:
|
||||
target = "TARGETDIR" + version
|
||||
name = default = "Python" + version
|
||||
desc = "Everything"
|
||||
if version is self.other_version:
|
||||
title = "Python from another location"
|
||||
level = 2
|
||||
else:
|
||||
title = "Python %s from registry" % version
|
||||
level = 1
|
||||
f = Feature(db, name, title, desc, 1, level, directory=target)
|
||||
dir = Directory(db, cab, root, rootdir, target, default)
|
||||
items.append((f, dir, version))
|
||||
db.Commit()
|
||||
|
||||
seen = {}
|
||||
for feature, dir, version in items:
|
||||
todo = [dir]
|
||||
while todo:
|
||||
dir = todo.pop()
|
||||
for file in os.listdir(dir.absolute):
|
||||
afile = os.path.join(dir.absolute, file)
|
||||
if os.path.isdir(afile):
|
||||
short = "%s|%s" % (dir.make_short(file), file)
|
||||
default = file + version
|
||||
newdir = Directory(db, cab, dir, file, default, short)
|
||||
todo.append(newdir)
|
||||
else:
|
||||
if not dir.component:
|
||||
dir.start_component(dir.logical, feature, 0)
|
||||
if afile not in seen:
|
||||
key = seen[afile] = dir.add_file(file)
|
||||
if file==self.install_script:
|
||||
if self.install_script_key:
|
||||
raise PackagingOptionError(
|
||||
"Multiple files with name %s" % file)
|
||||
self.install_script_key = '[#%s]' % key
|
||||
else:
|
||||
key = seen[afile]
|
||||
add_data(self.db, "DuplicateFile",
|
||||
[(key + version, dir.component, key, None, dir.logical)])
|
||||
db.Commit()
|
||||
cab.commit(db)
|
||||
|
||||
def add_find_python(self):
|
||||
"""Adds code to the installer to compute the location of Python.
|
||||
|
||||
Properties PYTHON.MACHINE.X.Y and PYTHON.USER.X.Y will be set from the
|
||||
registry for each version of Python.
|
||||
|
||||
Properties TARGETDIRX.Y will be set from PYTHON.USER.X.Y if defined,
|
||||
else from PYTHON.MACHINE.X.Y.
|
||||
|
||||
Properties PYTHONX.Y will be set to TARGETDIRX.Y\\python.exe"""
|
||||
|
||||
start = 402
|
||||
for ver in self.versions:
|
||||
install_path = r"SOFTWARE\Python\PythonCore\%s\InstallPath" % ver
|
||||
machine_reg = "python.machine." + ver
|
||||
user_reg = "python.user." + ver
|
||||
machine_prop = "PYTHON.MACHINE." + ver
|
||||
user_prop = "PYTHON.USER." + ver
|
||||
machine_action = "PythonFromMachine" + ver
|
||||
user_action = "PythonFromUser" + ver
|
||||
exe_action = "PythonExe" + ver
|
||||
target_dir_prop = "TARGETDIR" + ver
|
||||
exe_prop = "PYTHON" + ver
|
||||
if msilib.Win64:
|
||||
# type: msidbLocatorTypeRawValue + msidbLocatorType64bit
|
||||
Type = 2+16
|
||||
else:
|
||||
Type = 2
|
||||
add_data(self.db, "RegLocator",
|
||||
[(machine_reg, 2, install_path, None, Type),
|
||||
(user_reg, 1, install_path, None, Type)])
|
||||
add_data(self.db, "AppSearch",
|
||||
[(machine_prop, machine_reg),
|
||||
(user_prop, user_reg)])
|
||||
add_data(self.db, "CustomAction",
|
||||
[(machine_action, 51+256, target_dir_prop, "[" + machine_prop + "]"),
|
||||
(user_action, 51+256, target_dir_prop, "[" + user_prop + "]"),
|
||||
(exe_action, 51+256, exe_prop, "[" + target_dir_prop + "]\\python.exe"),
|
||||
])
|
||||
add_data(self.db, "InstallExecuteSequence",
|
||||
[(machine_action, machine_prop, start),
|
||||
(user_action, user_prop, start + 1),
|
||||
(exe_action, None, start + 2),
|
||||
])
|
||||
add_data(self.db, "InstallUISequence",
|
||||
[(machine_action, machine_prop, start),
|
||||
(user_action, user_prop, start + 1),
|
||||
(exe_action, None, start + 2),
|
||||
])
|
||||
add_data(self.db, "Condition",
|
||||
[("Python" + ver, 0, "NOT TARGETDIR" + ver)])
|
||||
start += 4
|
||||
assert start < 500
|
||||
|
||||
def add_scripts(self):
|
||||
if self.install_script:
|
||||
start = 6800
|
||||
for ver in self.versions + [self.other_version]:
|
||||
install_action = "install_script." + ver
|
||||
exe_prop = "PYTHON" + ver
|
||||
add_data(self.db, "CustomAction",
|
||||
[(install_action, 50, exe_prop, self.install_script_key)])
|
||||
add_data(self.db, "InstallExecuteSequence",
|
||||
[(install_action, "&Python%s=3" % ver, start)])
|
||||
start += 1
|
||||
# XXX pre-install scripts are currently refused in finalize_options()
|
||||
# but if this feature is completed, it will also need to add
|
||||
# entries for each version as the above code does
|
||||
if self.pre_install_script:
|
||||
scriptfn = os.path.join(self.bdist_dir, "preinstall.bat")
|
||||
with open(scriptfn, "w") as f:
|
||||
# The batch file will be executed with [PYTHON], so that %1
|
||||
# is the path to the Python interpreter; %0 will be the path
|
||||
# of the batch file.
|
||||
# rem ="""
|
||||
# %1 %0
|
||||
# exit
|
||||
# """
|
||||
# <actual script>
|
||||
f.write('rem ="""\n%1 %0\nexit\n"""\n')
|
||||
with open(self.pre_install_script) as fp:
|
||||
f.write(fp.read())
|
||||
add_data(self.db, "Binary",
|
||||
[("PreInstall", msilib.Binary(scriptfn)),
|
||||
])
|
||||
add_data(self.db, "CustomAction",
|
||||
[("PreInstall", 2, "PreInstall", None),
|
||||
])
|
||||
add_data(self.db, "InstallExecuteSequence",
|
||||
[("PreInstall", "NOT Installed", 450),
|
||||
])
|
||||
|
||||
def add_ui(self):
|
||||
db = self.db
|
||||
x = y = 50
|
||||
w = 370
|
||||
h = 300
|
||||
title = "[ProductName] Setup"
|
||||
|
||||
# see "Dialog Style Bits"
|
||||
modal = 3 # visible | modal
|
||||
modeless = 1 # visible
|
||||
|
||||
# UI customization properties
|
||||
add_data(db, "Property",
|
||||
# See "DefaultUIFont Property"
|
||||
[("DefaultUIFont", "DlgFont8"),
|
||||
# See "ErrorDialog Style Bit"
|
||||
("ErrorDialog", "ErrorDlg"),
|
||||
("Progress1", "Install"), # modified in maintenance type dlg
|
||||
("Progress2", "installs"),
|
||||
("MaintenanceForm_Action", "Repair"),
|
||||
# possible values: ALL, JUSTME
|
||||
("WhichUsers", "ALL")
|
||||
])
|
||||
|
||||
# Fonts, see "TextStyle Table"
|
||||
add_data(db, "TextStyle",
|
||||
[("DlgFont8", "Tahoma", 9, None, 0),
|
||||
("DlgFontBold8", "Tahoma", 8, None, 1), #bold
|
||||
("VerdanaBold10", "Verdana", 10, None, 1),
|
||||
("VerdanaRed9", "Verdana", 9, 255, 0),
|
||||
])
|
||||
|
||||
# UI Sequences, see "InstallUISequence Table", "Using a Sequence Table"
|
||||
# Numbers indicate sequence; see sequence.py for how these action integrate
|
||||
add_data(db, "InstallUISequence",
|
||||
[("PrepareDlg", "Not Privileged or Windows9x or Installed", 140),
|
||||
("WhichUsersDlg", "Privileged and not Windows9x and not Installed", 141),
|
||||
# In the user interface, assume all-users installation if privileged.
|
||||
("SelectFeaturesDlg", "Not Installed", 1230),
|
||||
# XXX no support for resume installations yet
|
||||
#("ResumeDlg", "Installed AND (RESUME OR Preselected)", 1240),
|
||||
("MaintenanceTypeDlg", "Installed AND NOT RESUME AND NOT Preselected", 1250),
|
||||
("ProgressDlg", None, 1280)])
|
||||
|
||||
add_data(db, 'ActionText', text.ActionText)
|
||||
add_data(db, 'UIText', text.UIText)
|
||||
#####################################################################
|
||||
# Standard dialogs: FatalError, UserExit, ExitDialog
|
||||
fatal=PyDialog(db, "FatalError", x, y, w, h, modal, title,
|
||||
"Finish", "Finish", "Finish")
|
||||
fatal.title("[ProductName] Installer ended prematurely")
|
||||
fatal.back("< Back", "Finish", active = 0)
|
||||
fatal.cancel("Cancel", "Back", active = 0)
|
||||
fatal.text("Description1", 15, 70, 320, 80, 0x30003,
|
||||
"[ProductName] setup ended prematurely because of an error. Your system has not been modified. To install this program at a later time, please run the installation again.")
|
||||
fatal.text("Description2", 15, 155, 320, 20, 0x30003,
|
||||
"Click the Finish button to exit the Installer.")
|
||||
c=fatal.next("Finish", "Cancel", name="Finish")
|
||||
c.event("EndDialog", "Exit")
|
||||
|
||||
user_exit=PyDialog(db, "UserExit", x, y, w, h, modal, title,
|
||||
"Finish", "Finish", "Finish")
|
||||
user_exit.title("[ProductName] Installer was interrupted")
|
||||
user_exit.back("< Back", "Finish", active = 0)
|
||||
user_exit.cancel("Cancel", "Back", active = 0)
|
||||
user_exit.text("Description1", 15, 70, 320, 80, 0x30003,
|
||||
"[ProductName] setup was interrupted. Your system has not been modified. "
|
||||
"To install this program at a later time, please run the installation again.")
|
||||
user_exit.text("Description2", 15, 155, 320, 20, 0x30003,
|
||||
"Click the Finish button to exit the Installer.")
|
||||
c = user_exit.next("Finish", "Cancel", name="Finish")
|
||||
c.event("EndDialog", "Exit")
|
||||
|
||||
exit_dialog = PyDialog(db, "ExitDialog", x, y, w, h, modal, title,
|
||||
"Finish", "Finish", "Finish")
|
||||
exit_dialog.title("Completing the [ProductName] Installer")
|
||||
exit_dialog.back("< Back", "Finish", active = 0)
|
||||
exit_dialog.cancel("Cancel", "Back", active = 0)
|
||||
exit_dialog.text("Description", 15, 235, 320, 20, 0x30003,
|
||||
"Click the Finish button to exit the Installer.")
|
||||
c = exit_dialog.next("Finish", "Cancel", name="Finish")
|
||||
c.event("EndDialog", "Return")
|
||||
|
||||
#####################################################################
|
||||
# Required dialog: FilesInUse, ErrorDlg
|
||||
inuse = PyDialog(db, "FilesInUse",
|
||||
x, y, w, h,
|
||||
19, # KeepModeless|Modal|Visible
|
||||
title,
|
||||
"Retry", "Retry", "Retry", bitmap=False)
|
||||
inuse.text("Title", 15, 6, 200, 15, 0x30003,
|
||||
r"{\DlgFontBold8}Files in Use")
|
||||
inuse.text("Description", 20, 23, 280, 20, 0x30003,
|
||||
"Some files that need to be updated are currently in use.")
|
||||
inuse.text("Text", 20, 55, 330, 50, 3,
|
||||
"The following applications are using files that need to be updated by this setup. Close these applications and then click Retry to continue the installation or Cancel to exit it.")
|
||||
inuse.control("List", "ListBox", 20, 107, 330, 130, 7, "FileInUseProcess",
|
||||
None, None, None)
|
||||
c=inuse.back("Exit", "Ignore", name="Exit")
|
||||
c.event("EndDialog", "Exit")
|
||||
c=inuse.next("Ignore", "Retry", name="Ignore")
|
||||
c.event("EndDialog", "Ignore")
|
||||
c=inuse.cancel("Retry", "Exit", name="Retry")
|
||||
c.event("EndDialog","Retry")
|
||||
|
||||
# See "Error Dialog". See "ICE20" for the required names of the controls.
|
||||
error = Dialog(db, "ErrorDlg",
|
||||
50, 10, 330, 101,
|
||||
65543, # Error|Minimize|Modal|Visible
|
||||
title,
|
||||
"ErrorText", None, None)
|
||||
error.text("ErrorText", 50,9,280,48,3, "")
|
||||
#error.control("ErrorIcon", "Icon", 15, 9, 24, 24, 5242881, None, "py.ico", None, None)
|
||||
error.pushbutton("N",120,72,81,21,3,"No",None).event("EndDialog","ErrorNo")
|
||||
error.pushbutton("Y",240,72,81,21,3,"Yes",None).event("EndDialog","ErrorYes")
|
||||
error.pushbutton("A",0,72,81,21,3,"Abort",None).event("EndDialog","ErrorAbort")
|
||||
error.pushbutton("C",42,72,81,21,3,"Cancel",None).event("EndDialog","ErrorCancel")
|
||||
error.pushbutton("I",81,72,81,21,3,"Ignore",None).event("EndDialog","ErrorIgnore")
|
||||
error.pushbutton("O",159,72,81,21,3,"Ok",None).event("EndDialog","ErrorOk")
|
||||
error.pushbutton("R",198,72,81,21,3,"Retry",None).event("EndDialog","ErrorRetry")
|
||||
|
||||
#####################################################################
|
||||
# Global "Query Cancel" dialog
|
||||
cancel = Dialog(db, "CancelDlg", 50, 10, 260, 85, 3, title,
|
||||
"No", "No", "No")
|
||||
cancel.text("Text", 48, 15, 194, 30, 3,
|
||||
"Are you sure you want to cancel [ProductName] installation?")
|
||||
#cancel.control("Icon", "Icon", 15, 15, 24, 24, 5242881, None,
|
||||
# "py.ico", None, None)
|
||||
c=cancel.pushbutton("Yes", 72, 57, 56, 17, 3, "Yes", "No")
|
||||
c.event("EndDialog", "Exit")
|
||||
|
||||
c=cancel.pushbutton("No", 132, 57, 56, 17, 3, "No", "Yes")
|
||||
c.event("EndDialog", "Return")
|
||||
|
||||
#####################################################################
|
||||
# Global "Wait for costing" dialog
|
||||
costing = Dialog(db, "WaitForCostingDlg", 50, 10, 260, 85, modal, title,
|
||||
"Return", "Return", "Return")
|
||||
costing.text("Text", 48, 15, 194, 30, 3,
|
||||
"Please wait while the installer finishes determining your disk space requirements.")
|
||||
c = costing.pushbutton("Return", 102, 57, 56, 17, 3, "Return", None)
|
||||
c.event("EndDialog", "Exit")
|
||||
|
||||
#####################################################################
|
||||
# Preparation dialog: no user input except cancellation
|
||||
prep = PyDialog(db, "PrepareDlg", x, y, w, h, modeless, title,
|
||||
"Cancel", "Cancel", "Cancel")
|
||||
prep.text("Description", 15, 70, 320, 40, 0x30003,
|
||||
"Please wait while the Installer prepares to guide you through the installation.")
|
||||
prep.title("Welcome to the [ProductName] Installer")
|
||||
c=prep.text("ActionText", 15, 110, 320, 20, 0x30003, "Pondering...")
|
||||
c.mapping("ActionText", "Text")
|
||||
c=prep.text("ActionData", 15, 135, 320, 30, 0x30003, None)
|
||||
c.mapping("ActionData", "Text")
|
||||
prep.back("Back", None, active=0)
|
||||
prep.next("Next", None, active=0)
|
||||
c=prep.cancel("Cancel", None)
|
||||
c.event("SpawnDialog", "CancelDlg")
|
||||
|
||||
#####################################################################
|
||||
# Feature (Python directory) selection
|
||||
seldlg = PyDialog(db, "SelectFeaturesDlg", x, y, w, h, modal, title,
|
||||
"Next", "Next", "Cancel")
|
||||
seldlg.title("Select Python Installations")
|
||||
|
||||
seldlg.text("Hint", 15, 30, 300, 20, 3,
|
||||
"Select the Python locations where %s should be installed."
|
||||
% self.distribution.get_fullname())
|
||||
|
||||
seldlg.back("< Back", None, active=0)
|
||||
c = seldlg.next("Next >", "Cancel")
|
||||
order = 1
|
||||
c.event("[TARGETDIR]", "[SourceDir]", ordering=order)
|
||||
for version in self.versions + [self.other_version]:
|
||||
order += 1
|
||||
c.event("[TARGETDIR]", "[TARGETDIR%s]" % version,
|
||||
"FEATURE_SELECTED AND &Python%s=3" % version,
|
||||
ordering=order)
|
||||
c.event("SpawnWaitDialog", "WaitForCostingDlg", ordering=order + 1)
|
||||
c.event("EndDialog", "Return", ordering=order + 2)
|
||||
c = seldlg.cancel("Cancel", "Features")
|
||||
c.event("SpawnDialog", "CancelDlg")
|
||||
|
||||
c = seldlg.control("Features", "SelectionTree", 15, 60, 300, 120, 3,
|
||||
"FEATURE", None, "PathEdit", None)
|
||||
c.event("[FEATURE_SELECTED]", "1")
|
||||
ver = self.other_version
|
||||
install_other_cond = "FEATURE_SELECTED AND &Python%s=3" % ver
|
||||
dont_install_other_cond = "FEATURE_SELECTED AND &Python%s<>3" % ver
|
||||
|
||||
c = seldlg.text("Other", 15, 200, 300, 15, 3,
|
||||
"Provide an alternate Python location")
|
||||
c.condition("Enable", install_other_cond)
|
||||
c.condition("Show", install_other_cond)
|
||||
c.condition("Disable", dont_install_other_cond)
|
||||
c.condition("Hide", dont_install_other_cond)
|
||||
|
||||
c = seldlg.control("PathEdit", "PathEdit", 15, 215, 300, 16, 1,
|
||||
"TARGETDIR" + ver, None, "Next", None)
|
||||
c.condition("Enable", install_other_cond)
|
||||
c.condition("Show", install_other_cond)
|
||||
c.condition("Disable", dont_install_other_cond)
|
||||
c.condition("Hide", dont_install_other_cond)
|
||||
|
||||
#####################################################################
|
||||
# Disk cost
|
||||
cost = PyDialog(db, "DiskCostDlg", x, y, w, h, modal, title,
|
||||
"OK", "OK", "OK", bitmap=False)
|
||||
cost.text("Title", 15, 6, 200, 15, 0x30003,
|
||||
"{\DlgFontBold8}Disk Space Requirements")
|
||||
cost.text("Description", 20, 20, 280, 20, 0x30003,
|
||||
"The disk space required for the installation of the selected features.")
|
||||
cost.text("Text", 20, 53, 330, 60, 3,
|
||||
"The highlighted volumes (if any) do not have enough disk space "
|
||||
"available for the currently selected features. You can either "
|
||||
"remove some files from the highlighted volumes, or choose to "
|
||||
"install less features onto local drive(s), or select different "
|
||||
"destination drive(s).")
|
||||
cost.control("VolumeList", "VolumeCostList", 20, 100, 330, 150, 393223,
|
||||
None, "{120}{70}{70}{70}{70}", None, None)
|
||||
cost.xbutton("OK", "Ok", None, 0.5).event("EndDialog", "Return")
|
||||
|
||||
#####################################################################
|
||||
# WhichUsers Dialog. Only available on NT, and for privileged users.
|
||||
# This must be run before FindRelatedProducts, because that will
|
||||
# take into account whether the previous installation was per-user
|
||||
# or per-machine. We currently don't support going back to this
|
||||
# dialog after "Next" was selected; to support this, we would need to
|
||||
# find how to reset the ALLUSERS property, and how to re-run
|
||||
# FindRelatedProducts.
|
||||
# On Windows9x, the ALLUSERS property is ignored on the command line
|
||||
# and in the Property table, but installer fails according to the documentation
|
||||
# if a dialog attempts to set ALLUSERS.
|
||||
whichusers = PyDialog(db, "WhichUsersDlg", x, y, w, h, modal, title,
|
||||
"AdminInstall", "Next", "Cancel")
|
||||
whichusers.title("Select whether to install [ProductName] for all users of this computer.")
|
||||
# A radio group with two options: allusers, justme
|
||||
g = whichusers.radiogroup("AdminInstall", 15, 60, 260, 50, 3,
|
||||
"WhichUsers", "", "Next")
|
||||
g.add("ALL", 0, 5, 150, 20, "Install for all users")
|
||||
g.add("JUSTME", 0, 25, 150, 20, "Install just for me")
|
||||
|
||||
whichusers.back("Back", None, active=0)
|
||||
|
||||
c = whichusers.next("Next >", "Cancel")
|
||||
c.event("[ALLUSERS]", "1", 'WhichUsers="ALL"', 1)
|
||||
c.event("EndDialog", "Return", ordering = 2)
|
||||
|
||||
c = whichusers.cancel("Cancel", "AdminInstall")
|
||||
c.event("SpawnDialog", "CancelDlg")
|
||||
|
||||
#####################################################################
|
||||
# Installation Progress dialog (modeless)
|
||||
progress = PyDialog(db, "ProgressDlg", x, y, w, h, modeless, title,
|
||||
"Cancel", "Cancel", "Cancel", bitmap=False)
|
||||
progress.text("Title", 20, 15, 200, 15, 0x30003,
|
||||
"{\DlgFontBold8}[Progress1] [ProductName]")
|
||||
progress.text("Text", 35, 65, 300, 30, 3,
|
||||
"Please wait while the Installer [Progress2] [ProductName]. "
|
||||
"This may take several minutes.")
|
||||
progress.text("StatusLabel", 35, 100, 35, 20, 3, "Status:")
|
||||
|
||||
c=progress.text("ActionText", 70, 100, w-70, 20, 3, "Pondering...")
|
||||
c.mapping("ActionText", "Text")
|
||||
|
||||
#c=progress.text("ActionData", 35, 140, 300, 20, 3, None)
|
||||
#c.mapping("ActionData", "Text")
|
||||
|
||||
c=progress.control("ProgressBar", "ProgressBar", 35, 120, 300, 10, 65537,
|
||||
None, "Progress done", None, None)
|
||||
c.mapping("SetProgress", "Progress")
|
||||
|
||||
progress.back("< Back", "Next", active=False)
|
||||
progress.next("Next >", "Cancel", active=False)
|
||||
progress.cancel("Cancel", "Back").event("SpawnDialog", "CancelDlg")
|
||||
|
||||
###################################################################
|
||||
# Maintenance type: repair/uninstall
|
||||
maint = PyDialog(db, "MaintenanceTypeDlg", x, y, w, h, modal, title,
|
||||
"Next", "Next", "Cancel")
|
||||
maint.title("Welcome to the [ProductName] Setup Wizard")
|
||||
maint.text("BodyText", 15, 63, 330, 42, 3,
|
||||
"Select whether you want to repair or remove [ProductName].")
|
||||
g=maint.radiogroup("RepairRadioGroup", 15, 108, 330, 60, 3,
|
||||
"MaintenanceForm_Action", "", "Next")
|
||||
#g.add("Change", 0, 0, 200, 17, "&Change [ProductName]")
|
||||
g.add("Repair", 0, 18, 200, 17, "&Repair [ProductName]")
|
||||
g.add("Remove", 0, 36, 200, 17, "Re&move [ProductName]")
|
||||
|
||||
maint.back("< Back", None, active=False)
|
||||
c=maint.next("Finish", "Cancel")
|
||||
# Change installation: Change progress dialog to "Change", then ask
|
||||
# for feature selection
|
||||
#c.event("[Progress1]", "Change", 'MaintenanceForm_Action="Change"', 1)
|
||||
#c.event("[Progress2]", "changes", 'MaintenanceForm_Action="Change"', 2)
|
||||
|
||||
# Reinstall: Change progress dialog to "Repair", then invoke reinstall
|
||||
# Also set list of reinstalled features to "ALL"
|
||||
c.event("[REINSTALL]", "ALL", 'MaintenanceForm_Action="Repair"', 5)
|
||||
c.event("[Progress1]", "Repairing", 'MaintenanceForm_Action="Repair"', 6)
|
||||
c.event("[Progress2]", "repairs", 'MaintenanceForm_Action="Repair"', 7)
|
||||
c.event("Reinstall", "ALL", 'MaintenanceForm_Action="Repair"', 8)
|
||||
|
||||
# Uninstall: Change progress to "Remove", then invoke uninstall
|
||||
# Also set list of removed features to "ALL"
|
||||
c.event("[REMOVE]", "ALL", 'MaintenanceForm_Action="Remove"', 11)
|
||||
c.event("[Progress1]", "Removing", 'MaintenanceForm_Action="Remove"', 12)
|
||||
c.event("[Progress2]", "removes", 'MaintenanceForm_Action="Remove"', 13)
|
||||
c.event("Remove", "ALL", 'MaintenanceForm_Action="Remove"', 14)
|
||||
|
||||
# Close dialog when maintenance action scheduled
|
||||
c.event("EndDialog", "Return", 'MaintenanceForm_Action<>"Change"', 20)
|
||||
#c.event("NewDialog", "SelectFeaturesDlg", 'MaintenanceForm_Action="Change"', 21)
|
||||
|
||||
maint.cancel("Cancel", "RepairRadioGroup").event("SpawnDialog", "CancelDlg")
|
||||
|
||||
def get_installer_filename(self, fullname):
|
||||
# Factored out to allow overriding in subclasses
|
||||
if self.target_version:
|
||||
base_name = "%s.%s-py%s.msi" % (fullname, self.plat_name,
|
||||
self.target_version)
|
||||
else:
|
||||
base_name = "%s.%s.msi" % (fullname, self.plat_name)
|
||||
installer_name = os.path.join(self.dist_dir, base_name)
|
||||
return installer_name
|
||||
342
Lib/packaging/command/bdist_wininst.py
Normal file
342
Lib/packaging/command/bdist_wininst.py
Normal file
|
|
@ -0,0 +1,342 @@
|
|||
"""Create an executable installer for Windows."""
|
||||
|
||||
# FIXME synchronize bytes/str use with same file in distutils
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
from shutil import rmtree
|
||||
from sysconfig import get_python_version
|
||||
from packaging.command.cmd import Command
|
||||
from packaging.errors import PackagingOptionError, PackagingPlatformError
|
||||
from packaging import logger
|
||||
from packaging.util import get_platform
|
||||
|
||||
|
||||
class bdist_wininst(Command):
|
||||
|
||||
description = "create an executable installer for Windows"
|
||||
|
||||
user_options = [('bdist-dir=', None,
|
||||
"temporary directory for creating the distribution"),
|
||||
('plat-name=', 'p',
|
||||
"platform name to embed in generated filenames "
|
||||
"(default: %s)" % get_platform()),
|
||||
('keep-temp', 'k',
|
||||
"keep the pseudo-installation tree around after " +
|
||||
"creating the distribution archive"),
|
||||
('target-version=', None,
|
||||
"require a specific python version" +
|
||||
" on the target system"),
|
||||
('no-target-compile', 'c',
|
||||
"do not compile .py to .pyc on the target system"),
|
||||
('no-target-optimize', 'o',
|
||||
"do not compile .py to .pyo (optimized)"
|
||||
"on the target system"),
|
||||
('dist-dir=', 'd',
|
||||
"directory to put final built distributions in"),
|
||||
('bitmap=', 'b',
|
||||
"bitmap to use for the installer instead of python-powered logo"),
|
||||
('title=', 't',
|
||||
"title to display on the installer background instead of default"),
|
||||
('skip-build', None,
|
||||
"skip rebuilding everything (for testing/debugging)"),
|
||||
('install-script=', None,
|
||||
"basename of installation script to be run after"
|
||||
"installation or before deinstallation"),
|
||||
('pre-install-script=', None,
|
||||
"Fully qualified filename of a script to be run before "
|
||||
"any files are installed. This script need not be in the "
|
||||
"distribution"),
|
||||
('user-access-control=', None,
|
||||
"specify Vista's UAC handling - 'none'/default=no "
|
||||
"handling, 'auto'=use UAC if target Python installed for "
|
||||
"all users, 'force'=always use UAC"),
|
||||
]
|
||||
|
||||
boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize',
|
||||
'skip-build']
|
||||
|
||||
def initialize_options(self):
|
||||
self.bdist_dir = None
|
||||
self.plat_name = None
|
||||
self.keep_temp = False
|
||||
self.no_target_compile = False
|
||||
self.no_target_optimize = False
|
||||
self.target_version = None
|
||||
self.dist_dir = None
|
||||
self.bitmap = None
|
||||
self.title = None
|
||||
self.skip_build = False
|
||||
self.install_script = None
|
||||
self.pre_install_script = None
|
||||
self.user_access_control = None
|
||||
|
||||
|
||||
def finalize_options(self):
|
||||
if self.bdist_dir is None:
|
||||
if self.skip_build and self.plat_name:
|
||||
# If build is skipped and plat_name is overridden, bdist will
|
||||
# not see the correct 'plat_name' - so set that up manually.
|
||||
bdist = self.distribution.get_command_obj('bdist')
|
||||
bdist.plat_name = self.plat_name
|
||||
# next the command will be initialized using that name
|
||||
bdist_base = self.get_finalized_command('bdist').bdist_base
|
||||
self.bdist_dir = os.path.join(bdist_base, 'wininst')
|
||||
if not self.target_version:
|
||||
self.target_version = ""
|
||||
if not self.skip_build and self.distribution.has_ext_modules():
|
||||
short_version = get_python_version()
|
||||
if self.target_version and self.target_version != short_version:
|
||||
raise PackagingOptionError("target version can only be %s, or the '--skip-build'" \
|
||||
" option must be specified" % (short_version,))
|
||||
self.target_version = short_version
|
||||
|
||||
self.set_undefined_options('bdist', 'dist_dir', 'plat_name')
|
||||
|
||||
if self.install_script:
|
||||
for script in self.distribution.scripts:
|
||||
if self.install_script == os.path.basename(script):
|
||||
break
|
||||
else:
|
||||
raise PackagingOptionError("install_script '%s' not found in scripts" % \
|
||||
self.install_script)
|
||||
|
||||
def run(self):
|
||||
if (sys.platform != "win32" and
|
||||
(self.distribution.has_ext_modules() or
|
||||
self.distribution.has_c_libraries())):
|
||||
raise PackagingPlatformError \
|
||||
("distribution contains extensions and/or C libraries; "
|
||||
"must be compiled on a Windows 32 platform")
|
||||
|
||||
if not self.skip_build:
|
||||
self.run_command('build')
|
||||
|
||||
install = self.get_reinitialized_command('install',
|
||||
reinit_subcommands=True)
|
||||
install.root = self.bdist_dir
|
||||
install.skip_build = self.skip_build
|
||||
install.warn_dir = False
|
||||
install.plat_name = self.plat_name
|
||||
|
||||
install_lib = self.get_reinitialized_command('install_lib')
|
||||
# we do not want to include pyc or pyo files
|
||||
install_lib.compile = False
|
||||
install_lib.optimize = 0
|
||||
|
||||
if self.distribution.has_ext_modules():
|
||||
# If we are building an installer for a Python version other
|
||||
# than the one we are currently running, then we need to ensure
|
||||
# our build_lib reflects the other Python version rather than ours.
|
||||
# Note that for target_version!=sys.version, we must have skipped the
|
||||
# build step, so there is no issue with enforcing the build of this
|
||||
# version.
|
||||
target_version = self.target_version
|
||||
if not target_version:
|
||||
assert self.skip_build, "Should have already checked this"
|
||||
target_version = sys.version[0:3]
|
||||
plat_specifier = ".%s-%s" % (self.plat_name, target_version)
|
||||
build = self.get_finalized_command('build')
|
||||
build.build_lib = os.path.join(build.build_base,
|
||||
'lib' + plat_specifier)
|
||||
|
||||
# Use a custom scheme for the zip-file, because we have to decide
|
||||
# at installation time which scheme to use.
|
||||
for key in ('purelib', 'platlib', 'headers', 'scripts', 'data'):
|
||||
value = key.upper()
|
||||
if key == 'headers':
|
||||
value = value + '/Include/$dist_name'
|
||||
setattr(install,
|
||||
'install_' + key,
|
||||
value)
|
||||
|
||||
logger.info("installing to %s", self.bdist_dir)
|
||||
install.ensure_finalized()
|
||||
|
||||
# avoid warning of 'install_lib' about installing
|
||||
# into a directory not in sys.path
|
||||
sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB'))
|
||||
|
||||
install.run()
|
||||
|
||||
del sys.path[0]
|
||||
|
||||
# And make an archive relative to the root of the
|
||||
# pseudo-installation tree.
|
||||
from tempfile import NamedTemporaryFile
|
||||
archive_basename = NamedTemporaryFile().name
|
||||
fullname = self.distribution.get_fullname()
|
||||
arcname = self.make_archive(archive_basename, "zip",
|
||||
root_dir=self.bdist_dir)
|
||||
# create an exe containing the zip-file
|
||||
self.create_exe(arcname, fullname, self.bitmap)
|
||||
if self.distribution.has_ext_modules():
|
||||
pyversion = get_python_version()
|
||||
else:
|
||||
pyversion = 'any'
|
||||
self.distribution.dist_files.append(('bdist_wininst', pyversion,
|
||||
self.get_installer_filename(fullname)))
|
||||
# remove the zip-file again
|
||||
logger.debug("removing temporary file '%s'", arcname)
|
||||
os.remove(arcname)
|
||||
|
||||
if not self.keep_temp:
|
||||
if self.dry_run:
|
||||
logger.info('removing %s', self.bdist_dir)
|
||||
else:
|
||||
rmtree(self.bdist_dir)
|
||||
|
||||
def get_inidata(self):
|
||||
# Return data describing the installation.
|
||||
|
||||
lines = []
|
||||
metadata = self.distribution.metadata
|
||||
|
||||
# Write the [metadata] section.
|
||||
lines.append("[metadata]")
|
||||
|
||||
# 'info' will be displayed in the installer's dialog box,
|
||||
# describing the items to be installed.
|
||||
info = (metadata.long_description or '') + '\n'
|
||||
|
||||
# Escape newline characters
|
||||
def escape(s):
|
||||
return s.replace("\n", "\\n")
|
||||
|
||||
for name in ["author", "author_email", "description", "maintainer",
|
||||
"maintainer_email", "name", "url", "version"]:
|
||||
data = getattr(metadata, name, "")
|
||||
if data:
|
||||
info = info + ("\n %s: %s" % \
|
||||
(name.capitalize(), escape(data)))
|
||||
lines.append("%s=%s" % (name, escape(data)))
|
||||
|
||||
# The [setup] section contains entries controlling
|
||||
# the installer runtime.
|
||||
lines.append("\n[Setup]")
|
||||
if self.install_script:
|
||||
lines.append("install_script=%s" % self.install_script)
|
||||
lines.append("info=%s" % escape(info))
|
||||
lines.append("target_compile=%d" % (not self.no_target_compile))
|
||||
lines.append("target_optimize=%d" % (not self.no_target_optimize))
|
||||
if self.target_version:
|
||||
lines.append("target_version=%s" % self.target_version)
|
||||
if self.user_access_control:
|
||||
lines.append("user_access_control=%s" % self.user_access_control)
|
||||
|
||||
title = self.title or self.distribution.get_fullname()
|
||||
lines.append("title=%s" % escape(title))
|
||||
import time
|
||||
import packaging
|
||||
build_info = "Built %s with packaging-%s" % \
|
||||
(time.ctime(time.time()), packaging.__version__)
|
||||
lines.append("build_info=%s" % build_info)
|
||||
return "\n".join(lines)
|
||||
|
||||
def create_exe(self, arcname, fullname, bitmap=None):
|
||||
import struct
|
||||
|
||||
self.mkpath(self.dist_dir)
|
||||
|
||||
cfgdata = self.get_inidata()
|
||||
|
||||
installer_name = self.get_installer_filename(fullname)
|
||||
logger.info("creating %s", installer_name)
|
||||
|
||||
if bitmap:
|
||||
with open(bitmap, "rb") as fp:
|
||||
bitmapdata = fp.read()
|
||||
bitmaplen = len(bitmapdata)
|
||||
else:
|
||||
bitmaplen = 0
|
||||
|
||||
with open(installer_name, "wb") as file:
|
||||
file.write(self.get_exe_bytes())
|
||||
if bitmap:
|
||||
file.write(bitmapdata)
|
||||
|
||||
# Convert cfgdata from unicode to ascii, mbcs encoded
|
||||
if isinstance(cfgdata, str):
|
||||
cfgdata = cfgdata.encode("mbcs")
|
||||
|
||||
# Append the pre-install script
|
||||
cfgdata = cfgdata + "\0"
|
||||
if self.pre_install_script:
|
||||
with open(self.pre_install_script) as fp:
|
||||
script_data = fp.read()
|
||||
cfgdata = cfgdata + script_data + "\n\0"
|
||||
else:
|
||||
# empty pre-install script
|
||||
cfgdata = cfgdata + "\0"
|
||||
file.write(cfgdata)
|
||||
|
||||
# The 'magic number' 0x1234567B is used to make sure that the
|
||||
# binary layout of 'cfgdata' is what the wininst.exe binary
|
||||
# expects. If the layout changes, increment that number, make
|
||||
# the corresponding changes to the wininst.exe sources, and
|
||||
# recompile them.
|
||||
header = struct.pack("<iii",
|
||||
0x1234567B, # tag
|
||||
len(cfgdata), # length
|
||||
bitmaplen, # number of bytes in bitmap
|
||||
)
|
||||
file.write(header)
|
||||
with open(arcname, "rb") as fp:
|
||||
file.write(fp.read())
|
||||
|
||||
def get_installer_filename(self, fullname):
|
||||
# Factored out to allow overriding in subclasses
|
||||
if self.target_version:
|
||||
# if we create an installer for a specific python version,
|
||||
# it's better to include this in the name
|
||||
installer_name = os.path.join(self.dist_dir,
|
||||
"%s.%s-py%s.exe" %
|
||||
(fullname, self.plat_name, self.target_version))
|
||||
else:
|
||||
installer_name = os.path.join(self.dist_dir,
|
||||
"%s.%s.exe" % (fullname, self.plat_name))
|
||||
return installer_name
|
||||
|
||||
def get_exe_bytes(self):
|
||||
from packaging.compiler.msvccompiler import get_build_version
|
||||
# If a target-version other than the current version has been
|
||||
# specified, then using the MSVC version from *this* build is no good.
|
||||
# Without actually finding and executing the target version and parsing
|
||||
# its sys.version, we just hard-code our knowledge of old versions.
|
||||
# NOTE: Possible alternative is to allow "--target-version" to
|
||||
# specify a Python executable rather than a simple version string.
|
||||
# We can then execute this program to obtain any info we need, such
|
||||
# as the real sys.version string for the build.
|
||||
cur_version = get_python_version()
|
||||
if self.target_version and self.target_version != cur_version:
|
||||
# If the target version is *later* than us, then we assume they
|
||||
# use what we use
|
||||
# string compares seem wrong, but are what sysconfig.py itself uses
|
||||
if self.target_version > cur_version:
|
||||
bv = get_build_version()
|
||||
else:
|
||||
if self.target_version < "2.4":
|
||||
bv = 6.0
|
||||
else:
|
||||
bv = 7.1
|
||||
else:
|
||||
# for current version - use authoritative check.
|
||||
bv = get_build_version()
|
||||
|
||||
# wininst-x.y.exe is in the same directory as this file
|
||||
directory = os.path.dirname(__file__)
|
||||
# we must use a wininst-x.y.exe built with the same C compiler
|
||||
# used for python. XXX What about mingw, borland, and so on?
|
||||
|
||||
# if plat_name starts with "win" but is not "win32"
|
||||
# we want to strip "win" and leave the rest (e.g. -amd64)
|
||||
# for all other cases, we don't want any suffix
|
||||
if self.plat_name != 'win32' and self.plat_name[:3] == 'win':
|
||||
sfix = self.plat_name[3:]
|
||||
else:
|
||||
sfix = ''
|
||||
|
||||
filename = os.path.join(directory, "wininst-%.1f%s.exe" % (bv, sfix))
|
||||
with open(filename, "rb") as fp:
|
||||
return fp.read()
|
||||
151
Lib/packaging/command/build.py
Normal file
151
Lib/packaging/command/build.py
Normal file
|
|
@ -0,0 +1,151 @@
|
|||
"""Main build command, which calls the other build_* commands."""
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
from packaging.util import get_platform
|
||||
from packaging.command.cmd import Command
|
||||
from packaging.errors import PackagingOptionError
|
||||
from packaging.compiler import show_compilers
|
||||
|
||||
|
||||
class build(Command):
|
||||
|
||||
description = "build everything needed to install"
|
||||
|
||||
user_options = [
|
||||
('build-base=', 'b',
|
||||
"base directory for build library"),
|
||||
('build-purelib=', None,
|
||||
"build directory for platform-neutral distributions"),
|
||||
('build-platlib=', None,
|
||||
"build directory for platform-specific distributions"),
|
||||
('build-lib=', None,
|
||||
"build directory for all distribution (defaults to either " +
|
||||
"build-purelib or build-platlib"),
|
||||
('build-scripts=', None,
|
||||
"build directory for scripts"),
|
||||
('build-temp=', 't',
|
||||
"temporary build directory"),
|
||||
('plat-name=', 'p',
|
||||
"platform name to build for, if supported "
|
||||
"(default: %s)" % get_platform()),
|
||||
('compiler=', 'c',
|
||||
"specify the compiler type"),
|
||||
('debug', 'g',
|
||||
"compile extensions and libraries with debugging information"),
|
||||
('force', 'f',
|
||||
"forcibly build everything (ignore file timestamps)"),
|
||||
('executable=', 'e',
|
||||
"specify final destination interpreter path (build.py)"),
|
||||
('use-2to3', None,
|
||||
"use 2to3 to make source python 3.x compatible"),
|
||||
('convert-2to3-doctests', None,
|
||||
"use 2to3 to convert doctests in seperate text files"),
|
||||
('use-2to3-fixers', None,
|
||||
"list additional fixers opted for during 2to3 conversion"),
|
||||
]
|
||||
|
||||
boolean_options = ['debug', 'force']
|
||||
|
||||
help_options = [
|
||||
('help-compiler', None,
|
||||
"list available compilers", show_compilers),
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
self.build_base = 'build'
|
||||
# these are decided only after 'build_base' has its final value
|
||||
# (unless overridden by the user or client)
|
||||
self.build_purelib = None
|
||||
self.build_platlib = None
|
||||
self.build_lib = None
|
||||
self.build_temp = None
|
||||
self.build_scripts = None
|
||||
self.compiler = None
|
||||
self.plat_name = None
|
||||
self.debug = None
|
||||
self.force = False
|
||||
self.executable = None
|
||||
self.use_2to3 = False
|
||||
self.convert_2to3_doctests = None
|
||||
self.use_2to3_fixers = None
|
||||
|
||||
def finalize_options(self):
|
||||
if self.plat_name is None:
|
||||
self.plat_name = get_platform()
|
||||
else:
|
||||
# plat-name only supported for windows (other platforms are
|
||||
# supported via ./configure flags, if at all). Avoid misleading
|
||||
# other platforms.
|
||||
if os.name != 'nt':
|
||||
raise PackagingOptionError(
|
||||
"--plat-name only supported on Windows (try "
|
||||
"using './configure --help' on your platform)")
|
||||
|
||||
plat_specifier = ".%s-%s" % (self.plat_name, sys.version[0:3])
|
||||
|
||||
# Make it so Python 2.x and Python 2.x with --with-pydebug don't
|
||||
# share the same build directories. Doing so confuses the build
|
||||
# process for C modules
|
||||
if hasattr(sys, 'gettotalrefcount'):
|
||||
plat_specifier += '-pydebug'
|
||||
|
||||
# 'build_purelib' and 'build_platlib' just default to 'lib' and
|
||||
# 'lib.<plat>' under the base build directory. We only use one of
|
||||
# them for a given distribution, though --
|
||||
if self.build_purelib is None:
|
||||
self.build_purelib = os.path.join(self.build_base, 'lib')
|
||||
if self.build_platlib is None:
|
||||
self.build_platlib = os.path.join(self.build_base,
|
||||
'lib' + plat_specifier)
|
||||
|
||||
# 'build_lib' is the actual directory that we will use for this
|
||||
# particular module distribution -- if user didn't supply it, pick
|
||||
# one of 'build_purelib' or 'build_platlib'.
|
||||
if self.build_lib is None:
|
||||
if self.distribution.ext_modules:
|
||||
self.build_lib = self.build_platlib
|
||||
else:
|
||||
self.build_lib = self.build_purelib
|
||||
|
||||
# 'build_temp' -- temporary directory for compiler turds,
|
||||
# "build/temp.<plat>"
|
||||
if self.build_temp is None:
|
||||
self.build_temp = os.path.join(self.build_base,
|
||||
'temp' + plat_specifier)
|
||||
if self.build_scripts is None:
|
||||
self.build_scripts = os.path.join(self.build_base,
|
||||
'scripts-' + sys.version[0:3])
|
||||
|
||||
if self.executable is None:
|
||||
self.executable = os.path.normpath(sys.executable)
|
||||
|
||||
def run(self):
|
||||
# Run all relevant sub-commands. This will be some subset of:
|
||||
# - build_py - pure Python modules
|
||||
# - build_clib - standalone C libraries
|
||||
# - build_ext - Python extension modules
|
||||
# - build_scripts - Python scripts
|
||||
for cmd_name in self.get_sub_commands():
|
||||
self.run_command(cmd_name)
|
||||
|
||||
# -- Predicates for the sub-command list ---------------------------
|
||||
|
||||
def has_pure_modules(self):
|
||||
return self.distribution.has_pure_modules()
|
||||
|
||||
def has_c_libraries(self):
|
||||
return self.distribution.has_c_libraries()
|
||||
|
||||
def has_ext_modules(self):
|
||||
return self.distribution.has_ext_modules()
|
||||
|
||||
def has_scripts(self):
|
||||
return self.distribution.has_scripts()
|
||||
|
||||
sub_commands = [('build_py', has_pure_modules),
|
||||
('build_clib', has_c_libraries),
|
||||
('build_ext', has_ext_modules),
|
||||
('build_scripts', has_scripts),
|
||||
]
|
||||
198
Lib/packaging/command/build_clib.py
Normal file
198
Lib/packaging/command/build_clib.py
Normal file
|
|
@ -0,0 +1,198 @@
|
|||
"""Build C/C++ libraries.
|
||||
|
||||
This command is useful to build libraries that are included in the
|
||||
distribution and needed by extension modules.
|
||||
"""
|
||||
|
||||
# XXX this module has *lots* of code ripped-off quite transparently from
|
||||
# build_ext.py -- not surprisingly really, as the work required to build
|
||||
# a static library from a collection of C source files is not really all
|
||||
# that different from what's required to build a shared object file from
|
||||
# a collection of C source files. Nevertheless, I haven't done the
|
||||
# necessary refactoring to account for the overlap in code between the
|
||||
# two modules, mainly because a number of subtle details changed in the
|
||||
# cut 'n paste. Sigh.
|
||||
|
||||
import os
|
||||
from packaging.command.cmd import Command
|
||||
from packaging.errors import PackagingSetupError
|
||||
from packaging.compiler import customize_compiler
|
||||
from packaging import logger
|
||||
|
||||
|
||||
def show_compilers():
|
||||
from packaging.compiler import show_compilers
|
||||
show_compilers()
|
||||
|
||||
|
||||
class build_clib(Command):
|
||||
|
||||
description = "build C/C++ libraries used by extension modules"
|
||||
|
||||
user_options = [
|
||||
('build-clib=', 'b',
|
||||
"directory to build C/C++ libraries to"),
|
||||
('build-temp=', 't',
|
||||
"directory to put temporary build by-products"),
|
||||
('debug', 'g',
|
||||
"compile with debugging information"),
|
||||
('force', 'f',
|
||||
"forcibly build everything (ignore file timestamps)"),
|
||||
('compiler=', 'c',
|
||||
"specify the compiler type"),
|
||||
]
|
||||
|
||||
boolean_options = ['debug', 'force']
|
||||
|
||||
help_options = [
|
||||
('help-compiler', None,
|
||||
"list available compilers", show_compilers),
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
self.build_clib = None
|
||||
self.build_temp = None
|
||||
|
||||
# List of libraries to build
|
||||
self.libraries = None
|
||||
|
||||
# Compilation options for all libraries
|
||||
self.include_dirs = None
|
||||
self.define = None
|
||||
self.undef = None
|
||||
self.debug = None
|
||||
self.force = False
|
||||
self.compiler = None
|
||||
|
||||
|
||||
def finalize_options(self):
|
||||
# This might be confusing: both build-clib and build-temp default
|
||||
# to build-temp as defined by the "build" command. This is because
|
||||
# I think that C libraries are really just temporary build
|
||||
# by-products, at least from the point of view of building Python
|
||||
# extensions -- but I want to keep my options open.
|
||||
self.set_undefined_options('build',
|
||||
('build_temp', 'build_clib'),
|
||||
('build_temp', 'build_temp'),
|
||||
'compiler', 'debug', 'force')
|
||||
|
||||
self.libraries = self.distribution.libraries
|
||||
if self.libraries:
|
||||
self.check_library_list(self.libraries)
|
||||
|
||||
if self.include_dirs is None:
|
||||
self.include_dirs = self.distribution.include_dirs or []
|
||||
if isinstance(self.include_dirs, str):
|
||||
self.include_dirs = self.include_dirs.split(os.pathsep)
|
||||
|
||||
# XXX same as for build_ext -- what about 'self.define' and
|
||||
# 'self.undef' ?
|
||||
|
||||
def run(self):
|
||||
if not self.libraries:
|
||||
return
|
||||
|
||||
# Yech -- this is cut 'n pasted from build_ext.py!
|
||||
from packaging.compiler import new_compiler
|
||||
self.compiler = new_compiler(compiler=self.compiler,
|
||||
dry_run=self.dry_run,
|
||||
force=self.force)
|
||||
customize_compiler(self.compiler)
|
||||
|
||||
if self.include_dirs is not None:
|
||||
self.compiler.set_include_dirs(self.include_dirs)
|
||||
if self.define is not None:
|
||||
# 'define' option is a list of (name,value) tuples
|
||||
for name, value in self.define:
|
||||
self.compiler.define_macro(name, value)
|
||||
if self.undef is not None:
|
||||
for macro in self.undef:
|
||||
self.compiler.undefine_macro(macro)
|
||||
|
||||
self.build_libraries(self.libraries)
|
||||
|
||||
|
||||
def check_library_list(self, libraries):
|
||||
"""Ensure that the list of libraries is valid.
|
||||
|
||||
`library` is presumably provided as a command option 'libraries'.
|
||||
This method checks that it is a list of 2-tuples, where the tuples
|
||||
are (library_name, build_info_dict).
|
||||
|
||||
Raise PackagingSetupError if the structure is invalid anywhere;
|
||||
just returns otherwise.
|
||||
"""
|
||||
if not isinstance(libraries, list):
|
||||
raise PackagingSetupError("'libraries' option must be a list of tuples")
|
||||
|
||||
for lib in libraries:
|
||||
if not isinstance(lib, tuple) and len(lib) != 2:
|
||||
raise PackagingSetupError("each element of 'libraries' must a 2-tuple")
|
||||
|
||||
name, build_info = lib
|
||||
|
||||
if not isinstance(name, str):
|
||||
raise PackagingSetupError("first element of each tuple in 'libraries' " + \
|
||||
"must be a string (the library name)")
|
||||
if '/' in name or (os.sep != '/' and os.sep in name):
|
||||
raise PackagingSetupError(("bad library name '%s': " +
|
||||
"may not contain directory separators") % \
|
||||
lib[0])
|
||||
|
||||
if not isinstance(build_info, dict):
|
||||
raise PackagingSetupError("second element of each tuple in 'libraries' " + \
|
||||
"must be a dictionary (build info)")
|
||||
|
||||
def get_library_names(self):
|
||||
# Assume the library list is valid -- 'check_library_list()' is
|
||||
# called from 'finalize_options()', so it should be!
|
||||
if not self.libraries:
|
||||
return None
|
||||
|
||||
lib_names = []
|
||||
for lib_name, build_info in self.libraries:
|
||||
lib_names.append(lib_name)
|
||||
return lib_names
|
||||
|
||||
|
||||
def get_source_files(self):
|
||||
self.check_library_list(self.libraries)
|
||||
filenames = []
|
||||
for lib_name, build_info in self.libraries:
|
||||
sources = build_info.get('sources')
|
||||
if sources is None or not isinstance(sources, (list, tuple)):
|
||||
raise PackagingSetupError(("in 'libraries' option (library '%s'), "
|
||||
"'sources' must be present and must be "
|
||||
"a list of source filenames") % lib_name)
|
||||
|
||||
filenames.extend(sources)
|
||||
return filenames
|
||||
|
||||
def build_libraries(self, libraries):
|
||||
for lib_name, build_info in libraries:
|
||||
sources = build_info.get('sources')
|
||||
if sources is None or not isinstance(sources, (list, tuple)):
|
||||
raise PackagingSetupError(("in 'libraries' option (library '%s'), " +
|
||||
"'sources' must be present and must be " +
|
||||
"a list of source filenames") % lib_name)
|
||||
sources = list(sources)
|
||||
|
||||
logger.info("building '%s' library", lib_name)
|
||||
|
||||
# First, compile the source code to object files in the library
|
||||
# directory. (This should probably change to putting object
|
||||
# files in a temporary build directory.)
|
||||
macros = build_info.get('macros')
|
||||
include_dirs = build_info.get('include_dirs')
|
||||
objects = self.compiler.compile(sources,
|
||||
output_dir=self.build_temp,
|
||||
macros=macros,
|
||||
include_dirs=include_dirs,
|
||||
debug=self.debug)
|
||||
|
||||
# Now "link" the object files together into a static library.
|
||||
# (On Unix at least, this isn't really linking -- it just
|
||||
# builds an archive. Whatever.)
|
||||
self.compiler.create_static_lib(objects, lib_name,
|
||||
output_dir=self.build_clib,
|
||||
debug=self.debug)
|
||||
666
Lib/packaging/command/build_ext.py
Normal file
666
Lib/packaging/command/build_ext.py
Normal file
|
|
@ -0,0 +1,666 @@
|
|||
"""Build extension modules."""
|
||||
|
||||
# FIXME Is this module limited to C extensions or do C++ extensions work too?
|
||||
# The docstring of this module said that C++ was not supported, but other
|
||||
# comments contradict that.
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import logging
|
||||
import sysconfig
|
||||
|
||||
from packaging.util import get_platform
|
||||
from packaging.command.cmd import Command
|
||||
from packaging.errors import (CCompilerError, CompileError, PackagingError,
|
||||
PackagingPlatformError, PackagingSetupError)
|
||||
from packaging.compiler import customize_compiler, show_compilers
|
||||
from packaging.util import newer_group
|
||||
from packaging.compiler.extension import Extension
|
||||
from packaging import logger
|
||||
|
||||
import site
|
||||
HAS_USER_SITE = True
|
||||
|
||||
if os.name == 'nt':
|
||||
from packaging.compiler.msvccompiler import get_build_version
|
||||
MSVC_VERSION = int(get_build_version())
|
||||
|
||||
# An extension name is just a dot-separated list of Python NAMEs (ie.
|
||||
# the same as a fully-qualified module name).
|
||||
extension_name_re = re.compile \
|
||||
(r'^[a-zA-Z_][a-zA-Z_0-9]*(\.[a-zA-Z_][a-zA-Z_0-9]*)*$')
|
||||
|
||||
|
||||
class build_ext(Command):
|
||||
|
||||
description = "build C/C++ extension modules (compile/link to build directory)"
|
||||
|
||||
# XXX thoughts on how to deal with complex command-line options like
|
||||
# these, i.e. how to make it so fancy_getopt can suck them off the
|
||||
# command line and make it look like setup.py defined the appropriate
|
||||
# lists of tuples of what-have-you.
|
||||
# - each command needs a callback to process its command-line options
|
||||
# - Command.__init__() needs access to its share of the whole
|
||||
# command line (must ultimately come from
|
||||
# Distribution.parse_command_line())
|
||||
# - it then calls the current command class' option-parsing
|
||||
# callback to deal with weird options like -D, which have to
|
||||
# parse the option text and churn out some custom data
|
||||
# structure
|
||||
# - that data structure (in this case, a list of 2-tuples)
|
||||
# will then be present in the command object by the time
|
||||
# we get to finalize_options() (i.e. the constructor
|
||||
# takes care of both command-line and client options
|
||||
# in between initialize_options() and finalize_options())
|
||||
|
||||
sep_by = " (separated by '%s')" % os.pathsep
|
||||
user_options = [
|
||||
('build-lib=', 'b',
|
||||
"directory for compiled extension modules"),
|
||||
('build-temp=', 't',
|
||||
"directory for temporary files (build by-products)"),
|
||||
('plat-name=', 'p',
|
||||
"platform name to cross-compile for, if supported "
|
||||
"(default: %s)" % get_platform()),
|
||||
('inplace', 'i',
|
||||
"ignore build-lib and put compiled extensions into the source " +
|
||||
"directory alongside your pure Python modules"),
|
||||
('include-dirs=', 'I',
|
||||
"list of directories to search for header files" + sep_by),
|
||||
('define=', 'D',
|
||||
"C preprocessor macros to define"),
|
||||
('undef=', 'U',
|
||||
"C preprocessor macros to undefine"),
|
||||
('libraries=', 'l',
|
||||
"external C libraries to link with"),
|
||||
('library-dirs=', 'L',
|
||||
"directories to search for external C libraries" + sep_by),
|
||||
('rpath=', 'R',
|
||||
"directories to search for shared C libraries at runtime"),
|
||||
('link-objects=', 'O',
|
||||
"extra explicit link objects to include in the link"),
|
||||
('debug', 'g',
|
||||
"compile/link with debugging information"),
|
||||
('force', 'f',
|
||||
"forcibly build everything (ignore file timestamps)"),
|
||||
('compiler=', 'c',
|
||||
"specify the compiler type"),
|
||||
('swig-opts=', None,
|
||||
"list of SWIG command-line options"),
|
||||
('swig=', None,
|
||||
"path to the SWIG executable"),
|
||||
]
|
||||
|
||||
boolean_options = ['inplace', 'debug', 'force']
|
||||
|
||||
if HAS_USER_SITE:
|
||||
user_options.append(('user', None,
|
||||
"add user include, library and rpath"))
|
||||
boolean_options.append('user')
|
||||
|
||||
help_options = [
|
||||
('help-compiler', None,
|
||||
"list available compilers", show_compilers),
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
self.extensions = None
|
||||
self.build_lib = None
|
||||
self.plat_name = None
|
||||
self.build_temp = None
|
||||
self.inplace = False
|
||||
self.package = None
|
||||
|
||||
self.include_dirs = None
|
||||
self.define = None
|
||||
self.undef = None
|
||||
self.libraries = None
|
||||
self.library_dirs = None
|
||||
self.rpath = None
|
||||
self.link_objects = None
|
||||
self.debug = None
|
||||
self.force = None
|
||||
self.compiler = None
|
||||
self.swig = None
|
||||
self.swig_opts = None
|
||||
if HAS_USER_SITE:
|
||||
self.user = None
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options('build',
|
||||
'build_lib', 'build_temp', 'compiler',
|
||||
'debug', 'force', 'plat_name')
|
||||
|
||||
if self.package is None:
|
||||
self.package = self.distribution.ext_package
|
||||
|
||||
# Ensure that the list of extensions is valid, i.e. it is a list of
|
||||
# Extension objects.
|
||||
self.extensions = self.distribution.ext_modules
|
||||
if self.extensions:
|
||||
if not isinstance(self.extensions, (list, tuple)):
|
||||
type_name = (self.extensions is None and 'None'
|
||||
or type(self.extensions).__name__)
|
||||
raise PackagingSetupError(
|
||||
"'ext_modules' must be a sequence of Extension instances,"
|
||||
" not %s" % (type_name,))
|
||||
for i, ext in enumerate(self.extensions):
|
||||
if isinstance(ext, Extension):
|
||||
continue # OK! (assume type-checking done
|
||||
# by Extension constructor)
|
||||
type_name = (ext is None and 'None' or type(ext).__name__)
|
||||
raise PackagingSetupError(
|
||||
"'ext_modules' item %d must be an Extension instance,"
|
||||
" not %s" % (i, type_name))
|
||||
|
||||
# Make sure Python's include directories (for Python.h, pyconfig.h,
|
||||
# etc.) are in the include search path.
|
||||
py_include = sysconfig.get_path('include')
|
||||
plat_py_include = sysconfig.get_path('platinclude')
|
||||
if self.include_dirs is None:
|
||||
self.include_dirs = self.distribution.include_dirs or []
|
||||
if isinstance(self.include_dirs, str):
|
||||
self.include_dirs = self.include_dirs.split(os.pathsep)
|
||||
|
||||
# Put the Python "system" include dir at the end, so that
|
||||
# any local include dirs take precedence.
|
||||
self.include_dirs.append(py_include)
|
||||
if plat_py_include != py_include:
|
||||
self.include_dirs.append(plat_py_include)
|
||||
|
||||
if isinstance(self.libraries, str):
|
||||
self.libraries = [self.libraries]
|
||||
|
||||
# Life is easier if we're not forever checking for None, so
|
||||
# simplify these options to empty lists if unset
|
||||
if self.libraries is None:
|
||||
self.libraries = []
|
||||
if self.library_dirs is None:
|
||||
self.library_dirs = []
|
||||
elif isinstance(self.library_dirs, str):
|
||||
self.library_dirs = self.library_dirs.split(os.pathsep)
|
||||
|
||||
if self.rpath is None:
|
||||
self.rpath = []
|
||||
elif isinstance(self.rpath, str):
|
||||
self.rpath = self.rpath.split(os.pathsep)
|
||||
|
||||
# for extensions under windows use different directories
|
||||
# for Release and Debug builds.
|
||||
# also Python's library directory must be appended to library_dirs
|
||||
if os.name == 'nt':
|
||||
# the 'libs' directory is for binary installs - we assume that
|
||||
# must be the *native* platform. But we don't really support
|
||||
# cross-compiling via a binary install anyway, so we let it go.
|
||||
self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs'))
|
||||
if self.debug:
|
||||
self.build_temp = os.path.join(self.build_temp, "Debug")
|
||||
else:
|
||||
self.build_temp = os.path.join(self.build_temp, "Release")
|
||||
|
||||
# Append the source distribution include and library directories,
|
||||
# this allows distutils on windows to work in the source tree
|
||||
self.include_dirs.append(os.path.join(sys.exec_prefix, 'PC'))
|
||||
if MSVC_VERSION == 9:
|
||||
# Use the .lib files for the correct architecture
|
||||
if self.plat_name == 'win32':
|
||||
suffix = ''
|
||||
else:
|
||||
# win-amd64 or win-ia64
|
||||
suffix = self.plat_name[4:]
|
||||
new_lib = os.path.join(sys.exec_prefix, 'PCbuild')
|
||||
if suffix:
|
||||
new_lib = os.path.join(new_lib, suffix)
|
||||
self.library_dirs.append(new_lib)
|
||||
|
||||
elif MSVC_VERSION == 8:
|
||||
self.library_dirs.append(os.path.join(sys.exec_prefix,
|
||||
'PC', 'VS8.0'))
|
||||
elif MSVC_VERSION == 7:
|
||||
self.library_dirs.append(os.path.join(sys.exec_prefix,
|
||||
'PC', 'VS7.1'))
|
||||
else:
|
||||
self.library_dirs.append(os.path.join(sys.exec_prefix,
|
||||
'PC', 'VC6'))
|
||||
|
||||
# OS/2 (EMX) doesn't support Debug vs Release builds, but has the
|
||||
# import libraries in its "Config" subdirectory
|
||||
if os.name == 'os2':
|
||||
self.library_dirs.append(os.path.join(sys.exec_prefix, 'Config'))
|
||||
|
||||
# for extensions under Cygwin and AtheOS Python's library directory must be
|
||||
# appended to library_dirs
|
||||
if sys.platform[:6] == 'cygwin' or sys.platform[:6] == 'atheos':
|
||||
if sys.executable.startswith(os.path.join(sys.exec_prefix, "bin")):
|
||||
# building third party extensions
|
||||
self.library_dirs.append(os.path.join(sys.prefix, "lib",
|
||||
"python" + sysconfig.get_python_version(),
|
||||
"config"))
|
||||
else:
|
||||
# building python standard extensions
|
||||
self.library_dirs.append(os.curdir)
|
||||
|
||||
# for extensions under Linux or Solaris with a shared Python library,
|
||||
# Python's library directory must be appended to library_dirs
|
||||
sysconfig.get_config_var('Py_ENABLE_SHARED')
|
||||
if ((sys.platform.startswith('linux') or sys.platform.startswith('gnu')
|
||||
or sys.platform.startswith('sunos'))
|
||||
and sysconfig.get_config_var('Py_ENABLE_SHARED')):
|
||||
if sys.executable.startswith(os.path.join(sys.exec_prefix, "bin")):
|
||||
# building third party extensions
|
||||
self.library_dirs.append(sysconfig.get_config_var('LIBDIR'))
|
||||
else:
|
||||
# building python standard extensions
|
||||
self.library_dirs.append(os.curdir)
|
||||
|
||||
# The argument parsing will result in self.define being a string, but
|
||||
# it has to be a list of 2-tuples. All the preprocessor symbols
|
||||
# specified by the 'define' option will be set to '1'. Multiple
|
||||
# symbols can be separated with commas.
|
||||
|
||||
if self.define:
|
||||
defines = self.define.split(',')
|
||||
self.define = [(symbol, '1') for symbol in defines]
|
||||
|
||||
# The option for macros to undefine is also a string from the
|
||||
# option parsing, but has to be a list. Multiple symbols can also
|
||||
# be separated with commas here.
|
||||
if self.undef:
|
||||
self.undef = self.undef.split(',')
|
||||
|
||||
if self.swig_opts is None:
|
||||
self.swig_opts = []
|
||||
else:
|
||||
self.swig_opts = self.swig_opts.split(' ')
|
||||
|
||||
# Finally add the user include and library directories if requested
|
||||
if HAS_USER_SITE and self.user:
|
||||
user_include = os.path.join(site.USER_BASE, "include")
|
||||
user_lib = os.path.join(site.USER_BASE, "lib")
|
||||
if os.path.isdir(user_include):
|
||||
self.include_dirs.append(user_include)
|
||||
if os.path.isdir(user_lib):
|
||||
self.library_dirs.append(user_lib)
|
||||
self.rpath.append(user_lib)
|
||||
|
||||
def run(self):
|
||||
from packaging.compiler import new_compiler
|
||||
|
||||
# 'self.extensions', as supplied by setup.py, is a list of
|
||||
# Extension instances. See the documentation for Extension (in
|
||||
# distutils.extension) for details.
|
||||
if not self.extensions:
|
||||
return
|
||||
|
||||
# If we were asked to build any C/C++ libraries, make sure that the
|
||||
# directory where we put them is in the library search path for
|
||||
# linking extensions.
|
||||
if self.distribution.has_c_libraries():
|
||||
build_clib = self.get_finalized_command('build_clib')
|
||||
self.libraries.extend(build_clib.get_library_names() or [])
|
||||
self.library_dirs.append(build_clib.build_clib)
|
||||
|
||||
# Temporary kludge until we remove the verbose arguments and use
|
||||
# logging everywhere
|
||||
verbose = logger.getEffectiveLevel() >= logging.DEBUG
|
||||
|
||||
# Setup the CCompiler object that we'll use to do all the
|
||||
# compiling and linking
|
||||
self.compiler_obj = new_compiler(compiler=self.compiler,
|
||||
verbose=verbose,
|
||||
dry_run=self.dry_run,
|
||||
force=self.force)
|
||||
|
||||
customize_compiler(self.compiler_obj)
|
||||
# If we are cross-compiling, init the compiler now (if we are not
|
||||
# cross-compiling, init would not hurt, but people may rely on
|
||||
# late initialization of compiler even if they shouldn't...)
|
||||
if os.name == 'nt' and self.plat_name != get_platform():
|
||||
self.compiler_obj.initialize(self.plat_name)
|
||||
|
||||
# And make sure that any compile/link-related options (which might
|
||||
# come from the command line or from the setup script) are set in
|
||||
# that CCompiler object -- that way, they automatically apply to
|
||||
# all compiling and linking done here.
|
||||
if self.include_dirs is not None:
|
||||
self.compiler_obj.set_include_dirs(self.include_dirs)
|
||||
if self.define is not None:
|
||||
# 'define' option is a list of (name,value) tuples
|
||||
for name, value in self.define:
|
||||
self.compiler_obj.define_macro(name, value)
|
||||
if self.undef is not None:
|
||||
for macro in self.undef:
|
||||
self.compiler_obj.undefine_macro(macro)
|
||||
if self.libraries is not None:
|
||||
self.compiler_obj.set_libraries(self.libraries)
|
||||
if self.library_dirs is not None:
|
||||
self.compiler_obj.set_library_dirs(self.library_dirs)
|
||||
if self.rpath is not None:
|
||||
self.compiler_obj.set_runtime_library_dirs(self.rpath)
|
||||
if self.link_objects is not None:
|
||||
self.compiler_obj.set_link_objects(self.link_objects)
|
||||
|
||||
# Now actually compile and link everything.
|
||||
self.build_extensions()
|
||||
|
||||
def get_source_files(self):
|
||||
filenames = []
|
||||
|
||||
# Wouldn't it be neat if we knew the names of header files too...
|
||||
for ext in self.extensions:
|
||||
filenames.extend(ext.sources)
|
||||
|
||||
return filenames
|
||||
|
||||
def get_outputs(self):
|
||||
# And build the list of output (built) filenames. Note that this
|
||||
# ignores the 'inplace' flag, and assumes everything goes in the
|
||||
# "build" tree.
|
||||
outputs = []
|
||||
for ext in self.extensions:
|
||||
outputs.append(self.get_ext_fullpath(ext.name))
|
||||
return outputs
|
||||
|
||||
def build_extensions(self):
|
||||
for ext in self.extensions:
|
||||
try:
|
||||
self.build_extension(ext)
|
||||
except (CCompilerError, PackagingError, CompileError) as e:
|
||||
if not ext.optional:
|
||||
raise
|
||||
logger.warning('%s: building extension %r failed: %s',
|
||||
self.get_command_name(), ext.name, e)
|
||||
|
||||
def build_extension(self, ext):
|
||||
sources = ext.sources
|
||||
if sources is None or not isinstance(sources, (list, tuple)):
|
||||
raise PackagingSetupError(("in 'ext_modules' option (extension '%s'), " +
|
||||
"'sources' must be present and must be " +
|
||||
"a list of source filenames") % ext.name)
|
||||
sources = list(sources)
|
||||
|
||||
ext_path = self.get_ext_fullpath(ext.name)
|
||||
depends = sources + ext.depends
|
||||
if not (self.force or newer_group(depends, ext_path, 'newer')):
|
||||
logger.debug("skipping '%s' extension (up-to-date)", ext.name)
|
||||
return
|
||||
else:
|
||||
logger.info("building '%s' extension", ext.name)
|
||||
|
||||
# First, scan the sources for SWIG definition files (.i), run
|
||||
# SWIG on 'em to create .c files, and modify the sources list
|
||||
# accordingly.
|
||||
sources = self.swig_sources(sources, ext)
|
||||
|
||||
# Next, compile the source code to object files.
|
||||
|
||||
# XXX not honouring 'define_macros' or 'undef_macros' -- the
|
||||
# CCompiler API needs to change to accommodate this, and I
|
||||
# want to do one thing at a time!
|
||||
|
||||
# Two possible sources for extra compiler arguments:
|
||||
# - 'extra_compile_args' in Extension object
|
||||
# - CFLAGS environment variable (not particularly
|
||||
# elegant, but people seem to expect it and I
|
||||
# guess it's useful)
|
||||
# The environment variable should take precedence, and
|
||||
# any sensible compiler will give precedence to later
|
||||
# command-line args. Hence we combine them in order:
|
||||
extra_args = ext.extra_compile_args or []
|
||||
|
||||
macros = ext.define_macros[:]
|
||||
for undef in ext.undef_macros:
|
||||
macros.append((undef,))
|
||||
|
||||
objects = self.compiler_obj.compile(sources,
|
||||
output_dir=self.build_temp,
|
||||
macros=macros,
|
||||
include_dirs=ext.include_dirs,
|
||||
debug=self.debug,
|
||||
extra_postargs=extra_args,
|
||||
depends=ext.depends)
|
||||
|
||||
# XXX -- this is a Vile HACK!
|
||||
#
|
||||
# The setup.py script for Python on Unix needs to be able to
|
||||
# get this list so it can perform all the clean up needed to
|
||||
# avoid keeping object files around when cleaning out a failed
|
||||
# build of an extension module. Since Packaging does not
|
||||
# track dependencies, we have to get rid of intermediates to
|
||||
# ensure all the intermediates will be properly re-built.
|
||||
#
|
||||
self._built_objects = objects[:]
|
||||
|
||||
# Now link the object files together into a "shared object" --
|
||||
# of course, first we have to figure out all the other things
|
||||
# that go into the mix.
|
||||
if ext.extra_objects:
|
||||
objects.extend(ext.extra_objects)
|
||||
extra_args = ext.extra_link_args or []
|
||||
|
||||
# Detect target language, if not provided
|
||||
language = ext.language or self.compiler_obj.detect_language(sources)
|
||||
|
||||
self.compiler_obj.link_shared_object(
|
||||
objects, ext_path,
|
||||
libraries=self.get_libraries(ext),
|
||||
library_dirs=ext.library_dirs,
|
||||
runtime_library_dirs=ext.runtime_library_dirs,
|
||||
extra_postargs=extra_args,
|
||||
export_symbols=self.get_export_symbols(ext),
|
||||
debug=self.debug,
|
||||
build_temp=self.build_temp,
|
||||
target_lang=language)
|
||||
|
||||
|
||||
def swig_sources(self, sources, extension):
|
||||
"""Walk the list of source files in 'sources', looking for SWIG
|
||||
interface (.i) files. Run SWIG on all that are found, and
|
||||
return a modified 'sources' list with SWIG source files replaced
|
||||
by the generated C (or C++) files.
|
||||
"""
|
||||
new_sources = []
|
||||
swig_sources = []
|
||||
swig_targets = {}
|
||||
|
||||
# XXX this drops generated C/C++ files into the source tree, which
|
||||
# is fine for developers who want to distribute the generated
|
||||
# source -- but there should be an option to put SWIG output in
|
||||
# the temp dir.
|
||||
|
||||
if ('-c++' in self.swig_opts or '-c++' in extension.swig_opts):
|
||||
target_ext = '.cpp'
|
||||
else:
|
||||
target_ext = '.c'
|
||||
|
||||
for source in sources:
|
||||
base, ext = os.path.splitext(source)
|
||||
if ext == ".i": # SWIG interface file
|
||||
new_sources.append(base + '_wrap' + target_ext)
|
||||
swig_sources.append(source)
|
||||
swig_targets[source] = new_sources[-1]
|
||||
else:
|
||||
new_sources.append(source)
|
||||
|
||||
if not swig_sources:
|
||||
return new_sources
|
||||
|
||||
swig = self.swig or self.find_swig()
|
||||
swig_cmd = [swig, "-python"]
|
||||
swig_cmd.extend(self.swig_opts)
|
||||
|
||||
# Do not override commandline arguments
|
||||
if not self.swig_opts:
|
||||
for o in extension.swig_opts:
|
||||
swig_cmd.append(o)
|
||||
|
||||
for source in swig_sources:
|
||||
target = swig_targets[source]
|
||||
logger.info("swigging %s to %s", source, target)
|
||||
self.spawn(swig_cmd + ["-o", target, source])
|
||||
|
||||
return new_sources
|
||||
|
||||
def find_swig(self):
|
||||
"""Return the name of the SWIG executable. On Unix, this is
|
||||
just "swig" -- it should be in the PATH. Tries a bit harder on
|
||||
Windows.
|
||||
"""
|
||||
|
||||
if os.name == "posix":
|
||||
return "swig"
|
||||
elif os.name == "nt":
|
||||
|
||||
# Look for SWIG in its standard installation directory on
|
||||
# Windows (or so I presume!). If we find it there, great;
|
||||
# if not, act like Unix and assume it's in the PATH.
|
||||
for vers in ("1.3", "1.2", "1.1"):
|
||||
fn = os.path.join("c:\\swig%s" % vers, "swig.exe")
|
||||
if os.path.isfile(fn):
|
||||
return fn
|
||||
else:
|
||||
return "swig.exe"
|
||||
|
||||
elif os.name == "os2":
|
||||
# assume swig available in the PATH.
|
||||
return "swig.exe"
|
||||
|
||||
else:
|
||||
raise PackagingPlatformError(("I don't know how to find (much less run) SWIG "
|
||||
"on platform '%s'") % os.name)
|
||||
|
||||
# -- Name generators -----------------------------------------------
|
||||
# (extension names, filenames, whatever)
|
||||
def get_ext_fullpath(self, ext_name):
|
||||
"""Returns the path of the filename for a given extension.
|
||||
|
||||
The file is located in `build_lib` or directly in the package
|
||||
(inplace option).
|
||||
"""
|
||||
fullname = self.get_ext_fullname(ext_name)
|
||||
modpath = fullname.split('.')
|
||||
filename = self.get_ext_filename(modpath[-1])
|
||||
|
||||
if not self.inplace:
|
||||
# no further work needed
|
||||
# returning :
|
||||
# build_dir/package/path/filename
|
||||
filename = os.path.join(*modpath[:-1]+[filename])
|
||||
return os.path.join(self.build_lib, filename)
|
||||
|
||||
# the inplace option requires to find the package directory
|
||||
# using the build_py command for that
|
||||
package = '.'.join(modpath[0:-1])
|
||||
build_py = self.get_finalized_command('build_py')
|
||||
package_dir = os.path.abspath(build_py.get_package_dir(package))
|
||||
|
||||
# returning
|
||||
# package_dir/filename
|
||||
return os.path.join(package_dir, filename)
|
||||
|
||||
def get_ext_fullname(self, ext_name):
|
||||
"""Returns the fullname of a given extension name.
|
||||
|
||||
Adds the `package.` prefix"""
|
||||
if self.package is None:
|
||||
return ext_name
|
||||
else:
|
||||
return self.package + '.' + ext_name
|
||||
|
||||
def get_ext_filename(self, ext_name):
|
||||
r"""Convert the name of an extension (eg. "foo.bar") into the name
|
||||
of the file from which it will be loaded (eg. "foo/bar.so", or
|
||||
"foo\bar.pyd").
|
||||
"""
|
||||
ext_path = ext_name.split('.')
|
||||
# OS/2 has an 8 character module (extension) limit :-(
|
||||
if os.name == "os2":
|
||||
ext_path[len(ext_path) - 1] = ext_path[len(ext_path) - 1][:8]
|
||||
# extensions in debug_mode are named 'module_d.pyd' under windows
|
||||
so_ext = sysconfig.get_config_var('SO')
|
||||
if os.name == 'nt' and self.debug:
|
||||
return os.path.join(*ext_path) + '_d' + so_ext
|
||||
return os.path.join(*ext_path) + so_ext
|
||||
|
||||
def get_export_symbols(self, ext):
|
||||
"""Return the list of symbols that a shared extension has to
|
||||
export. This either uses 'ext.export_symbols' or, if it's not
|
||||
provided, "init" + module_name. Only relevant on Windows, where
|
||||
the .pyd file (DLL) must export the module "init" function.
|
||||
"""
|
||||
initfunc_name = "init" + ext.name.split('.')[-1]
|
||||
if initfunc_name not in ext.export_symbols:
|
||||
ext.export_symbols.append(initfunc_name)
|
||||
return ext.export_symbols
|
||||
|
||||
def get_libraries(self, ext):
|
||||
"""Return the list of libraries to link against when building a
|
||||
shared extension. On most platforms, this is just 'ext.libraries';
|
||||
on Windows and OS/2, we add the Python library (eg. python20.dll).
|
||||
"""
|
||||
# The python library is always needed on Windows. For MSVC, this
|
||||
# is redundant, since the library is mentioned in a pragma in
|
||||
# pyconfig.h that MSVC groks. The other Windows compilers all seem
|
||||
# to need it mentioned explicitly, though, so that's what we do.
|
||||
# Append '_d' to the python import library on debug builds.
|
||||
if sys.platform == "win32":
|
||||
from packaging.compiler.msvccompiler import MSVCCompiler
|
||||
if not isinstance(self.compiler_obj, MSVCCompiler):
|
||||
template = "python%d%d"
|
||||
if self.debug:
|
||||
template = template + '_d'
|
||||
pythonlib = (template %
|
||||
(sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
|
||||
# don't extend ext.libraries, it may be shared with other
|
||||
# extensions, it is a reference to the original list
|
||||
return ext.libraries + [pythonlib]
|
||||
else:
|
||||
return ext.libraries
|
||||
elif sys.platform == "os2emx":
|
||||
# EMX/GCC requires the python library explicitly, and I
|
||||
# believe VACPP does as well (though not confirmed) - AIM Apr01
|
||||
template = "python%d%d"
|
||||
# debug versions of the main DLL aren't supported, at least
|
||||
# not at this time - AIM Apr01
|
||||
#if self.debug:
|
||||
# template = template + '_d'
|
||||
pythonlib = (template %
|
||||
(sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
|
||||
# don't extend ext.libraries, it may be shared with other
|
||||
# extensions, it is a reference to the original list
|
||||
return ext.libraries + [pythonlib]
|
||||
elif sys.platform[:6] == "cygwin":
|
||||
template = "python%d.%d"
|
||||
pythonlib = (template %
|
||||
(sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
|
||||
# don't extend ext.libraries, it may be shared with other
|
||||
# extensions, it is a reference to the original list
|
||||
return ext.libraries + [pythonlib]
|
||||
elif sys.platform[:6] == "atheos":
|
||||
template = "python%d.%d"
|
||||
pythonlib = (template %
|
||||
(sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
|
||||
# Get SHLIBS from Makefile
|
||||
extra = []
|
||||
for lib in sysconfig.get_config_var('SHLIBS').split():
|
||||
if lib.startswith('-l'):
|
||||
extra.append(lib[2:])
|
||||
else:
|
||||
extra.append(lib)
|
||||
# don't extend ext.libraries, it may be shared with other
|
||||
# extensions, it is a reference to the original list
|
||||
return ext.libraries + [pythonlib, "m"] + extra
|
||||
|
||||
elif sys.platform == 'darwin':
|
||||
# Don't use the default code below
|
||||
return ext.libraries
|
||||
|
||||
else:
|
||||
if sysconfig.get_config_var('Py_ENABLE_SHARED'):
|
||||
template = "python%d.%d"
|
||||
pythonlib = (template %
|
||||
(sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
|
||||
return ext.libraries + [pythonlib]
|
||||
else:
|
||||
return ext.libraries
|
||||
410
Lib/packaging/command/build_py.py
Normal file
410
Lib/packaging/command/build_py.py
Normal file
|
|
@ -0,0 +1,410 @@
|
|||
"""Build pure Python modules (just copy to build directory)."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from glob import glob
|
||||
|
||||
from packaging import logger
|
||||
from packaging.command.cmd import Command
|
||||
from packaging.errors import PackagingOptionError, PackagingFileError
|
||||
from packaging.util import convert_path
|
||||
from packaging.compat import Mixin2to3
|
||||
|
||||
# marking public APIs
|
||||
__all__ = ['build_py']
|
||||
|
||||
class build_py(Command, Mixin2to3):
|
||||
|
||||
description = "build pure Python modules (copy to build directory)"
|
||||
|
||||
user_options = [
|
||||
('build-lib=', 'd', "directory to build (copy) to"),
|
||||
('compile', 'c', "compile .py to .pyc"),
|
||||
('no-compile', None, "don't compile .py files [default]"),
|
||||
('optimize=', 'O',
|
||||
"also compile with optimization: -O1 for \"python -O\", "
|
||||
"-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
|
||||
('force', 'f', "forcibly build everything (ignore file timestamps)"),
|
||||
('use-2to3', None,
|
||||
"use 2to3 to make source python 3.x compatible"),
|
||||
('convert-2to3-doctests', None,
|
||||
"use 2to3 to convert doctests in seperate text files"),
|
||||
('use-2to3-fixers', None,
|
||||
"list additional fixers opted for during 2to3 conversion"),
|
||||
]
|
||||
|
||||
boolean_options = ['compile', 'force']
|
||||
negative_opt = {'no-compile' : 'compile'}
|
||||
|
||||
def initialize_options(self):
|
||||
self.build_lib = None
|
||||
self.py_modules = None
|
||||
self.package = None
|
||||
self.package_data = None
|
||||
self.package_dir = None
|
||||
self.compile = False
|
||||
self.optimize = 0
|
||||
self.force = None
|
||||
self._updated_files = []
|
||||
self._doctests_2to3 = []
|
||||
self.use_2to3 = False
|
||||
self.convert_2to3_doctests = None
|
||||
self.use_2to3_fixers = None
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options('build',
|
||||
'use_2to3', 'use_2to3_fixers',
|
||||
'convert_2to3_doctests', 'build_lib',
|
||||
'force')
|
||||
|
||||
# Get the distribution options that are aliases for build_py
|
||||
# options -- list of packages and list of modules.
|
||||
self.packages = self.distribution.packages
|
||||
self.py_modules = self.distribution.py_modules
|
||||
self.package_data = self.distribution.package_data
|
||||
self.package_dir = None
|
||||
if self.distribution.package_dir is not None:
|
||||
self.package_dir = convert_path(self.distribution.package_dir)
|
||||
self.data_files = self.get_data_files()
|
||||
|
||||
# Ick, copied straight from install_lib.py (fancy_getopt needs a
|
||||
# type system! Hell, *everything* needs a type system!!!)
|
||||
if not isinstance(self.optimize, int):
|
||||
try:
|
||||
self.optimize = int(self.optimize)
|
||||
assert 0 <= self.optimize <= 2
|
||||
except (ValueError, AssertionError):
|
||||
raise PackagingOptionError("optimize must be 0, 1, or 2")
|
||||
|
||||
def run(self):
|
||||
# XXX copy_file by default preserves atime and mtime. IMHO this is
|
||||
# the right thing to do, but perhaps it should be an option -- in
|
||||
# particular, a site administrator might want installed files to
|
||||
# reflect the time of installation rather than the last
|
||||
# modification time before the installed release.
|
||||
|
||||
# XXX copy_file by default preserves mode, which appears to be the
|
||||
# wrong thing to do: if a file is read-only in the working
|
||||
# directory, we want it to be installed read/write so that the next
|
||||
# installation of the same module distribution can overwrite it
|
||||
# without problems. (This might be a Unix-specific issue.) Thus
|
||||
# we turn off 'preserve_mode' when copying to the build directory,
|
||||
# since the build directory is supposed to be exactly what the
|
||||
# installation will look like (ie. we preserve mode when
|
||||
# installing).
|
||||
|
||||
# Two options control which modules will be installed: 'packages'
|
||||
# and 'py_modules'. The former lets us work with whole packages, not
|
||||
# specifying individual modules at all; the latter is for
|
||||
# specifying modules one-at-a-time.
|
||||
|
||||
if self.py_modules:
|
||||
self.build_modules()
|
||||
if self.packages:
|
||||
self.build_packages()
|
||||
self.build_package_data()
|
||||
|
||||
if self.use_2to3 and self._updated_files:
|
||||
self.run_2to3(self._updated_files, self._doctests_2to3,
|
||||
self.use_2to3_fixers)
|
||||
|
||||
self.byte_compile(self.get_outputs(include_bytecode=False))
|
||||
|
||||
# -- Top-level worker functions ------------------------------------
|
||||
|
||||
def get_data_files(self):
|
||||
"""Generate list of '(package,src_dir,build_dir,filenames)' tuples.
|
||||
|
||||
Helper function for `finalize_options()`.
|
||||
"""
|
||||
data = []
|
||||
if not self.packages:
|
||||
return data
|
||||
for package in self.packages:
|
||||
# Locate package source directory
|
||||
src_dir = self.get_package_dir(package)
|
||||
|
||||
# Compute package build directory
|
||||
build_dir = os.path.join(*([self.build_lib] + package.split('.')))
|
||||
|
||||
# Length of path to strip from found files
|
||||
plen = 0
|
||||
if src_dir:
|
||||
plen = len(src_dir)+1
|
||||
|
||||
# Strip directory from globbed filenames
|
||||
filenames = [
|
||||
file[plen:] for file in self.find_data_files(package, src_dir)
|
||||
]
|
||||
data.append((package, src_dir, build_dir, filenames))
|
||||
return data
|
||||
|
||||
def find_data_files(self, package, src_dir):
|
||||
"""Return filenames for package's data files in 'src_dir'.
|
||||
|
||||
Helper function for `get_data_files()`.
|
||||
"""
|
||||
globs = (self.package_data.get('', [])
|
||||
+ self.package_data.get(package, []))
|
||||
files = []
|
||||
for pattern in globs:
|
||||
# Each pattern has to be converted to a platform-specific path
|
||||
filelist = glob(os.path.join(src_dir, convert_path(pattern)))
|
||||
# Files that match more than one pattern are only added once
|
||||
files.extend(fn for fn in filelist if fn not in files)
|
||||
return files
|
||||
|
||||
def build_package_data(self):
|
||||
"""Copy data files into build directory.
|
||||
|
||||
Helper function for `run()`.
|
||||
"""
|
||||
# FIXME add tests for this method
|
||||
for package, src_dir, build_dir, filenames in self.data_files:
|
||||
for filename in filenames:
|
||||
target = os.path.join(build_dir, filename)
|
||||
srcfile = os.path.join(src_dir, filename)
|
||||
self.mkpath(os.path.dirname(target))
|
||||
outf, copied = self.copy_file(srcfile,
|
||||
target, preserve_mode=False)
|
||||
if copied and srcfile in self.distribution.convert_2to3.doctests:
|
||||
self._doctests_2to3.append(outf)
|
||||
|
||||
# XXX - this should be moved to the Distribution class as it is not
|
||||
# only needed for build_py. It also has no dependencies on this class.
|
||||
def get_package_dir(self, package):
|
||||
"""Return the directory, relative to the top of the source
|
||||
distribution, where package 'package' should be found
|
||||
(at least according to the 'package_dir' option, if any)."""
|
||||
|
||||
path = package.split('.')
|
||||
if self.package_dir is not None:
|
||||
path.insert(0, self.package_dir)
|
||||
|
||||
if len(path) > 0:
|
||||
return os.path.join(*path)
|
||||
|
||||
return ''
|
||||
|
||||
def check_package(self, package, package_dir):
|
||||
"""Helper function for `find_package_modules()` and `find_modules()'.
|
||||
"""
|
||||
# Empty dir name means current directory, which we can probably
|
||||
# assume exists. Also, os.path.exists and isdir don't know about
|
||||
# my "empty string means current dir" convention, so we have to
|
||||
# circumvent them.
|
||||
if package_dir != "":
|
||||
if not os.path.exists(package_dir):
|
||||
raise PackagingFileError(
|
||||
"package directory '%s' does not exist" % package_dir)
|
||||
if not os.path.isdir(package_dir):
|
||||
raise PackagingFileError(
|
||||
"supposed package directory '%s' exists, "
|
||||
"but is not a directory" % package_dir)
|
||||
|
||||
# Require __init__.py for all but the "root package"
|
||||
if package:
|
||||
init_py = os.path.join(package_dir, "__init__.py")
|
||||
if os.path.isfile(init_py):
|
||||
return init_py
|
||||
else:
|
||||
logger.warning(("package init file '%s' not found " +
|
||||
"(or not a regular file)"), init_py)
|
||||
|
||||
# Either not in a package at all (__init__.py not expected), or
|
||||
# __init__.py doesn't exist -- so don't return the filename.
|
||||
return None
|
||||
|
||||
def check_module(self, module, module_file):
|
||||
if not os.path.isfile(module_file):
|
||||
logger.warning("file %s (for module %s) not found",
|
||||
module_file, module)
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def find_package_modules(self, package, package_dir):
|
||||
self.check_package(package, package_dir)
|
||||
module_files = glob(os.path.join(package_dir, "*.py"))
|
||||
modules = []
|
||||
if self.distribution.script_name is not None:
|
||||
setup_script = os.path.abspath(self.distribution.script_name)
|
||||
else:
|
||||
setup_script = None
|
||||
|
||||
for f in module_files:
|
||||
abs_f = os.path.abspath(f)
|
||||
if abs_f != setup_script:
|
||||
module = os.path.splitext(os.path.basename(f))[0]
|
||||
modules.append((package, module, f))
|
||||
else:
|
||||
logger.debug("excluding %s", setup_script)
|
||||
return modules
|
||||
|
||||
def find_modules(self):
|
||||
"""Finds individually-specified Python modules, ie. those listed by
|
||||
module name in 'self.py_modules'. Returns a list of tuples (package,
|
||||
module_base, filename): 'package' is a tuple of the path through
|
||||
package-space to the module; 'module_base' is the bare (no
|
||||
packages, no dots) module name, and 'filename' is the path to the
|
||||
".py" file (relative to the distribution root) that implements the
|
||||
module.
|
||||
"""
|
||||
# Map package names to tuples of useful info about the package:
|
||||
# (package_dir, checked)
|
||||
# package_dir - the directory where we'll find source files for
|
||||
# this package
|
||||
# checked - true if we have checked that the package directory
|
||||
# is valid (exists, contains __init__.py, ... ?)
|
||||
packages = {}
|
||||
|
||||
# List of (package, module, filename) tuples to return
|
||||
modules = []
|
||||
|
||||
# We treat modules-in-packages almost the same as toplevel modules,
|
||||
# just the "package" for a toplevel is empty (either an empty
|
||||
# string or empty list, depending on context). Differences:
|
||||
# - don't check for __init__.py in directory for empty package
|
||||
for module in self.py_modules:
|
||||
path = module.split('.')
|
||||
package = '.'.join(path[0:-1])
|
||||
module_base = path[-1]
|
||||
|
||||
try:
|
||||
package_dir, checked = packages[package]
|
||||
except KeyError:
|
||||
package_dir = self.get_package_dir(package)
|
||||
checked = False
|
||||
|
||||
if not checked:
|
||||
init_py = self.check_package(package, package_dir)
|
||||
packages[package] = (package_dir, 1)
|
||||
if init_py:
|
||||
modules.append((package, "__init__", init_py))
|
||||
|
||||
# XXX perhaps we should also check for just .pyc files
|
||||
# (so greedy closed-source bastards can distribute Python
|
||||
# modules too)
|
||||
module_file = os.path.join(package_dir, module_base + ".py")
|
||||
if not self.check_module(module, module_file):
|
||||
continue
|
||||
|
||||
modules.append((package, module_base, module_file))
|
||||
|
||||
return modules
|
||||
|
||||
def find_all_modules(self):
|
||||
"""Compute the list of all modules that will be built, whether
|
||||
they are specified one-module-at-a-time ('self.py_modules') or
|
||||
by whole packages ('self.packages'). Return a list of tuples
|
||||
(package, module, module_file), just like 'find_modules()' and
|
||||
'find_package_modules()' do."""
|
||||
modules = []
|
||||
if self.py_modules:
|
||||
modules.extend(self.find_modules())
|
||||
if self.packages:
|
||||
for package in self.packages:
|
||||
package_dir = self.get_package_dir(package)
|
||||
m = self.find_package_modules(package, package_dir)
|
||||
modules.extend(m)
|
||||
return modules
|
||||
|
||||
def get_source_files(self):
|
||||
sources = [module[-1] for module in self.find_all_modules()]
|
||||
sources += [
|
||||
os.path.join(src_dir, filename)
|
||||
for package, src_dir, build_dir, filenames in self.data_files
|
||||
for filename in filenames]
|
||||
return sources
|
||||
|
||||
def get_module_outfile(self, build_dir, package, module):
|
||||
outfile_path = [build_dir] + list(package) + [module + ".py"]
|
||||
return os.path.join(*outfile_path)
|
||||
|
||||
def get_outputs(self, include_bytecode=True):
|
||||
modules = self.find_all_modules()
|
||||
outputs = []
|
||||
for package, module, module_file in modules:
|
||||
package = package.split('.')
|
||||
filename = self.get_module_outfile(self.build_lib, package, module)
|
||||
outputs.append(filename)
|
||||
if include_bytecode:
|
||||
if self.compile:
|
||||
outputs.append(filename + "c")
|
||||
if self.optimize > 0:
|
||||
outputs.append(filename + "o")
|
||||
|
||||
outputs += [
|
||||
os.path.join(build_dir, filename)
|
||||
for package, src_dir, build_dir, filenames in self.data_files
|
||||
for filename in filenames]
|
||||
|
||||
return outputs
|
||||
|
||||
def build_module(self, module, module_file, package):
|
||||
if isinstance(package, str):
|
||||
package = package.split('.')
|
||||
elif not isinstance(package, (list, tuple)):
|
||||
raise TypeError(
|
||||
"'package' must be a string (dot-separated), list, or tuple")
|
||||
|
||||
# Now put the module source file into the "build" area -- this is
|
||||
# easy, we just copy it somewhere under self.build_lib (the build
|
||||
# directory for Python source).
|
||||
outfile = self.get_module_outfile(self.build_lib, package, module)
|
||||
dir = os.path.dirname(outfile)
|
||||
self.mkpath(dir)
|
||||
return self.copy_file(module_file, outfile, preserve_mode=False)
|
||||
|
||||
def build_modules(self):
|
||||
modules = self.find_modules()
|
||||
for package, module, module_file in modules:
|
||||
|
||||
# Now "build" the module -- ie. copy the source file to
|
||||
# self.build_lib (the build directory for Python source).
|
||||
# (Actually, it gets copied to the directory for this package
|
||||
# under self.build_lib.)
|
||||
self.build_module(module, module_file, package)
|
||||
|
||||
def build_packages(self):
|
||||
for package in self.packages:
|
||||
|
||||
# Get list of (package, module, module_file) tuples based on
|
||||
# scanning the package directory. 'package' is only included
|
||||
# in the tuple so that 'find_modules()' and
|
||||
# 'find_package_tuples()' have a consistent interface; it's
|
||||
# ignored here (apart from a sanity check). Also, 'module' is
|
||||
# the *unqualified* module name (ie. no dots, no package -- we
|
||||
# already know its package!), and 'module_file' is the path to
|
||||
# the .py file, relative to the current directory
|
||||
# (ie. including 'package_dir').
|
||||
package_dir = self.get_package_dir(package)
|
||||
modules = self.find_package_modules(package, package_dir)
|
||||
|
||||
# Now loop over the modules we found, "building" each one (just
|
||||
# copy it to self.build_lib).
|
||||
for package_, module, module_file in modules:
|
||||
assert package == package_
|
||||
self.build_module(module, module_file, package)
|
||||
|
||||
def byte_compile(self, files):
|
||||
if hasattr(sys, 'dont_write_bytecode') and sys.dont_write_bytecode:
|
||||
logger.warning('%s: byte-compiling is disabled, skipping.',
|
||||
self.get_command_name())
|
||||
return
|
||||
|
||||
from packaging.util import byte_compile
|
||||
prefix = self.build_lib
|
||||
if prefix[-1] != os.sep:
|
||||
prefix = prefix + os.sep
|
||||
|
||||
# XXX this code is essentially the same as the 'byte_compile()
|
||||
# method of the "install_lib" command, except for the determination
|
||||
# of the 'prefix' string. Hmmm.
|
||||
|
||||
if self.compile:
|
||||
byte_compile(files, optimize=0,
|
||||
force=self.force, prefix=prefix, dry_run=self.dry_run)
|
||||
if self.optimize > 0:
|
||||
byte_compile(files, optimize=self.optimize,
|
||||
force=self.force, prefix=prefix, dry_run=self.dry_run)
|
||||
132
Lib/packaging/command/build_scripts.py
Normal file
132
Lib/packaging/command/build_scripts.py
Normal file
|
|
@ -0,0 +1,132 @@
|
|||
"""Build scripts (copy to build dir and fix up shebang line)."""
|
||||
|
||||
import os
|
||||
import re
|
||||
import sysconfig
|
||||
|
||||
from packaging.command.cmd import Command
|
||||
from packaging.util import convert_path, newer
|
||||
from packaging import logger
|
||||
from packaging.compat import Mixin2to3
|
||||
|
||||
|
||||
# check if Python is called on the first line with this expression
|
||||
first_line_re = re.compile('^#!.*python[0-9.]*([ \t].*)?$')
|
||||
|
||||
class build_scripts(Command, Mixin2to3):
|
||||
|
||||
description = "build scripts (copy and fix up shebang line)"
|
||||
|
||||
user_options = [
|
||||
('build-dir=', 'd', "directory to build (copy) to"),
|
||||
('force', 'f', "forcibly build everything (ignore file timestamps"),
|
||||
('executable=', 'e', "specify final destination interpreter path"),
|
||||
]
|
||||
|
||||
boolean_options = ['force']
|
||||
|
||||
|
||||
def initialize_options(self):
|
||||
self.build_dir = None
|
||||
self.scripts = None
|
||||
self.force = None
|
||||
self.executable = None
|
||||
self.outfiles = None
|
||||
self.use_2to3 = False
|
||||
self.convert_2to3_doctests = None
|
||||
self.use_2to3_fixers = None
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options('build',
|
||||
('build_scripts', 'build_dir'),
|
||||
'use_2to3', 'use_2to3_fixers',
|
||||
'convert_2to3_doctests', 'force',
|
||||
'executable')
|
||||
self.scripts = self.distribution.scripts
|
||||
|
||||
def get_source_files(self):
|
||||
return self.scripts
|
||||
|
||||
def run(self):
|
||||
if not self.scripts:
|
||||
return
|
||||
copied_files = self.copy_scripts()
|
||||
if self.use_2to3 and copied_files:
|
||||
self._run_2to3(copied_files, fixers=self.use_2to3_fixers)
|
||||
|
||||
def copy_scripts(self):
|
||||
"""Copy each script listed in 'self.scripts'; if it's marked as a
|
||||
Python script in the Unix way (first line matches 'first_line_re',
|
||||
ie. starts with "\#!" and contains "python"), then adjust the first
|
||||
line to refer to the current Python interpreter as we copy.
|
||||
"""
|
||||
self.mkpath(self.build_dir)
|
||||
outfiles = []
|
||||
for script in self.scripts:
|
||||
adjust = False
|
||||
script = convert_path(script)
|
||||
outfile = os.path.join(self.build_dir, os.path.basename(script))
|
||||
outfiles.append(outfile)
|
||||
|
||||
if not self.force and not newer(script, outfile):
|
||||
logger.debug("not copying %s (up-to-date)", script)
|
||||
continue
|
||||
|
||||
# Always open the file, but ignore failures in dry-run mode --
|
||||
# that way, we'll get accurate feedback if we can read the
|
||||
# script.
|
||||
try:
|
||||
f = open(script, "r")
|
||||
except IOError:
|
||||
if not self.dry_run:
|
||||
raise
|
||||
f = None
|
||||
else:
|
||||
first_line = f.readline()
|
||||
if not first_line:
|
||||
logger.warning('%s: %s is an empty file (skipping)',
|
||||
self.get_command_name(), script)
|
||||
continue
|
||||
|
||||
match = first_line_re.match(first_line)
|
||||
if match:
|
||||
adjust = True
|
||||
post_interp = match.group(1) or ''
|
||||
|
||||
if adjust:
|
||||
logger.info("copying and adjusting %s -> %s", script,
|
||||
self.build_dir)
|
||||
if not self.dry_run:
|
||||
outf = open(outfile, "w")
|
||||
if not sysconfig.is_python_build():
|
||||
outf.write("#!%s%s\n" %
|
||||
(self.executable,
|
||||
post_interp))
|
||||
else:
|
||||
outf.write("#!%s%s\n" %
|
||||
(os.path.join(
|
||||
sysconfig.get_config_var("BINDIR"),
|
||||
"python%s%s" % (sysconfig.get_config_var("VERSION"),
|
||||
sysconfig.get_config_var("EXE"))),
|
||||
post_interp))
|
||||
outf.writelines(f.readlines())
|
||||
outf.close()
|
||||
if f:
|
||||
f.close()
|
||||
else:
|
||||
if f:
|
||||
f.close()
|
||||
self.copy_file(script, outfile)
|
||||
|
||||
if os.name == 'posix':
|
||||
for file in outfiles:
|
||||
if self.dry_run:
|
||||
logger.info("changing mode of %s", file)
|
||||
else:
|
||||
oldmode = os.stat(file).st_mode & 0o7777
|
||||
newmode = (oldmode | 0o555) & 0o7777
|
||||
if newmode != oldmode:
|
||||
logger.info("changing mode of %s from %o to %o",
|
||||
file, oldmode, newmode)
|
||||
os.chmod(file, newmode)
|
||||
return outfiles
|
||||
88
Lib/packaging/command/check.py
Normal file
88
Lib/packaging/command/check.py
Normal file
|
|
@ -0,0 +1,88 @@
|
|||
"""Check PEP compliance of metadata."""
|
||||
|
||||
from packaging import logger
|
||||
from packaging.command.cmd import Command
|
||||
from packaging.errors import PackagingSetupError
|
||||
from packaging.util import resolve_name
|
||||
|
||||
class check(Command):
|
||||
|
||||
description = "check PEP compliance of metadata"
|
||||
|
||||
user_options = [('metadata', 'm', 'Verify metadata'),
|
||||
('all', 'a',
|
||||
('runs extended set of checks')),
|
||||
('strict', 's',
|
||||
'Will exit with an error if a check fails')]
|
||||
|
||||
boolean_options = ['metadata', 'all', 'strict']
|
||||
|
||||
def initialize_options(self):
|
||||
"""Sets default values for options."""
|
||||
self.all = False
|
||||
self.metadata = True
|
||||
self.strict = False
|
||||
self._warnings = []
|
||||
|
||||
def finalize_options(self):
|
||||
pass
|
||||
|
||||
def warn(self, msg, *args):
|
||||
"""Wrapper around logging that also remembers messages."""
|
||||
# XXX we could use a special handler for this, but would need to test
|
||||
# if it works even if the logger has a too high level
|
||||
self._warnings.append((msg, args))
|
||||
return logger.warning(self.get_command_name() + msg, *args)
|
||||
|
||||
def run(self):
|
||||
"""Runs the command."""
|
||||
# perform the various tests
|
||||
if self.metadata:
|
||||
self.check_metadata()
|
||||
if self.all:
|
||||
self.check_restructuredtext()
|
||||
self.check_hooks_resolvable()
|
||||
|
||||
# let's raise an error in strict mode, if we have at least
|
||||
# one warning
|
||||
if self.strict and len(self._warnings) > 0:
|
||||
msg = '\n'.join(msg % args for msg, args in self._warnings)
|
||||
raise PackagingSetupError(msg)
|
||||
|
||||
def check_metadata(self):
|
||||
"""Ensures that all required elements of metadata are supplied.
|
||||
|
||||
name, version, URL, author
|
||||
|
||||
Warns if any are missing.
|
||||
"""
|
||||
missing, warnings = self.distribution.metadata.check(strict=True)
|
||||
if missing != []:
|
||||
self.warn('missing required metadata: %s', ', '.join(missing))
|
||||
for warning in warnings:
|
||||
self.warn(warning)
|
||||
|
||||
def check_restructuredtext(self):
|
||||
"""Checks if the long string fields are reST-compliant."""
|
||||
missing, warnings = self.distribution.metadata.check(restructuredtext=True)
|
||||
if self.distribution.metadata.docutils_support:
|
||||
for warning in warnings:
|
||||
line = warning[-1].get('line')
|
||||
if line is None:
|
||||
warning = warning[1]
|
||||
else:
|
||||
warning = '%s (line %s)' % (warning[1], line)
|
||||
self.warn(warning)
|
||||
elif self.strict:
|
||||
raise PackagingSetupError('The docutils package is needed.')
|
||||
|
||||
def check_hooks_resolvable(self):
|
||||
for options in self.distribution.command_options.values():
|
||||
for hook_kind in ("pre_hook", "post_hook"):
|
||||
if hook_kind not in options:
|
||||
break
|
||||
for hook_name in options[hook_kind][1].values():
|
||||
try:
|
||||
resolve_name(hook_name)
|
||||
except ImportError:
|
||||
self.warn('name %r cannot be resolved', hook_name)
|
||||
76
Lib/packaging/command/clean.py
Normal file
76
Lib/packaging/command/clean.py
Normal file
|
|
@ -0,0 +1,76 @@
|
|||
"""Clean up temporary files created by the build command."""
|
||||
|
||||
# Contributed by Bastian Kleineidam <calvin@cs.uni-sb.de>
|
||||
|
||||
import os
|
||||
from shutil import rmtree
|
||||
from packaging.command.cmd import Command
|
||||
from packaging import logger
|
||||
|
||||
class clean(Command):
|
||||
|
||||
description = "clean up temporary files from 'build' command"
|
||||
user_options = [
|
||||
('build-base=', 'b',
|
||||
"base build directory (default: 'build.build-base')"),
|
||||
('build-lib=', None,
|
||||
"build directory for all modules (default: 'build.build-lib')"),
|
||||
('build-temp=', 't',
|
||||
"temporary build directory (default: 'build.build-temp')"),
|
||||
('build-scripts=', None,
|
||||
"build directory for scripts (default: 'build.build-scripts')"),
|
||||
('bdist-base=', None,
|
||||
"temporary directory for built distributions"),
|
||||
('all', 'a',
|
||||
"remove all build output, not just temporary by-products")
|
||||
]
|
||||
|
||||
boolean_options = ['all']
|
||||
|
||||
def initialize_options(self):
|
||||
self.build_base = None
|
||||
self.build_lib = None
|
||||
self.build_temp = None
|
||||
self.build_scripts = None
|
||||
self.bdist_base = None
|
||||
self.all = None
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options('build', 'build_base', 'build_lib',
|
||||
'build_scripts', 'build_temp')
|
||||
self.set_undefined_options('bdist', 'bdist_base')
|
||||
|
||||
def run(self):
|
||||
# remove the build/temp.<plat> directory (unless it's already
|
||||
# gone)
|
||||
if os.path.exists(self.build_temp):
|
||||
if self.dry_run:
|
||||
logger.info('removing %s', self.build_temp)
|
||||
else:
|
||||
rmtree(self.build_temp)
|
||||
else:
|
||||
logger.debug("'%s' does not exist -- can't clean it",
|
||||
self.build_temp)
|
||||
|
||||
if self.all:
|
||||
# remove build directories
|
||||
for directory in (self.build_lib,
|
||||
self.bdist_base,
|
||||
self.build_scripts):
|
||||
if os.path.exists(directory):
|
||||
if self.dry_run:
|
||||
logger.info('removing %s', directory)
|
||||
else:
|
||||
rmtree(directory)
|
||||
else:
|
||||
logger.warning("'%s' does not exist -- can't clean it",
|
||||
directory)
|
||||
|
||||
# just for the heck of it, try to remove the base build directory:
|
||||
# we might have emptied it right now, but if not we don't care
|
||||
if not self.dry_run:
|
||||
try:
|
||||
os.rmdir(self.build_base)
|
||||
logger.info("removing '%s'", self.build_base)
|
||||
except OSError:
|
||||
pass
|
||||
440
Lib/packaging/command/cmd.py
Normal file
440
Lib/packaging/command/cmd.py
Normal file
|
|
@ -0,0 +1,440 @@
|
|||
"""Base class for commands."""
|
||||
|
||||
import os
|
||||
import re
|
||||
from shutil import copyfile, move, make_archive
|
||||
from packaging import util
|
||||
from packaging import logger
|
||||
from packaging.errors import PackagingOptionError
|
||||
|
||||
|
||||
class Command:
|
||||
"""Abstract base class for defining command classes, the "worker bees"
|
||||
of the Packaging. A useful analogy for command classes is to think of
|
||||
them as subroutines with local variables called "options". The options
|
||||
are "declared" in 'initialize_options()' and "defined" (given their
|
||||
final values, aka "finalized") in 'finalize_options()', both of which
|
||||
must be defined by every command class. The distinction between the
|
||||
two is necessary because option values might come from the outside
|
||||
world (command line, config file, ...), and any options dependent on
|
||||
other options must be computed *after* these outside influences have
|
||||
been processed -- hence 'finalize_options()'. The "body" of the
|
||||
subroutine, where it does all its work based on the values of its
|
||||
options, is the 'run()' method, which must also be implemented by every
|
||||
command class.
|
||||
"""
|
||||
|
||||
# 'sub_commands' formalizes the notion of a "family" of commands,
|
||||
# eg. "install_dist" as the parent with sub-commands "install_lib",
|
||||
# "install_headers", etc. The parent of a family of commands
|
||||
# defines 'sub_commands' as a class attribute; it's a list of
|
||||
# (command_name : string, predicate : unbound_method | string | None)
|
||||
# tuples, where 'predicate' is a method of the parent command that
|
||||
# determines whether the corresponding command is applicable in the
|
||||
# current situation. (Eg. we "install_headers" is only applicable if
|
||||
# we have any C header files to install.) If 'predicate' is None,
|
||||
# that command is always applicable.
|
||||
#
|
||||
# 'sub_commands' is usually defined at the *end* of a class, because
|
||||
# predicates can be unbound methods, so they must already have been
|
||||
# defined. The canonical example is the "install_dist" command.
|
||||
sub_commands = []
|
||||
|
||||
# Pre and post command hooks are run just before or just after the command
|
||||
# itself. They are simple functions that receive the command instance. They
|
||||
# are specified as callable objects or dotted strings (for lazy loading).
|
||||
pre_hook = None
|
||||
post_hook = None
|
||||
|
||||
# -- Creation/initialization methods -------------------------------
|
||||
|
||||
def __init__(self, dist):
|
||||
"""Create and initialize a new Command object. Most importantly,
|
||||
invokes the 'initialize_options()' method, which is the real
|
||||
initializer and depends on the actual command being instantiated.
|
||||
"""
|
||||
# late import because of mutual dependence between these classes
|
||||
from packaging.dist import Distribution
|
||||
|
||||
if not isinstance(dist, Distribution):
|
||||
raise TypeError("dist must be a Distribution instance")
|
||||
if self.__class__ is Command:
|
||||
raise RuntimeError("Command is an abstract class")
|
||||
|
||||
self.distribution = dist
|
||||
self.initialize_options()
|
||||
|
||||
# Per-command versions of the global flags, so that the user can
|
||||
# customize Packaging' behaviour command-by-command and let some
|
||||
# commands fall back on the Distribution's behaviour. None means
|
||||
# "not defined, check self.distribution's copy", while 0 or 1 mean
|
||||
# false and true (duh). Note that this means figuring out the real
|
||||
# value of each flag is a touch complicated -- hence "self._dry_run"
|
||||
# will be handled by a property, below.
|
||||
# XXX This needs to be fixed. [I changed it to a property--does that
|
||||
# "fix" it?]
|
||||
self._dry_run = None
|
||||
|
||||
# Some commands define a 'self.force' option to ignore file
|
||||
# timestamps, but methods defined *here* assume that
|
||||
# 'self.force' exists for all commands. So define it here
|
||||
# just to be safe.
|
||||
self.force = None
|
||||
|
||||
# The 'help' flag is just used for command line parsing, so
|
||||
# none of that complicated bureaucracy is needed.
|
||||
self.help = False
|
||||
|
||||
# 'finalized' records whether or not 'finalize_options()' has been
|
||||
# called. 'finalize_options()' itself should not pay attention to
|
||||
# this flag: it is the business of 'ensure_finalized()', which
|
||||
# always calls 'finalize_options()', to respect/update it.
|
||||
self.finalized = False
|
||||
|
||||
# XXX A more explicit way to customize dry_run would be better.
|
||||
@property
|
||||
def dry_run(self):
|
||||
if self._dry_run is None:
|
||||
return getattr(self.distribution, 'dry_run')
|
||||
else:
|
||||
return self._dry_run
|
||||
|
||||
def ensure_finalized(self):
|
||||
if not self.finalized:
|
||||
self.finalize_options()
|
||||
self.finalized = True
|
||||
|
||||
# Subclasses must define:
|
||||
# initialize_options()
|
||||
# provide default values for all options; may be customized by
|
||||
# setup script, by options from config file(s), or by command-line
|
||||
# options
|
||||
# finalize_options()
|
||||
# decide on the final values for all options; this is called
|
||||
# after all possible intervention from the outside world
|
||||
# (command line, option file, etc.) has been processed
|
||||
# run()
|
||||
# run the command: do whatever it is we're here to do,
|
||||
# controlled by the command's various option values
|
||||
|
||||
def initialize_options(self):
|
||||
"""Set default values for all the options that this command
|
||||
supports. Note that these defaults may be overridden by other
|
||||
commands, by the setup script, by config files, or by the
|
||||
command line. Thus, this is not the place to code dependencies
|
||||
between options; generally, 'initialize_options()' implementations
|
||||
are just a bunch of "self.foo = None" assignments.
|
||||
|
||||
This method must be implemented by all command classes.
|
||||
"""
|
||||
raise RuntimeError(
|
||||
"abstract method -- subclass %s must override" % self.__class__)
|
||||
|
||||
def finalize_options(self):
|
||||
"""Set final values for all the options that this command supports.
|
||||
This is always called as late as possible, ie. after any option
|
||||
assignments from the command line or from other commands have been
|
||||
done. Thus, this is the place to code option dependencies: if
|
||||
'foo' depends on 'bar', then it is safe to set 'foo' from 'bar' as
|
||||
long as 'foo' still has the same value it was assigned in
|
||||
'initialize_options()'.
|
||||
|
||||
This method must be implemented by all command classes.
|
||||
"""
|
||||
raise RuntimeError(
|
||||
"abstract method -- subclass %s must override" % self.__class__)
|
||||
|
||||
def dump_options(self, header=None, indent=""):
|
||||
if header is None:
|
||||
header = "command options for '%s':" % self.get_command_name()
|
||||
logger.info(indent + header)
|
||||
indent = indent + " "
|
||||
negative_opt = getattr(self, 'negative_opt', ())
|
||||
for option, _, _ in self.user_options:
|
||||
if option in negative_opt:
|
||||
continue
|
||||
option = option.replace('-', '_')
|
||||
if option[-1] == "=":
|
||||
option = option[:-1]
|
||||
value = getattr(self, option)
|
||||
logger.info(indent + "%s = %s", option, value)
|
||||
|
||||
def run(self):
|
||||
"""A command's raison d'etre: carry out the action it exists to
|
||||
perform, controlled by the options initialized in
|
||||
'initialize_options()', customized by other commands, the setup
|
||||
script, the command line and config files, and finalized in
|
||||
'finalize_options()'. All terminal output and filesystem
|
||||
interaction should be done by 'run()'.
|
||||
|
||||
This method must be implemented by all command classes.
|
||||
"""
|
||||
raise RuntimeError(
|
||||
"abstract method -- subclass %s must override" % self.__class__)
|
||||
|
||||
# -- External interface --------------------------------------------
|
||||
# (called by outsiders)
|
||||
|
||||
def get_source_files(self):
|
||||
"""Return the list of files that are used as inputs to this command,
|
||||
i.e. the files used to generate the output files. The result is used
|
||||
by the `sdist` command in determining the set of default files.
|
||||
|
||||
Command classes should implement this method if they operate on files
|
||||
from the source tree.
|
||||
"""
|
||||
return []
|
||||
|
||||
def get_outputs(self):
|
||||
"""Return the list of files that would be produced if this command
|
||||
were actually run. Not affected by the "dry-run" flag or whether
|
||||
any other commands have been run.
|
||||
|
||||
Command classes should implement this method if they produce any
|
||||
output files that get consumed by another command. e.g., `build_ext`
|
||||
returns the list of built extension modules, but not any temporary
|
||||
files used in the compilation process.
|
||||
"""
|
||||
return []
|
||||
|
||||
# -- Option validation methods -------------------------------------
|
||||
# (these are very handy in writing the 'finalize_options()' method)
|
||||
#
|
||||
# NB. the general philosophy here is to ensure that a particular option
|
||||
# value meets certain type and value constraints. If not, we try to
|
||||
# force it into conformance (eg. if we expect a list but have a string,
|
||||
# split the string on comma and/or whitespace). If we can't force the
|
||||
# option into conformance, raise PackagingOptionError. Thus, command
|
||||
# classes need do nothing more than (eg.)
|
||||
# self.ensure_string_list('foo')
|
||||
# and they can be guaranteed that thereafter, self.foo will be
|
||||
# a list of strings.
|
||||
|
||||
def _ensure_stringlike(self, option, what, default=None):
|
||||
val = getattr(self, option)
|
||||
if val is None:
|
||||
setattr(self, option, default)
|
||||
return default
|
||||
elif not isinstance(val, str):
|
||||
raise PackagingOptionError("'%s' must be a %s (got `%s`)" %
|
||||
(option, what, val))
|
||||
return val
|
||||
|
||||
def ensure_string(self, option, default=None):
|
||||
"""Ensure that 'option' is a string; if not defined, set it to
|
||||
'default'.
|
||||
"""
|
||||
self._ensure_stringlike(option, "string", default)
|
||||
|
||||
def ensure_string_list(self, option):
|
||||
r"""Ensure that 'option' is a list of strings. If 'option' is
|
||||
currently a string, we split it either on /,\s*/ or /\s+/, so
|
||||
"foo bar baz", "foo,bar,baz", and "foo, bar baz" all become
|
||||
["foo", "bar", "baz"].
|
||||
"""
|
||||
val = getattr(self, option)
|
||||
if val is None:
|
||||
return
|
||||
elif isinstance(val, str):
|
||||
setattr(self, option, re.split(r',\s*|\s+', val))
|
||||
else:
|
||||
if isinstance(val, list):
|
||||
# checks if all elements are str
|
||||
ok = True
|
||||
for element in val:
|
||||
if not isinstance(element, str):
|
||||
ok = False
|
||||
break
|
||||
else:
|
||||
ok = False
|
||||
|
||||
if not ok:
|
||||
raise PackagingOptionError(
|
||||
"'%s' must be a list of strings (got %r)" % (option, val))
|
||||
|
||||
def _ensure_tested_string(self, option, tester,
|
||||
what, error_fmt, default=None):
|
||||
val = self._ensure_stringlike(option, what, default)
|
||||
if val is not None and not tester(val):
|
||||
raise PackagingOptionError(
|
||||
("error in '%s' option: " + error_fmt) % (option, val))
|
||||
|
||||
def ensure_filename(self, option):
|
||||
"""Ensure that 'option' is the name of an existing file."""
|
||||
self._ensure_tested_string(option, os.path.isfile,
|
||||
"filename",
|
||||
"'%s' does not exist or is not a file")
|
||||
|
||||
def ensure_dirname(self, option):
|
||||
self._ensure_tested_string(option, os.path.isdir,
|
||||
"directory name",
|
||||
"'%s' does not exist or is not a directory")
|
||||
|
||||
# -- Convenience methods for commands ------------------------------
|
||||
|
||||
@classmethod
|
||||
def get_command_name(cls):
|
||||
if hasattr(cls, 'command_name'):
|
||||
return cls.command_name
|
||||
else:
|
||||
return cls.__name__
|
||||
|
||||
def set_undefined_options(self, src_cmd, *options):
|
||||
"""Set values of undefined options from another command.
|
||||
|
||||
Undefined options are options set to None, which is the convention
|
||||
used to indicate that an option has not been changed between
|
||||
'initialize_options()' and 'finalize_options()'. This method is
|
||||
usually called from 'finalize_options()' for options that depend on
|
||||
some other command rather than another option of the same command,
|
||||
typically subcommands.
|
||||
|
||||
The 'src_cmd' argument is the other command from which option values
|
||||
will be taken (a command object will be created for it if necessary);
|
||||
the remaining positional arguments are strings that give the name of
|
||||
the option to set. If the name is different on the source and target
|
||||
command, you can pass a tuple with '(name_on_source, name_on_dest)' so
|
||||
that 'self.name_on_dest' will be set from 'src_cmd.name_on_source'.
|
||||
"""
|
||||
src_cmd_obj = self.distribution.get_command_obj(src_cmd)
|
||||
src_cmd_obj.ensure_finalized()
|
||||
for obj in options:
|
||||
if isinstance(obj, tuple):
|
||||
src_option, dst_option = obj
|
||||
else:
|
||||
src_option, dst_option = obj, obj
|
||||
if getattr(self, dst_option) is None:
|
||||
setattr(self, dst_option,
|
||||
getattr(src_cmd_obj, src_option))
|
||||
|
||||
def get_finalized_command(self, command, create=True):
|
||||
"""Wrapper around Distribution's 'get_command_obj()' method: find
|
||||
(create if necessary and 'create' is true) the command object for
|
||||
'command', call its 'ensure_finalized()' method, and return the
|
||||
finalized command object.
|
||||
"""
|
||||
cmd_obj = self.distribution.get_command_obj(command, create)
|
||||
cmd_obj.ensure_finalized()
|
||||
return cmd_obj
|
||||
|
||||
def get_reinitialized_command(self, command, reinit_subcommands=False):
|
||||
return self.distribution.get_reinitialized_command(
|
||||
command, reinit_subcommands)
|
||||
|
||||
def run_command(self, command):
|
||||
"""Run some other command: uses the 'run_command()' method of
|
||||
Distribution, which creates and finalizes the command object if
|
||||
necessary and then invokes its 'run()' method.
|
||||
"""
|
||||
self.distribution.run_command(command)
|
||||
|
||||
def get_sub_commands(self):
|
||||
"""Determine the sub-commands that are relevant in the current
|
||||
distribution (ie., that need to be run). This is based on the
|
||||
'sub_commands' class attribute: each tuple in that list may include
|
||||
a method that we call to determine if the subcommand needs to be
|
||||
run for the current distribution. Return a list of command names.
|
||||
"""
|
||||
commands = []
|
||||
for sub_command in self.sub_commands:
|
||||
if len(sub_command) == 2:
|
||||
cmd_name, method = sub_command
|
||||
if method is None or method(self):
|
||||
commands.append(cmd_name)
|
||||
else:
|
||||
commands.append(sub_command)
|
||||
return commands
|
||||
|
||||
# -- External world manipulation -----------------------------------
|
||||
|
||||
def execute(self, func, args, msg=None, level=1):
|
||||
util.execute(func, args, msg, dry_run=self.dry_run)
|
||||
|
||||
def mkpath(self, name, mode=0o777, dry_run=None, verbose=0):
|
||||
if dry_run is None:
|
||||
dry_run = self.dry_run
|
||||
name = os.path.normpath(name)
|
||||
if os.path.isdir(name) or name == '':
|
||||
return
|
||||
if dry_run:
|
||||
head = ''
|
||||
for part in name.split(os.sep):
|
||||
logger.info("created directory %s%s", head, part)
|
||||
head += part + os.sep
|
||||
return
|
||||
os.makedirs(name, mode)
|
||||
|
||||
def copy_file(self, infile, outfile,
|
||||
preserve_mode=True, preserve_times=True, link=None, level=1):
|
||||
"""Copy a file respecting verbose, dry-run and force flags. (The
|
||||
former two default to whatever is in the Distribution object, and
|
||||
the latter defaults to false for commands that don't define it.)"""
|
||||
if self.dry_run:
|
||||
# XXX add a comment
|
||||
return
|
||||
if os.path.isdir(outfile):
|
||||
outfile = os.path.join(outfile, os.path.split(infile)[-1])
|
||||
copyfile(infile, outfile)
|
||||
return outfile, None # XXX
|
||||
|
||||
def copy_tree(self, infile, outfile, preserve_mode=True,
|
||||
preserve_times=True, preserve_symlinks=False, level=1):
|
||||
"""Copy an entire directory tree respecting verbose, dry-run,
|
||||
and force flags.
|
||||
"""
|
||||
if self.dry_run:
|
||||
return # see if we want to display something
|
||||
|
||||
|
||||
return util.copy_tree(infile, outfile, preserve_mode, preserve_times,
|
||||
preserve_symlinks, not self.force, dry_run=self.dry_run)
|
||||
|
||||
def move_file(self, src, dst, level=1):
|
||||
"""Move a file respecting the dry-run flag."""
|
||||
if self.dry_run:
|
||||
return # XXX log ?
|
||||
return move(src, dst)
|
||||
|
||||
def spawn(self, cmd, search_path=True, level=1):
|
||||
"""Spawn an external command respecting dry-run flag."""
|
||||
from packaging.util import spawn
|
||||
spawn(cmd, search_path, dry_run=self.dry_run)
|
||||
|
||||
def make_archive(self, base_name, format, root_dir=None, base_dir=None,
|
||||
owner=None, group=None):
|
||||
return make_archive(base_name, format, root_dir,
|
||||
base_dir, dry_run=self.dry_run,
|
||||
owner=owner, group=group)
|
||||
|
||||
def make_file(self, infiles, outfile, func, args,
|
||||
exec_msg=None, skip_msg=None, level=1):
|
||||
"""Special case of 'execute()' for operations that process one or
|
||||
more input files and generate one output file. Works just like
|
||||
'execute()', except the operation is skipped and a different
|
||||
message printed if 'outfile' already exists and is newer than all
|
||||
files listed in 'infiles'. If the command defined 'self.force',
|
||||
and it is true, then the command is unconditionally run -- does no
|
||||
timestamp checks.
|
||||
"""
|
||||
if skip_msg is None:
|
||||
skip_msg = "skipping %s (inputs unchanged)" % outfile
|
||||
|
||||
# Allow 'infiles' to be a single string
|
||||
if isinstance(infiles, str):
|
||||
infiles = (infiles,)
|
||||
elif not isinstance(infiles, (list, tuple)):
|
||||
raise TypeError(
|
||||
"'infiles' must be a string, or a list or tuple of strings")
|
||||
|
||||
if exec_msg is None:
|
||||
exec_msg = "generating %s from %s" % (outfile, ', '.join(infiles))
|
||||
|
||||
# If 'outfile' must be regenerated (either because it doesn't
|
||||
# exist, is out-of-date, or the 'force' flag is true) then
|
||||
# perform the action that presumably regenerates it
|
||||
if self.force or util.newer_group(infiles, outfile):
|
||||
self.execute(func, args, exec_msg, level)
|
||||
|
||||
# Otherwise, print the "skip" message
|
||||
else:
|
||||
logger.debug(skip_msg)
|
||||
35
Lib/packaging/command/command_template
Normal file
35
Lib/packaging/command/command_template
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
"""Do X and Y."""
|
||||
|
||||
from packaging import logger
|
||||
from packaging.command.cmd import Command
|
||||
|
||||
|
||||
class x(Command):
|
||||
|
||||
# Brief (40-50 characters) description of the command
|
||||
description = ""
|
||||
|
||||
# List of option tuples: long name, short name (None if no short
|
||||
# name), and help string.
|
||||
user_options = [
|
||||
('', '', # long option, short option (one letter) or None
|
||||
""), # help text
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
self. = None
|
||||
self. = None
|
||||
self. = None
|
||||
|
||||
def finalize_options(self):
|
||||
if self.x is None:
|
||||
self.x = ...
|
||||
|
||||
def run(self):
|
||||
...
|
||||
logger.info(...)
|
||||
|
||||
if not self.dry_run:
|
||||
...
|
||||
|
||||
self.execute(..., dry_run=self.dry_run)
|
||||
351
Lib/packaging/command/config.py
Normal file
351
Lib/packaging/command/config.py
Normal file
|
|
@ -0,0 +1,351 @@
|
|||
"""Prepare the build.
|
||||
|
||||
This module provides config, a (mostly) empty command class
|
||||
that exists mainly to be sub-classed by specific module distributions and
|
||||
applications. The idea is that while every "config" command is different,
|
||||
at least they're all named the same, and users always see "config" in the
|
||||
list of standard commands. Also, this is a good place to put common
|
||||
configure-like tasks: "try to compile this C code", or "figure out where
|
||||
this header file lives".
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
from packaging.command.cmd import Command
|
||||
from packaging.errors import PackagingExecError
|
||||
from packaging.compiler import customize_compiler
|
||||
from packaging import logger
|
||||
|
||||
LANG_EXT = {'c': '.c', 'c++': '.cxx'}
|
||||
|
||||
class config(Command):
|
||||
|
||||
description = "prepare the build"
|
||||
|
||||
user_options = [
|
||||
('compiler=', None,
|
||||
"specify the compiler type"),
|
||||
('cc=', None,
|
||||
"specify the compiler executable"),
|
||||
('include-dirs=', 'I',
|
||||
"list of directories to search for header files"),
|
||||
('define=', 'D',
|
||||
"C preprocessor macros to define"),
|
||||
('undef=', 'U',
|
||||
"C preprocessor macros to undefine"),
|
||||
('libraries=', 'l',
|
||||
"external C libraries to link with"),
|
||||
('library-dirs=', 'L',
|
||||
"directories to search for external C libraries"),
|
||||
|
||||
('noisy', None,
|
||||
"show every action (compile, link, run, ...) taken"),
|
||||
('dump-source', None,
|
||||
"dump generated source files before attempting to compile them"),
|
||||
]
|
||||
|
||||
|
||||
# The three standard command methods: since the "config" command
|
||||
# does nothing by default, these are empty.
|
||||
|
||||
def initialize_options(self):
|
||||
self.compiler = None
|
||||
self.cc = None
|
||||
self.include_dirs = None
|
||||
self.libraries = None
|
||||
self.library_dirs = None
|
||||
|
||||
# maximal output for now
|
||||
self.noisy = True
|
||||
self.dump_source = True
|
||||
|
||||
# list of temporary files generated along-the-way that we have
|
||||
# to clean at some point
|
||||
self.temp_files = []
|
||||
|
||||
def finalize_options(self):
|
||||
if self.include_dirs is None:
|
||||
self.include_dirs = self.distribution.include_dirs or []
|
||||
elif isinstance(self.include_dirs, str):
|
||||
self.include_dirs = self.include_dirs.split(os.pathsep)
|
||||
|
||||
if self.libraries is None:
|
||||
self.libraries = []
|
||||
elif isinstance(self.libraries, str):
|
||||
self.libraries = [self.libraries]
|
||||
|
||||
if self.library_dirs is None:
|
||||
self.library_dirs = []
|
||||
elif isinstance(self.library_dirs, str):
|
||||
self.library_dirs = self.library_dirs.split(os.pathsep)
|
||||
|
||||
def run(self):
|
||||
pass
|
||||
|
||||
|
||||
# Utility methods for actual "config" commands. The interfaces are
|
||||
# loosely based on Autoconf macros of similar names. Sub-classes
|
||||
# may use these freely.
|
||||
|
||||
def _check_compiler(self):
|
||||
"""Check that 'self.compiler' really is a CCompiler object;
|
||||
if not, make it one.
|
||||
"""
|
||||
# We do this late, and only on-demand, because this is an expensive
|
||||
# import.
|
||||
from packaging.compiler.ccompiler import CCompiler
|
||||
from packaging.compiler import new_compiler
|
||||
if not isinstance(self.compiler, CCompiler):
|
||||
self.compiler = new_compiler(compiler=self.compiler,
|
||||
dry_run=self.dry_run, force=True)
|
||||
customize_compiler(self.compiler)
|
||||
if self.include_dirs:
|
||||
self.compiler.set_include_dirs(self.include_dirs)
|
||||
if self.libraries:
|
||||
self.compiler.set_libraries(self.libraries)
|
||||
if self.library_dirs:
|
||||
self.compiler.set_library_dirs(self.library_dirs)
|
||||
|
||||
|
||||
def _gen_temp_sourcefile(self, body, headers, lang):
|
||||
filename = "_configtest" + LANG_EXT[lang]
|
||||
file = open(filename, "w")
|
||||
if headers:
|
||||
for header in headers:
|
||||
file.write("#include <%s>\n" % header)
|
||||
file.write("\n")
|
||||
file.write(body)
|
||||
if body[-1] != "\n":
|
||||
file.write("\n")
|
||||
file.close()
|
||||
return filename
|
||||
|
||||
def _preprocess(self, body, headers, include_dirs, lang):
|
||||
src = self._gen_temp_sourcefile(body, headers, lang)
|
||||
out = "_configtest.i"
|
||||
self.temp_files.extend((src, out))
|
||||
self.compiler.preprocess(src, out, include_dirs=include_dirs)
|
||||
return src, out
|
||||
|
||||
def _compile(self, body, headers, include_dirs, lang):
|
||||
src = self._gen_temp_sourcefile(body, headers, lang)
|
||||
if self.dump_source:
|
||||
dump_file(src, "compiling '%s':" % src)
|
||||
obj = self.compiler.object_filenames([src])[0]
|
||||
self.temp_files.extend((src, obj))
|
||||
self.compiler.compile([src], include_dirs=include_dirs)
|
||||
return src, obj
|
||||
|
||||
def _link(self, body, headers, include_dirs, libraries, library_dirs,
|
||||
lang):
|
||||
src, obj = self._compile(body, headers, include_dirs, lang)
|
||||
prog = os.path.splitext(os.path.basename(src))[0]
|
||||
self.compiler.link_executable([obj], prog,
|
||||
libraries=libraries,
|
||||
library_dirs=library_dirs,
|
||||
target_lang=lang)
|
||||
|
||||
if self.compiler.exe_extension is not None:
|
||||
prog = prog + self.compiler.exe_extension
|
||||
self.temp_files.append(prog)
|
||||
|
||||
return src, obj, prog
|
||||
|
||||
def _clean(self, *filenames):
|
||||
if not filenames:
|
||||
filenames = self.temp_files
|
||||
self.temp_files = []
|
||||
logger.info("removing: %s", ' '.join(filenames))
|
||||
for filename in filenames:
|
||||
try:
|
||||
os.remove(filename)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
# XXX these ignore the dry-run flag: what to do, what to do? even if
|
||||
# you want a dry-run build, you still need some sort of configuration
|
||||
# info. My inclination is to make it up to the real config command to
|
||||
# consult 'dry_run', and assume a default (minimal) configuration if
|
||||
# true. The problem with trying to do it here is that you'd have to
|
||||
# return either true or false from all the 'try' methods, neither of
|
||||
# which is correct.
|
||||
|
||||
# XXX need access to the header search path and maybe default macros.
|
||||
|
||||
def try_cpp(self, body=None, headers=None, include_dirs=None, lang="c"):
|
||||
"""Construct a source file from 'body' (a string containing lines
|
||||
of C/C++ code) and 'headers' (a list of header files to include)
|
||||
and run it through the preprocessor. Return true if the
|
||||
preprocessor succeeded, false if there were any errors.
|
||||
('body' probably isn't of much use, but what the heck.)
|
||||
"""
|
||||
from packaging.compiler.ccompiler import CompileError
|
||||
self._check_compiler()
|
||||
ok = True
|
||||
try:
|
||||
self._preprocess(body, headers, include_dirs, lang)
|
||||
except CompileError:
|
||||
ok = False
|
||||
|
||||
self._clean()
|
||||
return ok
|
||||
|
||||
def search_cpp(self, pattern, body=None, headers=None, include_dirs=None,
|
||||
lang="c"):
|
||||
"""Construct a source file (just like 'try_cpp()'), run it through
|
||||
the preprocessor, and return true if any line of the output matches
|
||||
'pattern'. 'pattern' should either be a compiled regex object or a
|
||||
string containing a regex. If both 'body' and 'headers' are None,
|
||||
preprocesses an empty file -- which can be useful to determine the
|
||||
symbols the preprocessor and compiler set by default.
|
||||
"""
|
||||
self._check_compiler()
|
||||
src, out = self._preprocess(body, headers, include_dirs, lang)
|
||||
|
||||
if isinstance(pattern, str):
|
||||
pattern = re.compile(pattern)
|
||||
|
||||
file = open(out)
|
||||
match = False
|
||||
while True:
|
||||
line = file.readline()
|
||||
if line == '':
|
||||
break
|
||||
if pattern.search(line):
|
||||
match = True
|
||||
break
|
||||
|
||||
file.close()
|
||||
self._clean()
|
||||
return match
|
||||
|
||||
def try_compile(self, body, headers=None, include_dirs=None, lang="c"):
|
||||
"""Try to compile a source file built from 'body' and 'headers'.
|
||||
Return true on success, false otherwise.
|
||||
"""
|
||||
from packaging.compiler.ccompiler import CompileError
|
||||
self._check_compiler()
|
||||
try:
|
||||
self._compile(body, headers, include_dirs, lang)
|
||||
ok = True
|
||||
except CompileError:
|
||||
ok = False
|
||||
|
||||
logger.info(ok and "success!" or "failure.")
|
||||
self._clean()
|
||||
return ok
|
||||
|
||||
def try_link(self, body, headers=None, include_dirs=None, libraries=None,
|
||||
library_dirs=None, lang="c"):
|
||||
"""Try to compile and link a source file, built from 'body' and
|
||||
'headers', to executable form. Return true on success, false
|
||||
otherwise.
|
||||
"""
|
||||
from packaging.compiler.ccompiler import CompileError, LinkError
|
||||
self._check_compiler()
|
||||
try:
|
||||
self._link(body, headers, include_dirs,
|
||||
libraries, library_dirs, lang)
|
||||
ok = True
|
||||
except (CompileError, LinkError):
|
||||
ok = False
|
||||
|
||||
logger.info(ok and "success!" or "failure.")
|
||||
self._clean()
|
||||
return ok
|
||||
|
||||
def try_run(self, body, headers=None, include_dirs=None, libraries=None,
|
||||
library_dirs=None, lang="c"):
|
||||
"""Try to compile, link to an executable, and run a program
|
||||
built from 'body' and 'headers'. Return true on success, false
|
||||
otherwise.
|
||||
"""
|
||||
from packaging.compiler.ccompiler import CompileError, LinkError
|
||||
self._check_compiler()
|
||||
try:
|
||||
src, obj, exe = self._link(body, headers, include_dirs,
|
||||
libraries, library_dirs, lang)
|
||||
self.spawn([exe])
|
||||
ok = True
|
||||
except (CompileError, LinkError, PackagingExecError):
|
||||
ok = False
|
||||
|
||||
logger.info(ok and "success!" or "failure.")
|
||||
self._clean()
|
||||
return ok
|
||||
|
||||
|
||||
# -- High-level methods --------------------------------------------
|
||||
# (these are the ones that are actually likely to be useful
|
||||
# when implementing a real-world config command!)
|
||||
|
||||
def check_func(self, func, headers=None, include_dirs=None,
|
||||
libraries=None, library_dirs=None, decl=False, call=False):
|
||||
|
||||
"""Determine if function 'func' is available by constructing a
|
||||
source file that refers to 'func', and compiles and links it.
|
||||
If everything succeeds, returns true; otherwise returns false.
|
||||
|
||||
The constructed source file starts out by including the header
|
||||
files listed in 'headers'. If 'decl' is true, it then declares
|
||||
'func' (as "int func()"); you probably shouldn't supply 'headers'
|
||||
and set 'decl' true in the same call, or you might get errors about
|
||||
a conflicting declarations for 'func'. Finally, the constructed
|
||||
'main()' function either references 'func' or (if 'call' is true)
|
||||
calls it. 'libraries' and 'library_dirs' are used when
|
||||
linking.
|
||||
"""
|
||||
|
||||
self._check_compiler()
|
||||
body = []
|
||||
if decl:
|
||||
body.append("int %s ();" % func)
|
||||
body.append("int main () {")
|
||||
if call:
|
||||
body.append(" %s();" % func)
|
||||
else:
|
||||
body.append(" %s;" % func)
|
||||
body.append("}")
|
||||
body = "\n".join(body) + "\n"
|
||||
|
||||
return self.try_link(body, headers, include_dirs,
|
||||
libraries, library_dirs)
|
||||
|
||||
def check_lib(self, library, library_dirs=None, headers=None,
|
||||
include_dirs=None, other_libraries=[]):
|
||||
"""Determine if 'library' is available to be linked against,
|
||||
without actually checking that any particular symbols are provided
|
||||
by it. 'headers' will be used in constructing the source file to
|
||||
be compiled, but the only effect of this is to check if all the
|
||||
header files listed are available. Any libraries listed in
|
||||
'other_libraries' will be included in the link, in case 'library'
|
||||
has symbols that depend on other libraries.
|
||||
"""
|
||||
self._check_compiler()
|
||||
return self.try_link("int main (void) { }",
|
||||
headers, include_dirs,
|
||||
[library]+other_libraries, library_dirs)
|
||||
|
||||
def check_header(self, header, include_dirs=None, library_dirs=None,
|
||||
lang="c"):
|
||||
"""Determine if the system header file named by 'header_file'
|
||||
exists and can be found by the preprocessor; return true if so,
|
||||
false otherwise.
|
||||
"""
|
||||
return self.try_cpp(body="/* No body */", headers=[header],
|
||||
include_dirs=include_dirs)
|
||||
|
||||
|
||||
def dump_file(filename, head=None):
|
||||
"""Dumps a file content into log.info.
|
||||
|
||||
If head is not None, will be dumped before the file content.
|
||||
"""
|
||||
if head is None:
|
||||
logger.info(filename)
|
||||
else:
|
||||
logger.info(head)
|
||||
with open(filename) as file:
|
||||
logger.info(file.read())
|
||||
79
Lib/packaging/command/install_data.py
Normal file
79
Lib/packaging/command/install_data.py
Normal file
|
|
@ -0,0 +1,79 @@
|
|||
"""Install platform-independent data files."""
|
||||
|
||||
# Contributed by Bastian Kleineidam
|
||||
|
||||
import os
|
||||
from shutil import Error
|
||||
from sysconfig import get_paths, format_value
|
||||
from packaging import logger
|
||||
from packaging.util import convert_path
|
||||
from packaging.command.cmd import Command
|
||||
|
||||
|
||||
class install_data(Command):
|
||||
|
||||
description = "install platform-independent data files"
|
||||
|
||||
user_options = [
|
||||
('install-dir=', 'd',
|
||||
"base directory for installing data files "
|
||||
"(default: installation base dir)"),
|
||||
('root=', None,
|
||||
"install everything relative to this alternate root directory"),
|
||||
('force', 'f', "force installation (overwrite existing files)"),
|
||||
]
|
||||
|
||||
boolean_options = ['force']
|
||||
|
||||
def initialize_options(self):
|
||||
self.install_dir = None
|
||||
self.outfiles = []
|
||||
self.data_files_out = []
|
||||
self.root = None
|
||||
self.force = False
|
||||
self.data_files = self.distribution.data_files
|
||||
self.warn_dir = True
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options('install_dist',
|
||||
('install_data', 'install_dir'),
|
||||
'root', 'force')
|
||||
|
||||
def run(self):
|
||||
self.mkpath(self.install_dir)
|
||||
for _file in self.data_files.items():
|
||||
destination = convert_path(self.expand_categories(_file[1]))
|
||||
dir_dest = os.path.abspath(os.path.dirname(destination))
|
||||
|
||||
self.mkpath(dir_dest)
|
||||
try:
|
||||
out = self.copy_file(_file[0], dir_dest)[0]
|
||||
except Error as e:
|
||||
logger.warning('%s: %s', self.get_command_name(), e)
|
||||
out = destination
|
||||
|
||||
self.outfiles.append(out)
|
||||
self.data_files_out.append((_file[0], destination))
|
||||
|
||||
def expand_categories(self, path_with_categories):
|
||||
local_vars = get_paths()
|
||||
local_vars['distribution.name'] = self.distribution.metadata['Name']
|
||||
expanded_path = format_value(path_with_categories, local_vars)
|
||||
expanded_path = format_value(expanded_path, local_vars)
|
||||
if '{' in expanded_path and '}' in expanded_path:
|
||||
logger.warning(
|
||||
'%s: unable to expand %s, some categories may be missing',
|
||||
self.get_command_name(), path_with_categories)
|
||||
return expanded_path
|
||||
|
||||
def get_source_files(self):
|
||||
return list(self.data_files)
|
||||
|
||||
def get_inputs(self):
|
||||
return list(self.data_files)
|
||||
|
||||
def get_outputs(self):
|
||||
return self.outfiles
|
||||
|
||||
def get_resources_out(self):
|
||||
return self.data_files_out
|
||||
625
Lib/packaging/command/install_dist.py
Normal file
625
Lib/packaging/command/install_dist.py
Normal file
|
|
@ -0,0 +1,625 @@
|
|||
"""Main install command, which calls the other install_* commands."""
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
import sysconfig
|
||||
from sysconfig import get_config_vars, get_paths, get_path, get_config_var
|
||||
|
||||
from packaging import logger
|
||||
from packaging.command.cmd import Command
|
||||
from packaging.errors import PackagingPlatformError
|
||||
from packaging.util import write_file
|
||||
from packaging.util import convert_path, change_root, get_platform
|
||||
from packaging.errors import PackagingOptionError
|
||||
|
||||
|
||||
HAS_USER_SITE = True
|
||||
|
||||
|
||||
class install_dist(Command):
|
||||
|
||||
description = "install everything from build directory"
|
||||
|
||||
user_options = [
|
||||
# Select installation scheme and set base director(y|ies)
|
||||
('prefix=', None,
|
||||
"installation prefix"),
|
||||
('exec-prefix=', None,
|
||||
"(Unix only) prefix for platform-specific files"),
|
||||
('home=', None,
|
||||
"(Unix only) home directory to install under"),
|
||||
|
||||
# Or just set the base director(y|ies)
|
||||
('install-base=', None,
|
||||
"base installation directory (instead of --prefix or --home)"),
|
||||
('install-platbase=', None,
|
||||
"base installation directory for platform-specific files " +
|
||||
"(instead of --exec-prefix or --home)"),
|
||||
('root=', None,
|
||||
"install everything relative to this alternate root directory"),
|
||||
|
||||
# Or explicitly set the installation scheme
|
||||
('install-purelib=', None,
|
||||
"installation directory for pure Python module distributions"),
|
||||
('install-platlib=', None,
|
||||
"installation directory for non-pure module distributions"),
|
||||
('install-lib=', None,
|
||||
"installation directory for all module distributions " +
|
||||
"(overrides --install-purelib and --install-platlib)"),
|
||||
|
||||
('install-headers=', None,
|
||||
"installation directory for C/C++ headers"),
|
||||
('install-scripts=', None,
|
||||
"installation directory for Python scripts"),
|
||||
('install-data=', None,
|
||||
"installation directory for data files"),
|
||||
|
||||
# Byte-compilation options -- see install_lib.py for details, as
|
||||
# these are duplicated from there (but only install_lib does
|
||||
# anything with them).
|
||||
('compile', 'c', "compile .py to .pyc [default]"),
|
||||
('no-compile', None, "don't compile .py files"),
|
||||
('optimize=', 'O',
|
||||
'also compile with optimization: -O1 for "python -O", '
|
||||
'-O2 for "python -OO", and -O0 to disable [default: -O0]'),
|
||||
|
||||
# Miscellaneous control options
|
||||
('force', 'f',
|
||||
"force installation (overwrite any existing files)"),
|
||||
('skip-build', None,
|
||||
"skip rebuilding everything (for testing/debugging)"),
|
||||
|
||||
# Where to install documentation (eventually!)
|
||||
#('doc-format=', None, "format of documentation to generate"),
|
||||
#('install-man=', None, "directory for Unix man pages"),
|
||||
#('install-html=', None, "directory for HTML documentation"),
|
||||
#('install-info=', None, "directory for GNU info files"),
|
||||
|
||||
# XXX use a name that makes clear this is the old format
|
||||
('record=', None,
|
||||
"filename in which to record a list of installed files "
|
||||
"(not PEP 376-compliant)"),
|
||||
('resources=', None,
|
||||
"data files mapping"),
|
||||
|
||||
# .dist-info related arguments, read by install_dist_info
|
||||
('no-distinfo', None,
|
||||
"do not create a .dist-info directory"),
|
||||
('installer=', None,
|
||||
"the name of the installer"),
|
||||
('requested', None,
|
||||
"generate a REQUESTED file (i.e."),
|
||||
('no-requested', None,
|
||||
"do not generate a REQUESTED file"),
|
||||
('no-record', None,
|
||||
"do not generate a RECORD file"),
|
||||
]
|
||||
|
||||
boolean_options = ['compile', 'force', 'skip-build', 'no-distinfo',
|
||||
'requested', 'no-record']
|
||||
|
||||
if HAS_USER_SITE:
|
||||
user_options.append(
|
||||
('user', None,
|
||||
"install in user site-packages directory [%s]" %
|
||||
get_path('purelib', '%s_user' % os.name)))
|
||||
|
||||
boolean_options.append('user')
|
||||
|
||||
negative_opt = {'no-compile': 'compile', 'no-requested': 'requested'}
|
||||
|
||||
def initialize_options(self):
|
||||
# High-level options: these select both an installation base
|
||||
# and scheme.
|
||||
self.prefix = None
|
||||
self.exec_prefix = None
|
||||
self.home = None
|
||||
if HAS_USER_SITE:
|
||||
self.user = False
|
||||
|
||||
# These select only the installation base; it's up to the user to
|
||||
# specify the installation scheme (currently, that means supplying
|
||||
# the --install-{platlib,purelib,scripts,data} options).
|
||||
self.install_base = None
|
||||
self.install_platbase = None
|
||||
self.root = None
|
||||
|
||||
# These options are the actual installation directories; if not
|
||||
# supplied by the user, they are filled in using the installation
|
||||
# scheme implied by prefix/exec-prefix/home and the contents of
|
||||
# that installation scheme.
|
||||
self.install_purelib = None # for pure module distributions
|
||||
self.install_platlib = None # non-pure (dists w/ extensions)
|
||||
self.install_headers = None # for C/C++ headers
|
||||
self.install_lib = None # set to either purelib or platlib
|
||||
self.install_scripts = None
|
||||
self.install_data = None
|
||||
if HAS_USER_SITE:
|
||||
self.install_userbase = get_config_var('userbase')
|
||||
self.install_usersite = get_path('purelib', '%s_user' % os.name)
|
||||
|
||||
self.compile = None
|
||||
self.optimize = None
|
||||
|
||||
# These two are for putting non-packagized distributions into their
|
||||
# own directory and creating a .pth file if it makes sense.
|
||||
# 'extra_path' comes from the setup file; 'install_path_file' can
|
||||
# be turned off if it makes no sense to install a .pth file. (But
|
||||
# better to install it uselessly than to guess wrong and not
|
||||
# install it when it's necessary and would be used!) Currently,
|
||||
# 'install_path_file' is always true unless some outsider meddles
|
||||
# with it.
|
||||
self.extra_path = None
|
||||
self.install_path_file = True
|
||||
|
||||
# 'force' forces installation, even if target files are not
|
||||
# out-of-date. 'skip_build' skips running the "build" command,
|
||||
# handy if you know it's not necessary. 'warn_dir' (which is *not*
|
||||
# a user option, it's just there so the bdist_* commands can turn
|
||||
# it off) determines whether we warn about installing to a
|
||||
# directory not in sys.path.
|
||||
self.force = False
|
||||
self.skip_build = False
|
||||
self.warn_dir = True
|
||||
|
||||
# These are only here as a conduit from the 'build' command to the
|
||||
# 'install_*' commands that do the real work. ('build_base' isn't
|
||||
# actually used anywhere, but it might be useful in future.) They
|
||||
# are not user options, because if the user told the install
|
||||
# command where the build directory is, that wouldn't affect the
|
||||
# build command.
|
||||
self.build_base = None
|
||||
self.build_lib = None
|
||||
|
||||
# Not defined yet because we don't know anything about
|
||||
# documentation yet.
|
||||
#self.install_man = None
|
||||
#self.install_html = None
|
||||
#self.install_info = None
|
||||
|
||||
self.record = None
|
||||
self.resources = None
|
||||
|
||||
# .dist-info related options
|
||||
self.no_distinfo = None
|
||||
self.installer = None
|
||||
self.requested = None
|
||||
self.no_record = None
|
||||
self.no_resources = None
|
||||
|
||||
# -- Option finalizing methods -------------------------------------
|
||||
# (This is rather more involved than for most commands,
|
||||
# because this is where the policy for installing third-
|
||||
# party Python modules on various platforms given a wide
|
||||
# array of user input is decided. Yes, it's quite complex!)
|
||||
|
||||
def finalize_options(self):
|
||||
# This method (and its pliant slaves, like 'finalize_unix()',
|
||||
# 'finalize_other()', and 'select_scheme()') is where the default
|
||||
# installation directories for modules, extension modules, and
|
||||
# anything else we care to install from a Python module
|
||||
# distribution. Thus, this code makes a pretty important policy
|
||||
# statement about how third-party stuff is added to a Python
|
||||
# installation! Note that the actual work of installation is done
|
||||
# by the relatively simple 'install_*' commands; they just take
|
||||
# their orders from the installation directory options determined
|
||||
# here.
|
||||
|
||||
# Check for errors/inconsistencies in the options; first, stuff
|
||||
# that's wrong on any platform.
|
||||
|
||||
if ((self.prefix or self.exec_prefix or self.home) and
|
||||
(self.install_base or self.install_platbase)):
|
||||
raise PackagingOptionError(
|
||||
"must supply either prefix/exec-prefix/home or "
|
||||
"install-base/install-platbase -- not both")
|
||||
|
||||
if self.home and (self.prefix or self.exec_prefix):
|
||||
raise PackagingOptionError(
|
||||
"must supply either home or prefix/exec-prefix -- not both")
|
||||
|
||||
if HAS_USER_SITE and self.user and (
|
||||
self.prefix or self.exec_prefix or self.home or
|
||||
self.install_base or self.install_platbase):
|
||||
raise PackagingOptionError(
|
||||
"can't combine user with prefix/exec_prefix/home or "
|
||||
"install_base/install_platbase")
|
||||
|
||||
# Next, stuff that's wrong (or dubious) only on certain platforms.
|
||||
if os.name != "posix":
|
||||
if self.exec_prefix:
|
||||
logger.warning(
|
||||
'%s: exec-prefix option ignored on this platform',
|
||||
self.get_command_name())
|
||||
self.exec_prefix = None
|
||||
|
||||
# Now the interesting logic -- so interesting that we farm it out
|
||||
# to other methods. The goal of these methods is to set the final
|
||||
# values for the install_{lib,scripts,data,...} options, using as
|
||||
# input a heady brew of prefix, exec_prefix, home, install_base,
|
||||
# install_platbase, user-supplied versions of
|
||||
# install_{purelib,platlib,lib,scripts,data,...}, and the
|
||||
# INSTALL_SCHEME dictionary above. Phew!
|
||||
|
||||
self.dump_dirs("pre-finalize_{unix,other}")
|
||||
|
||||
if os.name == 'posix':
|
||||
self.finalize_unix()
|
||||
else:
|
||||
self.finalize_other()
|
||||
|
||||
self.dump_dirs("post-finalize_{unix,other}()")
|
||||
|
||||
# Expand configuration variables, tilde, etc. in self.install_base
|
||||
# and self.install_platbase -- that way, we can use $base or
|
||||
# $platbase in the other installation directories and not worry
|
||||
# about needing recursive variable expansion (shudder).
|
||||
|
||||
py_version = sys.version.split()[0]
|
||||
prefix, exec_prefix, srcdir, projectbase = get_config_vars(
|
||||
'prefix', 'exec_prefix', 'srcdir', 'projectbase')
|
||||
|
||||
metadata = self.distribution.metadata
|
||||
self.config_vars = {
|
||||
'dist_name': metadata['Name'],
|
||||
'dist_version': metadata['Version'],
|
||||
'dist_fullname': metadata.get_fullname(),
|
||||
'py_version': py_version,
|
||||
'py_version_short': py_version[:3],
|
||||
'py_version_nodot': py_version[:3:2],
|
||||
'sys_prefix': prefix,
|
||||
'prefix': prefix,
|
||||
'sys_exec_prefix': exec_prefix,
|
||||
'exec_prefix': exec_prefix,
|
||||
'srcdir': srcdir,
|
||||
'projectbase': projectbase,
|
||||
}
|
||||
|
||||
if HAS_USER_SITE:
|
||||
self.config_vars['userbase'] = self.install_userbase
|
||||
self.config_vars['usersite'] = self.install_usersite
|
||||
|
||||
self.expand_basedirs()
|
||||
|
||||
self.dump_dirs("post-expand_basedirs()")
|
||||
|
||||
# Now define config vars for the base directories so we can expand
|
||||
# everything else.
|
||||
self.config_vars['base'] = self.install_base
|
||||
self.config_vars['platbase'] = self.install_platbase
|
||||
|
||||
# Expand "~" and configuration variables in the installation
|
||||
# directories.
|
||||
self.expand_dirs()
|
||||
|
||||
self.dump_dirs("post-expand_dirs()")
|
||||
|
||||
# Create directories in the home dir:
|
||||
if HAS_USER_SITE and self.user:
|
||||
self.create_home_path()
|
||||
|
||||
# Pick the actual directory to install all modules to: either
|
||||
# install_purelib or install_platlib, depending on whether this
|
||||
# module distribution is pure or not. Of course, if the user
|
||||
# already specified install_lib, use their selection.
|
||||
if self.install_lib is None:
|
||||
if self.distribution.ext_modules: # has extensions: non-pure
|
||||
self.install_lib = self.install_platlib
|
||||
else:
|
||||
self.install_lib = self.install_purelib
|
||||
|
||||
# Convert directories from Unix /-separated syntax to the local
|
||||
# convention.
|
||||
self.convert_paths('lib', 'purelib', 'platlib',
|
||||
'scripts', 'data', 'headers')
|
||||
if HAS_USER_SITE:
|
||||
self.convert_paths('userbase', 'usersite')
|
||||
|
||||
# Well, we're not actually fully completely finalized yet: we still
|
||||
# have to deal with 'extra_path', which is the hack for allowing
|
||||
# non-packagized module distributions (hello, Numerical Python!) to
|
||||
# get their own directories.
|
||||
self.handle_extra_path()
|
||||
self.install_libbase = self.install_lib # needed for .pth file
|
||||
self.install_lib = os.path.join(self.install_lib, self.extra_dirs)
|
||||
|
||||
# If a new root directory was supplied, make all the installation
|
||||
# dirs relative to it.
|
||||
if self.root is not None:
|
||||
self.change_roots('libbase', 'lib', 'purelib', 'platlib',
|
||||
'scripts', 'data', 'headers')
|
||||
|
||||
self.dump_dirs("after prepending root")
|
||||
|
||||
# Find out the build directories, ie. where to install from.
|
||||
self.set_undefined_options('build', 'build_base', 'build_lib')
|
||||
|
||||
# Punt on doc directories for now -- after all, we're punting on
|
||||
# documentation completely!
|
||||
|
||||
if self.no_distinfo is None:
|
||||
self.no_distinfo = False
|
||||
|
||||
def finalize_unix(self):
|
||||
"""Finalize options for posix platforms."""
|
||||
if self.install_base is not None or self.install_platbase is not None:
|
||||
if ((self.install_lib is None and
|
||||
self.install_purelib is None and
|
||||
self.install_platlib is None) or
|
||||
self.install_headers is None or
|
||||
self.install_scripts is None or
|
||||
self.install_data is None):
|
||||
raise PackagingOptionError(
|
||||
"install-base or install-platbase supplied, but "
|
||||
"installation scheme is incomplete")
|
||||
return
|
||||
|
||||
if HAS_USER_SITE and self.user:
|
||||
if self.install_userbase is None:
|
||||
raise PackagingPlatformError(
|
||||
"user base directory is not specified")
|
||||
self.install_base = self.install_platbase = self.install_userbase
|
||||
self.select_scheme("posix_user")
|
||||
elif self.home is not None:
|
||||
self.install_base = self.install_platbase = self.home
|
||||
self.select_scheme("posix_home")
|
||||
else:
|
||||
if self.prefix is None:
|
||||
if self.exec_prefix is not None:
|
||||
raise PackagingOptionError(
|
||||
"must not supply exec-prefix without prefix")
|
||||
|
||||
self.prefix = os.path.normpath(sys.prefix)
|
||||
self.exec_prefix = os.path.normpath(sys.exec_prefix)
|
||||
|
||||
else:
|
||||
if self.exec_prefix is None:
|
||||
self.exec_prefix = self.prefix
|
||||
|
||||
self.install_base = self.prefix
|
||||
self.install_platbase = self.exec_prefix
|
||||
self.select_scheme("posix_prefix")
|
||||
|
||||
def finalize_other(self):
|
||||
"""Finalize options for non-posix platforms"""
|
||||
if HAS_USER_SITE and self.user:
|
||||
if self.install_userbase is None:
|
||||
raise PackagingPlatformError(
|
||||
"user base directory is not specified")
|
||||
self.install_base = self.install_platbase = self.install_userbase
|
||||
self.select_scheme(os.name + "_user")
|
||||
elif self.home is not None:
|
||||
self.install_base = self.install_platbase = self.home
|
||||
self.select_scheme("posix_home")
|
||||
else:
|
||||
if self.prefix is None:
|
||||
self.prefix = os.path.normpath(sys.prefix)
|
||||
|
||||
self.install_base = self.install_platbase = self.prefix
|
||||
try:
|
||||
self.select_scheme(os.name)
|
||||
except KeyError:
|
||||
raise PackagingPlatformError(
|
||||
"no support for installation on '%s'" % os.name)
|
||||
|
||||
def dump_dirs(self, msg):
|
||||
"""Dump the list of user options."""
|
||||
logger.debug(msg + ":")
|
||||
for opt in self.user_options:
|
||||
opt_name = opt[0]
|
||||
if opt_name[-1] == "=":
|
||||
opt_name = opt_name[0:-1]
|
||||
if opt_name in self.negative_opt:
|
||||
opt_name = self.negative_opt[opt_name]
|
||||
opt_name = opt_name.replace('-', '_')
|
||||
val = not getattr(self, opt_name)
|
||||
else:
|
||||
opt_name = opt_name.replace('-', '_')
|
||||
val = getattr(self, opt_name)
|
||||
logger.debug(" %s: %s", opt_name, val)
|
||||
|
||||
def select_scheme(self, name):
|
||||
"""Set the install directories by applying the install schemes."""
|
||||
# it's the caller's problem if they supply a bad name!
|
||||
scheme = get_paths(name, expand=False)
|
||||
for key, value in scheme.items():
|
||||
if key == 'platinclude':
|
||||
key = 'headers'
|
||||
value = os.path.join(value, self.distribution.metadata['Name'])
|
||||
attrname = 'install_' + key
|
||||
if hasattr(self, attrname):
|
||||
if getattr(self, attrname) is None:
|
||||
setattr(self, attrname, value)
|
||||
|
||||
def _expand_attrs(self, attrs):
|
||||
for attr in attrs:
|
||||
val = getattr(self, attr)
|
||||
if val is not None:
|
||||
if os.name == 'posix' or os.name == 'nt':
|
||||
val = os.path.expanduser(val)
|
||||
# see if we want to push this work in sysconfig XXX
|
||||
val = sysconfig._subst_vars(val, self.config_vars)
|
||||
setattr(self, attr, val)
|
||||
|
||||
def expand_basedirs(self):
|
||||
"""Call `os.path.expanduser` on install_{base,platbase} and root."""
|
||||
self._expand_attrs(['install_base', 'install_platbase', 'root'])
|
||||
|
||||
def expand_dirs(self):
|
||||
"""Call `os.path.expanduser` on install dirs."""
|
||||
self._expand_attrs(['install_purelib', 'install_platlib',
|
||||
'install_lib', 'install_headers',
|
||||
'install_scripts', 'install_data'])
|
||||
|
||||
def convert_paths(self, *names):
|
||||
"""Call `convert_path` over `names`."""
|
||||
for name in names:
|
||||
attr = "install_" + name
|
||||
setattr(self, attr, convert_path(getattr(self, attr)))
|
||||
|
||||
def handle_extra_path(self):
|
||||
"""Set `path_file` and `extra_dirs` using `extra_path`."""
|
||||
if self.extra_path is None:
|
||||
self.extra_path = self.distribution.extra_path
|
||||
|
||||
if self.extra_path is not None:
|
||||
if isinstance(self.extra_path, str):
|
||||
self.extra_path = self.extra_path.split(',')
|
||||
|
||||
if len(self.extra_path) == 1:
|
||||
path_file = extra_dirs = self.extra_path[0]
|
||||
elif len(self.extra_path) == 2:
|
||||
path_file, extra_dirs = self.extra_path
|
||||
else:
|
||||
raise PackagingOptionError(
|
||||
"'extra_path' option must be a list, tuple, or "
|
||||
"comma-separated string with 1 or 2 elements")
|
||||
|
||||
# convert to local form in case Unix notation used (as it
|
||||
# should be in setup scripts)
|
||||
extra_dirs = convert_path(extra_dirs)
|
||||
else:
|
||||
path_file = None
|
||||
extra_dirs = ''
|
||||
|
||||
# XXX should we warn if path_file and not extra_dirs? (in which
|
||||
# case the path file would be harmless but pointless)
|
||||
self.path_file = path_file
|
||||
self.extra_dirs = extra_dirs
|
||||
|
||||
def change_roots(self, *names):
|
||||
"""Change the install direcories pointed by name using root."""
|
||||
for name in names:
|
||||
attr = "install_" + name
|
||||
setattr(self, attr, change_root(self.root, getattr(self, attr)))
|
||||
|
||||
def create_home_path(self):
|
||||
"""Create directories under ~."""
|
||||
if HAS_USER_SITE and not self.user:
|
||||
return
|
||||
home = convert_path(os.path.expanduser("~"))
|
||||
for name, path in self.config_vars.items():
|
||||
if path.startswith(home) and not os.path.isdir(path):
|
||||
os.makedirs(path, 0o700)
|
||||
|
||||
# -- Command execution methods -------------------------------------
|
||||
|
||||
def run(self):
|
||||
"""Runs the command."""
|
||||
# Obviously have to build before we can install
|
||||
if not self.skip_build:
|
||||
self.run_command('build')
|
||||
# If we built for any other platform, we can't install.
|
||||
build_plat = self.distribution.get_command_obj('build').plat_name
|
||||
# check warn_dir - it is a clue that the 'install_dist' is happening
|
||||
# internally, and not to sys.path, so we don't check the platform
|
||||
# matches what we are running.
|
||||
if self.warn_dir and build_plat != get_platform():
|
||||
raise PackagingPlatformError("Can't install when "
|
||||
"cross-compiling")
|
||||
|
||||
# Run all sub-commands (at least those that need to be run)
|
||||
for cmd_name in self.get_sub_commands():
|
||||
self.run_command(cmd_name)
|
||||
|
||||
if self.path_file:
|
||||
self.create_path_file()
|
||||
|
||||
# write list of installed files, if requested.
|
||||
if self.record:
|
||||
outputs = self.get_outputs()
|
||||
if self.root: # strip any package prefix
|
||||
root_len = len(self.root)
|
||||
for counter in range(len(outputs)):
|
||||
outputs[counter] = outputs[counter][root_len:]
|
||||
self.execute(write_file,
|
||||
(self.record, outputs),
|
||||
"writing list of installed files to '%s'" %
|
||||
self.record)
|
||||
|
||||
normpath, normcase = os.path.normpath, os.path.normcase
|
||||
sys_path = [normcase(normpath(p)) for p in sys.path]
|
||||
install_lib = normcase(normpath(self.install_lib))
|
||||
if (self.warn_dir and
|
||||
not (self.path_file and self.install_path_file) and
|
||||
install_lib not in sys_path):
|
||||
logger.debug(("modules installed to '%s', which is not in "
|
||||
"Python's module search path (sys.path) -- "
|
||||
"you'll have to change the search path yourself"),
|
||||
self.install_lib)
|
||||
|
||||
def create_path_file(self):
|
||||
"""Creates the .pth file"""
|
||||
filename = os.path.join(self.install_libbase,
|
||||
self.path_file + ".pth")
|
||||
if self.install_path_file:
|
||||
self.execute(write_file,
|
||||
(filename, [self.extra_dirs]),
|
||||
"creating %s" % filename)
|
||||
else:
|
||||
logger.warning('%s: path file %r not created',
|
||||
self.get_command_name(), filename)
|
||||
|
||||
# -- Reporting methods ---------------------------------------------
|
||||
|
||||
def get_outputs(self):
|
||||
"""Assembles the outputs of all the sub-commands."""
|
||||
outputs = []
|
||||
for cmd_name in self.get_sub_commands():
|
||||
cmd = self.get_finalized_command(cmd_name)
|
||||
# Add the contents of cmd.get_outputs(), ensuring
|
||||
# that outputs doesn't contain duplicate entries
|
||||
for filename in cmd.get_outputs():
|
||||
if filename not in outputs:
|
||||
outputs.append(filename)
|
||||
|
||||
if self.path_file and self.install_path_file:
|
||||
outputs.append(os.path.join(self.install_libbase,
|
||||
self.path_file + ".pth"))
|
||||
|
||||
return outputs
|
||||
|
||||
def get_inputs(self):
|
||||
"""Returns the inputs of all the sub-commands"""
|
||||
# XXX gee, this looks familiar ;-(
|
||||
inputs = []
|
||||
for cmd_name in self.get_sub_commands():
|
||||
cmd = self.get_finalized_command(cmd_name)
|
||||
inputs.extend(cmd.get_inputs())
|
||||
|
||||
return inputs
|
||||
|
||||
# -- Predicates for sub-command list -------------------------------
|
||||
|
||||
def has_lib(self):
|
||||
"""Returns true if the current distribution has any Python
|
||||
modules to install."""
|
||||
return (self.distribution.has_pure_modules() or
|
||||
self.distribution.has_ext_modules())
|
||||
|
||||
def has_headers(self):
|
||||
"""Returns true if the current distribution has any headers to
|
||||
install."""
|
||||
return self.distribution.has_headers()
|
||||
|
||||
def has_scripts(self):
|
||||
"""Returns true if the current distribution has any scripts to.
|
||||
install."""
|
||||
return self.distribution.has_scripts()
|
||||
|
||||
def has_data(self):
|
||||
"""Returns true if the current distribution has any data to.
|
||||
install."""
|
||||
return self.distribution.has_data_files()
|
||||
|
||||
# 'sub_commands': a list of commands this command might have to run to
|
||||
# get its work done. See cmd.py for more info.
|
||||
sub_commands = [('install_lib', has_lib),
|
||||
('install_headers', has_headers),
|
||||
('install_scripts', has_scripts),
|
||||
('install_data', has_data),
|
||||
# keep install_distinfo last, as it needs the record
|
||||
# with files to be completely generated
|
||||
('install_distinfo', lambda self: not self.no_distinfo),
|
||||
]
|
||||
175
Lib/packaging/command/install_distinfo.py
Normal file
175
Lib/packaging/command/install_distinfo.py
Normal file
|
|
@ -0,0 +1,175 @@
|
|||
"""Create the PEP 376-compliant .dist-info directory."""
|
||||
|
||||
# Forked from the former install_egg_info command by Josip Djolonga
|
||||
|
||||
import csv
|
||||
import os
|
||||
import re
|
||||
import hashlib
|
||||
|
||||
from packaging.command.cmd import Command
|
||||
from packaging import logger
|
||||
from shutil import rmtree
|
||||
|
||||
|
||||
class install_distinfo(Command):
|
||||
|
||||
description = 'create a .dist-info directory for the distribution'
|
||||
|
||||
user_options = [
|
||||
('distinfo-dir=', None,
|
||||
"directory where the the .dist-info directory will be installed"),
|
||||
('installer=', None,
|
||||
"the name of the installer"),
|
||||
('requested', None,
|
||||
"generate a REQUESTED file"),
|
||||
('no-requested', None,
|
||||
"do not generate a REQUESTED file"),
|
||||
('no-record', None,
|
||||
"do not generate a RECORD file"),
|
||||
('no-resources', None,
|
||||
"do not generate a RESSOURCES list installed file")
|
||||
]
|
||||
|
||||
boolean_options = ['requested', 'no-record', 'no-resources']
|
||||
|
||||
negative_opt = {'no-requested': 'requested'}
|
||||
|
||||
def initialize_options(self):
|
||||
self.distinfo_dir = None
|
||||
self.installer = None
|
||||
self.requested = None
|
||||
self.no_record = None
|
||||
self.no_resources = None
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options('install_dist',
|
||||
'installer', 'requested', 'no_record')
|
||||
|
||||
self.set_undefined_options('install_lib',
|
||||
('install_dir', 'distinfo_dir'))
|
||||
|
||||
if self.installer is None:
|
||||
# FIXME distutils or packaging?
|
||||
# + document default in the option help text above and in install
|
||||
self.installer = 'distutils'
|
||||
if self.requested is None:
|
||||
self.requested = True
|
||||
if self.no_record is None:
|
||||
self.no_record = False
|
||||
if self.no_resources is None:
|
||||
self.no_resources = False
|
||||
|
||||
metadata = self.distribution.metadata
|
||||
|
||||
basename = "%s-%s.dist-info" % (
|
||||
to_filename(safe_name(metadata['Name'])),
|
||||
to_filename(safe_version(metadata['Version'])))
|
||||
|
||||
self.distinfo_dir = os.path.join(self.distinfo_dir, basename)
|
||||
self.outputs = []
|
||||
|
||||
def run(self):
|
||||
# FIXME dry-run should be used at a finer level, so that people get
|
||||
# useful logging output and can have an idea of what the command would
|
||||
# have done
|
||||
if not self.dry_run:
|
||||
target = self.distinfo_dir
|
||||
|
||||
if os.path.isdir(target) and not os.path.islink(target):
|
||||
rmtree(target)
|
||||
elif os.path.exists(target):
|
||||
self.execute(os.unlink, (self.distinfo_dir,),
|
||||
"removing " + target)
|
||||
|
||||
self.execute(os.makedirs, (target,), "creating " + target)
|
||||
|
||||
metadata_path = os.path.join(self.distinfo_dir, 'METADATA')
|
||||
logger.info('creating %s', metadata_path)
|
||||
self.distribution.metadata.write(metadata_path)
|
||||
self.outputs.append(metadata_path)
|
||||
|
||||
installer_path = os.path.join(self.distinfo_dir, 'INSTALLER')
|
||||
logger.info('creating %s', installer_path)
|
||||
with open(installer_path, 'w') as f:
|
||||
f.write(self.installer)
|
||||
self.outputs.append(installer_path)
|
||||
|
||||
if self.requested:
|
||||
requested_path = os.path.join(self.distinfo_dir, 'REQUESTED')
|
||||
logger.info('creating %s', requested_path)
|
||||
open(requested_path, 'w').close()
|
||||
self.outputs.append(requested_path)
|
||||
|
||||
|
||||
if not self.no_resources:
|
||||
install_data = self.get_finalized_command('install_data')
|
||||
if install_data.get_resources_out() != []:
|
||||
resources_path = os.path.join(self.distinfo_dir,
|
||||
'RESOURCES')
|
||||
logger.info('creating %s', resources_path)
|
||||
with open(resources_path, 'wb') as f:
|
||||
writer = csv.writer(f, delimiter=',',
|
||||
lineterminator=os.linesep,
|
||||
quotechar='"')
|
||||
for tuple in install_data.get_resources_out():
|
||||
writer.writerow(tuple)
|
||||
|
||||
self.outputs.append(resources_path)
|
||||
|
||||
if not self.no_record:
|
||||
record_path = os.path.join(self.distinfo_dir, 'RECORD')
|
||||
logger.info('creating %s', record_path)
|
||||
with open(record_path, 'w', encoding='utf-8') as f:
|
||||
writer = csv.writer(f, delimiter=',',
|
||||
lineterminator=os.linesep,
|
||||
quotechar='"')
|
||||
|
||||
install = self.get_finalized_command('install_dist')
|
||||
|
||||
for fpath in install.get_outputs():
|
||||
if fpath.endswith('.pyc') or fpath.endswith('.pyo'):
|
||||
# do not put size and md5 hash, as in PEP-376
|
||||
writer.writerow((fpath, '', ''))
|
||||
else:
|
||||
size = os.path.getsize(fpath)
|
||||
with open(fpath, 'r') as fp:
|
||||
hash = hashlib.md5()
|
||||
hash.update(fp.read().encode())
|
||||
md5sum = hash.hexdigest()
|
||||
writer.writerow((fpath, md5sum, size))
|
||||
|
||||
# add the RECORD file itself
|
||||
writer.writerow((record_path, '', ''))
|
||||
self.outputs.append(record_path)
|
||||
|
||||
def get_outputs(self):
|
||||
return self.outputs
|
||||
|
||||
|
||||
# The following functions are taken from setuptools' pkg_resources module.
|
||||
|
||||
def safe_name(name):
|
||||
"""Convert an arbitrary string to a standard distribution name
|
||||
|
||||
Any runs of non-alphanumeric/. characters are replaced with a single '-'.
|
||||
"""
|
||||
return re.sub('[^A-Za-z0-9.]+', '-', name)
|
||||
|
||||
|
||||
def safe_version(version):
|
||||
"""Convert an arbitrary string to a standard version string
|
||||
|
||||
Spaces become dots, and all other non-alphanumeric characters become
|
||||
dashes, with runs of multiple dashes condensed to a single dash.
|
||||
"""
|
||||
version = version.replace(' ', '.')
|
||||
return re.sub('[^A-Za-z0-9.]+', '-', version)
|
||||
|
||||
|
||||
def to_filename(name):
|
||||
"""Convert a project or version name to its filename-escaped form
|
||||
|
||||
Any '-' characters are currently replaced with '_'.
|
||||
"""
|
||||
return name.replace('-', '_')
|
||||
43
Lib/packaging/command/install_headers.py
Normal file
43
Lib/packaging/command/install_headers.py
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
"""Install C/C++ header files to the Python include directory."""
|
||||
|
||||
from packaging.command.cmd import Command
|
||||
|
||||
|
||||
# XXX force is never used
|
||||
class install_headers(Command):
|
||||
|
||||
description = "install C/C++ header files"
|
||||
|
||||
user_options = [('install-dir=', 'd',
|
||||
"directory to install header files to"),
|
||||
('force', 'f',
|
||||
"force installation (overwrite existing files)"),
|
||||
]
|
||||
|
||||
boolean_options = ['force']
|
||||
|
||||
def initialize_options(self):
|
||||
self.install_dir = None
|
||||
self.force = False
|
||||
self.outfiles = []
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options('install_dist',
|
||||
('install_headers', 'install_dir'),
|
||||
'force')
|
||||
|
||||
def run(self):
|
||||
headers = self.distribution.headers
|
||||
if not headers:
|
||||
return
|
||||
|
||||
self.mkpath(self.install_dir)
|
||||
for header in headers:
|
||||
out = self.copy_file(header, self.install_dir)[0]
|
||||
self.outfiles.append(out)
|
||||
|
||||
def get_inputs(self):
|
||||
return self.distribution.headers or []
|
||||
|
||||
def get_outputs(self):
|
||||
return self.outfiles
|
||||
222
Lib/packaging/command/install_lib.py
Normal file
222
Lib/packaging/command/install_lib.py
Normal file
|
|
@ -0,0 +1,222 @@
|
|||
"""Install all modules (extensions and pure Python)."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
|
||||
from packaging import logger
|
||||
from packaging.command.cmd import Command
|
||||
from packaging.errors import PackagingOptionError
|
||||
|
||||
|
||||
# Extension for Python source files.
|
||||
if hasattr(os, 'extsep'):
|
||||
PYTHON_SOURCE_EXTENSION = os.extsep + "py"
|
||||
else:
|
||||
PYTHON_SOURCE_EXTENSION = ".py"
|
||||
|
||||
class install_lib(Command):
|
||||
|
||||
description = "install all modules (extensions and pure Python)"
|
||||
|
||||
# The byte-compilation options are a tad confusing. Here are the
|
||||
# possible scenarios:
|
||||
# 1) no compilation at all (--no-compile --no-optimize)
|
||||
# 2) compile .pyc only (--compile --no-optimize; default)
|
||||
# 3) compile .pyc and "level 1" .pyo (--compile --optimize)
|
||||
# 4) compile "level 1" .pyo only (--no-compile --optimize)
|
||||
# 5) compile .pyc and "level 2" .pyo (--compile --optimize-more)
|
||||
# 6) compile "level 2" .pyo only (--no-compile --optimize-more)
|
||||
#
|
||||
# The UI for this is two option, 'compile' and 'optimize'.
|
||||
# 'compile' is strictly boolean, and only decides whether to
|
||||
# generate .pyc files. 'optimize' is three-way (0, 1, or 2), and
|
||||
# decides both whether to generate .pyo files and what level of
|
||||
# optimization to use.
|
||||
|
||||
user_options = [
|
||||
('install-dir=', 'd', "directory to install to"),
|
||||
('build-dir=','b', "build directory (where to install from)"),
|
||||
('force', 'f', "force installation (overwrite existing files)"),
|
||||
('compile', 'c', "compile .py to .pyc [default]"),
|
||||
('no-compile', None, "don't compile .py files"),
|
||||
('optimize=', 'O',
|
||||
"also compile with optimization: -O1 for \"python -O\", "
|
||||
"-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
|
||||
('skip-build', None, "skip the build steps"),
|
||||
]
|
||||
|
||||
boolean_options = ['force', 'compile', 'skip-build']
|
||||
negative_opt = {'no-compile' : 'compile'}
|
||||
|
||||
def initialize_options(self):
|
||||
# let the 'install_dist' command dictate our installation directory
|
||||
self.install_dir = None
|
||||
self.build_dir = None
|
||||
self.force = False
|
||||
self.compile = None
|
||||
self.optimize = None
|
||||
self.skip_build = None
|
||||
|
||||
def finalize_options(self):
|
||||
# Get all the information we need to install pure Python modules
|
||||
# from the umbrella 'install_dist' command -- build (source) directory,
|
||||
# install (target) directory, and whether to compile .py files.
|
||||
self.set_undefined_options('install_dist',
|
||||
('build_lib', 'build_dir'),
|
||||
('install_lib', 'install_dir'),
|
||||
'force', 'compile', 'optimize', 'skip_build')
|
||||
|
||||
if self.compile is None:
|
||||
self.compile = True
|
||||
if self.optimize is None:
|
||||
self.optimize = 0
|
||||
|
||||
if not isinstance(self.optimize, int):
|
||||
try:
|
||||
self.optimize = int(self.optimize)
|
||||
if self.optimize not in (0, 1, 2):
|
||||
raise AssertionError
|
||||
except (ValueError, AssertionError):
|
||||
raise PackagingOptionError("optimize must be 0, 1, or 2")
|
||||
|
||||
def run(self):
|
||||
# Make sure we have built everything we need first
|
||||
self.build()
|
||||
|
||||
# Install everything: simply dump the entire contents of the build
|
||||
# directory to the installation directory (that's the beauty of
|
||||
# having a build directory!)
|
||||
outfiles = self.install()
|
||||
|
||||
# (Optionally) compile .py to .pyc
|
||||
if outfiles is not None and self.distribution.has_pure_modules():
|
||||
self.byte_compile(outfiles)
|
||||
|
||||
# -- Top-level worker functions ------------------------------------
|
||||
# (called from 'run()')
|
||||
|
||||
def build(self):
|
||||
if not self.skip_build:
|
||||
if self.distribution.has_pure_modules():
|
||||
self.run_command('build_py')
|
||||
if self.distribution.has_ext_modules():
|
||||
self.run_command('build_ext')
|
||||
|
||||
def install(self):
|
||||
if os.path.isdir(self.build_dir):
|
||||
outfiles = self.copy_tree(self.build_dir, self.install_dir)
|
||||
else:
|
||||
logger.warning(
|
||||
'%s: %r does not exist -- no Python modules to install',
|
||||
self.get_command_name(), self.build_dir)
|
||||
return
|
||||
return outfiles
|
||||
|
||||
def byte_compile(self, files):
|
||||
if getattr(sys, 'dont_write_bytecode'):
|
||||
# XXX do we want this? because a Python runs without bytecode
|
||||
# doesn't mean that the *dists should not contain bytecode
|
||||
#--or does it?
|
||||
logger.warning('%s: byte-compiling is disabled, skipping.',
|
||||
self.get_command_name())
|
||||
return
|
||||
|
||||
from packaging.util import byte_compile
|
||||
|
||||
# Get the "--root" directory supplied to the "install_dist" command,
|
||||
# and use it as a prefix to strip off the purported filename
|
||||
# encoded in bytecode files. This is far from complete, but it
|
||||
# should at least generate usable bytecode in RPM distributions.
|
||||
install_root = self.get_finalized_command('install_dist').root
|
||||
|
||||
# Temporary kludge until we remove the verbose arguments and use
|
||||
# logging everywhere
|
||||
verbose = logger.getEffectiveLevel() >= logging.DEBUG
|
||||
|
||||
if self.compile:
|
||||
byte_compile(files, optimize=0,
|
||||
force=self.force, prefix=install_root,
|
||||
dry_run=self.dry_run)
|
||||
if self.optimize > 0:
|
||||
byte_compile(files, optimize=self.optimize,
|
||||
force=self.force, prefix=install_root,
|
||||
verbose=verbose,
|
||||
dry_run=self.dry_run)
|
||||
|
||||
|
||||
# -- Utility methods -----------------------------------------------
|
||||
|
||||
def _mutate_outputs(self, has_any, build_cmd, cmd_option, output_dir):
|
||||
if not has_any:
|
||||
return []
|
||||
|
||||
build_cmd = self.get_finalized_command(build_cmd)
|
||||
build_files = build_cmd.get_outputs()
|
||||
build_dir = getattr(build_cmd, cmd_option)
|
||||
|
||||
prefix_len = len(build_dir) + len(os.sep)
|
||||
outputs = []
|
||||
for file in build_files:
|
||||
outputs.append(os.path.join(output_dir, file[prefix_len:]))
|
||||
|
||||
return outputs
|
||||
|
||||
def _bytecode_filenames(self, py_filenames):
|
||||
bytecode_files = []
|
||||
for py_file in py_filenames:
|
||||
# Since build_py handles package data installation, the
|
||||
# list of outputs can contain more than just .py files.
|
||||
# Make sure we only report bytecode for the .py files.
|
||||
ext = os.path.splitext(os.path.normcase(py_file))[1]
|
||||
if ext != PYTHON_SOURCE_EXTENSION:
|
||||
continue
|
||||
if self.compile:
|
||||
bytecode_files.append(py_file + "c")
|
||||
if self.optimize > 0:
|
||||
bytecode_files.append(py_file + "o")
|
||||
|
||||
return bytecode_files
|
||||
|
||||
|
||||
# -- External interface --------------------------------------------
|
||||
# (called by outsiders)
|
||||
|
||||
def get_outputs(self):
|
||||
"""Return the list of files that would be installed if this command
|
||||
were actually run. Not affected by the "dry-run" flag or whether
|
||||
modules have actually been built yet.
|
||||
"""
|
||||
pure_outputs = \
|
||||
self._mutate_outputs(self.distribution.has_pure_modules(),
|
||||
'build_py', 'build_lib',
|
||||
self.install_dir)
|
||||
if self.compile:
|
||||
bytecode_outputs = self._bytecode_filenames(pure_outputs)
|
||||
else:
|
||||
bytecode_outputs = []
|
||||
|
||||
ext_outputs = \
|
||||
self._mutate_outputs(self.distribution.has_ext_modules(),
|
||||
'build_ext', 'build_lib',
|
||||
self.install_dir)
|
||||
|
||||
return pure_outputs + bytecode_outputs + ext_outputs
|
||||
|
||||
def get_inputs(self):
|
||||
"""Get the list of files that are input to this command, ie. the
|
||||
files that get installed as they are named in the build tree.
|
||||
The files in this list correspond one-to-one to the output
|
||||
filenames returned by 'get_outputs()'.
|
||||
"""
|
||||
inputs = []
|
||||
|
||||
if self.distribution.has_pure_modules():
|
||||
build_py = self.get_finalized_command('build_py')
|
||||
inputs.extend(build_py.get_outputs())
|
||||
|
||||
if self.distribution.has_ext_modules():
|
||||
build_ext = self.get_finalized_command('build_ext')
|
||||
inputs.extend(build_ext.get_outputs())
|
||||
|
||||
return inputs
|
||||
59
Lib/packaging/command/install_scripts.py
Normal file
59
Lib/packaging/command/install_scripts.py
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
"""Install scripts."""
|
||||
|
||||
# Contributed by Bastian Kleineidam
|
||||
|
||||
import os
|
||||
from packaging.command.cmd import Command
|
||||
from packaging import logger
|
||||
|
||||
class install_scripts(Command):
|
||||
|
||||
description = "install scripts (Python or otherwise)"
|
||||
|
||||
user_options = [
|
||||
('install-dir=', 'd', "directory to install scripts to"),
|
||||
('build-dir=','b', "build directory (where to install from)"),
|
||||
('force', 'f', "force installation (overwrite existing files)"),
|
||||
('skip-build', None, "skip the build steps"),
|
||||
]
|
||||
|
||||
boolean_options = ['force', 'skip-build']
|
||||
|
||||
|
||||
def initialize_options(self):
|
||||
self.install_dir = None
|
||||
self.force = False
|
||||
self.build_dir = None
|
||||
self.skip_build = None
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options('build', ('build_scripts', 'build_dir'))
|
||||
self.set_undefined_options('install_dist',
|
||||
('install_scripts', 'install_dir'),
|
||||
'force', 'skip_build')
|
||||
|
||||
def run(self):
|
||||
if not self.skip_build:
|
||||
self.run_command('build_scripts')
|
||||
|
||||
if not os.path.exists(self.build_dir):
|
||||
self.outfiles = []
|
||||
return
|
||||
|
||||
self.outfiles = self.copy_tree(self.build_dir, self.install_dir)
|
||||
if os.name == 'posix':
|
||||
# Set the executable bits (owner, group, and world) on
|
||||
# all the scripts we just installed.
|
||||
for file in self.get_outputs():
|
||||
if self.dry_run:
|
||||
logger.info("changing mode of %s", file)
|
||||
else:
|
||||
mode = (os.stat(file).st_mode | 0o555) & 0o7777
|
||||
logger.info("changing mode of %s to %o", file, mode)
|
||||
os.chmod(file, mode)
|
||||
|
||||
def get_inputs(self):
|
||||
return self.distribution.scripts or []
|
||||
|
||||
def get_outputs(self):
|
||||
return self.outfiles or []
|
||||
282
Lib/packaging/command/register.py
Normal file
282
Lib/packaging/command/register.py
Normal file
|
|
@ -0,0 +1,282 @@
|
|||
"""Register a release with a project index."""
|
||||
|
||||
# Contributed by Richard Jones
|
||||
|
||||
import io
|
||||
import getpass
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
|
||||
from packaging import logger
|
||||
from packaging.util import (read_pypirc, generate_pypirc, DEFAULT_REPOSITORY,
|
||||
DEFAULT_REALM, get_pypirc_path)
|
||||
from packaging.command.cmd import Command
|
||||
|
||||
class register(Command):
|
||||
|
||||
description = "register a release with PyPI"
|
||||
user_options = [
|
||||
('repository=', 'r',
|
||||
"repository URL [default: %s]" % DEFAULT_REPOSITORY),
|
||||
('show-response', None,
|
||||
"display full response text from server"),
|
||||
('list-classifiers', None,
|
||||
"list valid Trove classifiers"),
|
||||
('strict', None ,
|
||||
"stop the registration if the metadata is not fully compliant")
|
||||
]
|
||||
|
||||
boolean_options = ['show-response', 'list-classifiers', 'strict']
|
||||
|
||||
def initialize_options(self):
|
||||
self.repository = None
|
||||
self.realm = None
|
||||
self.show_response = False
|
||||
self.list_classifiers = False
|
||||
self.strict = False
|
||||
|
||||
def finalize_options(self):
|
||||
if self.repository is None:
|
||||
self.repository = DEFAULT_REPOSITORY
|
||||
if self.realm is None:
|
||||
self.realm = DEFAULT_REALM
|
||||
|
||||
def run(self):
|
||||
self._set_config()
|
||||
|
||||
# Check the package metadata
|
||||
check = self.distribution.get_command_obj('check')
|
||||
if check.strict != self.strict and not check.all:
|
||||
# If check was already run but with different options,
|
||||
# re-run it
|
||||
check.strict = self.strict
|
||||
check.all = True
|
||||
self.distribution.have_run.pop('check', None)
|
||||
self.run_command('check')
|
||||
|
||||
if self.dry_run:
|
||||
self.verify_metadata()
|
||||
elif self.list_classifiers:
|
||||
self.classifiers()
|
||||
else:
|
||||
self.send_metadata()
|
||||
|
||||
def _set_config(self):
|
||||
''' Reads the configuration file and set attributes.
|
||||
'''
|
||||
config = read_pypirc(self.repository, self.realm)
|
||||
if config != {}:
|
||||
self.username = config['username']
|
||||
self.password = config['password']
|
||||
self.repository = config['repository']
|
||||
self.realm = config['realm']
|
||||
self.has_config = True
|
||||
else:
|
||||
if self.repository not in ('pypi', DEFAULT_REPOSITORY):
|
||||
raise ValueError('%s not found in .pypirc' % self.repository)
|
||||
if self.repository == 'pypi':
|
||||
self.repository = DEFAULT_REPOSITORY
|
||||
self.has_config = False
|
||||
|
||||
def classifiers(self):
|
||||
''' Fetch the list of classifiers from the server.
|
||||
'''
|
||||
response = urllib.request.urlopen(self.repository+'?:action=list_classifiers')
|
||||
logger.info(response.read())
|
||||
|
||||
def verify_metadata(self):
|
||||
''' Send the metadata to the package index server to be checked.
|
||||
'''
|
||||
# send the info to the server and report the result
|
||||
code, result = self.post_to_server(self.build_post_data('verify'))
|
||||
logger.info('server response (%s): %s', code, result)
|
||||
|
||||
|
||||
def send_metadata(self):
|
||||
''' Send the metadata to the package index server.
|
||||
|
||||
Well, do the following:
|
||||
1. figure who the user is, and then
|
||||
2. send the data as a Basic auth'ed POST.
|
||||
|
||||
First we try to read the username/password from $HOME/.pypirc,
|
||||
which is a ConfigParser-formatted file with a section
|
||||
[distutils] containing username and password entries (both
|
||||
in clear text). Eg:
|
||||
|
||||
[distutils]
|
||||
index-servers =
|
||||
pypi
|
||||
|
||||
[pypi]
|
||||
username: fred
|
||||
password: sekrit
|
||||
|
||||
Otherwise, to figure who the user is, we offer the user three
|
||||
choices:
|
||||
|
||||
1. use existing login,
|
||||
2. register as a new user, or
|
||||
3. set the password to a random string and email the user.
|
||||
|
||||
'''
|
||||
# TODO factor registration out into another method
|
||||
# TODO use print to print, not logging
|
||||
|
||||
# see if we can short-cut and get the username/password from the
|
||||
# config
|
||||
if self.has_config:
|
||||
choice = '1'
|
||||
username = self.username
|
||||
password = self.password
|
||||
else:
|
||||
choice = 'x'
|
||||
username = password = ''
|
||||
|
||||
# get the user's login info
|
||||
choices = '1 2 3 4'.split()
|
||||
while choice not in choices:
|
||||
logger.info('''\
|
||||
We need to know who you are, so please choose either:
|
||||
1. use your existing login,
|
||||
2. register as a new user,
|
||||
3. have the server generate a new password for you (and email it to you), or
|
||||
4. quit
|
||||
Your selection [default 1]: ''')
|
||||
|
||||
choice = input()
|
||||
if not choice:
|
||||
choice = '1'
|
||||
elif choice not in choices:
|
||||
print('Please choose one of the four options!')
|
||||
|
||||
if choice == '1':
|
||||
# get the username and password
|
||||
while not username:
|
||||
username = input('Username: ')
|
||||
while not password:
|
||||
password = getpass.getpass('Password: ')
|
||||
|
||||
# set up the authentication
|
||||
auth = urllib.request.HTTPPasswordMgr()
|
||||
host = urllib.parse.urlparse(self.repository)[1]
|
||||
auth.add_password(self.realm, host, username, password)
|
||||
# send the info to the server and report the result
|
||||
code, result = self.post_to_server(self.build_post_data('submit'),
|
||||
auth)
|
||||
logger.info('Server response (%s): %s', code, result)
|
||||
|
||||
# possibly save the login
|
||||
if code == 200:
|
||||
if self.has_config:
|
||||
# sharing the password in the distribution instance
|
||||
# so the upload command can reuse it
|
||||
self.distribution.password = password
|
||||
else:
|
||||
logger.info(
|
||||
'I can store your PyPI login so future submissions '
|
||||
'will be faster.\n(the login will be stored in %s)',
|
||||
get_pypirc_path())
|
||||
choice = 'X'
|
||||
while choice.lower() not in 'yn':
|
||||
choice = input('Save your login (y/N)?')
|
||||
if not choice:
|
||||
choice = 'n'
|
||||
if choice.lower() == 'y':
|
||||
generate_pypirc(username, password)
|
||||
|
||||
elif choice == '2':
|
||||
data = {':action': 'user'}
|
||||
data['name'] = data['password'] = data['email'] = ''
|
||||
data['confirm'] = None
|
||||
while not data['name']:
|
||||
data['name'] = input('Username: ')
|
||||
while data['password'] != data['confirm']:
|
||||
while not data['password']:
|
||||
data['password'] = getpass.getpass('Password: ')
|
||||
while not data['confirm']:
|
||||
data['confirm'] = getpass.getpass(' Confirm: ')
|
||||
if data['password'] != data['confirm']:
|
||||
data['password'] = ''
|
||||
data['confirm'] = None
|
||||
print("Password and confirm don't match!")
|
||||
while not data['email']:
|
||||
data['email'] = input(' EMail: ')
|
||||
code, result = self.post_to_server(data)
|
||||
if code != 200:
|
||||
logger.info('server response (%s): %s', code, result)
|
||||
else:
|
||||
logger.info('you will receive an email shortly; follow the '
|
||||
'instructions in it to complete registration.')
|
||||
elif choice == '3':
|
||||
data = {':action': 'password_reset'}
|
||||
data['email'] = ''
|
||||
while not data['email']:
|
||||
data['email'] = input('Your email address: ')
|
||||
code, result = self.post_to_server(data)
|
||||
logger.info('server response (%s): %s', code, result)
|
||||
|
||||
def build_post_data(self, action):
|
||||
# figure the data to send - the metadata plus some additional
|
||||
# information used by the package server
|
||||
data = self.distribution.metadata.todict()
|
||||
data[':action'] = action
|
||||
return data
|
||||
|
||||
# XXX to be refactored with upload.upload_file
|
||||
def post_to_server(self, data, auth=None):
|
||||
''' Post a query to the server, and return a string response.
|
||||
'''
|
||||
if 'name' in data:
|
||||
logger.info('Registering %s to %s', data['name'], self.repository)
|
||||
# Build up the MIME payload for the urllib2 POST data
|
||||
boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
|
||||
sep_boundary = '\n--' + boundary
|
||||
end_boundary = sep_boundary + '--'
|
||||
body = io.StringIO()
|
||||
for key, value in data.items():
|
||||
# handle multiple entries for the same name
|
||||
if not isinstance(value, (tuple, list)):
|
||||
value = [value]
|
||||
|
||||
for value in value:
|
||||
body.write(sep_boundary)
|
||||
body.write('\nContent-Disposition: form-data; name="%s"'%key)
|
||||
body.write("\n\n")
|
||||
body.write(value)
|
||||
if value and value[-1] == '\r':
|
||||
body.write('\n') # write an extra newline (lurve Macs)
|
||||
body.write(end_boundary)
|
||||
body.write("\n")
|
||||
body = body.getvalue()
|
||||
|
||||
# build the Request
|
||||
headers = {
|
||||
'Content-type': 'multipart/form-data; boundary=%s; charset=utf-8'%boundary,
|
||||
'Content-length': str(len(body))
|
||||
}
|
||||
req = urllib.request.Request(self.repository, body, headers)
|
||||
|
||||
# handle HTTP and include the Basic Auth handler
|
||||
opener = urllib.request.build_opener(
|
||||
urllib.request.HTTPBasicAuthHandler(password_mgr=auth)
|
||||
)
|
||||
data = ''
|
||||
try:
|
||||
result = opener.open(req)
|
||||
except urllib.error.HTTPError as e:
|
||||
if self.show_response:
|
||||
data = e.fp.read()
|
||||
result = e.code, e.msg
|
||||
except urllib.error.URLError as e:
|
||||
result = 500, str(e)
|
||||
else:
|
||||
if self.show_response:
|
||||
data = result.read()
|
||||
result = 200, 'OK'
|
||||
if self.show_response:
|
||||
dashes = '-' * 75
|
||||
logger.info('%s%s%s', dashes, data, dashes)
|
||||
|
||||
return result
|
||||
375
Lib/packaging/command/sdist.py
Normal file
375
Lib/packaging/command/sdist.py
Normal file
|
|
@ -0,0 +1,375 @@
|
|||
"""Create a source distribution."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
from io import StringIO
|
||||
from glob import glob
|
||||
from shutil import get_archive_formats, rmtree
|
||||
|
||||
from packaging import logger
|
||||
from packaging.util import resolve_name
|
||||
from packaging.errors import (PackagingPlatformError, PackagingOptionError,
|
||||
PackagingModuleError, PackagingFileError)
|
||||
from packaging.command import get_command_names
|
||||
from packaging.command.cmd import Command
|
||||
from packaging.manifest import Manifest
|
||||
|
||||
|
||||
def show_formats():
|
||||
"""Print all possible values for the 'formats' option (used by
|
||||
the "--help-formats" command-line option).
|
||||
"""
|
||||
from packaging.fancy_getopt import FancyGetopt
|
||||
formats = sorted(('formats=' + name, None, desc)
|
||||
for name, desc in get_archive_formats())
|
||||
FancyGetopt(formats).print_help(
|
||||
"List of available source distribution formats:")
|
||||
|
||||
# a \ followed by some spaces + EOL
|
||||
_COLLAPSE_PATTERN = re.compile('\\\w\n', re.M)
|
||||
_COMMENTED_LINE = re.compile('^#.*\n$|^\w*\n$', re.M)
|
||||
|
||||
|
||||
class sdist(Command):
|
||||
|
||||
description = "create a source distribution (tarball, zip file, etc.)"
|
||||
|
||||
user_options = [
|
||||
('manifest=', 'm',
|
||||
"name of manifest file [default: MANIFEST]"),
|
||||
('use-defaults', None,
|
||||
"include the default file set in the manifest "
|
||||
"[default; disable with --no-defaults]"),
|
||||
('no-defaults', None,
|
||||
"don't include the default file set"),
|
||||
('prune', None,
|
||||
"specifically exclude files/directories that should not be "
|
||||
"distributed (build tree, RCS/CVS dirs, etc.) "
|
||||
"[default; disable with --no-prune]"),
|
||||
('no-prune', None,
|
||||
"don't automatically exclude anything"),
|
||||
('manifest-only', 'o',
|
||||
"just regenerate the manifest and then stop "),
|
||||
('formats=', None,
|
||||
"formats for source distribution (comma-separated list)"),
|
||||
('keep-temp', 'k',
|
||||
"keep the distribution tree around after creating " +
|
||||
"archive file(s)"),
|
||||
('dist-dir=', 'd',
|
||||
"directory to put the source distribution archive(s) in "
|
||||
"[default: dist]"),
|
||||
('check-metadata', None,
|
||||
"Ensure that all required elements of metadata "
|
||||
"are supplied. Warn if any missing. [default]"),
|
||||
('owner=', 'u',
|
||||
"Owner name used when creating a tar file [default: current user]"),
|
||||
('group=', 'g',
|
||||
"Group name used when creating a tar file [default: current group]"),
|
||||
('manifest-builders=', None,
|
||||
"manifest builders (comma-separated list)"),
|
||||
]
|
||||
|
||||
boolean_options = ['use-defaults', 'prune',
|
||||
'manifest-only', 'keep-temp', 'check-metadata']
|
||||
|
||||
help_options = [
|
||||
('help-formats', None,
|
||||
"list available distribution formats", show_formats),
|
||||
]
|
||||
|
||||
negative_opt = {'no-defaults': 'use-defaults',
|
||||
'no-prune': 'prune'}
|
||||
|
||||
default_format = {'posix': 'gztar',
|
||||
'nt': 'zip'}
|
||||
|
||||
def initialize_options(self):
|
||||
self.manifest = None
|
||||
# 'use_defaults': if true, we will include the default file set
|
||||
# in the manifest
|
||||
self.use_defaults = True
|
||||
self.prune = True
|
||||
self.manifest_only = False
|
||||
self.formats = None
|
||||
self.keep_temp = False
|
||||
self.dist_dir = None
|
||||
|
||||
self.archive_files = None
|
||||
self.metadata_check = True
|
||||
self.owner = None
|
||||
self.group = None
|
||||
self.filelist = None
|
||||
self.manifest_builders = None
|
||||
|
||||
def _check_archive_formats(self, formats):
|
||||
supported_formats = [name for name, desc in get_archive_formats()]
|
||||
for format in formats:
|
||||
if format not in supported_formats:
|
||||
return format
|
||||
return None
|
||||
|
||||
def finalize_options(self):
|
||||
if self.manifest is None:
|
||||
self.manifest = "MANIFEST"
|
||||
|
||||
self.ensure_string_list('formats')
|
||||
if self.formats is None:
|
||||
try:
|
||||
self.formats = [self.default_format[os.name]]
|
||||
except KeyError:
|
||||
raise PackagingPlatformError("don't know how to create source "
|
||||
"distributions on platform %s" % os.name)
|
||||
|
||||
bad_format = self._check_archive_formats(self.formats)
|
||||
if bad_format:
|
||||
raise PackagingOptionError("unknown archive format '%s'" \
|
||||
% bad_format)
|
||||
|
||||
if self.dist_dir is None:
|
||||
self.dist_dir = "dist"
|
||||
|
||||
if self.filelist is None:
|
||||
self.filelist = Manifest()
|
||||
|
||||
if self.manifest_builders is None:
|
||||
self.manifest_builders = []
|
||||
else:
|
||||
if isinstance(self.manifest_builders, str):
|
||||
self.manifest_builders = self.manifest_builders.split(',')
|
||||
builders = []
|
||||
for builder in self.manifest_builders:
|
||||
builder = builder.strip()
|
||||
if builder == '':
|
||||
continue
|
||||
try:
|
||||
builder = resolve_name(builder)
|
||||
except ImportError as e:
|
||||
raise PackagingModuleError(e)
|
||||
|
||||
builders.append(builder)
|
||||
|
||||
self.manifest_builders = builders
|
||||
|
||||
def run(self):
|
||||
# 'filelist' contains the list of files that will make up the
|
||||
# manifest
|
||||
self.filelist.clear()
|
||||
|
||||
# Check the package metadata
|
||||
if self.metadata_check:
|
||||
self.run_command('check')
|
||||
|
||||
# Do whatever it takes to get the list of files to process
|
||||
# (process the manifest template, read an existing manifest,
|
||||
# whatever). File list is accumulated in 'self.filelist'.
|
||||
self.get_file_list()
|
||||
|
||||
# If user just wanted us to regenerate the manifest, stop now.
|
||||
if self.manifest_only:
|
||||
return
|
||||
|
||||
# Otherwise, go ahead and create the source distribution tarball,
|
||||
# or zipfile, or whatever.
|
||||
self.make_distribution()
|
||||
|
||||
def get_file_list(self):
|
||||
"""Figure out the list of files to include in the source
|
||||
distribution, and put it in 'self.filelist'. This might involve
|
||||
reading the manifest template (and writing the manifest), or just
|
||||
reading the manifest, or just using the default file set -- it all
|
||||
depends on the user's options.
|
||||
"""
|
||||
template_exists = len(self.distribution.extra_files) > 0
|
||||
if not template_exists:
|
||||
logger.warning('%s: using default file list',
|
||||
self.get_command_name())
|
||||
self.filelist.findall()
|
||||
|
||||
if self.use_defaults:
|
||||
self.add_defaults()
|
||||
if template_exists:
|
||||
template = '\n'.join(self.distribution.extra_files)
|
||||
self.filelist.read_template(StringIO(template))
|
||||
|
||||
# call manifest builders, if any.
|
||||
for builder in self.manifest_builders:
|
||||
builder(self.distribution, self.filelist)
|
||||
|
||||
if self.prune:
|
||||
self.prune_file_list()
|
||||
|
||||
self.filelist.write(self.manifest)
|
||||
|
||||
def add_defaults(self):
|
||||
"""Add all the default files to self.filelist:
|
||||
- README or README.txt
|
||||
- test/test*.py
|
||||
- all pure Python modules mentioned in setup script
|
||||
- all files pointed by package_data (build_py)
|
||||
- all files defined in data_files.
|
||||
- all files defined as scripts.
|
||||
- all C sources listed as part of extensions or C libraries
|
||||
in the setup script (doesn't catch C headers!)
|
||||
Warns if (README or README.txt) or setup.py are missing; everything
|
||||
else is optional.
|
||||
"""
|
||||
standards = [('README', 'README.txt')]
|
||||
for fn in standards:
|
||||
if isinstance(fn, tuple):
|
||||
alts = fn
|
||||
got_it = False
|
||||
for fn in alts:
|
||||
if os.path.exists(fn):
|
||||
got_it = True
|
||||
self.filelist.append(fn)
|
||||
break
|
||||
|
||||
if not got_it:
|
||||
logger.warning(
|
||||
'%s: standard file not found: should have one of %s',
|
||||
self.get_command_name(), ', '.join(alts))
|
||||
else:
|
||||
if os.path.exists(fn):
|
||||
self.filelist.append(fn)
|
||||
else:
|
||||
logger.warning('%s: standard file %r not found',
|
||||
self.get_command_name(), fn)
|
||||
|
||||
optional = ['test/test*.py', 'setup.cfg']
|
||||
for pattern in optional:
|
||||
files = [f for f in glob(pattern) if os.path.isfile(f)]
|
||||
if files:
|
||||
self.filelist.extend(files)
|
||||
|
||||
for cmd_name in get_command_names():
|
||||
try:
|
||||
cmd_obj = self.get_finalized_command(cmd_name)
|
||||
except PackagingOptionError:
|
||||
pass
|
||||
else:
|
||||
self.filelist.extend(cmd_obj.get_source_files())
|
||||
|
||||
def prune_file_list(self):
|
||||
"""Prune off branches that might slip into the file list as created
|
||||
by 'read_template()', but really don't belong there:
|
||||
* the build tree (typically "build")
|
||||
* the release tree itself (only an issue if we ran "sdist"
|
||||
previously with --keep-temp, or it aborted)
|
||||
* any RCS, CVS, .svn, .hg, .git, .bzr, _darcs directories
|
||||
"""
|
||||
build = self.get_finalized_command('build')
|
||||
base_dir = self.distribution.get_fullname()
|
||||
|
||||
self.filelist.exclude_pattern(None, prefix=build.build_base)
|
||||
self.filelist.exclude_pattern(None, prefix=base_dir)
|
||||
|
||||
# pruning out vcs directories
|
||||
# both separators are used under win32
|
||||
if sys.platform == 'win32':
|
||||
seps = r'/|\\'
|
||||
else:
|
||||
seps = '/'
|
||||
|
||||
vcs_dirs = ['RCS', 'CVS', r'\.svn', r'\.hg', r'\.git', r'\.bzr',
|
||||
'_darcs']
|
||||
vcs_ptrn = r'(^|%s)(%s)(%s).*' % (seps, '|'.join(vcs_dirs), seps)
|
||||
self.filelist.exclude_pattern(vcs_ptrn, is_regex=True)
|
||||
|
||||
def make_release_tree(self, base_dir, files):
|
||||
"""Create the directory tree that will become the source
|
||||
distribution archive. All directories implied by the filenames in
|
||||
'files' are created under 'base_dir', and then we hard link or copy
|
||||
(if hard linking is unavailable) those files into place.
|
||||
Essentially, this duplicates the developer's source tree, but in a
|
||||
directory named after the distribution, containing only the files
|
||||
to be distributed.
|
||||
"""
|
||||
# Create all the directories under 'base_dir' necessary to
|
||||
# put 'files' there; the 'mkpath()' is just so we don't die
|
||||
# if the manifest happens to be empty.
|
||||
self.mkpath(base_dir)
|
||||
self.create_tree(base_dir, files, dry_run=self.dry_run)
|
||||
|
||||
# And walk over the list of files, either making a hard link (if
|
||||
# os.link exists) to each one that doesn't already exist in its
|
||||
# corresponding location under 'base_dir', or copying each file
|
||||
# that's out-of-date in 'base_dir'. (Usually, all files will be
|
||||
# out-of-date, because by default we blow away 'base_dir' when
|
||||
# we're done making the distribution archives.)
|
||||
|
||||
if hasattr(os, 'link'): # can make hard links on this system
|
||||
link = 'hard'
|
||||
msg = "making hard links in %s..." % base_dir
|
||||
else: # nope, have to copy
|
||||
link = None
|
||||
msg = "copying files to %s..." % base_dir
|
||||
|
||||
if not files:
|
||||
logger.warning("no files to distribute -- empty manifest?")
|
||||
else:
|
||||
logger.info(msg)
|
||||
|
||||
for file in self.distribution.metadata.requires_files:
|
||||
if file not in files:
|
||||
msg = "'%s' must be included explicitly in 'extra_files'" \
|
||||
% file
|
||||
raise PackagingFileError(msg)
|
||||
|
||||
for file in files:
|
||||
if not os.path.isfile(file):
|
||||
logger.warning("'%s' not a regular file -- skipping", file)
|
||||
else:
|
||||
dest = os.path.join(base_dir, file)
|
||||
self.copy_file(file, dest, link=link)
|
||||
|
||||
self.distribution.metadata.write(os.path.join(base_dir, 'PKG-INFO'))
|
||||
|
||||
def make_distribution(self):
|
||||
"""Create the source distribution(s). First, we create the release
|
||||
tree with 'make_release_tree()'; then, we create all required
|
||||
archive files (according to 'self.formats') from the release tree.
|
||||
Finally, we clean up by blowing away the release tree (unless
|
||||
'self.keep_temp' is true). The list of archive files created is
|
||||
stored so it can be retrieved later by 'get_archive_files()'.
|
||||
"""
|
||||
# Don't warn about missing metadata here -- should be (and is!)
|
||||
# done elsewhere.
|
||||
base_dir = self.distribution.get_fullname()
|
||||
base_name = os.path.join(self.dist_dir, base_dir)
|
||||
|
||||
self.make_release_tree(base_dir, self.filelist.files)
|
||||
archive_files = [] # remember names of files we create
|
||||
# tar archive must be created last to avoid overwrite and remove
|
||||
if 'tar' in self.formats:
|
||||
self.formats.append(self.formats.pop(self.formats.index('tar')))
|
||||
|
||||
for fmt in self.formats:
|
||||
file = self.make_archive(base_name, fmt, base_dir=base_dir,
|
||||
owner=self.owner, group=self.group)
|
||||
archive_files.append(file)
|
||||
self.distribution.dist_files.append(('sdist', '', file))
|
||||
|
||||
self.archive_files = archive_files
|
||||
|
||||
if not self.keep_temp:
|
||||
if self.dry_run:
|
||||
logger.info('removing %s', base_dir)
|
||||
else:
|
||||
rmtree(base_dir)
|
||||
|
||||
def get_archive_files(self):
|
||||
"""Return the list of archive files created when the command
|
||||
was run, or None if the command hasn't run yet.
|
||||
"""
|
||||
return self.archive_files
|
||||
|
||||
def create_tree(self, base_dir, files, mode=0o777, verbose=1,
|
||||
dry_run=False):
|
||||
need_dir = set()
|
||||
for file in files:
|
||||
need_dir.add(os.path.join(base_dir, os.path.dirname(file)))
|
||||
|
||||
# Now create them
|
||||
for dir in sorted(need_dir):
|
||||
self.mkpath(dir, mode, verbose=verbose, dry_run=dry_run)
|
||||
81
Lib/packaging/command/test.py
Normal file
81
Lib/packaging/command/test.py
Normal file
|
|
@ -0,0 +1,81 @@
|
|||
"""Run the project's test suite."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
import unittest
|
||||
|
||||
from packaging import logger
|
||||
from packaging.command.cmd import Command
|
||||
from packaging.database import get_distribution
|
||||
from packaging.errors import PackagingOptionError
|
||||
from packaging.util import resolve_name
|
||||
|
||||
|
||||
class test(Command):
|
||||
|
||||
description = "run the project's test suite"
|
||||
|
||||
user_options = [
|
||||
('suite=', 's',
|
||||
"test suite to run (for example: 'some_module.test_suite')"),
|
||||
('runner=', None,
|
||||
"test runner to be called."),
|
||||
('tests-require=', None,
|
||||
"list of distributions required to run the test suite."),
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
self.suite = None
|
||||
self.runner = None
|
||||
self.tests_require = []
|
||||
|
||||
def finalize_options(self):
|
||||
self.build_lib = self.get_finalized_command("build").build_lib
|
||||
for requirement in self.tests_require:
|
||||
if get_distribution(requirement) is None:
|
||||
logger.warning("test dependency %s is not installed, "
|
||||
"tests may fail", requirement)
|
||||
if (not self.suite and not self.runner and
|
||||
self.get_ut_with_discovery() is None):
|
||||
raise PackagingOptionError(
|
||||
"no test discovery available, please give a 'suite' or "
|
||||
"'runner' option or install unittest2")
|
||||
|
||||
def get_ut_with_discovery(self):
|
||||
if hasattr(unittest.TestLoader, "discover"):
|
||||
return unittest
|
||||
else:
|
||||
try:
|
||||
import unittest2
|
||||
return unittest2
|
||||
except ImportError:
|
||||
return None
|
||||
|
||||
def run(self):
|
||||
prev_syspath = sys.path[:]
|
||||
try:
|
||||
# build release
|
||||
build = self.get_reinitialized_command('build')
|
||||
self.run_command('build')
|
||||
sys.path.insert(0, build.build_lib)
|
||||
|
||||
# Temporary kludge until we remove the verbose arguments and use
|
||||
# logging everywhere
|
||||
logger = logging.getLogger('packaging')
|
||||
verbose = logger.getEffectiveLevel() >= logging.DEBUG
|
||||
verbosity = verbose + 1
|
||||
|
||||
# run the tests
|
||||
if self.runner:
|
||||
resolve_name(self.runner)()
|
||||
elif self.suite:
|
||||
runner = unittest.TextTestRunner(verbosity=verbosity)
|
||||
runner.run(resolve_name(self.suite)())
|
||||
elif self.get_ut_with_discovery():
|
||||
ut = self.get_ut_with_discovery()
|
||||
test_suite = ut.TestLoader().discover(os.curdir)
|
||||
runner = ut.TextTestRunner(verbosity=verbosity)
|
||||
runner.run(test_suite)
|
||||
finally:
|
||||
sys.path[:] = prev_syspath
|
||||
201
Lib/packaging/command/upload.py
Normal file
201
Lib/packaging/command/upload.py
Normal file
|
|
@ -0,0 +1,201 @@
|
|||
"""Upload a distribution to a project index."""
|
||||
|
||||
import os
|
||||
import socket
|
||||
import logging
|
||||
import platform
|
||||
import urllib.parse
|
||||
from io import BytesIO
|
||||
from base64 import standard_b64encode
|
||||
from hashlib import md5
|
||||
from urllib.error import HTTPError
|
||||
from urllib.request import urlopen, Request
|
||||
|
||||
from packaging import logger
|
||||
from packaging.errors import PackagingOptionError
|
||||
from packaging.util import (spawn, read_pypirc, DEFAULT_REPOSITORY,
|
||||
DEFAULT_REALM)
|
||||
from packaging.command.cmd import Command
|
||||
|
||||
|
||||
class upload(Command):
|
||||
|
||||
description = "upload distribution to PyPI"
|
||||
|
||||
user_options = [
|
||||
('repository=', 'r',
|
||||
"repository URL [default: %s]" % DEFAULT_REPOSITORY),
|
||||
('show-response', None,
|
||||
"display full response text from server"),
|
||||
('sign', 's',
|
||||
"sign files to upload using gpg"),
|
||||
('identity=', 'i',
|
||||
"GPG identity used to sign files"),
|
||||
('upload-docs', None,
|
||||
"upload documentation too"),
|
||||
]
|
||||
|
||||
boolean_options = ['show-response', 'sign']
|
||||
|
||||
def initialize_options(self):
|
||||
self.repository = None
|
||||
self.realm = None
|
||||
self.show_response = False
|
||||
self.username = ''
|
||||
self.password = ''
|
||||
self.show_response = False
|
||||
self.sign = False
|
||||
self.identity = None
|
||||
self.upload_docs = False
|
||||
|
||||
def finalize_options(self):
|
||||
if self.repository is None:
|
||||
self.repository = DEFAULT_REPOSITORY
|
||||
if self.realm is None:
|
||||
self.realm = DEFAULT_REALM
|
||||
if self.identity and not self.sign:
|
||||
raise PackagingOptionError(
|
||||
"Must use --sign for --identity to have meaning")
|
||||
config = read_pypirc(self.repository, self.realm)
|
||||
if config != {}:
|
||||
self.username = config['username']
|
||||
self.password = config['password']
|
||||
self.repository = config['repository']
|
||||
self.realm = config['realm']
|
||||
|
||||
# getting the password from the distribution
|
||||
# if previously set by the register command
|
||||
if not self.password and self.distribution.password:
|
||||
self.password = self.distribution.password
|
||||
|
||||
def run(self):
|
||||
if not self.distribution.dist_files:
|
||||
raise PackagingOptionError(
|
||||
"No dist file created in earlier command")
|
||||
for command, pyversion, filename in self.distribution.dist_files:
|
||||
self.upload_file(command, pyversion, filename)
|
||||
if self.upload_docs:
|
||||
upload_docs = self.get_finalized_command("upload_docs")
|
||||
upload_docs.repository = self.repository
|
||||
upload_docs.username = self.username
|
||||
upload_docs.password = self.password
|
||||
upload_docs.run()
|
||||
|
||||
# XXX to be refactored with register.post_to_server
|
||||
def upload_file(self, command, pyversion, filename):
|
||||
# Makes sure the repository URL is compliant
|
||||
scheme, netloc, url, params, query, fragments = \
|
||||
urllib.parse.urlparse(self.repository)
|
||||
if params or query or fragments:
|
||||
raise AssertionError("Incompatible url %s" % self.repository)
|
||||
|
||||
if scheme not in ('http', 'https'):
|
||||
raise AssertionError("unsupported scheme " + scheme)
|
||||
|
||||
# Sign if requested
|
||||
if self.sign:
|
||||
gpg_args = ["gpg", "--detach-sign", "-a", filename]
|
||||
if self.identity:
|
||||
gpg_args[2:2] = ["--local-user", self.identity]
|
||||
spawn(gpg_args,
|
||||
dry_run=self.dry_run)
|
||||
|
||||
# Fill in the data - send all the metadata in case we need to
|
||||
# register a new release
|
||||
with open(filename, 'rb') as f:
|
||||
content = f.read()
|
||||
|
||||
data = self.distribution.metadata.todict()
|
||||
|
||||
# extra upload infos
|
||||
data[':action'] = 'file_upload'
|
||||
data['protcol_version'] = '1'
|
||||
data['content'] = (os.path.basename(filename), content)
|
||||
data['filetype'] = command
|
||||
data['pyversion'] = pyversion
|
||||
data['md5_digest'] = md5(content).hexdigest()
|
||||
|
||||
if command == 'bdist_dumb':
|
||||
data['comment'] = 'built for %s' % platform.platform(terse=True)
|
||||
|
||||
if self.sign:
|
||||
with open(filename + '.asc') as fp:
|
||||
sig = fp.read()
|
||||
data['gpg_signature'] = [
|
||||
(os.path.basename(filename) + ".asc", sig)]
|
||||
|
||||
# set up the authentication
|
||||
# The exact encoding of the authentication string is debated.
|
||||
# Anyway PyPI only accepts ascii for both username or password.
|
||||
user_pass = (self.username + ":" + self.password).encode('ascii')
|
||||
auth = b"Basic " + standard_b64encode(user_pass)
|
||||
|
||||
# Build up the MIME payload for the POST data
|
||||
boundary = b'--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
|
||||
sep_boundary = b'\n--' + boundary
|
||||
end_boundary = sep_boundary + b'--'
|
||||
body = BytesIO()
|
||||
|
||||
file_fields = ('content', 'gpg_signature')
|
||||
|
||||
for key, value in data.items():
|
||||
# handle multiple entries for the same name
|
||||
if not isinstance(value, tuple):
|
||||
value = [value]
|
||||
|
||||
content_dispo = '\nContent-Disposition: form-data; name="%s"' % key
|
||||
|
||||
if key in file_fields:
|
||||
filename_, content = value
|
||||
filename_ = ';filename="%s"' % filename_
|
||||
body.write(sep_boundary)
|
||||
body.write(content_dispo.encode('utf-8'))
|
||||
body.write(filename_.encode('utf-8'))
|
||||
body.write(b"\n\n")
|
||||
body.write(content)
|
||||
else:
|
||||
for value in value:
|
||||
value = str(value).encode('utf-8')
|
||||
body.write(sep_boundary)
|
||||
body.write(content_dispo.encode('utf-8'))
|
||||
body.write(b"\n\n")
|
||||
body.write(value)
|
||||
if value and value.endswith(b'\r'):
|
||||
# write an extra newline (lurve Macs)
|
||||
body.write(b'\n')
|
||||
|
||||
body.write(end_boundary)
|
||||
body.write(b"\n")
|
||||
body = body.getvalue()
|
||||
|
||||
logger.info("Submitting %s to %s", filename, self.repository)
|
||||
|
||||
# build the Request
|
||||
headers = {'Content-type':
|
||||
'multipart/form-data; boundary=%s' %
|
||||
boundary.decode('ascii'),
|
||||
'Content-length': str(len(body)),
|
||||
'Authorization': auth}
|
||||
|
||||
request = Request(self.repository, data=body,
|
||||
headers=headers)
|
||||
# send the data
|
||||
try:
|
||||
result = urlopen(request)
|
||||
status = result.code
|
||||
reason = result.msg
|
||||
except socket.error as e:
|
||||
logger.error(e)
|
||||
return
|
||||
except HTTPError as e:
|
||||
status = e.code
|
||||
reason = e.msg
|
||||
|
||||
if status == 200:
|
||||
logger.info('Server response (%s): %s', status, reason)
|
||||
else:
|
||||
logger.error('Upload failed (%s): %s', status, reason)
|
||||
|
||||
if self.show_response and logger.isEnabledFor(logging.INFO):
|
||||
sep = '-' * 75
|
||||
logger.info('%s\n%s\n%s', sep, result.read().decode(), sep)
|
||||
173
Lib/packaging/command/upload_docs.py
Normal file
173
Lib/packaging/command/upload_docs.py
Normal file
|
|
@ -0,0 +1,173 @@
|
|||
"""Upload HTML documentation to a project index."""
|
||||
|
||||
import os
|
||||
import base64
|
||||
import socket
|
||||
import zipfile
|
||||
import logging
|
||||
import http.client
|
||||
import urllib.parse
|
||||
from io import BytesIO
|
||||
|
||||
from packaging import logger
|
||||
from packaging.util import read_pypirc, DEFAULT_REPOSITORY, DEFAULT_REALM
|
||||
from packaging.errors import PackagingFileError
|
||||
from packaging.command.cmd import Command
|
||||
|
||||
|
||||
def zip_dir(directory):
|
||||
"""Compresses recursively contents of directory into a BytesIO object"""
|
||||
destination = BytesIO()
|
||||
zip_file = zipfile.ZipFile(destination, "w")
|
||||
for root, dirs, files in os.walk(directory):
|
||||
for name in files:
|
||||
full = os.path.join(root, name)
|
||||
relative = root[len(directory):].lstrip(os.path.sep)
|
||||
dest = os.path.join(relative, name)
|
||||
zip_file.write(full, dest)
|
||||
zip_file.close()
|
||||
return destination
|
||||
|
||||
|
||||
# grabbed from
|
||||
# http://code.activestate.com/recipes/
|
||||
# 146306-http-client-to-post-using-multipartform-data/
|
||||
# TODO factor this out for use by install and command/upload
|
||||
|
||||
def encode_multipart(fields, files, boundary=None):
|
||||
"""
|
||||
*fields* is a sequence of (name: str, value: str) elements for regular
|
||||
form fields, *files* is a sequence of (name: str, filename: str, value:
|
||||
bytes) elements for data to be uploaded as files.
|
||||
|
||||
Returns (content_type: bytes, body: bytes) ready for http.client.HTTP.
|
||||
"""
|
||||
if boundary is None:
|
||||
boundary = b'--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
|
||||
elif not isinstance(boundary, bytes):
|
||||
raise TypeError('boundary is not bytes but %r' % type(boundary))
|
||||
|
||||
l = []
|
||||
for key, value in fields:
|
||||
l.extend((
|
||||
b'--' + boundary,
|
||||
('Content-Disposition: form-data; name="%s"' %
|
||||
key).encode('utf-8'),
|
||||
b'',
|
||||
value.encode('utf-8')))
|
||||
|
||||
for key, filename, value in files:
|
||||
l.extend((
|
||||
b'--' + boundary,
|
||||
('Content-Disposition: form-data; name="%s"; filename="%s"' %
|
||||
(key, filename)).encode('utf-8'),
|
||||
b'',
|
||||
value))
|
||||
l.append(b'--' + boundary + b'--')
|
||||
l.append(b'')
|
||||
|
||||
body = b'\r\n'.join(l)
|
||||
|
||||
content_type = b'multipart/form-data; boundary=' + boundary
|
||||
return content_type, body
|
||||
|
||||
|
||||
class upload_docs(Command):
|
||||
|
||||
description = "upload HTML documentation to PyPI"
|
||||
|
||||
user_options = [
|
||||
('repository=', 'r',
|
||||
"repository URL [default: %s]" % DEFAULT_REPOSITORY),
|
||||
('show-response', None,
|
||||
"display full response text from server"),
|
||||
('upload-dir=', None,
|
||||
"directory to upload"),
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
self.repository = None
|
||||
self.realm = None
|
||||
self.show_response = False
|
||||
self.upload_dir = None
|
||||
self.username = ''
|
||||
self.password = ''
|
||||
|
||||
def finalize_options(self):
|
||||
if self.repository is None:
|
||||
self.repository = DEFAULT_REPOSITORY
|
||||
if self.realm is None:
|
||||
self.realm = DEFAULT_REALM
|
||||
if self.upload_dir is None:
|
||||
build = self.get_finalized_command('build')
|
||||
self.upload_dir = os.path.join(build.build_base, "docs")
|
||||
if not os.path.isdir(self.upload_dir):
|
||||
self.upload_dir = os.path.join(build.build_base, "doc")
|
||||
logger.info('Using upload directory %s', self.upload_dir)
|
||||
self.verify_upload_dir(self.upload_dir)
|
||||
config = read_pypirc(self.repository, self.realm)
|
||||
if config != {}:
|
||||
self.username = config['username']
|
||||
self.password = config['password']
|
||||
self.repository = config['repository']
|
||||
self.realm = config['realm']
|
||||
|
||||
def verify_upload_dir(self, upload_dir):
|
||||
self.ensure_dirname('upload_dir')
|
||||
index_location = os.path.join(upload_dir, "index.html")
|
||||
if not os.path.exists(index_location):
|
||||
mesg = "No 'index.html found in docs directory (%s)"
|
||||
raise PackagingFileError(mesg % upload_dir)
|
||||
|
||||
def run(self):
|
||||
name = self.distribution.metadata['Name']
|
||||
version = self.distribution.metadata['Version']
|
||||
zip_file = zip_dir(self.upload_dir)
|
||||
|
||||
fields = [(':action', 'doc_upload'),
|
||||
('name', name), ('version', version)]
|
||||
files = [('content', name, zip_file.getvalue())]
|
||||
content_type, body = encode_multipart(fields, files)
|
||||
|
||||
credentials = self.username + ':' + self.password
|
||||
auth = b"Basic " + base64.encodebytes(credentials.encode()).strip()
|
||||
|
||||
logger.info("Submitting documentation to %s", self.repository)
|
||||
|
||||
scheme, netloc, url, params, query, fragments = urllib.parse.urlparse(
|
||||
self.repository)
|
||||
if scheme == "http":
|
||||
conn = http.client.HTTPConnection(netloc)
|
||||
elif scheme == "https":
|
||||
conn = http.client.HTTPSConnection(netloc)
|
||||
else:
|
||||
raise AssertionError("unsupported scheme %r" % scheme)
|
||||
|
||||
try:
|
||||
conn.connect()
|
||||
conn.putrequest("POST", url)
|
||||
conn.putheader('Content-type', content_type)
|
||||
conn.putheader('Content-length', str(len(body)))
|
||||
conn.putheader('Authorization', auth)
|
||||
conn.endheaders()
|
||||
conn.send(body)
|
||||
|
||||
except socket.error as e:
|
||||
logger.error(e)
|
||||
return
|
||||
|
||||
r = conn.getresponse()
|
||||
|
||||
if r.status == 200:
|
||||
logger.info('Server response (%s): %s', r.status, r.reason)
|
||||
elif r.status == 301:
|
||||
location = r.getheader('Location')
|
||||
if location is None:
|
||||
location = 'http://packages.python.org/%s/' % name
|
||||
logger.info('Upload successful. Visit %s', location)
|
||||
else:
|
||||
logger.error('Upload failed (%s): %s', r.status, r.reason)
|
||||
|
||||
if self.show_response and logger.isEnabledFor(logging.INFO):
|
||||
sep = '-' * 75
|
||||
logger.info('%s\n%s\n%s', sep, r.read().decode('utf-8'), sep)
|
||||
BIN
Lib/packaging/command/wininst-10.0-amd64.exe
Normal file
BIN
Lib/packaging/command/wininst-10.0-amd64.exe
Normal file
Binary file not shown.
BIN
Lib/packaging/command/wininst-10.0.exe
Normal file
BIN
Lib/packaging/command/wininst-10.0.exe
Normal file
Binary file not shown.
BIN
Lib/packaging/command/wininst-6.0.exe
Normal file
BIN
Lib/packaging/command/wininst-6.0.exe
Normal file
Binary file not shown.
BIN
Lib/packaging/command/wininst-7.1.exe
Normal file
BIN
Lib/packaging/command/wininst-7.1.exe
Normal file
Binary file not shown.
BIN
Lib/packaging/command/wininst-8.0.exe
Normal file
BIN
Lib/packaging/command/wininst-8.0.exe
Normal file
Binary file not shown.
BIN
Lib/packaging/command/wininst-9.0-amd64.exe
Normal file
BIN
Lib/packaging/command/wininst-9.0-amd64.exe
Normal file
Binary file not shown.
BIN
Lib/packaging/command/wininst-9.0.exe
Normal file
BIN
Lib/packaging/command/wininst-9.0.exe
Normal file
Binary file not shown.
57
Lib/packaging/compat.py
Normal file
57
Lib/packaging/compat.py
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
"""Compatibility helpers.
|
||||
|
||||
This module provides classes, variables and imports which are used to
|
||||
support packaging across Python 2.x and 3.x.
|
||||
"""
|
||||
|
||||
from packaging import logger
|
||||
|
||||
|
||||
# XXX Having two classes with the same name is not a good thing.
|
||||
# XXX 2to3-related code should move from util to this module
|
||||
|
||||
# TODO Move common code here: PY3 (bool indicating if we're on 3.x), any, etc.
|
||||
|
||||
try:
|
||||
from packaging.util import Mixin2to3 as _Mixin2to3
|
||||
_CONVERT = True
|
||||
_KLASS = _Mixin2to3
|
||||
except ImportError:
|
||||
_CONVERT = False
|
||||
_KLASS = object
|
||||
|
||||
__all__ = ['Mixin2to3']
|
||||
|
||||
|
||||
class Mixin2to3(_KLASS):
|
||||
""" The base class which can be used for refactoring. When run under
|
||||
Python 3.0, the run_2to3 method provided by Mixin2to3 is overridden.
|
||||
When run on Python 2.x, it merely creates a class which overrides run_2to3,
|
||||
yet does nothing in particular with it.
|
||||
"""
|
||||
if _CONVERT:
|
||||
|
||||
def _run_2to3(self, files, doctests=[], fixers=[]):
|
||||
""" Takes a list of files and doctests, and performs conversion
|
||||
on those.
|
||||
- First, the files which contain the code(`files`) are converted.
|
||||
- Second, the doctests in `files` are converted.
|
||||
- Thirdly, the doctests in `doctests` are converted.
|
||||
"""
|
||||
if fixers:
|
||||
self.fixer_names = fixers
|
||||
|
||||
logger.info('converting Python code')
|
||||
_KLASS.run_2to3(self, files)
|
||||
|
||||
logger.info('converting doctests in Python files')
|
||||
_KLASS.run_2to3(self, files, doctests_only=True)
|
||||
|
||||
if doctests != []:
|
||||
logger.info('converting doctest in text files')
|
||||
_KLASS.run_2to3(self, doctests, doctests_only=True)
|
||||
else:
|
||||
# If run on Python 2.x, there is nothing to do.
|
||||
|
||||
def _run_2to3(self, files, doctests=[], fixers=[]):
|
||||
pass
|
||||
282
Lib/packaging/compiler/__init__.py
Normal file
282
Lib/packaging/compiler/__init__.py
Normal file
|
|
@ -0,0 +1,282 @@
|
|||
"""Compiler abstraction model used by packaging.
|
||||
|
||||
An abstract base class is defined in the ccompiler submodule, and
|
||||
concrete implementations suitable for various platforms are defined in
|
||||
the other submodules. The extension module is also placed in this
|
||||
package.
|
||||
|
||||
In general, code should not instantiate compiler classes directly but
|
||||
use the new_compiler and customize_compiler functions provided in this
|
||||
module.
|
||||
|
||||
The compiler system has a registration API: get_default_compiler,
|
||||
set_compiler, show_compilers.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
|
||||
import sysconfig
|
||||
from packaging.util import resolve_name
|
||||
from packaging.errors import PackagingPlatformError
|
||||
|
||||
|
||||
def customize_compiler(compiler):
|
||||
"""Do any platform-specific customization of a CCompiler instance.
|
||||
|
||||
Mainly needed on Unix, so we can plug in the information that
|
||||
varies across Unices and is stored in Python's Makefile.
|
||||
"""
|
||||
if compiler.name == "unix":
|
||||
cc, cxx, opt, cflags, ccshared, ldshared, so_ext, ar, ar_flags = (
|
||||
sysconfig.get_config_vars('CC', 'CXX', 'OPT', 'CFLAGS',
|
||||
'CCSHARED', 'LDSHARED', 'SO', 'AR',
|
||||
'ARFLAGS'))
|
||||
|
||||
if 'CC' in os.environ:
|
||||
cc = os.environ['CC']
|
||||
if 'CXX' in os.environ:
|
||||
cxx = os.environ['CXX']
|
||||
if 'LDSHARED' in os.environ:
|
||||
ldshared = os.environ['LDSHARED']
|
||||
if 'CPP' in os.environ:
|
||||
cpp = os.environ['CPP']
|
||||
else:
|
||||
cpp = cc + " -E" # not always
|
||||
if 'LDFLAGS' in os.environ:
|
||||
ldshared = ldshared + ' ' + os.environ['LDFLAGS']
|
||||
if 'CFLAGS' in os.environ:
|
||||
cflags = opt + ' ' + os.environ['CFLAGS']
|
||||
ldshared = ldshared + ' ' + os.environ['CFLAGS']
|
||||
if 'CPPFLAGS' in os.environ:
|
||||
cpp = cpp + ' ' + os.environ['CPPFLAGS']
|
||||
cflags = cflags + ' ' + os.environ['CPPFLAGS']
|
||||
ldshared = ldshared + ' ' + os.environ['CPPFLAGS']
|
||||
if 'AR' in os.environ:
|
||||
ar = os.environ['AR']
|
||||
if 'ARFLAGS' in os.environ:
|
||||
archiver = ar + ' ' + os.environ['ARFLAGS']
|
||||
else:
|
||||
if ar_flags is not None:
|
||||
archiver = ar + ' ' + ar_flags
|
||||
else:
|
||||
# see if its the proper default value
|
||||
# mmm I don't want to backport the makefile
|
||||
archiver = ar + ' rc'
|
||||
|
||||
cc_cmd = cc + ' ' + cflags
|
||||
compiler.set_executables(
|
||||
preprocessor=cpp,
|
||||
compiler=cc_cmd,
|
||||
compiler_so=cc_cmd + ' ' + ccshared,
|
||||
compiler_cxx=cxx,
|
||||
linker_so=ldshared,
|
||||
linker_exe=cc,
|
||||
archiver=archiver)
|
||||
|
||||
compiler.shared_lib_extension = so_ext
|
||||
|
||||
|
||||
# Map a sys.platform/os.name ('posix', 'nt') to the default compiler
|
||||
# type for that platform. Keys are interpreted as re match
|
||||
# patterns. Order is important; platform mappings are preferred over
|
||||
# OS names.
|
||||
_default_compilers = (
|
||||
|
||||
# Platform string mappings
|
||||
|
||||
# on a cygwin built python we can use gcc like an ordinary UNIXish
|
||||
# compiler
|
||||
('cygwin.*', 'unix'),
|
||||
('os2emx', 'emx'),
|
||||
|
||||
# OS name mappings
|
||||
('posix', 'unix'),
|
||||
('nt', 'msvc'),
|
||||
|
||||
)
|
||||
|
||||
def get_default_compiler(osname=None, platform=None):
|
||||
""" Determine the default compiler to use for the given platform.
|
||||
|
||||
osname should be one of the standard Python OS names (i.e. the
|
||||
ones returned by os.name) and platform the common value
|
||||
returned by sys.platform for the platform in question.
|
||||
|
||||
The default values are os.name and sys.platform in case the
|
||||
parameters are not given.
|
||||
|
||||
"""
|
||||
if osname is None:
|
||||
osname = os.name
|
||||
if platform is None:
|
||||
platform = sys.platform
|
||||
for pattern, compiler in _default_compilers:
|
||||
if re.match(pattern, platform) is not None or \
|
||||
re.match(pattern, osname) is not None:
|
||||
return compiler
|
||||
# Defaults to Unix compiler
|
||||
return 'unix'
|
||||
|
||||
|
||||
# compiler mapping
|
||||
# XXX useful to expose them? (i.e. get_compiler_names)
|
||||
_COMPILERS = {
|
||||
'unix': 'packaging.compiler.unixccompiler.UnixCCompiler',
|
||||
'msvc': 'packaging.compiler.msvccompiler.MSVCCompiler',
|
||||
'cygwin': 'packaging.compiler.cygwinccompiler.CygwinCCompiler',
|
||||
'mingw32': 'packaging.compiler.cygwinccompiler.Mingw32CCompiler',
|
||||
'bcpp': 'packaging.compiler.bcppcompiler.BCPPCompiler',
|
||||
}
|
||||
|
||||
def set_compiler(location):
|
||||
"""Add or change a compiler"""
|
||||
cls = resolve_name(location)
|
||||
# XXX we want to check the class here
|
||||
_COMPILERS[cls.name] = cls
|
||||
|
||||
|
||||
def show_compilers():
|
||||
"""Print list of available compilers (used by the "--help-compiler"
|
||||
options to "build", "build_ext", "build_clib").
|
||||
"""
|
||||
from packaging.fancy_getopt import FancyGetopt
|
||||
compilers = []
|
||||
|
||||
for name, cls in _COMPILERS.items():
|
||||
if isinstance(cls, str):
|
||||
cls = resolve_name(cls)
|
||||
_COMPILERS[name] = cls
|
||||
|
||||
compilers.append(("compiler=" + name, None, cls.description))
|
||||
|
||||
compilers.sort()
|
||||
pretty_printer = FancyGetopt(compilers)
|
||||
pretty_printer.print_help("List of available compilers:")
|
||||
|
||||
|
||||
def new_compiler(plat=None, compiler=None, verbose=0, dry_run=False,
|
||||
force=False):
|
||||
"""Generate an instance of some CCompiler subclass for the supplied
|
||||
platform/compiler combination. 'plat' defaults to 'os.name'
|
||||
(eg. 'posix', 'nt'), and 'compiler' defaults to the default compiler
|
||||
for that platform. Currently only 'posix' and 'nt' are supported, and
|
||||
the default compilers are "traditional Unix interface" (UnixCCompiler
|
||||
class) and Visual C++ (MSVCCompiler class). Note that it's perfectly
|
||||
possible to ask for a Unix compiler object under Windows, and a
|
||||
Microsoft compiler object under Unix -- if you supply a value for
|
||||
'compiler', 'plat' is ignored.
|
||||
"""
|
||||
if plat is None:
|
||||
plat = os.name
|
||||
|
||||
try:
|
||||
if compiler is None:
|
||||
compiler = get_default_compiler(plat)
|
||||
|
||||
cls = _COMPILERS[compiler]
|
||||
except KeyError:
|
||||
msg = "don't know how to compile C/C++ code on platform '%s'" % plat
|
||||
if compiler is not None:
|
||||
msg = msg + " with '%s' compiler" % compiler
|
||||
raise PackagingPlatformError(msg)
|
||||
|
||||
if isinstance(cls, str):
|
||||
cls = resolve_name(cls)
|
||||
_COMPILERS[compiler] = cls
|
||||
|
||||
|
||||
# XXX The None is necessary to preserve backwards compatibility
|
||||
# with classes that expect verbose to be the first positional
|
||||
# argument.
|
||||
return cls(None, dry_run, force)
|
||||
|
||||
|
||||
def gen_preprocess_options(macros, include_dirs):
|
||||
"""Generate C pre-processor options (-D, -U, -I) as used by at least
|
||||
two types of compilers: the typical Unix compiler and Visual C++.
|
||||
'macros' is the usual thing, a list of 1- or 2-tuples, where (name,)
|
||||
means undefine (-U) macro 'name', and (name,value) means define (-D)
|
||||
macro 'name' to 'value'. 'include_dirs' is just a list of directory
|
||||
names to be added to the header file search path (-I). Returns a list
|
||||
of command-line options suitable for either Unix compilers or Visual
|
||||
C++.
|
||||
"""
|
||||
# XXX it would be nice (mainly aesthetic, and so we don't generate
|
||||
# stupid-looking command lines) to go over 'macros' and eliminate
|
||||
# redundant definitions/undefinitions (ie. ensure that only the
|
||||
# latest mention of a particular macro winds up on the command
|
||||
# line). I don't think it's essential, though, since most (all?)
|
||||
# Unix C compilers only pay attention to the latest -D or -U
|
||||
# mention of a macro on their command line. Similar situation for
|
||||
# 'include_dirs'. I'm punting on both for now. Anyways, weeding out
|
||||
# redundancies like this should probably be the province of
|
||||
# CCompiler, since the data structures used are inherited from it
|
||||
# and therefore common to all CCompiler classes.
|
||||
|
||||
pp_opts = []
|
||||
for macro in macros:
|
||||
|
||||
if not isinstance(macro, tuple) and 1 <= len(macro) <= 2:
|
||||
raise TypeError(
|
||||
"bad macro definition '%s': each element of 'macros'"
|
||||
"list must be a 1- or 2-tuple" % macro)
|
||||
|
||||
if len(macro) == 1: # undefine this macro
|
||||
pp_opts.append("-U%s" % macro[0])
|
||||
elif len(macro) == 2:
|
||||
if macro[1] is None: # define with no explicit value
|
||||
pp_opts.append("-D%s" % macro[0])
|
||||
else:
|
||||
# XXX *don't* need to be clever about quoting the
|
||||
# macro value here, because we're going to avoid the
|
||||
# shell at all costs when we spawn the command!
|
||||
pp_opts.append("-D%s=%s" % macro)
|
||||
|
||||
for dir in include_dirs:
|
||||
pp_opts.append("-I%s" % dir)
|
||||
|
||||
return pp_opts
|
||||
|
||||
|
||||
def gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries):
|
||||
"""Generate linker options for searching library directories and
|
||||
linking with specific libraries.
|
||||
|
||||
'libraries' and 'library_dirs' are, respectively, lists of library names
|
||||
(not filenames!) and search directories. Returns a list of command-line
|
||||
options suitable for use with some compiler (depending on the two format
|
||||
strings passed in).
|
||||
"""
|
||||
lib_opts = []
|
||||
|
||||
for dir in library_dirs:
|
||||
lib_opts.append(compiler.library_dir_option(dir))
|
||||
|
||||
for dir in runtime_library_dirs:
|
||||
opt = compiler.runtime_library_dir_option(dir)
|
||||
if isinstance(opt, list):
|
||||
lib_opts.extend(opt)
|
||||
else:
|
||||
lib_opts.append(opt)
|
||||
|
||||
# XXX it's important that we *not* remove redundant library mentions!
|
||||
# sometimes you really do have to say "-lfoo -lbar -lfoo" in order to
|
||||
# resolve all symbols. I just hope we never have to say "-lfoo obj.o
|
||||
# -lbar" to get things to work -- that's certainly a possibility, but a
|
||||
# pretty nasty way to arrange your C code.
|
||||
|
||||
for lib in libraries:
|
||||
lib_dir, lib_name = os.path.split(lib)
|
||||
if lib_dir != '':
|
||||
lib_file = compiler.find_library_file([lib_dir], lib_name)
|
||||
if lib_file is not None:
|
||||
lib_opts.append(lib_file)
|
||||
else:
|
||||
compiler.warn("no library file corresponding to "
|
||||
"'%s' found (skipping)" % lib)
|
||||
else:
|
||||
lib_opts.append(compiler.library_option(lib))
|
||||
|
||||
return lib_opts
|
||||
356
Lib/packaging/compiler/bcppcompiler.py
Normal file
356
Lib/packaging/compiler/bcppcompiler.py
Normal file
|
|
@ -0,0 +1,356 @@
|
|||
"""CCompiler implementation for the Borland C++ compiler."""
|
||||
|
||||
# This implementation by Lyle Johnson, based on the original msvccompiler.py
|
||||
# module and using the directions originally published by Gordon Williams.
|
||||
|
||||
# XXX looks like there's a LOT of overlap between these two classes:
|
||||
# someone should sit down and factor out the common code as
|
||||
# WindowsCCompiler! --GPW
|
||||
|
||||
import os
|
||||
|
||||
from packaging.errors import (PackagingExecError, CompileError, LibError,
|
||||
LinkError, UnknownFileError)
|
||||
from packaging.compiler.ccompiler import CCompiler
|
||||
from packaging.compiler import gen_preprocess_options
|
||||
from packaging.file_util import write_file
|
||||
from packaging.dep_util import newer
|
||||
from packaging import logger
|
||||
|
||||
|
||||
class BCPPCompiler(CCompiler) :
|
||||
"""Concrete class that implements an interface to the Borland C/C++
|
||||
compiler, as defined by the CCompiler abstract class.
|
||||
"""
|
||||
|
||||
name = 'bcpp'
|
||||
description = 'Borland C++ Compiler'
|
||||
|
||||
# Just set this so CCompiler's constructor doesn't barf. We currently
|
||||
# don't use the 'set_executables()' bureaucracy provided by CCompiler,
|
||||
# as it really isn't necessary for this sort of single-compiler class.
|
||||
# Would be nice to have a consistent interface with UnixCCompiler,
|
||||
# though, so it's worth thinking about.
|
||||
executables = {}
|
||||
|
||||
# Private class data (need to distinguish C from C++ source for compiler)
|
||||
_c_extensions = ['.c']
|
||||
_cpp_extensions = ['.cc', '.cpp', '.cxx']
|
||||
|
||||
# Needed for the filename generation methods provided by the
|
||||
# base class, CCompiler.
|
||||
src_extensions = _c_extensions + _cpp_extensions
|
||||
obj_extension = '.obj'
|
||||
static_lib_extension = '.lib'
|
||||
shared_lib_extension = '.dll'
|
||||
static_lib_format = shared_lib_format = '%s%s'
|
||||
exe_extension = '.exe'
|
||||
|
||||
|
||||
def __init__(self, verbose=0, dry_run=False, force=False):
|
||||
CCompiler.__init__(self, verbose, dry_run, force)
|
||||
|
||||
# These executables are assumed to all be in the path.
|
||||
# Borland doesn't seem to use any special registry settings to
|
||||
# indicate their installation locations.
|
||||
|
||||
self.cc = "bcc32.exe"
|
||||
self.linker = "ilink32.exe"
|
||||
self.lib = "tlib.exe"
|
||||
|
||||
self.preprocess_options = None
|
||||
self.compile_options = ['/tWM', '/O2', '/q', '/g0']
|
||||
self.compile_options_debug = ['/tWM', '/Od', '/q', '/g0']
|
||||
|
||||
self.ldflags_shared = ['/Tpd', '/Gn', '/q', '/x']
|
||||
self.ldflags_shared_debug = ['/Tpd', '/Gn', '/q', '/x']
|
||||
self.ldflags_static = []
|
||||
self.ldflags_exe = ['/Gn', '/q', '/x']
|
||||
self.ldflags_exe_debug = ['/Gn', '/q', '/x','/r']
|
||||
|
||||
|
||||
# -- Worker methods ------------------------------------------------
|
||||
|
||||
def compile(self, sources,
|
||||
output_dir=None, macros=None, include_dirs=None, debug=False,
|
||||
extra_preargs=None, extra_postargs=None, depends=None):
|
||||
|
||||
macros, objects, extra_postargs, pp_opts, build = \
|
||||
self._setup_compile(output_dir, macros, include_dirs, sources,
|
||||
depends, extra_postargs)
|
||||
compile_opts = extra_preargs or []
|
||||
compile_opts.append('-c')
|
||||
if debug:
|
||||
compile_opts.extend(self.compile_options_debug)
|
||||
else:
|
||||
compile_opts.extend(self.compile_options)
|
||||
|
||||
for obj in objects:
|
||||
try:
|
||||
src, ext = build[obj]
|
||||
except KeyError:
|
||||
continue
|
||||
# XXX why do the normpath here?
|
||||
src = os.path.normpath(src)
|
||||
obj = os.path.normpath(obj)
|
||||
# XXX _setup_compile() did a mkpath() too but before the normpath.
|
||||
# Is it possible to skip the normpath?
|
||||
self.mkpath(os.path.dirname(obj))
|
||||
|
||||
if ext == '.res':
|
||||
# This is already a binary file -- skip it.
|
||||
continue # the 'for' loop
|
||||
if ext == '.rc':
|
||||
# This needs to be compiled to a .res file -- do it now.
|
||||
try:
|
||||
self.spawn(["brcc32", "-fo", obj, src])
|
||||
except PackagingExecError as msg:
|
||||
raise CompileError(msg)
|
||||
continue # the 'for' loop
|
||||
|
||||
# The next two are both for the real compiler.
|
||||
if ext in self._c_extensions:
|
||||
input_opt = ""
|
||||
elif ext in self._cpp_extensions:
|
||||
input_opt = "-P"
|
||||
else:
|
||||
# Unknown file type -- no extra options. The compiler
|
||||
# will probably fail, but let it just in case this is a
|
||||
# file the compiler recognizes even if we don't.
|
||||
input_opt = ""
|
||||
|
||||
output_opt = "-o" + obj
|
||||
|
||||
# Compiler command line syntax is: "bcc32 [options] file(s)".
|
||||
# Note that the source file names must appear at the end of
|
||||
# the command line.
|
||||
try:
|
||||
self.spawn([self.cc] + compile_opts + pp_opts +
|
||||
[input_opt, output_opt] +
|
||||
extra_postargs + [src])
|
||||
except PackagingExecError as msg:
|
||||
raise CompileError(msg)
|
||||
|
||||
return objects
|
||||
|
||||
|
||||
def create_static_lib(self, objects, output_libname, output_dir=None,
|
||||
debug=False, target_lang=None):
|
||||
objects, output_dir = self._fix_object_args(objects, output_dir)
|
||||
output_filename = \
|
||||
self.library_filename(output_libname, output_dir=output_dir)
|
||||
|
||||
if self._need_link(objects, output_filename):
|
||||
lib_args = [output_filename, '/u'] + objects
|
||||
if debug:
|
||||
pass # XXX what goes here?
|
||||
try:
|
||||
self.spawn([self.lib] + lib_args)
|
||||
except PackagingExecError as msg:
|
||||
raise LibError(msg)
|
||||
else:
|
||||
logger.debug("skipping %s (up-to-date)", output_filename)
|
||||
|
||||
|
||||
def link(self, target_desc, objects, output_filename, output_dir=None,
|
||||
libraries=None, library_dirs=None, runtime_library_dirs=None,
|
||||
export_symbols=None, debug=False, extra_preargs=None,
|
||||
extra_postargs=None, build_temp=None, target_lang=None):
|
||||
|
||||
# XXX this ignores 'build_temp'! should follow the lead of
|
||||
# msvccompiler.py
|
||||
|
||||
objects, output_dir = self._fix_object_args(objects, output_dir)
|
||||
libraries, library_dirs, runtime_library_dirs = \
|
||||
self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)
|
||||
|
||||
if runtime_library_dirs:
|
||||
logger.warning("don't know what to do with "
|
||||
"'runtime_library_dirs': %r", runtime_library_dirs)
|
||||
|
||||
if output_dir is not None:
|
||||
output_filename = os.path.join(output_dir, output_filename)
|
||||
|
||||
if self._need_link(objects, output_filename):
|
||||
|
||||
# Figure out linker args based on type of target.
|
||||
if target_desc == CCompiler.EXECUTABLE:
|
||||
startup_obj = 'c0w32'
|
||||
if debug:
|
||||
ld_args = self.ldflags_exe_debug[:]
|
||||
else:
|
||||
ld_args = self.ldflags_exe[:]
|
||||
else:
|
||||
startup_obj = 'c0d32'
|
||||
if debug:
|
||||
ld_args = self.ldflags_shared_debug[:]
|
||||
else:
|
||||
ld_args = self.ldflags_shared[:]
|
||||
|
||||
|
||||
# Create a temporary exports file for use by the linker
|
||||
if export_symbols is None:
|
||||
def_file = ''
|
||||
else:
|
||||
head, tail = os.path.split(output_filename)
|
||||
modname, ext = os.path.splitext(tail)
|
||||
temp_dir = os.path.dirname(objects[0]) # preserve tree structure
|
||||
def_file = os.path.join(temp_dir, '%s.def' % modname)
|
||||
contents = ['EXPORTS']
|
||||
for sym in (export_symbols or []):
|
||||
contents.append(' %s=_%s' % (sym, sym))
|
||||
self.execute(write_file, (def_file, contents),
|
||||
"writing %s" % def_file)
|
||||
|
||||
# Borland C++ has problems with '/' in paths
|
||||
objects2 = [os.path.normpath(o) for o in objects]
|
||||
# split objects in .obj and .res files
|
||||
# Borland C++ needs them at different positions in the command line
|
||||
objects = [startup_obj]
|
||||
resources = []
|
||||
for file in objects2:
|
||||
base, ext = os.path.splitext(os.path.normcase(file))
|
||||
if ext == '.res':
|
||||
resources.append(file)
|
||||
else:
|
||||
objects.append(file)
|
||||
|
||||
|
||||
for l in library_dirs:
|
||||
ld_args.append("/L%s" % os.path.normpath(l))
|
||||
ld_args.append("/L.") # we sometimes use relative paths
|
||||
|
||||
# list of object files
|
||||
ld_args.extend(objects)
|
||||
|
||||
# XXX the command line syntax for Borland C++ is a bit wonky;
|
||||
# certain filenames are jammed together in one big string, but
|
||||
# comma-delimited. This doesn't mesh too well with the
|
||||
# Unix-centric attitude (with a DOS/Windows quoting hack) of
|
||||
# 'spawn()', so constructing the argument list is a bit
|
||||
# awkward. Note that doing the obvious thing and jamming all
|
||||
# the filenames and commas into one argument would be wrong,
|
||||
# because 'spawn()' would quote any filenames with spaces in
|
||||
# them. Arghghh!. Apparently it works fine as coded...
|
||||
|
||||
# name of dll/exe file
|
||||
ld_args.extend((',',output_filename))
|
||||
# no map file and start libraries
|
||||
ld_args.append(',,')
|
||||
|
||||
for lib in libraries:
|
||||
# see if we find it and if there is a bcpp specific lib
|
||||
# (xxx_bcpp.lib)
|
||||
libfile = self.find_library_file(library_dirs, lib, debug)
|
||||
if libfile is None:
|
||||
ld_args.append(lib)
|
||||
# probably a BCPP internal library -- don't warn
|
||||
else:
|
||||
# full name which prefers bcpp_xxx.lib over xxx.lib
|
||||
ld_args.append(libfile)
|
||||
|
||||
# some default libraries
|
||||
ld_args.append('import32')
|
||||
ld_args.append('cw32mt')
|
||||
|
||||
# def file for export symbols
|
||||
ld_args.extend((',',def_file))
|
||||
# add resource files
|
||||
ld_args.append(',')
|
||||
ld_args.extend(resources)
|
||||
|
||||
|
||||
if extra_preargs:
|
||||
ld_args[:0] = extra_preargs
|
||||
if extra_postargs:
|
||||
ld_args.extend(extra_postargs)
|
||||
|
||||
self.mkpath(os.path.dirname(output_filename))
|
||||
try:
|
||||
self.spawn([self.linker] + ld_args)
|
||||
except PackagingExecError as msg:
|
||||
raise LinkError(msg)
|
||||
|
||||
else:
|
||||
logger.debug("skipping %s (up-to-date)", output_filename)
|
||||
|
||||
# -- Miscellaneous methods -----------------------------------------
|
||||
|
||||
|
||||
def find_library_file(self, dirs, lib, debug=False):
|
||||
# List of effective library names to try, in order of preference:
|
||||
# xxx_bcpp.lib is better than xxx.lib
|
||||
# and xxx_d.lib is better than xxx.lib if debug is set
|
||||
#
|
||||
# The "_bcpp" suffix is to handle a Python installation for people
|
||||
# with multiple compilers (primarily Packaging hackers, I suspect
|
||||
# ;-). The idea is they'd have one static library for each
|
||||
# compiler they care about, since (almost?) every Windows compiler
|
||||
# seems to have a different format for static libraries.
|
||||
if debug:
|
||||
dlib = (lib + "_d")
|
||||
try_names = (dlib + "_bcpp", lib + "_bcpp", dlib, lib)
|
||||
else:
|
||||
try_names = (lib + "_bcpp", lib)
|
||||
|
||||
for dir in dirs:
|
||||
for name in try_names:
|
||||
libfile = os.path.join(dir, self.library_filename(name))
|
||||
if os.path.exists(libfile):
|
||||
return libfile
|
||||
else:
|
||||
# Oops, didn't find it in *any* of 'dirs'
|
||||
return None
|
||||
|
||||
# overwrite the one from CCompiler to support rc and res-files
|
||||
def object_filenames(self, source_filenames, strip_dir=False,
|
||||
output_dir=''):
|
||||
if output_dir is None:
|
||||
output_dir = ''
|
||||
obj_names = []
|
||||
for src_name in source_filenames:
|
||||
# use normcase to make sure '.rc' is really '.rc' and not '.RC'
|
||||
base, ext = os.path.splitext(os.path.normcase(src_name))
|
||||
if ext not in (self.src_extensions + ['.rc','.res']):
|
||||
raise UnknownFileError("unknown file type '%s' (from '%s')" % \
|
||||
(ext, src_name))
|
||||
if strip_dir:
|
||||
base = os.path.basename(base)
|
||||
if ext == '.res':
|
||||
# these can go unchanged
|
||||
obj_names.append(os.path.join(output_dir, base + ext))
|
||||
elif ext == '.rc':
|
||||
# these need to be compiled to .res-files
|
||||
obj_names.append(os.path.join(output_dir, base + '.res'))
|
||||
else:
|
||||
obj_names.append(os.path.join(output_dir,
|
||||
base + self.obj_extension))
|
||||
return obj_names
|
||||
|
||||
|
||||
def preprocess(self, source, output_file=None, macros=None,
|
||||
include_dirs=None, extra_preargs=None,
|
||||
extra_postargs=None):
|
||||
_, macros, include_dirs = \
|
||||
self._fix_compile_args(None, macros, include_dirs)
|
||||
pp_opts = gen_preprocess_options(macros, include_dirs)
|
||||
pp_args = ['cpp32.exe'] + pp_opts
|
||||
if output_file is not None:
|
||||
pp_args.append('-o' + output_file)
|
||||
if extra_preargs:
|
||||
pp_args[:0] = extra_preargs
|
||||
if extra_postargs:
|
||||
pp_args.extend(extra_postargs)
|
||||
pp_args.append(source)
|
||||
|
||||
# We need to preprocess: either we're being forced to, or the
|
||||
# source file is newer than the target (or the target doesn't
|
||||
# exist).
|
||||
if self.force or output_file is None or newer(source, output_file):
|
||||
if output_file:
|
||||
self.mkpath(os.path.dirname(output_file))
|
||||
try:
|
||||
self.spawn(pp_args)
|
||||
except PackagingExecError as msg:
|
||||
print(msg)
|
||||
raise CompileError(msg)
|
||||
868
Lib/packaging/compiler/ccompiler.py
Normal file
868
Lib/packaging/compiler/ccompiler.py
Normal file
|
|
@ -0,0 +1,868 @@
|
|||
"""Abstract base class for compilers.
|
||||
|
||||
This modules contains CCompiler, an abstract base class that defines the
|
||||
interface for the compiler abstraction model used by packaging.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from shutil import move
|
||||
from packaging import logger
|
||||
from packaging.util import split_quoted, execute, newer_group, spawn
|
||||
from packaging.errors import (CompileError, LinkError, UnknownFileError)
|
||||
from packaging.compiler import gen_preprocess_options
|
||||
|
||||
|
||||
class CCompiler:
|
||||
"""Abstract base class to define the interface that must be implemented
|
||||
by real compiler classes. Also has some utility methods used by
|
||||
several compiler classes.
|
||||
|
||||
The basic idea behind a compiler abstraction class is that each
|
||||
instance can be used for all the compile/link steps in building a
|
||||
single project. Thus, attributes common to all of those compile and
|
||||
link steps -- include directories, macros to define, libraries to link
|
||||
against, etc. -- are attributes of the compiler instance. To allow for
|
||||
variability in how individual files are treated, most of those
|
||||
attributes may be varied on a per-compilation or per-link basis.
|
||||
"""
|
||||
|
||||
# 'name' is a class attribute that identifies this class. It
|
||||
# keeps code that wants to know what kind of compiler it's dealing with
|
||||
# from having to import all possible compiler classes just to do an
|
||||
# 'isinstance'.
|
||||
name = None
|
||||
description = None
|
||||
|
||||
# XXX things not handled by this compiler abstraction model:
|
||||
# * client can't provide additional options for a compiler,
|
||||
# e.g. warning, optimization, debugging flags. Perhaps this
|
||||
# should be the domain of concrete compiler abstraction classes
|
||||
# (UnixCCompiler, MSVCCompiler, etc.) -- or perhaps the base
|
||||
# class should have methods for the common ones.
|
||||
# * can't completely override the include or library searchg
|
||||
# path, ie. no "cc -I -Idir1 -Idir2" or "cc -L -Ldir1 -Ldir2".
|
||||
# I'm not sure how widely supported this is even by Unix
|
||||
# compilers, much less on other platforms. And I'm even less
|
||||
# sure how useful it is; maybe for cross-compiling, but
|
||||
# support for that is a ways off. (And anyways, cross
|
||||
# compilers probably have a dedicated binary with the
|
||||
# right paths compiled in. I hope.)
|
||||
# * can't do really freaky things with the library list/library
|
||||
# dirs, e.g. "-Ldir1 -lfoo -Ldir2 -lfoo" to link against
|
||||
# different versions of libfoo.a in different locations. I
|
||||
# think this is useless without the ability to null out the
|
||||
# library search path anyways.
|
||||
|
||||
|
||||
# Subclasses that rely on the standard filename generation methods
|
||||
# implemented below should override these; see the comment near
|
||||
# those methods ('object_filenames()' et. al.) for details:
|
||||
src_extensions = None # list of strings
|
||||
obj_extension = None # string
|
||||
static_lib_extension = None
|
||||
shared_lib_extension = None # string
|
||||
static_lib_format = None # format string
|
||||
shared_lib_format = None # prob. same as static_lib_format
|
||||
exe_extension = None # string
|
||||
|
||||
# Default language settings. language_map is used to detect a source
|
||||
# file or Extension target language, checking source filenames.
|
||||
# language_order is used to detect the language precedence, when deciding
|
||||
# what language to use when mixing source types. For example, if some
|
||||
# extension has two files with ".c" extension, and one with ".cpp", it
|
||||
# is still linked as c++.
|
||||
language_map = {".c": "c",
|
||||
".cc": "c++",
|
||||
".cpp": "c++",
|
||||
".cxx": "c++",
|
||||
".m": "objc",
|
||||
}
|
||||
language_order = ["c++", "objc", "c"]
|
||||
|
||||
def __init__(self, verbose=0, dry_run=False, force=False):
|
||||
self.dry_run = dry_run
|
||||
self.force = force
|
||||
self.verbose = verbose
|
||||
|
||||
# 'output_dir': a common output directory for object, library,
|
||||
# shared object, and shared library files
|
||||
self.output_dir = None
|
||||
|
||||
# 'macros': a list of macro definitions (or undefinitions). A
|
||||
# macro definition is a 2-tuple (name, value), where the value is
|
||||
# either a string or None (no explicit value). A macro
|
||||
# undefinition is a 1-tuple (name,).
|
||||
self.macros = []
|
||||
|
||||
# 'include_dirs': a list of directories to search for include files
|
||||
self.include_dirs = []
|
||||
|
||||
# 'libraries': a list of libraries to include in any link
|
||||
# (library names, not filenames: eg. "foo" not "libfoo.a")
|
||||
self.libraries = []
|
||||
|
||||
# 'library_dirs': a list of directories to search for libraries
|
||||
self.library_dirs = []
|
||||
|
||||
# 'runtime_library_dirs': a list of directories to search for
|
||||
# shared libraries/objects at runtime
|
||||
self.runtime_library_dirs = []
|
||||
|
||||
# 'objects': a list of object files (or similar, such as explicitly
|
||||
# named library files) to include on any link
|
||||
self.objects = []
|
||||
|
||||
for key, value in self.executables.items():
|
||||
self.set_executable(key, value)
|
||||
|
||||
def set_executables(self, **args):
|
||||
"""Define the executables (and options for them) that will be run
|
||||
to perform the various stages of compilation. The exact set of
|
||||
executables that may be specified here depends on the compiler
|
||||
class (via the 'executables' class attribute), but most will have:
|
||||
compiler the C/C++ compiler
|
||||
linker_so linker used to create shared objects and libraries
|
||||
linker_exe linker used to create binary executables
|
||||
archiver static library creator
|
||||
|
||||
On platforms with a command line (Unix, DOS/Windows), each of these
|
||||
is a string that will be split into executable name and (optional)
|
||||
list of arguments. (Splitting the string is done similarly to how
|
||||
Unix shells operate: words are delimited by spaces, but quotes and
|
||||
backslashes can override this. See
|
||||
'distutils.util.split_quoted()'.)
|
||||
"""
|
||||
|
||||
# Note that some CCompiler implementation classes will define class
|
||||
# attributes 'cpp', 'cc', etc. with hard-coded executable names;
|
||||
# this is appropriate when a compiler class is for exactly one
|
||||
# compiler/OS combination (eg. MSVCCompiler). Other compiler
|
||||
# classes (UnixCCompiler, in particular) are driven by information
|
||||
# discovered at run-time, since there are many different ways to do
|
||||
# basically the same things with Unix C compilers.
|
||||
|
||||
for key, value in args.items():
|
||||
if key not in self.executables:
|
||||
raise ValueError("unknown executable '%s' for class %s" % \
|
||||
(key, self.__class__.__name__))
|
||||
self.set_executable(key, value)
|
||||
|
||||
def set_executable(self, key, value):
|
||||
if isinstance(value, str):
|
||||
setattr(self, key, split_quoted(value))
|
||||
else:
|
||||
setattr(self, key, value)
|
||||
|
||||
def _find_macro(self, name):
|
||||
i = 0
|
||||
for defn in self.macros:
|
||||
if defn[0] == name:
|
||||
return i
|
||||
i = i + 1
|
||||
return None
|
||||
|
||||
def _check_macro_definitions(self, definitions):
|
||||
"""Ensures that every element of 'definitions' is a valid macro
|
||||
definition, ie. either (name,value) 2-tuple or a (name,) tuple. Do
|
||||
nothing if all definitions are OK, raise TypeError otherwise.
|
||||
"""
|
||||
for defn in definitions:
|
||||
if not (isinstance(defn, tuple) and
|
||||
(len(defn) == 1 or
|
||||
(len(defn) == 2 and
|
||||
(isinstance(defn[1], str) or defn[1] is None))) and
|
||||
isinstance(defn[0], str)):
|
||||
raise TypeError(("invalid macro definition '%s': " % defn) + \
|
||||
"must be tuple (string,), (string, string), or " + \
|
||||
"(string, None)")
|
||||
|
||||
|
||||
# -- Bookkeeping methods -------------------------------------------
|
||||
|
||||
def define_macro(self, name, value=None):
|
||||
"""Define a preprocessor macro for all compilations driven by this
|
||||
compiler object. The optional parameter 'value' should be a
|
||||
string; if it is not supplied, then the macro will be defined
|
||||
without an explicit value and the exact outcome depends on the
|
||||
compiler used (XXX true? does ANSI say anything about this?)
|
||||
"""
|
||||
# Delete from the list of macro definitions/undefinitions if
|
||||
# already there (so that this one will take precedence).
|
||||
i = self._find_macro(name)
|
||||
if i is not None:
|
||||
del self.macros[i]
|
||||
|
||||
defn = (name, value)
|
||||
self.macros.append(defn)
|
||||
|
||||
def undefine_macro(self, name):
|
||||
"""Undefine a preprocessor macro for all compilations driven by
|
||||
this compiler object. If the same macro is defined by
|
||||
'define_macro()' and undefined by 'undefine_macro()' the last call
|
||||
takes precedence (including multiple redefinitions or
|
||||
undefinitions). If the macro is redefined/undefined on a
|
||||
per-compilation basis (ie. in the call to 'compile()'), then that
|
||||
takes precedence.
|
||||
"""
|
||||
# Delete from the list of macro definitions/undefinitions if
|
||||
# already there (so that this one will take precedence).
|
||||
i = self._find_macro(name)
|
||||
if i is not None:
|
||||
del self.macros[i]
|
||||
|
||||
undefn = (name,)
|
||||
self.macros.append(undefn)
|
||||
|
||||
def add_include_dir(self, dir):
|
||||
"""Add 'dir' to the list of directories that will be searched for
|
||||
header files. The compiler is instructed to search directories in
|
||||
the order in which they are supplied by successive calls to
|
||||
'add_include_dir()'.
|
||||
"""
|
||||
self.include_dirs.append(dir)
|
||||
|
||||
def set_include_dirs(self, dirs):
|
||||
"""Set the list of directories that will be searched to 'dirs' (a
|
||||
list of strings). Overrides any preceding calls to
|
||||
'add_include_dir()'; subsequence calls to 'add_include_dir()' add
|
||||
to the list passed to 'set_include_dirs()'. This does not affect
|
||||
any list of standard include directories that the compiler may
|
||||
search by default.
|
||||
"""
|
||||
self.include_dirs = dirs[:]
|
||||
|
||||
def add_library(self, libname):
|
||||
"""Add 'libname' to the list of libraries that will be included in
|
||||
all links driven by this compiler object. Note that 'libname'
|
||||
should *not* be the name of a file containing a library, but the
|
||||
name of the library itself: the actual filename will be inferred by
|
||||
the linker, the compiler, or the compiler class (depending on the
|
||||
platform).
|
||||
|
||||
The linker will be instructed to link against libraries in the
|
||||
order they were supplied to 'add_library()' and/or
|
||||
'set_libraries()'. It is perfectly valid to duplicate library
|
||||
names; the linker will be instructed to link against libraries as
|
||||
many times as they are mentioned.
|
||||
"""
|
||||
self.libraries.append(libname)
|
||||
|
||||
def set_libraries(self, libnames):
|
||||
"""Set the list of libraries to be included in all links driven by
|
||||
this compiler object to 'libnames' (a list of strings). This does
|
||||
not affect any standard system libraries that the linker may
|
||||
include by default.
|
||||
"""
|
||||
self.libraries = libnames[:]
|
||||
|
||||
|
||||
def add_library_dir(self, dir):
|
||||
"""Add 'dir' to the list of directories that will be searched for
|
||||
libraries specified to 'add_library()' and 'set_libraries()'. The
|
||||
linker will be instructed to search for libraries in the order they
|
||||
are supplied to 'add_library_dir()' and/or 'set_library_dirs()'.
|
||||
"""
|
||||
self.library_dirs.append(dir)
|
||||
|
||||
def set_library_dirs(self, dirs):
|
||||
"""Set the list of library search directories to 'dirs' (a list of
|
||||
strings). This does not affect any standard library search path
|
||||
that the linker may search by default.
|
||||
"""
|
||||
self.library_dirs = dirs[:]
|
||||
|
||||
def add_runtime_library_dir(self, dir):
|
||||
"""Add 'dir' to the list of directories that will be searched for
|
||||
shared libraries at runtime.
|
||||
"""
|
||||
self.runtime_library_dirs.append(dir)
|
||||
|
||||
def set_runtime_library_dirs(self, dirs):
|
||||
"""Set the list of directories to search for shared libraries at
|
||||
runtime to 'dirs' (a list of strings). This does not affect any
|
||||
standard search path that the runtime linker may search by
|
||||
default.
|
||||
"""
|
||||
self.runtime_library_dirs = dirs[:]
|
||||
|
||||
def add_link_object(self, object):
|
||||
"""Add 'object' to the list of object files (or analogues, such as
|
||||
explicitly named library files or the output of "resource
|
||||
compilers") to be included in every link driven by this compiler
|
||||
object.
|
||||
"""
|
||||
self.objects.append(object)
|
||||
|
||||
def set_link_objects(self, objects):
|
||||
"""Set the list of object files (or analogues) to be included in
|
||||
every link to 'objects'. This does not affect any standard object
|
||||
files that the linker may include by default (such as system
|
||||
libraries).
|
||||
"""
|
||||
self.objects = objects[:]
|
||||
|
||||
|
||||
# -- Private utility methods --------------------------------------
|
||||
# (here for the convenience of subclasses)
|
||||
|
||||
# Helper method to prep compiler in subclass compile() methods
|
||||
def _setup_compile(self, outdir, macros, incdirs, sources, depends,
|
||||
extra):
|
||||
"""Process arguments and decide which source files to compile."""
|
||||
if outdir is None:
|
||||
outdir = self.output_dir
|
||||
elif not isinstance(outdir, str):
|
||||
raise TypeError("'output_dir' must be a string or None")
|
||||
|
||||
if macros is None:
|
||||
macros = self.macros
|
||||
elif isinstance(macros, list):
|
||||
macros = macros + (self.macros or [])
|
||||
else:
|
||||
raise TypeError("'macros' (if supplied) must be a list of tuples")
|
||||
|
||||
if incdirs is None:
|
||||
incdirs = self.include_dirs
|
||||
elif isinstance(incdirs, (list, tuple)):
|
||||
incdirs = list(incdirs) + (self.include_dirs or [])
|
||||
else:
|
||||
raise TypeError(
|
||||
"'include_dirs' (if supplied) must be a list of strings")
|
||||
|
||||
if extra is None:
|
||||
extra = []
|
||||
|
||||
# Get the list of expected output (object) files
|
||||
objects = self.object_filenames(sources,
|
||||
strip_dir=False,
|
||||
output_dir=outdir)
|
||||
assert len(objects) == len(sources)
|
||||
|
||||
pp_opts = gen_preprocess_options(macros, incdirs)
|
||||
|
||||
build = {}
|
||||
for i in range(len(sources)):
|
||||
src = sources[i]
|
||||
obj = objects[i]
|
||||
ext = os.path.splitext(src)[1]
|
||||
self.mkpath(os.path.dirname(obj))
|
||||
build[obj] = (src, ext)
|
||||
|
||||
return macros, objects, extra, pp_opts, build
|
||||
|
||||
def _get_cc_args(self, pp_opts, debug, before):
|
||||
# works for unixccompiler, emxccompiler, cygwinccompiler
|
||||
cc_args = pp_opts + ['-c']
|
||||
if debug:
|
||||
cc_args[:0] = ['-g']
|
||||
if before:
|
||||
cc_args[:0] = before
|
||||
return cc_args
|
||||
|
||||
def _fix_compile_args(self, output_dir, macros, include_dirs):
|
||||
"""Typecheck and fix-up some of the arguments to the 'compile()'
|
||||
method, and return fixed-up values. Specifically: if 'output_dir'
|
||||
is None, replaces it with 'self.output_dir'; ensures that 'macros'
|
||||
is a list, and augments it with 'self.macros'; ensures that
|
||||
'include_dirs' is a list, and augments it with 'self.include_dirs'.
|
||||
Guarantees that the returned values are of the correct type,
|
||||
i.e. for 'output_dir' either string or None, and for 'macros' and
|
||||
'include_dirs' either list or None.
|
||||
"""
|
||||
if output_dir is None:
|
||||
output_dir = self.output_dir
|
||||
elif not isinstance(output_dir, str):
|
||||
raise TypeError("'output_dir' must be a string or None")
|
||||
|
||||
if macros is None:
|
||||
macros = self.macros
|
||||
elif isinstance(macros, list):
|
||||
macros = macros + (self.macros or [])
|
||||
else:
|
||||
raise TypeError("'macros' (if supplied) must be a list of tuples")
|
||||
|
||||
if include_dirs is None:
|
||||
include_dirs = self.include_dirs
|
||||
elif isinstance(include_dirs, (list, tuple)):
|
||||
include_dirs = list(include_dirs) + (self.include_dirs or [])
|
||||
else:
|
||||
raise TypeError(
|
||||
"'include_dirs' (if supplied) must be a list of strings")
|
||||
|
||||
return output_dir, macros, include_dirs
|
||||
|
||||
def _fix_object_args(self, objects, output_dir):
|
||||
"""Typecheck and fix up some arguments supplied to various methods.
|
||||
Specifically: ensure that 'objects' is a list; if output_dir is
|
||||
None, replace with self.output_dir. Return fixed versions of
|
||||
'objects' and 'output_dir'.
|
||||
"""
|
||||
if not isinstance(objects, (list, tuple)):
|
||||
raise TypeError("'objects' must be a list or tuple of strings")
|
||||
objects = list(objects)
|
||||
|
||||
if output_dir is None:
|
||||
output_dir = self.output_dir
|
||||
elif not isinstance(output_dir, str):
|
||||
raise TypeError("'output_dir' must be a string or None")
|
||||
|
||||
return objects, output_dir
|
||||
|
||||
def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs):
|
||||
"""Typecheck and fix up some of the arguments supplied to the
|
||||
'link_*' methods. Specifically: ensure that all arguments are
|
||||
lists, and augment them with their permanent versions
|
||||
(eg. 'self.libraries' augments 'libraries'). Return a tuple with
|
||||
fixed versions of all arguments.
|
||||
"""
|
||||
if libraries is None:
|
||||
libraries = self.libraries
|
||||
elif isinstance(libraries, (list, tuple)):
|
||||
libraries = list(libraries) + (self.libraries or [])
|
||||
else:
|
||||
raise TypeError(
|
||||
"'libraries' (if supplied) must be a list of strings")
|
||||
|
||||
if library_dirs is None:
|
||||
library_dirs = self.library_dirs
|
||||
elif isinstance(library_dirs, (list, tuple)):
|
||||
library_dirs = list(library_dirs) + (self.library_dirs or [])
|
||||
else:
|
||||
raise TypeError(
|
||||
"'library_dirs' (if supplied) must be a list of strings")
|
||||
|
||||
if runtime_library_dirs is None:
|
||||
runtime_library_dirs = self.runtime_library_dirs
|
||||
elif isinstance(runtime_library_dirs, (list, tuple)):
|
||||
runtime_library_dirs = (list(runtime_library_dirs) +
|
||||
(self.runtime_library_dirs or []))
|
||||
else:
|
||||
raise TypeError("'runtime_library_dirs' (if supplied) "
|
||||
"must be a list of strings")
|
||||
|
||||
return libraries, library_dirs, runtime_library_dirs
|
||||
|
||||
def _need_link(self, objects, output_file):
|
||||
"""Return true if we need to relink the files listed in 'objects'
|
||||
to recreate 'output_file'.
|
||||
"""
|
||||
if self.force:
|
||||
return True
|
||||
else:
|
||||
if self.dry_run:
|
||||
newer = newer_group(objects, output_file, missing='newer')
|
||||
else:
|
||||
newer = newer_group(objects, output_file)
|
||||
return newer
|
||||
|
||||
def detect_language(self, sources):
|
||||
"""Detect the language of a given file, or list of files. Uses
|
||||
language_map, and language_order to do the job.
|
||||
"""
|
||||
if not isinstance(sources, list):
|
||||
sources = [sources]
|
||||
lang = None
|
||||
index = len(self.language_order)
|
||||
for source in sources:
|
||||
base, ext = os.path.splitext(source)
|
||||
extlang = self.language_map.get(ext)
|
||||
try:
|
||||
extindex = self.language_order.index(extlang)
|
||||
if extindex < index:
|
||||
lang = extlang
|
||||
index = extindex
|
||||
except ValueError:
|
||||
pass
|
||||
return lang
|
||||
|
||||
# -- Worker methods ------------------------------------------------
|
||||
# (must be implemented by subclasses)
|
||||
|
||||
def preprocess(self, source, output_file=None, macros=None,
|
||||
include_dirs=None, extra_preargs=None, extra_postargs=None):
|
||||
"""Preprocess a single C/C++ source file, named in 'source'.
|
||||
Output will be written to file named 'output_file', or stdout if
|
||||
'output_file' not supplied. 'macros' is a list of macro
|
||||
definitions as for 'compile()', which will augment the macros set
|
||||
with 'define_macro()' and 'undefine_macro()'. 'include_dirs' is a
|
||||
list of directory names that will be added to the default list.
|
||||
|
||||
Raises PreprocessError on failure.
|
||||
"""
|
||||
pass
|
||||
|
||||
def compile(self, sources, output_dir=None, macros=None,
|
||||
include_dirs=None, debug=False, extra_preargs=None,
|
||||
extra_postargs=None, depends=None):
|
||||
"""Compile one or more source files.
|
||||
|
||||
'sources' must be a list of filenames, most likely C/C++
|
||||
files, but in reality anything that can be handled by a
|
||||
particular compiler and compiler class (eg. MSVCCompiler can
|
||||
handle resource files in 'sources'). Return a list of object
|
||||
filenames, one per source filename in 'sources'. Depending on
|
||||
the implementation, not all source files will necessarily be
|
||||
compiled, but all corresponding object filenames will be
|
||||
returned.
|
||||
|
||||
If 'output_dir' is given, object files will be put under it, while
|
||||
retaining their original path component. That is, "foo/bar.c"
|
||||
normally compiles to "foo/bar.o" (for a Unix implementation); if
|
||||
'output_dir' is "build", then it would compile to
|
||||
"build/foo/bar.o".
|
||||
|
||||
'macros', if given, must be a list of macro definitions. A macro
|
||||
definition is either a (name, value) 2-tuple or a (name,) 1-tuple.
|
||||
The former defines a macro; if the value is None, the macro is
|
||||
defined without an explicit value. The 1-tuple case undefines a
|
||||
macro. Later definitions/redefinitions/ undefinitions take
|
||||
precedence.
|
||||
|
||||
'include_dirs', if given, must be a list of strings, the
|
||||
directories to add to the default include file search path for this
|
||||
compilation only.
|
||||
|
||||
'debug' is a boolean; if true, the compiler will be instructed to
|
||||
output debug symbols in (or alongside) the object file(s).
|
||||
|
||||
'extra_preargs' and 'extra_postargs' are implementation- dependent.
|
||||
On platforms that have the notion of a command line (e.g. Unix,
|
||||
DOS/Windows), they are most likely lists of strings: extra
|
||||
command-line arguments to prepand/append to the compiler command
|
||||
line. On other platforms, consult the implementation class
|
||||
documentation. In any event, they are intended as an escape hatch
|
||||
for those occasions when the abstract compiler framework doesn't
|
||||
cut the mustard.
|
||||
|
||||
'depends', if given, is a list of filenames that all targets
|
||||
depend on. If a source file is older than any file in
|
||||
depends, then the source file will be recompiled. This
|
||||
supports dependency tracking, but only at a coarse
|
||||
granularity.
|
||||
|
||||
Raises CompileError on failure.
|
||||
"""
|
||||
# A concrete compiler class can either override this method
|
||||
# entirely or implement _compile().
|
||||
|
||||
macros, objects, extra_postargs, pp_opts, build = \
|
||||
self._setup_compile(output_dir, macros, include_dirs, sources,
|
||||
depends, extra_postargs)
|
||||
cc_args = self._get_cc_args(pp_opts, debug, extra_preargs)
|
||||
|
||||
for obj in objects:
|
||||
try:
|
||||
src, ext = build[obj]
|
||||
except KeyError:
|
||||
continue
|
||||
self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts)
|
||||
|
||||
# Return *all* object filenames, not just the ones we just built.
|
||||
return objects
|
||||
|
||||
def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
|
||||
"""Compile 'src' to product 'obj'."""
|
||||
|
||||
# A concrete compiler class that does not override compile()
|
||||
# should implement _compile().
|
||||
pass
|
||||
|
||||
def create_static_lib(self, objects, output_libname, output_dir=None,
|
||||
debug=False, target_lang=None):
|
||||
"""Link a bunch of stuff together to create a static library file.
|
||||
The "bunch of stuff" consists of the list of object files supplied
|
||||
as 'objects', the extra object files supplied to
|
||||
'add_link_object()' and/or 'set_link_objects()', the libraries
|
||||
supplied to 'add_library()' and/or 'set_libraries()', and the
|
||||
libraries supplied as 'libraries' (if any).
|
||||
|
||||
'output_libname' should be a library name, not a filename; the
|
||||
filename will be inferred from the library name. 'output_dir' is
|
||||
the directory where the library file will be put.
|
||||
|
||||
'debug' is a boolean; if true, debugging information will be
|
||||
included in the library (note that on most platforms, it is the
|
||||
compile step where this matters: the 'debug' flag is included here
|
||||
just for consistency).
|
||||
|
||||
'target_lang' is the target language for which the given objects
|
||||
are being compiled. This allows specific linkage time treatment of
|
||||
certain languages.
|
||||
|
||||
Raises LibError on failure.
|
||||
"""
|
||||
pass
|
||||
|
||||
# values for target_desc parameter in link()
|
||||
SHARED_OBJECT = "shared_object"
|
||||
SHARED_LIBRARY = "shared_library"
|
||||
EXECUTABLE = "executable"
|
||||
|
||||
def link(self, target_desc, objects, output_filename, output_dir=None,
|
||||
libraries=None, library_dirs=None, runtime_library_dirs=None,
|
||||
export_symbols=None, debug=False, extra_preargs=None,
|
||||
extra_postargs=None, build_temp=None, target_lang=None):
|
||||
"""Link a bunch of stuff together to create an executable or
|
||||
shared library file.
|
||||
|
||||
The "bunch of stuff" consists of the list of object files supplied
|
||||
as 'objects'. 'output_filename' should be a filename. If
|
||||
'output_dir' is supplied, 'output_filename' is relative to it
|
||||
(i.e. 'output_filename' can provide directory components if
|
||||
needed).
|
||||
|
||||
'libraries' is a list of libraries to link against. These are
|
||||
library names, not filenames, since they're translated into
|
||||
filenames in a platform-specific way (eg. "foo" becomes "libfoo.a"
|
||||
on Unix and "foo.lib" on DOS/Windows). However, they can include a
|
||||
directory component, which means the linker will look in that
|
||||
specific directory rather than searching all the normal locations.
|
||||
|
||||
'library_dirs', if supplied, should be a list of directories to
|
||||
search for libraries that were specified as bare library names
|
||||
(ie. no directory component). These are on top of the system
|
||||
default and those supplied to 'add_library_dir()' and/or
|
||||
'set_library_dirs()'. 'runtime_library_dirs' is a list of
|
||||
directories that will be embedded into the shared library and used
|
||||
to search for other shared libraries that *it* depends on at
|
||||
run-time. (This may only be relevant on Unix.)
|
||||
|
||||
'export_symbols' is a list of symbols that the shared library will
|
||||
export. (This appears to be relevant only on Windows.)
|
||||
|
||||
'debug' is as for 'compile()' and 'create_static_lib()', with the
|
||||
slight distinction that it actually matters on most platforms (as
|
||||
opposed to 'create_static_lib()', which includes a 'debug' flag
|
||||
mostly for form's sake).
|
||||
|
||||
'extra_preargs' and 'extra_postargs' are as for 'compile()' (except
|
||||
of course that they supply command-line arguments for the
|
||||
particular linker being used).
|
||||
|
||||
'target_lang' is the target language for which the given objects
|
||||
are being compiled. This allows specific linkage time treatment of
|
||||
certain languages.
|
||||
|
||||
Raises LinkError on failure.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
# Old 'link_*()' methods, rewritten to use the new 'link()' method.
|
||||
|
||||
def link_shared_lib(self, objects, output_libname, output_dir=None,
|
||||
libraries=None, library_dirs=None,
|
||||
runtime_library_dirs=None, export_symbols=None,
|
||||
debug=False, extra_preargs=None, extra_postargs=None,
|
||||
build_temp=None, target_lang=None):
|
||||
self.link(CCompiler.SHARED_LIBRARY, objects,
|
||||
self.library_filename(output_libname, lib_type='shared'),
|
||||
output_dir,
|
||||
libraries, library_dirs, runtime_library_dirs,
|
||||
export_symbols, debug,
|
||||
extra_preargs, extra_postargs, build_temp, target_lang)
|
||||
|
||||
def link_shared_object(self, objects, output_filename, output_dir=None,
|
||||
libraries=None, library_dirs=None,
|
||||
runtime_library_dirs=None, export_symbols=None,
|
||||
debug=False, extra_preargs=None, extra_postargs=None,
|
||||
build_temp=None, target_lang=None):
|
||||
self.link(CCompiler.SHARED_OBJECT, objects,
|
||||
output_filename, output_dir,
|
||||
libraries, library_dirs, runtime_library_dirs,
|
||||
export_symbols, debug,
|
||||
extra_preargs, extra_postargs, build_temp, target_lang)
|
||||
|
||||
def link_executable(self, objects, output_progname, output_dir=None,
|
||||
libraries=None, library_dirs=None,
|
||||
runtime_library_dirs=None, debug=False,
|
||||
extra_preargs=None, extra_postargs=None,
|
||||
target_lang=None):
|
||||
self.link(CCompiler.EXECUTABLE, objects,
|
||||
self.executable_filename(output_progname), output_dir,
|
||||
libraries, library_dirs, runtime_library_dirs, None,
|
||||
debug, extra_preargs, extra_postargs, None, target_lang)
|
||||
|
||||
|
||||
# -- Miscellaneous methods -----------------------------------------
|
||||
# These are all used by the 'gen_lib_options() function; there is
|
||||
# no appropriate default implementation so subclasses should
|
||||
# implement all of these.
|
||||
|
||||
def library_dir_option(self, dir):
|
||||
"""Return the compiler option to add 'dir' to the list of
|
||||
directories searched for libraries.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def runtime_library_dir_option(self, dir):
|
||||
"""Return the compiler option to add 'dir' to the list of
|
||||
directories searched for runtime libraries.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def library_option(self, lib):
|
||||
"""Return the compiler option to add 'dir' to the list of libraries
|
||||
linked into the shared library or executable.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def has_function(self, funcname, includes=None, include_dirs=None,
|
||||
libraries=None, library_dirs=None):
|
||||
"""Return a boolean indicating whether funcname is supported on
|
||||
the current platform. The optional arguments can be used to
|
||||
augment the compilation environment.
|
||||
"""
|
||||
|
||||
# this can't be included at module scope because it tries to
|
||||
# import math which might not be available at that point - maybe
|
||||
# the necessary logic should just be inlined?
|
||||
import tempfile
|
||||
if includes is None:
|
||||
includes = []
|
||||
if include_dirs is None:
|
||||
include_dirs = []
|
||||
if libraries is None:
|
||||
libraries = []
|
||||
if library_dirs is None:
|
||||
library_dirs = []
|
||||
fd, fname = tempfile.mkstemp(".c", funcname, text=True)
|
||||
f = os.fdopen(fd, "w")
|
||||
try:
|
||||
for incl in includes:
|
||||
f.write("""#include "%s"\n""" % incl)
|
||||
f.write("""\
|
||||
main (int argc, char **argv) {
|
||||
%s();
|
||||
}
|
||||
""" % funcname)
|
||||
finally:
|
||||
f.close()
|
||||
try:
|
||||
objects = self.compile([fname], include_dirs=include_dirs)
|
||||
except CompileError:
|
||||
return False
|
||||
|
||||
try:
|
||||
self.link_executable(objects, "a.out",
|
||||
libraries=libraries,
|
||||
library_dirs=library_dirs)
|
||||
except (LinkError, TypeError):
|
||||
return False
|
||||
return True
|
||||
|
||||
def find_library_file(self, dirs, lib, debug=False):
|
||||
"""Search the specified list of directories for a static or shared
|
||||
library file 'lib' and return the full path to that file. If
|
||||
'debug' is true, look for a debugging version (if that makes sense on
|
||||
the current platform). Return None if 'lib' wasn't found in any of
|
||||
the specified directories.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
# -- Filename generation methods -----------------------------------
|
||||
|
||||
# The default implementation of the filename generating methods are
|
||||
# prejudiced towards the Unix/DOS/Windows view of the world:
|
||||
# * object files are named by replacing the source file extension
|
||||
# (eg. .c/.cpp -> .o/.obj)
|
||||
# * library files (shared or static) are named by plugging the
|
||||
# library name and extension into a format string, eg.
|
||||
# "lib%s.%s" % (lib_name, ".a") for Unix static libraries
|
||||
# * executables are named by appending an extension (possibly
|
||||
# empty) to the program name: eg. progname + ".exe" for
|
||||
# Windows
|
||||
#
|
||||
# To reduce redundant code, these methods expect to find
|
||||
# several attributes in the current object (presumably defined
|
||||
# as class attributes):
|
||||
# * src_extensions -
|
||||
# list of C/C++ source file extensions, eg. ['.c', '.cpp']
|
||||
# * obj_extension -
|
||||
# object file extension, eg. '.o' or '.obj'
|
||||
# * static_lib_extension -
|
||||
# extension for static library files, eg. '.a' or '.lib'
|
||||
# * shared_lib_extension -
|
||||
# extension for shared library/object files, eg. '.so', '.dll'
|
||||
# * static_lib_format -
|
||||
# format string for generating static library filenames,
|
||||
# eg. 'lib%s.%s' or '%s.%s'
|
||||
# * shared_lib_format
|
||||
# format string for generating shared library filenames
|
||||
# (probably same as static_lib_format, since the extension
|
||||
# is one of the intended parameters to the format string)
|
||||
# * exe_extension -
|
||||
# extension for executable files, eg. '' or '.exe'
|
||||
|
||||
def object_filenames(self, source_filenames, strip_dir=False, output_dir=''):
|
||||
if output_dir is None:
|
||||
output_dir = ''
|
||||
obj_names = []
|
||||
for src_name in source_filenames:
|
||||
base, ext = os.path.splitext(src_name)
|
||||
base = os.path.splitdrive(base)[1] # Chop off the drive
|
||||
base = base[os.path.isabs(base):] # If abs, chop off leading /
|
||||
if ext not in self.src_extensions:
|
||||
raise UnknownFileError("unknown file type '%s' (from '%s')" %
|
||||
(ext, src_name))
|
||||
if strip_dir:
|
||||
base = os.path.basename(base)
|
||||
obj_names.append(os.path.join(output_dir,
|
||||
base + self.obj_extension))
|
||||
return obj_names
|
||||
|
||||
def shared_object_filename(self, basename, strip_dir=False, output_dir=''):
|
||||
assert output_dir is not None
|
||||
if strip_dir:
|
||||
basename = os.path.basename(basename)
|
||||
return os.path.join(output_dir, basename + self.shared_lib_extension)
|
||||
|
||||
def executable_filename(self, basename, strip_dir=False, output_dir=''):
|
||||
assert output_dir is not None
|
||||
if strip_dir:
|
||||
basename = os.path.basename(basename)
|
||||
return os.path.join(output_dir, basename + (self.exe_extension or ''))
|
||||
|
||||
def library_filename(self, libname, lib_type='static', # or 'shared'
|
||||
strip_dir=False, output_dir=''):
|
||||
assert output_dir is not None
|
||||
if lib_type not in ("static", "shared", "dylib"):
|
||||
raise ValueError(
|
||||
"'lib_type' must be 'static', 'shared' or 'dylib'")
|
||||
fmt = getattr(self, lib_type + "_lib_format")
|
||||
ext = getattr(self, lib_type + "_lib_extension")
|
||||
|
||||
dir, base = os.path.split(libname)
|
||||
filename = fmt % (base, ext)
|
||||
if strip_dir:
|
||||
dir = ''
|
||||
|
||||
return os.path.join(output_dir, dir, filename)
|
||||
|
||||
|
||||
# -- Utility methods -----------------------------------------------
|
||||
|
||||
def execute(self, func, args, msg=None, level=1):
|
||||
execute(func, args, msg, self.dry_run)
|
||||
|
||||
def spawn(self, cmd):
|
||||
spawn(cmd, dry_run=self.dry_run)
|
||||
|
||||
def move_file(self, src, dst):
|
||||
logger.info("moving %r to %r", src, dst)
|
||||
if self.dry_run:
|
||||
return
|
||||
return move(src, dst)
|
||||
|
||||
def mkpath(self, name, mode=0o777):
|
||||
name = os.path.normpath(name)
|
||||
if os.path.isdir(name) or name == '':
|
||||
return
|
||||
if self.dry_run:
|
||||
head = ''
|
||||
for part in name.split(os.sep):
|
||||
logger.info("created directory %s%s", head, part)
|
||||
head += part + os.sep
|
||||
return
|
||||
os.makedirs(name, mode)
|
||||
355
Lib/packaging/compiler/cygwinccompiler.py
Normal file
355
Lib/packaging/compiler/cygwinccompiler.py
Normal file
|
|
@ -0,0 +1,355 @@
|
|||
"""CCompiler implementations for Cygwin and mingw32 versions of GCC.
|
||||
|
||||
This module contains the CygwinCCompiler class, a subclass of
|
||||
UnixCCompiler that handles the Cygwin port of the GNU C compiler to
|
||||
Windows, and the Mingw32CCompiler class which handles the mingw32 port
|
||||
of GCC (same as cygwin in no-cygwin mode).
|
||||
"""
|
||||
|
||||
# problems:
|
||||
#
|
||||
# * if you use a msvc compiled python version (1.5.2)
|
||||
# 1. you have to insert a __GNUC__ section in its config.h
|
||||
# 2. you have to generate a import library for its dll
|
||||
# - create a def-file for python??.dll
|
||||
# - create a import library using
|
||||
# dlltool --dllname python15.dll --def python15.def \
|
||||
# --output-lib libpython15.a
|
||||
#
|
||||
# see also http://starship.python.net/crew/kernr/mingw32/Notes.html
|
||||
#
|
||||
# * We put export_symbols in a def-file, and don't use
|
||||
# --export-all-symbols because it doesn't worked reliable in some
|
||||
# tested configurations. And because other windows compilers also
|
||||
# need their symbols specified this no serious problem.
|
||||
#
|
||||
# tested configurations:
|
||||
#
|
||||
# * cygwin gcc 2.91.57/ld 2.9.4/dllwrap 0.2.4 works
|
||||
# (after patching python's config.h and for C++ some other include files)
|
||||
# see also http://starship.python.net/crew/kernr/mingw32/Notes.html
|
||||
# * mingw32 gcc 2.95.2/ld 2.9.4/dllwrap 0.2.4 works
|
||||
# (ld doesn't support -shared, so we use dllwrap)
|
||||
# * cygwin gcc 2.95.2/ld 2.10.90/dllwrap 2.10.90 works now
|
||||
# - its dllwrap doesn't work, there is a bug in binutils 2.10.90
|
||||
# see also http://sources.redhat.com/ml/cygwin/2000-06/msg01274.html
|
||||
# - using gcc -mdll instead dllwrap doesn't work without -static because
|
||||
# it tries to link against dlls instead their import libraries. (If
|
||||
# it finds the dll first.)
|
||||
# By specifying -static we force ld to link against the import libraries,
|
||||
# this is windows standard and there are normally not the necessary symbols
|
||||
# in the dlls.
|
||||
# *** only the version of June 2000 shows these problems
|
||||
# * cygwin gcc 3.2/ld 2.13.90 works
|
||||
# (ld supports -shared)
|
||||
# * mingw gcc 3.2/ld 2.13 works
|
||||
# (ld supports -shared)
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
import copy
|
||||
|
||||
from packaging import logger
|
||||
from packaging.compiler.unixccompiler import UnixCCompiler
|
||||
from packaging.util import write_file
|
||||
from packaging.errors import PackagingExecError, CompileError, UnknownFileError
|
||||
from packaging.util import get_compiler_versions
|
||||
import sysconfig
|
||||
|
||||
|
||||
def get_msvcr():
|
||||
"""Include the appropriate MSVC runtime library if Python was built
|
||||
with MSVC 7.0 or later.
|
||||
"""
|
||||
msc_pos = sys.version.find('MSC v.')
|
||||
if msc_pos != -1:
|
||||
msc_ver = sys.version[msc_pos+6:msc_pos+10]
|
||||
if msc_ver == '1300':
|
||||
# MSVC 7.0
|
||||
return ['msvcr70']
|
||||
elif msc_ver == '1310':
|
||||
# MSVC 7.1
|
||||
return ['msvcr71']
|
||||
elif msc_ver == '1400':
|
||||
# VS2005 / MSVC 8.0
|
||||
return ['msvcr80']
|
||||
elif msc_ver == '1500':
|
||||
# VS2008 / MSVC 9.0
|
||||
return ['msvcr90']
|
||||
else:
|
||||
raise ValueError("Unknown MS Compiler version %s " % msc_ver)
|
||||
|
||||
|
||||
class CygwinCCompiler(UnixCCompiler):
|
||||
""" Handles the Cygwin port of the GNU C compiler to Windows.
|
||||
"""
|
||||
name = 'cygwin'
|
||||
description = 'Cygwin port of GNU C Compiler for Win32'
|
||||
obj_extension = ".o"
|
||||
static_lib_extension = ".a"
|
||||
shared_lib_extension = ".dll"
|
||||
static_lib_format = "lib%s%s"
|
||||
shared_lib_format = "%s%s"
|
||||
exe_extension = ".exe"
|
||||
|
||||
def __init__(self, verbose=0, dry_run=False, force=False):
|
||||
|
||||
UnixCCompiler.__init__(self, verbose, dry_run, force)
|
||||
|
||||
status, details = check_config_h()
|
||||
logger.debug("Python's GCC status: %s (details: %s)", status, details)
|
||||
if status is not CONFIG_H_OK:
|
||||
self.warn(
|
||||
"Python's pyconfig.h doesn't seem to support your compiler. "
|
||||
"Reason: %s. "
|
||||
"Compiling may fail because of undefined preprocessor macros."
|
||||
% details)
|
||||
|
||||
self.gcc_version, self.ld_version, self.dllwrap_version = \
|
||||
get_compiler_versions()
|
||||
logger.debug(self.name + ": gcc %s, ld %s, dllwrap %s\n",
|
||||
self.gcc_version,
|
||||
self.ld_version,
|
||||
self.dllwrap_version)
|
||||
|
||||
# ld_version >= "2.10.90" and < "2.13" should also be able to use
|
||||
# gcc -mdll instead of dllwrap
|
||||
# Older dllwraps had own version numbers, newer ones use the
|
||||
# same as the rest of binutils ( also ld )
|
||||
# dllwrap 2.10.90 is buggy
|
||||
if self.ld_version >= "2.10.90":
|
||||
self.linker_dll = "gcc"
|
||||
else:
|
||||
self.linker_dll = "dllwrap"
|
||||
|
||||
# ld_version >= "2.13" support -shared so use it instead of
|
||||
# -mdll -static
|
||||
if self.ld_version >= "2.13":
|
||||
shared_option = "-shared"
|
||||
else:
|
||||
shared_option = "-mdll -static"
|
||||
|
||||
# Hard-code GCC because that's what this is all about.
|
||||
# XXX optimization, warnings etc. should be customizable.
|
||||
self.set_executables(compiler='gcc -mcygwin -O -Wall',
|
||||
compiler_so='gcc -mcygwin -mdll -O -Wall',
|
||||
compiler_cxx='g++ -mcygwin -O -Wall',
|
||||
linker_exe='gcc -mcygwin',
|
||||
linker_so=('%s -mcygwin %s' %
|
||||
(self.linker_dll, shared_option)))
|
||||
|
||||
# cygwin and mingw32 need different sets of libraries
|
||||
if self.gcc_version == "2.91.57":
|
||||
# cygwin shouldn't need msvcrt, but without the dlls will crash
|
||||
# (gcc version 2.91.57) -- perhaps something about initialization
|
||||
self.dll_libraries=["msvcrt"]
|
||||
self.warn(
|
||||
"Consider upgrading to a newer version of gcc")
|
||||
else:
|
||||
# Include the appropriate MSVC runtime library if Python was built
|
||||
# with MSVC 7.0 or later.
|
||||
self.dll_libraries = get_msvcr()
|
||||
|
||||
def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
|
||||
"""Compile the source by spawning GCC and windres if needed."""
|
||||
if ext == '.rc' or ext == '.res':
|
||||
# gcc needs '.res' and '.rc' compiled to object files !!!
|
||||
try:
|
||||
self.spawn(["windres", "-i", src, "-o", obj])
|
||||
except PackagingExecError as msg:
|
||||
raise CompileError(msg)
|
||||
else: # for other files use the C-compiler
|
||||
try:
|
||||
self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +
|
||||
extra_postargs)
|
||||
except PackagingExecError as msg:
|
||||
raise CompileError(msg)
|
||||
|
||||
def link(self, target_desc, objects, output_filename, output_dir=None,
|
||||
libraries=None, library_dirs=None, runtime_library_dirs=None,
|
||||
export_symbols=None, debug=False, extra_preargs=None,
|
||||
extra_postargs=None, build_temp=None, target_lang=None):
|
||||
"""Link the objects."""
|
||||
# use separate copies, so we can modify the lists
|
||||
extra_preargs = copy.copy(extra_preargs or [])
|
||||
libraries = copy.copy(libraries or [])
|
||||
objects = copy.copy(objects or [])
|
||||
|
||||
# Additional libraries
|
||||
libraries.extend(self.dll_libraries)
|
||||
|
||||
# handle export symbols by creating a def-file
|
||||
# with executables this only works with gcc/ld as linker
|
||||
if ((export_symbols is not None) and
|
||||
(target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
|
||||
# (The linker doesn't do anything if output is up-to-date.
|
||||
# So it would probably better to check if we really need this,
|
||||
# but for this we had to insert some unchanged parts of
|
||||
# UnixCCompiler, and this is not what we want.)
|
||||
|
||||
# we want to put some files in the same directory as the
|
||||
# object files are, build_temp doesn't help much
|
||||
# where are the object files
|
||||
temp_dir = os.path.dirname(objects[0])
|
||||
# name of dll to give the helper files the same base name
|
||||
dll_name, dll_extension = os.path.splitext(
|
||||
os.path.basename(output_filename))
|
||||
|
||||
# generate the filenames for these files
|
||||
def_file = os.path.join(temp_dir, dll_name + ".def")
|
||||
lib_file = os.path.join(temp_dir, 'lib' + dll_name + ".a")
|
||||
|
||||
# Generate .def file
|
||||
contents = [
|
||||
"LIBRARY %s" % os.path.basename(output_filename),
|
||||
"EXPORTS"]
|
||||
for sym in export_symbols:
|
||||
contents.append(sym)
|
||||
self.execute(write_file, (def_file, contents),
|
||||
"writing %s" % def_file)
|
||||
|
||||
# next add options for def-file and to creating import libraries
|
||||
|
||||
# dllwrap uses different options than gcc/ld
|
||||
if self.linker_dll == "dllwrap":
|
||||
extra_preargs.extend(("--output-lib", lib_file))
|
||||
# for dllwrap we have to use a special option
|
||||
extra_preargs.extend(("--def", def_file))
|
||||
# we use gcc/ld here and can be sure ld is >= 2.9.10
|
||||
else:
|
||||
# doesn't work: bfd_close build\...\libfoo.a: Invalid operation
|
||||
#extra_preargs.extend(("-Wl,--out-implib,%s" % lib_file))
|
||||
# for gcc/ld the def-file is specified as any object files
|
||||
objects.append(def_file)
|
||||
|
||||
#end: if ((export_symbols is not None) and
|
||||
# (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
|
||||
|
||||
# who wants symbols and a many times larger output file
|
||||
# should explicitly switch the debug mode on
|
||||
# otherwise we let dllwrap/ld strip the output file
|
||||
# (On my machine: 10KB < stripped_file < ??100KB
|
||||
# unstripped_file = stripped_file + XXX KB
|
||||
# ( XXX=254 for a typical python extension))
|
||||
if not debug:
|
||||
extra_preargs.append("-s")
|
||||
|
||||
UnixCCompiler.link(self, target_desc, objects, output_filename,
|
||||
output_dir, libraries, library_dirs,
|
||||
runtime_library_dirs,
|
||||
None, # export_symbols, we do this in our def-file
|
||||
debug, extra_preargs, extra_postargs, build_temp,
|
||||
target_lang)
|
||||
|
||||
# -- Miscellaneous methods -----------------------------------------
|
||||
|
||||
def object_filenames(self, source_filenames, strip_dir=False,
|
||||
output_dir=''):
|
||||
"""Adds supports for rc and res files."""
|
||||
if output_dir is None:
|
||||
output_dir = ''
|
||||
obj_names = []
|
||||
for src_name in source_filenames:
|
||||
# use normcase to make sure '.rc' is really '.rc' and not '.RC'
|
||||
base, ext = os.path.splitext(os.path.normcase(src_name))
|
||||
if ext not in (self.src_extensions + ['.rc','.res']):
|
||||
raise UnknownFileError("unknown file type '%s' (from '%s')" % (ext, src_name))
|
||||
if strip_dir:
|
||||
base = os.path.basename (base)
|
||||
if ext in ('.res', '.rc'):
|
||||
# these need to be compiled to object files
|
||||
obj_names.append (os.path.join(output_dir,
|
||||
base + ext + self.obj_extension))
|
||||
else:
|
||||
obj_names.append (os.path.join(output_dir,
|
||||
base + self.obj_extension))
|
||||
return obj_names
|
||||
|
||||
# the same as cygwin plus some additional parameters
|
||||
class Mingw32CCompiler(CygwinCCompiler):
|
||||
""" Handles the Mingw32 port of the GNU C compiler to Windows.
|
||||
"""
|
||||
name = 'mingw32'
|
||||
description = 'MinGW32 compiler'
|
||||
|
||||
def __init__(self, verbose=0, dry_run=False, force=False):
|
||||
|
||||
CygwinCCompiler.__init__ (self, verbose, dry_run, force)
|
||||
|
||||
# ld_version >= "2.13" support -shared so use it instead of
|
||||
# -mdll -static
|
||||
if self.ld_version >= "2.13":
|
||||
shared_option = "-shared"
|
||||
else:
|
||||
shared_option = "-mdll -static"
|
||||
|
||||
# A real mingw32 doesn't need to specify a different entry point,
|
||||
# but cygwin 2.91.57 in no-cygwin-mode needs it.
|
||||
if self.gcc_version <= "2.91.57":
|
||||
entry_point = '--entry _DllMain@12'
|
||||
else:
|
||||
entry_point = ''
|
||||
|
||||
self.set_executables(compiler='gcc -mno-cygwin -O -Wall',
|
||||
compiler_so='gcc -mno-cygwin -mdll -O -Wall',
|
||||
compiler_cxx='g++ -mno-cygwin -O -Wall',
|
||||
linker_exe='gcc -mno-cygwin',
|
||||
linker_so='%s -mno-cygwin %s %s'
|
||||
% (self.linker_dll, shared_option,
|
||||
entry_point))
|
||||
# Maybe we should also append -mthreads, but then the finished
|
||||
# dlls need another dll (mingwm10.dll see Mingw32 docs)
|
||||
# (-mthreads: Support thread-safe exception handling on `Mingw32')
|
||||
|
||||
# no additional libraries needed
|
||||
self.dll_libraries=[]
|
||||
|
||||
# Include the appropriate MSVC runtime library if Python was built
|
||||
# with MSVC 7.0 or later.
|
||||
self.dll_libraries = get_msvcr()
|
||||
|
||||
# Because these compilers aren't configured in Python's pyconfig.h file by
|
||||
# default, we should at least warn the user if he is using a unmodified
|
||||
# version.
|
||||
|
||||
CONFIG_H_OK = "ok"
|
||||
CONFIG_H_NOTOK = "not ok"
|
||||
CONFIG_H_UNCERTAIN = "uncertain"
|
||||
|
||||
def check_config_h():
|
||||
"""Check if the current Python installation appears amenable to building
|
||||
extensions with GCC.
|
||||
|
||||
Returns a tuple (status, details), where 'status' is one of the following
|
||||
constants:
|
||||
|
||||
- CONFIG_H_OK: all is well, go ahead and compile
|
||||
- CONFIG_H_NOTOK: doesn't look good
|
||||
- CONFIG_H_UNCERTAIN: not sure -- unable to read pyconfig.h
|
||||
|
||||
'details' is a human-readable string explaining the situation.
|
||||
|
||||
Note there are two ways to conclude "OK": either 'sys.version' contains
|
||||
the string "GCC" (implying that this Python was built with GCC), or the
|
||||
installed "pyconfig.h" contains the string "__GNUC__".
|
||||
"""
|
||||
|
||||
# XXX since this function also checks sys.version, it's not strictly a
|
||||
# "pyconfig.h" check -- should probably be renamed...
|
||||
# if sys.version contains GCC then python was compiled with GCC, and the
|
||||
# pyconfig.h file should be OK
|
||||
if "GCC" in sys.version:
|
||||
return CONFIG_H_OK, "sys.version mentions 'GCC'"
|
||||
|
||||
# let's see if __GNUC__ is mentioned in python.h
|
||||
fn = sysconfig.get_config_h_filename()
|
||||
try:
|
||||
with open(fn) as config_h:
|
||||
if "__GNUC__" in config_h.read():
|
||||
return CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn
|
||||
else:
|
||||
return CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn
|
||||
except IOError as exc:
|
||||
return (CONFIG_H_UNCERTAIN,
|
||||
"couldn't read '%s': %s" % (fn, exc.strerror))
|
||||
121
Lib/packaging/compiler/extension.py
Normal file
121
Lib/packaging/compiler/extension.py
Normal file
|
|
@ -0,0 +1,121 @@
|
|||
"""Class representing C/C++ extension modules."""
|
||||
|
||||
from packaging import logger
|
||||
|
||||
# This class is really only used by the "build_ext" command, so it might
|
||||
# make sense to put it in distutils.command.build_ext. However, that
|
||||
# module is already big enough, and I want to make this class a bit more
|
||||
# complex to simplify some common cases ("foo" module in "foo.c") and do
|
||||
# better error-checking ("foo.c" actually exists).
|
||||
#
|
||||
# Also, putting this in build_ext.py means every setup script would have to
|
||||
# import that large-ish module (indirectly, through distutils.core) in
|
||||
# order to do anything.
|
||||
|
||||
|
||||
class Extension:
|
||||
"""Just a collection of attributes that describes an extension
|
||||
module and everything needed to build it (hopefully in a portable
|
||||
way, but there are hooks that let you be as unportable as you need).
|
||||
|
||||
Instance attributes:
|
||||
name : string
|
||||
the full name of the extension, including any packages -- ie.
|
||||
*not* a filename or pathname, but Python dotted name
|
||||
sources : [string]
|
||||
list of source filenames, relative to the distribution root
|
||||
(where the setup script lives), in Unix form (slash-separated)
|
||||
for portability. Source files may be C, C++, SWIG (.i),
|
||||
platform-specific resource files, or whatever else is recognized
|
||||
by the "build_ext" command as source for a Python extension.
|
||||
include_dirs : [string]
|
||||
list of directories to search for C/C++ header files (in Unix
|
||||
form for portability)
|
||||
define_macros : [(name : string, value : string|None)]
|
||||
list of macros to define; each macro is defined using a 2-tuple,
|
||||
where 'value' is either the string to define it to or None to
|
||||
define it without a particular value (equivalent of "#define
|
||||
FOO" in source or -DFOO on Unix C compiler command line)
|
||||
undef_macros : [string]
|
||||
list of macros to undefine explicitly
|
||||
library_dirs : [string]
|
||||
list of directories to search for C/C++ libraries at link time
|
||||
libraries : [string]
|
||||
list of library names (not filenames or paths) to link against
|
||||
runtime_library_dirs : [string]
|
||||
list of directories to search for C/C++ libraries at run time
|
||||
(for shared extensions, this is when the extension is loaded)
|
||||
extra_objects : [string]
|
||||
list of extra files to link with (eg. object files not implied
|
||||
by 'sources', static library that must be explicitly specified,
|
||||
binary resource files, etc.)
|
||||
extra_compile_args : [string]
|
||||
any extra platform- and compiler-specific information to use
|
||||
when compiling the source files in 'sources'. For platforms and
|
||||
compilers where "command line" makes sense, this is typically a
|
||||
list of command-line arguments, but for other platforms it could
|
||||
be anything.
|
||||
extra_link_args : [string]
|
||||
any extra platform- and compiler-specific information to use
|
||||
when linking object files together to create the extension (or
|
||||
to create a new static Python interpreter). Similar
|
||||
interpretation as for 'extra_compile_args'.
|
||||
export_symbols : [string]
|
||||
list of symbols to be exported from a shared extension. Not
|
||||
used on all platforms, and not generally necessary for Python
|
||||
extensions, which typically export exactly one symbol: "init" +
|
||||
extension_name.
|
||||
swig_opts : [string]
|
||||
any extra options to pass to SWIG if a source file has the .i
|
||||
extension.
|
||||
depends : [string]
|
||||
list of files that the extension depends on
|
||||
language : string
|
||||
extension language (i.e. "c", "c++", "objc"). Will be detected
|
||||
from the source extensions if not provided.
|
||||
optional : boolean
|
||||
specifies that a build failure in the extension should not abort the
|
||||
build process, but simply not install the failing extension.
|
||||
"""
|
||||
|
||||
# **kwargs are allowed so that a warning is emitted instead of an
|
||||
# exception
|
||||
def __init__(self, name, sources, include_dirs=None, define_macros=None,
|
||||
undef_macros=None, library_dirs=None, libraries=None,
|
||||
runtime_library_dirs=None, extra_objects=None,
|
||||
extra_compile_args=None, extra_link_args=None,
|
||||
export_symbols=None, swig_opts=None, depends=None,
|
||||
language=None, optional=None, **kw):
|
||||
if not isinstance(name, str):
|
||||
raise AssertionError("'name' must be a string")
|
||||
|
||||
if not isinstance(sources, list):
|
||||
raise AssertionError("'sources' must be a list of strings")
|
||||
|
||||
for v in sources:
|
||||
if not isinstance(v, str):
|
||||
raise AssertionError("'sources' must be a list of strings")
|
||||
|
||||
self.name = name
|
||||
self.sources = sources
|
||||
self.include_dirs = include_dirs or []
|
||||
self.define_macros = define_macros or []
|
||||
self.undef_macros = undef_macros or []
|
||||
self.library_dirs = library_dirs or []
|
||||
self.libraries = libraries or []
|
||||
self.runtime_library_dirs = runtime_library_dirs or []
|
||||
self.extra_objects = extra_objects or []
|
||||
self.extra_compile_args = extra_compile_args or []
|
||||
self.extra_link_args = extra_link_args or []
|
||||
self.export_symbols = export_symbols or []
|
||||
self.swig_opts = swig_opts or []
|
||||
self.depends = depends or []
|
||||
self.language = language
|
||||
self.optional = optional
|
||||
|
||||
# If there are unknown keyword options, warn about them
|
||||
if len(kw) > 0:
|
||||
options = [repr(option) for option in kw]
|
||||
options = ', '.join(sorted(options))
|
||||
logger.warning(
|
||||
'unknown arguments given to Extension: %s', options)
|
||||
720
Lib/packaging/compiler/msvc9compiler.py
Normal file
720
Lib/packaging/compiler/msvc9compiler.py
Normal file
|
|
@ -0,0 +1,720 @@
|
|||
"""CCompiler implementation for the Microsoft Visual Studio 2008 compiler.
|
||||
|
||||
The MSVCCompiler class is compatible with VS 2005 and VS 2008. Legacy
|
||||
support for older versions of VS are in the msvccompiler module.
|
||||
"""
|
||||
|
||||
# Written by Perry Stoll
|
||||
# hacked by Robin Becker and Thomas Heller to do a better job of
|
||||
# finding DevStudio (through the registry)
|
||||
# ported to VS2005 and VS 2008 by Christian Heimes
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import re
|
||||
|
||||
from packaging.errors import (PackagingExecError, PackagingPlatformError,
|
||||
CompileError, LibError, LinkError)
|
||||
from packaging.compiler.ccompiler import CCompiler
|
||||
from packaging.compiler import gen_lib_options
|
||||
from packaging import logger
|
||||
from packaging.util import get_platform
|
||||
|
||||
import winreg
|
||||
|
||||
RegOpenKeyEx = winreg.OpenKeyEx
|
||||
RegEnumKey = winreg.EnumKey
|
||||
RegEnumValue = winreg.EnumValue
|
||||
RegError = winreg.error
|
||||
|
||||
HKEYS = (winreg.HKEY_USERS,
|
||||
winreg.HKEY_CURRENT_USER,
|
||||
winreg.HKEY_LOCAL_MACHINE,
|
||||
winreg.HKEY_CLASSES_ROOT)
|
||||
|
||||
VS_BASE = r"Software\Microsoft\VisualStudio\%0.1f"
|
||||
WINSDK_BASE = r"Software\Microsoft\Microsoft SDKs\Windows"
|
||||
NET_BASE = r"Software\Microsoft\.NETFramework"
|
||||
|
||||
# A map keyed by get_platform() return values to values accepted by
|
||||
# 'vcvarsall.bat'. Note a cross-compile may combine these (eg, 'x86_amd64' is
|
||||
# the param to cross-compile on x86 targetting amd64.)
|
||||
PLAT_TO_VCVARS = {
|
||||
'win32' : 'x86',
|
||||
'win-amd64' : 'amd64',
|
||||
'win-ia64' : 'ia64',
|
||||
}
|
||||
|
||||
|
||||
class Reg:
|
||||
"""Helper class to read values from the registry
|
||||
"""
|
||||
|
||||
def get_value(cls, path, key):
|
||||
for base in HKEYS:
|
||||
d = cls.read_values(base, path)
|
||||
if d and key in d:
|
||||
return d[key]
|
||||
raise KeyError(key)
|
||||
get_value = classmethod(get_value)
|
||||
|
||||
def read_keys(cls, base, key):
|
||||
"""Return list of registry keys."""
|
||||
try:
|
||||
handle = RegOpenKeyEx(base, key)
|
||||
except RegError:
|
||||
return None
|
||||
L = []
|
||||
i = 0
|
||||
while True:
|
||||
try:
|
||||
k = RegEnumKey(handle, i)
|
||||
except RegError:
|
||||
break
|
||||
L.append(k)
|
||||
i += 1
|
||||
return L
|
||||
read_keys = classmethod(read_keys)
|
||||
|
||||
def read_values(cls, base, key):
|
||||
"""Return dict of registry keys and values.
|
||||
|
||||
All names are converted to lowercase.
|
||||
"""
|
||||
try:
|
||||
handle = RegOpenKeyEx(base, key)
|
||||
except RegError:
|
||||
return None
|
||||
d = {}
|
||||
i = 0
|
||||
while True:
|
||||
try:
|
||||
name, value, type = RegEnumValue(handle, i)
|
||||
except RegError:
|
||||
break
|
||||
name = name.lower()
|
||||
d[cls.convert_mbcs(name)] = cls.convert_mbcs(value)
|
||||
i += 1
|
||||
return d
|
||||
read_values = classmethod(read_values)
|
||||
|
||||
def convert_mbcs(s):
|
||||
dec = getattr(s, "decode", None)
|
||||
if dec is not None:
|
||||
try:
|
||||
s = dec("mbcs")
|
||||
except UnicodeError:
|
||||
pass
|
||||
return s
|
||||
convert_mbcs = staticmethod(convert_mbcs)
|
||||
|
||||
class MacroExpander:
|
||||
|
||||
def __init__(self, version):
|
||||
self.macros = {}
|
||||
self.vsbase = VS_BASE % version
|
||||
self.load_macros(version)
|
||||
|
||||
def set_macro(self, macro, path, key):
|
||||
self.macros["$(%s)" % macro] = Reg.get_value(path, key)
|
||||
|
||||
def load_macros(self, version):
|
||||
self.set_macro("VCInstallDir", self.vsbase + r"\Setup\VC", "productdir")
|
||||
self.set_macro("VSInstallDir", self.vsbase + r"\Setup\VS", "productdir")
|
||||
self.set_macro("FrameworkDir", NET_BASE, "installroot")
|
||||
try:
|
||||
if version >= 8.0:
|
||||
self.set_macro("FrameworkSDKDir", NET_BASE,
|
||||
"sdkinstallrootv2.0")
|
||||
else:
|
||||
raise KeyError("sdkinstallrootv2.0")
|
||||
except KeyError:
|
||||
raise PackagingPlatformError(
|
||||
"""Python was built with Visual Studio 2008;
|
||||
extensions must be built with a compiler than can generate compatible binaries.
|
||||
Visual Studio 2008 was not found on this system. If you have Cygwin installed,
|
||||
you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""")
|
||||
|
||||
if version >= 9.0:
|
||||
self.set_macro("FrameworkVersion", self.vsbase, "clr version")
|
||||
self.set_macro("WindowsSdkDir", WINSDK_BASE, "currentinstallfolder")
|
||||
else:
|
||||
p = r"Software\Microsoft\NET Framework Setup\Product"
|
||||
for base in HKEYS:
|
||||
try:
|
||||
h = RegOpenKeyEx(base, p)
|
||||
except RegError:
|
||||
continue
|
||||
key = RegEnumKey(h, 0)
|
||||
d = Reg.get_value(base, r"%s\%s" % (p, key))
|
||||
self.macros["$(FrameworkVersion)"] = d["version"]
|
||||
|
||||
def sub(self, s):
|
||||
for k, v in self.macros.items():
|
||||
s = s.replace(k, v)
|
||||
return s
|
||||
|
||||
def get_build_version():
|
||||
"""Return the version of MSVC that was used to build Python.
|
||||
|
||||
For Python 2.3 and up, the version number is included in
|
||||
sys.version. For earlier versions, assume the compiler is MSVC 6.
|
||||
"""
|
||||
prefix = "MSC v."
|
||||
i = sys.version.find(prefix)
|
||||
if i == -1:
|
||||
return 6
|
||||
i = i + len(prefix)
|
||||
s, rest = sys.version[i:].split(" ", 1)
|
||||
majorVersion = int(s[:-2]) - 6
|
||||
minorVersion = int(s[2:3]) / 10.0
|
||||
# I don't think paths are affected by minor version in version 6
|
||||
if majorVersion == 6:
|
||||
minorVersion = 0
|
||||
if majorVersion >= 6:
|
||||
return majorVersion + minorVersion
|
||||
# else we don't know what version of the compiler this is
|
||||
return None
|
||||
|
||||
def normalize_and_reduce_paths(paths):
|
||||
"""Return a list of normalized paths with duplicates removed.
|
||||
|
||||
The current order of paths is maintained.
|
||||
"""
|
||||
# Paths are normalized so things like: /a and /a/ aren't both preserved.
|
||||
reduced_paths = []
|
||||
for p in paths:
|
||||
np = os.path.normpath(p)
|
||||
# XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set.
|
||||
if np not in reduced_paths:
|
||||
reduced_paths.append(np)
|
||||
return reduced_paths
|
||||
|
||||
def removeDuplicates(variable):
|
||||
"""Remove duplicate values of an environment variable.
|
||||
"""
|
||||
oldList = variable.split(os.pathsep)
|
||||
newList = []
|
||||
for i in oldList:
|
||||
if i not in newList:
|
||||
newList.append(i)
|
||||
newVariable = os.pathsep.join(newList)
|
||||
return newVariable
|
||||
|
||||
def find_vcvarsall(version):
|
||||
"""Find the vcvarsall.bat file
|
||||
|
||||
At first it tries to find the productdir of VS 2008 in the registry. If
|
||||
that fails it falls back to the VS90COMNTOOLS env var.
|
||||
"""
|
||||
vsbase = VS_BASE % version
|
||||
try:
|
||||
productdir = Reg.get_value(r"%s\Setup\VC" % vsbase,
|
||||
"productdir")
|
||||
except KeyError:
|
||||
logger.debug("Unable to find productdir in registry")
|
||||
productdir = None
|
||||
|
||||
if not productdir or not os.path.isdir(productdir):
|
||||
toolskey = "VS%0.f0COMNTOOLS" % version
|
||||
toolsdir = os.environ.get(toolskey, None)
|
||||
|
||||
if toolsdir and os.path.isdir(toolsdir):
|
||||
productdir = os.path.join(toolsdir, os.pardir, os.pardir, "VC")
|
||||
productdir = os.path.abspath(productdir)
|
||||
if not os.path.isdir(productdir):
|
||||
logger.debug("%s is not a valid directory", productdir)
|
||||
return None
|
||||
else:
|
||||
logger.debug("env var %s is not set or invalid", toolskey)
|
||||
if not productdir:
|
||||
logger.debug("no productdir found")
|
||||
return None
|
||||
vcvarsall = os.path.join(productdir, "vcvarsall.bat")
|
||||
if os.path.isfile(vcvarsall):
|
||||
return vcvarsall
|
||||
logger.debug("unable to find vcvarsall.bat")
|
||||
return None
|
||||
|
||||
def query_vcvarsall(version, arch="x86"):
|
||||
"""Launch vcvarsall.bat and read the settings from its environment
|
||||
"""
|
||||
vcvarsall = find_vcvarsall(version)
|
||||
interesting = set(("include", "lib", "libpath", "path"))
|
||||
result = {}
|
||||
|
||||
if vcvarsall is None:
|
||||
raise PackagingPlatformError("Unable to find vcvarsall.bat")
|
||||
logger.debug("calling 'vcvarsall.bat %s' (version=%s)", arch, version)
|
||||
popen = subprocess.Popen('"%s" %s & set' % (vcvarsall, arch),
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
|
||||
stdout, stderr = popen.communicate()
|
||||
if popen.wait() != 0:
|
||||
raise PackagingPlatformError(stderr.decode("mbcs"))
|
||||
|
||||
stdout = stdout.decode("mbcs")
|
||||
for line in stdout.split("\n"):
|
||||
line = Reg.convert_mbcs(line)
|
||||
if '=' not in line:
|
||||
continue
|
||||
line = line.strip()
|
||||
key, value = line.split('=', 1)
|
||||
key = key.lower()
|
||||
if key in interesting:
|
||||
if value.endswith(os.pathsep):
|
||||
value = value[:-1]
|
||||
result[key] = removeDuplicates(value)
|
||||
|
||||
if len(result) != len(interesting):
|
||||
raise ValueError(str(list(result)))
|
||||
|
||||
return result
|
||||
|
||||
# More globals
|
||||
VERSION = get_build_version()
|
||||
if VERSION < 8.0:
|
||||
raise PackagingPlatformError("VC %0.1f is not supported by this module" % VERSION)
|
||||
# MACROS = MacroExpander(VERSION)
|
||||
|
||||
class MSVCCompiler(CCompiler) :
|
||||
"""Concrete class that implements an interface to Microsoft Visual C++,
|
||||
as defined by the CCompiler abstract class."""
|
||||
|
||||
name = 'msvc'
|
||||
description = 'Microsoft Visual C++'
|
||||
|
||||
# Just set this so CCompiler's constructor doesn't barf. We currently
|
||||
# don't use the 'set_executables()' bureaucracy provided by CCompiler,
|
||||
# as it really isn't necessary for this sort of single-compiler class.
|
||||
# Would be nice to have a consistent interface with UnixCCompiler,
|
||||
# though, so it's worth thinking about.
|
||||
executables = {}
|
||||
|
||||
# Private class data (need to distinguish C from C++ source for compiler)
|
||||
_c_extensions = ['.c']
|
||||
_cpp_extensions = ['.cc', '.cpp', '.cxx']
|
||||
_rc_extensions = ['.rc']
|
||||
_mc_extensions = ['.mc']
|
||||
|
||||
# Needed for the filename generation methods provided by the
|
||||
# base class, CCompiler.
|
||||
src_extensions = (_c_extensions + _cpp_extensions +
|
||||
_rc_extensions + _mc_extensions)
|
||||
res_extension = '.res'
|
||||
obj_extension = '.obj'
|
||||
static_lib_extension = '.lib'
|
||||
shared_lib_extension = '.dll'
|
||||
static_lib_format = shared_lib_format = '%s%s'
|
||||
exe_extension = '.exe'
|
||||
|
||||
def __init__(self, verbose=0, dry_run=False, force=False):
|
||||
CCompiler.__init__(self, verbose, dry_run, force)
|
||||
self.__version = VERSION
|
||||
self.__root = r"Software\Microsoft\VisualStudio"
|
||||
# self.__macros = MACROS
|
||||
self.__paths = []
|
||||
# target platform (.plat_name is consistent with 'bdist')
|
||||
self.plat_name = None
|
||||
self.__arch = None # deprecated name
|
||||
self.initialized = False
|
||||
|
||||
def initialize(self, plat_name=None):
|
||||
# multi-init means we would need to check platform same each time...
|
||||
assert not self.initialized, "don't init multiple times"
|
||||
if plat_name is None:
|
||||
plat_name = get_platform()
|
||||
# sanity check for platforms to prevent obscure errors later.
|
||||
ok_plats = 'win32', 'win-amd64', 'win-ia64'
|
||||
if plat_name not in ok_plats:
|
||||
raise PackagingPlatformError("--plat-name must be one of %s" %
|
||||
(ok_plats,))
|
||||
|
||||
if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"):
|
||||
# Assume that the SDK set up everything alright; don't try to be
|
||||
# smarter
|
||||
self.cc = "cl.exe"
|
||||
self.linker = "link.exe"
|
||||
self.lib = "lib.exe"
|
||||
self.rc = "rc.exe"
|
||||
self.mc = "mc.exe"
|
||||
else:
|
||||
# On x86, 'vcvars32.bat amd64' creates an env that doesn't work;
|
||||
# to cross compile, you use 'x86_amd64'.
|
||||
# On AMD64, 'vcvars32.bat amd64' is a native build env; to cross
|
||||
# compile use 'x86' (ie, it runs the x86 compiler directly)
|
||||
# No idea how itanium handles this, if at all.
|
||||
if plat_name == get_platform() or plat_name == 'win32':
|
||||
# native build or cross-compile to win32
|
||||
plat_spec = PLAT_TO_VCVARS[plat_name]
|
||||
else:
|
||||
# cross compile from win32 -> some 64bit
|
||||
plat_spec = PLAT_TO_VCVARS[get_platform()] + '_' + \
|
||||
PLAT_TO_VCVARS[plat_name]
|
||||
|
||||
vc_env = query_vcvarsall(VERSION, plat_spec)
|
||||
|
||||
# take care to only use strings in the environment.
|
||||
self.__paths = vc_env['path'].encode('mbcs').split(os.pathsep)
|
||||
os.environ['lib'] = vc_env['lib'].encode('mbcs')
|
||||
os.environ['include'] = vc_env['include'].encode('mbcs')
|
||||
|
||||
if len(self.__paths) == 0:
|
||||
raise PackagingPlatformError("Python was built with %s, "
|
||||
"and extensions need to be built with the same "
|
||||
"version of the compiler, but it isn't installed."
|
||||
% self.__product)
|
||||
|
||||
self.cc = self.find_exe("cl.exe")
|
||||
self.linker = self.find_exe("link.exe")
|
||||
self.lib = self.find_exe("lib.exe")
|
||||
self.rc = self.find_exe("rc.exe") # resource compiler
|
||||
self.mc = self.find_exe("mc.exe") # message compiler
|
||||
#self.set_path_env_var('lib')
|
||||
#self.set_path_env_var('include')
|
||||
|
||||
# extend the MSVC path with the current path
|
||||
try:
|
||||
for p in os.environ['path'].split(';'):
|
||||
self.__paths.append(p)
|
||||
except KeyError:
|
||||
pass
|
||||
self.__paths = normalize_and_reduce_paths(self.__paths)
|
||||
os.environ['path'] = ";".join(self.__paths)
|
||||
|
||||
self.preprocess_options = None
|
||||
if self.__arch == "x86":
|
||||
self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3',
|
||||
'/DNDEBUG']
|
||||
self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3',
|
||||
'/Z7', '/D_DEBUG']
|
||||
else:
|
||||
# Win64
|
||||
self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/GS-' ,
|
||||
'/DNDEBUG']
|
||||
self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-',
|
||||
'/Z7', '/D_DEBUG']
|
||||
|
||||
self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO']
|
||||
if self.__version >= 7:
|
||||
self.ldflags_shared_debug = [
|
||||
'/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG', '/pdb:None'
|
||||
]
|
||||
self.ldflags_static = [ '/nologo']
|
||||
|
||||
self.initialized = True
|
||||
|
||||
# -- Worker methods ------------------------------------------------
|
||||
|
||||
def object_filenames(self,
|
||||
source_filenames,
|
||||
strip_dir=False,
|
||||
output_dir=''):
|
||||
# Copied from ccompiler.py, extended to return .res as 'object'-file
|
||||
# for .rc input file
|
||||
if output_dir is None: output_dir = ''
|
||||
obj_names = []
|
||||
for src_name in source_filenames:
|
||||
base, ext = os.path.splitext(src_name)
|
||||
base = os.path.splitdrive(base)[1] # Chop off the drive
|
||||
base = base[os.path.isabs(base):] # If abs, chop off leading /
|
||||
if ext not in self.src_extensions:
|
||||
# Better to raise an exception instead of silently continuing
|
||||
# and later complain about sources and targets having
|
||||
# different lengths
|
||||
raise CompileError("Don't know how to compile %s" % src_name)
|
||||
if strip_dir:
|
||||
base = os.path.basename(base)
|
||||
if ext in self._rc_extensions:
|
||||
obj_names.append(os.path.join(output_dir,
|
||||
base + self.res_extension))
|
||||
elif ext in self._mc_extensions:
|
||||
obj_names.append(os.path.join(output_dir,
|
||||
base + self.res_extension))
|
||||
else:
|
||||
obj_names.append(os.path.join(output_dir,
|
||||
base + self.obj_extension))
|
||||
return obj_names
|
||||
|
||||
|
||||
def compile(self, sources,
|
||||
output_dir=None, macros=None, include_dirs=None, debug=False,
|
||||
extra_preargs=None, extra_postargs=None, depends=None):
|
||||
|
||||
if not self.initialized:
|
||||
self.initialize()
|
||||
compile_info = self._setup_compile(output_dir, macros, include_dirs,
|
||||
sources, depends, extra_postargs)
|
||||
macros, objects, extra_postargs, pp_opts, build = compile_info
|
||||
|
||||
compile_opts = extra_preargs or []
|
||||
compile_opts.append('/c')
|
||||
if debug:
|
||||
compile_opts.extend(self.compile_options_debug)
|
||||
else:
|
||||
compile_opts.extend(self.compile_options)
|
||||
|
||||
for obj in objects:
|
||||
try:
|
||||
src, ext = build[obj]
|
||||
except KeyError:
|
||||
continue
|
||||
if debug:
|
||||
# pass the full pathname to MSVC in debug mode,
|
||||
# this allows the debugger to find the source file
|
||||
# without asking the user to browse for it
|
||||
src = os.path.abspath(src)
|
||||
|
||||
if ext in self._c_extensions:
|
||||
input_opt = "/Tc" + src
|
||||
elif ext in self._cpp_extensions:
|
||||
input_opt = "/Tp" + src
|
||||
elif ext in self._rc_extensions:
|
||||
# compile .RC to .RES file
|
||||
input_opt = src
|
||||
output_opt = "/fo" + obj
|
||||
try:
|
||||
self.spawn([self.rc] + pp_opts +
|
||||
[output_opt] + [input_opt])
|
||||
except PackagingExecError as msg:
|
||||
raise CompileError(msg)
|
||||
continue
|
||||
elif ext in self._mc_extensions:
|
||||
# Compile .MC to .RC file to .RES file.
|
||||
# * '-h dir' specifies the directory for the
|
||||
# generated include file
|
||||
# * '-r dir' specifies the target directory of the
|
||||
# generated RC file and the binary message resource
|
||||
# it includes
|
||||
#
|
||||
# For now (since there are no options to change this),
|
||||
# we use the source-directory for the include file and
|
||||
# the build directory for the RC file and message
|
||||
# resources. This works at least for win32all.
|
||||
h_dir = os.path.dirname(src)
|
||||
rc_dir = os.path.dirname(obj)
|
||||
try:
|
||||
# first compile .MC to .RC and .H file
|
||||
self.spawn([self.mc] +
|
||||
['-h', h_dir, '-r', rc_dir] + [src])
|
||||
base, _ = os.path.splitext(os.path.basename(src))
|
||||
rc_file = os.path.join(rc_dir, base + '.rc')
|
||||
# then compile .RC to .RES file
|
||||
self.spawn([self.rc] +
|
||||
["/fo" + obj] + [rc_file])
|
||||
|
||||
except PackagingExecError as msg:
|
||||
raise CompileError(msg)
|
||||
continue
|
||||
else:
|
||||
# how to handle this file?
|
||||
raise CompileError("Don't know how to compile %s to %s"
|
||||
% (src, obj))
|
||||
|
||||
output_opt = "/Fo" + obj
|
||||
try:
|
||||
self.spawn([self.cc] + compile_opts + pp_opts +
|
||||
[input_opt, output_opt] +
|
||||
extra_postargs)
|
||||
except PackagingExecError as msg:
|
||||
raise CompileError(msg)
|
||||
|
||||
return objects
|
||||
|
||||
|
||||
def create_static_lib(self,
|
||||
objects,
|
||||
output_libname,
|
||||
output_dir=None,
|
||||
debug=False,
|
||||
target_lang=None):
|
||||
|
||||
if not self.initialized:
|
||||
self.initialize()
|
||||
objects, output_dir = self._fix_object_args(objects, output_dir)
|
||||
output_filename = self.library_filename(output_libname,
|
||||
output_dir=output_dir)
|
||||
|
||||
if self._need_link(objects, output_filename):
|
||||
lib_args = objects + ['/OUT:' + output_filename]
|
||||
if debug:
|
||||
pass # XXX what goes here?
|
||||
try:
|
||||
self.spawn([self.lib] + lib_args)
|
||||
except PackagingExecError as msg:
|
||||
raise LibError(msg)
|
||||
else:
|
||||
logger.debug("skipping %s (up-to-date)", output_filename)
|
||||
|
||||
|
||||
def link(self, target_desc, objects, output_filename, output_dir=None,
|
||||
libraries=None, library_dirs=None, runtime_library_dirs=None,
|
||||
export_symbols=None, debug=False, extra_preargs=None,
|
||||
extra_postargs=None, build_temp=None, target_lang=None):
|
||||
if not self.initialized:
|
||||
self.initialize()
|
||||
objects, output_dir = self._fix_object_args(objects, output_dir)
|
||||
fixed_args = self._fix_lib_args(libraries, library_dirs,
|
||||
runtime_library_dirs)
|
||||
libraries, library_dirs, runtime_library_dirs = fixed_args
|
||||
|
||||
if runtime_library_dirs:
|
||||
self.warn("don't know what to do with 'runtime_library_dirs': "
|
||||
+ str(runtime_library_dirs))
|
||||
|
||||
lib_opts = gen_lib_options(self,
|
||||
library_dirs, runtime_library_dirs,
|
||||
libraries)
|
||||
if output_dir is not None:
|
||||
output_filename = os.path.join(output_dir, output_filename)
|
||||
|
||||
if self._need_link(objects, output_filename):
|
||||
if target_desc == CCompiler.EXECUTABLE:
|
||||
if debug:
|
||||
ldflags = self.ldflags_shared_debug[1:]
|
||||
else:
|
||||
ldflags = self.ldflags_shared[1:]
|
||||
else:
|
||||
if debug:
|
||||
ldflags = self.ldflags_shared_debug
|
||||
else:
|
||||
ldflags = self.ldflags_shared
|
||||
|
||||
export_opts = []
|
||||
for sym in (export_symbols or []):
|
||||
export_opts.append("/EXPORT:" + sym)
|
||||
|
||||
ld_args = (ldflags + lib_opts + export_opts +
|
||||
objects + ['/OUT:' + output_filename])
|
||||
|
||||
# The MSVC linker generates .lib and .exp files, which cannot be
|
||||
# suppressed by any linker switches. The .lib files may even be
|
||||
# needed! Make sure they are generated in the temporary build
|
||||
# directory. Since they have different names for debug and release
|
||||
# builds, they can go into the same directory.
|
||||
build_temp = os.path.dirname(objects[0])
|
||||
if export_symbols is not None:
|
||||
dll_name, dll_ext = os.path.splitext(
|
||||
os.path.basename(output_filename))
|
||||
implib_file = os.path.join(
|
||||
build_temp,
|
||||
self.library_filename(dll_name))
|
||||
ld_args.append('/IMPLIB:' + implib_file)
|
||||
|
||||
# Embedded manifests are recommended - see MSDN article titled
|
||||
# "How to: Embed a Manifest Inside a C/C++ Application"
|
||||
# (currently at http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx)
|
||||
# Ask the linker to generate the manifest in the temp dir, so
|
||||
# we can embed it later.
|
||||
temp_manifest = os.path.join(
|
||||
build_temp,
|
||||
os.path.basename(output_filename) + ".manifest")
|
||||
ld_args.append('/MANIFESTFILE:' + temp_manifest)
|
||||
|
||||
if extra_preargs:
|
||||
ld_args[:0] = extra_preargs
|
||||
if extra_postargs:
|
||||
ld_args.extend(extra_postargs)
|
||||
|
||||
self.mkpath(os.path.dirname(output_filename))
|
||||
try:
|
||||
self.spawn([self.linker] + ld_args)
|
||||
except PackagingExecError as msg:
|
||||
raise LinkError(msg)
|
||||
|
||||
# embed the manifest
|
||||
# XXX - this is somewhat fragile - if mt.exe fails, distutils
|
||||
# will still consider the DLL up-to-date, but it will not have a
|
||||
# manifest. Maybe we should link to a temp file? OTOH, that
|
||||
# implies a build environment error that shouldn't go undetected.
|
||||
if target_desc == CCompiler.EXECUTABLE:
|
||||
mfid = 1
|
||||
else:
|
||||
mfid = 2
|
||||
self._remove_visual_c_ref(temp_manifest)
|
||||
out_arg = '-outputresource:%s;%s' % (output_filename, mfid)
|
||||
try:
|
||||
self.spawn(['mt.exe', '-nologo', '-manifest',
|
||||
temp_manifest, out_arg])
|
||||
except PackagingExecError as msg:
|
||||
raise LinkError(msg)
|
||||
else:
|
||||
logger.debug("skipping %s (up-to-date)", output_filename)
|
||||
|
||||
def _remove_visual_c_ref(self, manifest_file):
|
||||
try:
|
||||
# Remove references to the Visual C runtime, so they will
|
||||
# fall through to the Visual C dependency of Python.exe.
|
||||
# This way, when installed for a restricted user (e.g.
|
||||
# runtimes are not in WinSxS folder, but in Python's own
|
||||
# folder), the runtimes do not need to be in every folder
|
||||
# with .pyd's.
|
||||
with open(manifest_file) as manifest_f:
|
||||
manifest_buf = manifest_f.read()
|
||||
pattern = re.compile(
|
||||
r"""<assemblyIdentity.*?name=("|')Microsoft\."""\
|
||||
r"""VC\d{2}\.CRT("|').*?(/>|</assemblyIdentity>)""",
|
||||
re.DOTALL)
|
||||
manifest_buf = re.sub(pattern, "", manifest_buf)
|
||||
pattern = "<dependentAssembly>\s*</dependentAssembly>"
|
||||
manifest_buf = re.sub(pattern, "", manifest_buf)
|
||||
with open(manifest_file, 'w') as manifest_f:
|
||||
manifest_f.write(manifest_buf)
|
||||
except IOError:
|
||||
pass
|
||||
|
||||
# -- Miscellaneous methods -----------------------------------------
|
||||
# These are all used by the 'gen_lib_options() function, in
|
||||
# ccompiler.py.
|
||||
|
||||
def library_dir_option(self, dir):
|
||||
return "/LIBPATH:" + dir
|
||||
|
||||
def runtime_library_dir_option(self, dir):
|
||||
raise PackagingPlatformError(
|
||||
"don't know how to set runtime library search path for MSVC++")
|
||||
|
||||
def library_option(self, lib):
|
||||
return self.library_filename(lib)
|
||||
|
||||
|
||||
def find_library_file(self, dirs, lib, debug=False):
|
||||
# Prefer a debugging library if found (and requested), but deal
|
||||
# with it if we don't have one.
|
||||
if debug:
|
||||
try_names = [lib + "_d", lib]
|
||||
else:
|
||||
try_names = [lib]
|
||||
for dir in dirs:
|
||||
for name in try_names:
|
||||
libfile = os.path.join(dir, self.library_filename(name))
|
||||
if os.path.exists(libfile):
|
||||
return libfile
|
||||
else:
|
||||
# Oops, didn't find it in *any* of 'dirs'
|
||||
return None
|
||||
|
||||
# Helper methods for using the MSVC registry settings
|
||||
|
||||
def find_exe(self, exe):
|
||||
"""Return path to an MSVC executable program.
|
||||
|
||||
Tries to find the program in several places: first, one of the
|
||||
MSVC program search paths from the registry; next, the directories
|
||||
in the PATH environment variable. If any of those work, return an
|
||||
absolute path that is known to exist. If none of them work, just
|
||||
return the original program name, 'exe'.
|
||||
"""
|
||||
for p in self.__paths:
|
||||
fn = os.path.join(os.path.abspath(p), exe)
|
||||
if os.path.isfile(fn):
|
||||
return fn
|
||||
|
||||
# didn't find it; try existing path
|
||||
for p in os.environ['Path'].split(';'):
|
||||
fn = os.path.join(os.path.abspath(p),exe)
|
||||
if os.path.isfile(fn):
|
||||
return fn
|
||||
|
||||
return exe
|
||||
636
Lib/packaging/compiler/msvccompiler.py
Normal file
636
Lib/packaging/compiler/msvccompiler.py
Normal file
|
|
@ -0,0 +1,636 @@
|
|||
"""CCompiler implementation for old Microsoft Visual Studio compilers.
|
||||
|
||||
For a compiler compatible with VS 2005 and 2008, use msvc9compiler.
|
||||
"""
|
||||
|
||||
# Written by Perry Stoll
|
||||
# hacked by Robin Becker and Thomas Heller to do a better job of
|
||||
# finding DevStudio (through the registry)
|
||||
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
from packaging.errors import (PackagingExecError, PackagingPlatformError,
|
||||
CompileError, LibError, LinkError)
|
||||
from packaging.compiler.ccompiler import CCompiler
|
||||
from packaging.compiler import gen_lib_options
|
||||
from packaging import logger
|
||||
|
||||
_can_read_reg = False
|
||||
try:
|
||||
import winreg
|
||||
|
||||
_can_read_reg = True
|
||||
hkey_mod = winreg
|
||||
|
||||
RegOpenKeyEx = winreg.OpenKeyEx
|
||||
RegEnumKey = winreg.EnumKey
|
||||
RegEnumValue = winreg.EnumValue
|
||||
RegError = winreg.error
|
||||
|
||||
except ImportError:
|
||||
try:
|
||||
import win32api
|
||||
import win32con
|
||||
_can_read_reg = True
|
||||
hkey_mod = win32con
|
||||
|
||||
RegOpenKeyEx = win32api.RegOpenKeyEx
|
||||
RegEnumKey = win32api.RegEnumKey
|
||||
RegEnumValue = win32api.RegEnumValue
|
||||
RegError = win32api.error
|
||||
|
||||
except ImportError:
|
||||
logger.warning(
|
||||
"can't read registry to find the necessary compiler setting;\n"
|
||||
"make sure that Python modules _winreg, win32api or win32con "
|
||||
"are installed.")
|
||||
|
||||
if _can_read_reg:
|
||||
HKEYS = (hkey_mod.HKEY_USERS,
|
||||
hkey_mod.HKEY_CURRENT_USER,
|
||||
hkey_mod.HKEY_LOCAL_MACHINE,
|
||||
hkey_mod.HKEY_CLASSES_ROOT)
|
||||
|
||||
|
||||
def read_keys(base, key):
|
||||
"""Return list of registry keys."""
|
||||
|
||||
try:
|
||||
handle = RegOpenKeyEx(base, key)
|
||||
except RegError:
|
||||
return None
|
||||
L = []
|
||||
i = 0
|
||||
while True:
|
||||
try:
|
||||
k = RegEnumKey(handle, i)
|
||||
except RegError:
|
||||
break
|
||||
L.append(k)
|
||||
i = i + 1
|
||||
return L
|
||||
|
||||
|
||||
def read_values(base, key):
|
||||
"""Return dict of registry keys and values.
|
||||
|
||||
All names are converted to lowercase.
|
||||
"""
|
||||
try:
|
||||
handle = RegOpenKeyEx(base, key)
|
||||
except RegError:
|
||||
return None
|
||||
d = {}
|
||||
i = 0
|
||||
while True:
|
||||
try:
|
||||
name, value, type = RegEnumValue(handle, i)
|
||||
except RegError:
|
||||
break
|
||||
name = name.lower()
|
||||
d[convert_mbcs(name)] = convert_mbcs(value)
|
||||
i = i + 1
|
||||
return d
|
||||
|
||||
|
||||
def convert_mbcs(s):
|
||||
enc = getattr(s, "encode", None)
|
||||
if enc is not None:
|
||||
try:
|
||||
s = enc("mbcs")
|
||||
except UnicodeError:
|
||||
pass
|
||||
return s
|
||||
|
||||
|
||||
class MacroExpander:
|
||||
|
||||
def __init__(self, version):
|
||||
self.macros = {}
|
||||
self.load_macros(version)
|
||||
|
||||
def set_macro(self, macro, path, key):
|
||||
for base in HKEYS:
|
||||
d = read_values(base, path)
|
||||
if d:
|
||||
self.macros["$(%s)" % macro] = d[key]
|
||||
break
|
||||
|
||||
def load_macros(self, version):
|
||||
vsbase = r"Software\Microsoft\VisualStudio\%0.1f" % version
|
||||
self.set_macro("VCInstallDir", vsbase + r"\Setup\VC", "productdir")
|
||||
self.set_macro("VSInstallDir", vsbase + r"\Setup\VS", "productdir")
|
||||
net = r"Software\Microsoft\.NETFramework"
|
||||
self.set_macro("FrameworkDir", net, "installroot")
|
||||
try:
|
||||
if version > 7.0:
|
||||
self.set_macro("FrameworkSDKDir", net, "sdkinstallrootv1.1")
|
||||
else:
|
||||
self.set_macro("FrameworkSDKDir", net, "sdkinstallroot")
|
||||
except KeyError:
|
||||
raise PackagingPlatformError(
|
||||
"""Python was built with Visual Studio 2003; extensions must be built with
|
||||
a compiler than can generate compatible binaries. Visual Studio 2003 was
|
||||
not found on this system. If you have Cygwin installed, you can try
|
||||
compiling with MingW32, by passing "-c mingw32" to setup.py.""")
|
||||
# XXX update this comment for setup.cfg
|
||||
|
||||
p = r"Software\Microsoft\NET Framework Setup\Product"
|
||||
for base in HKEYS:
|
||||
try:
|
||||
h = RegOpenKeyEx(base, p)
|
||||
except RegError:
|
||||
continue
|
||||
key = RegEnumKey(h, 0)
|
||||
d = read_values(base, r"%s\%s" % (p, key))
|
||||
self.macros["$(FrameworkVersion)"] = d["version"]
|
||||
|
||||
def sub(self, s):
|
||||
for k, v in self.macros.items():
|
||||
s = s.replace(k, v)
|
||||
return s
|
||||
|
||||
|
||||
def get_build_version():
|
||||
"""Return the version of MSVC that was used to build Python.
|
||||
|
||||
For Python 2.3 and up, the version number is included in
|
||||
sys.version. For earlier versions, assume the compiler is MSVC 6.
|
||||
"""
|
||||
|
||||
prefix = "MSC v."
|
||||
i = sys.version.find(prefix)
|
||||
if i == -1:
|
||||
return 6
|
||||
i = i + len(prefix)
|
||||
s, rest = sys.version[i:].split(" ", 1)
|
||||
majorVersion = int(s[:-2]) - 6
|
||||
minorVersion = int(s[2:3]) / 10.0
|
||||
# I don't think paths are affected by minor version in version 6
|
||||
if majorVersion == 6:
|
||||
minorVersion = 0
|
||||
if majorVersion >= 6:
|
||||
return majorVersion + minorVersion
|
||||
# else we don't know what version of the compiler this is
|
||||
return None
|
||||
|
||||
|
||||
def get_build_architecture():
|
||||
"""Return the processor architecture.
|
||||
|
||||
Possible results are "Intel", "Itanium", or "AMD64".
|
||||
"""
|
||||
|
||||
prefix = " bit ("
|
||||
i = sys.version.find(prefix)
|
||||
if i == -1:
|
||||
return "Intel"
|
||||
j = sys.version.find(")", i)
|
||||
return sys.version[i+len(prefix):j]
|
||||
|
||||
|
||||
def normalize_and_reduce_paths(paths):
|
||||
"""Return a list of normalized paths with duplicates removed.
|
||||
|
||||
The current order of paths is maintained.
|
||||
"""
|
||||
# Paths are normalized so things like: /a and /a/ aren't both preserved.
|
||||
reduced_paths = []
|
||||
for p in paths:
|
||||
np = os.path.normpath(p)
|
||||
# XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set.
|
||||
if np not in reduced_paths:
|
||||
reduced_paths.append(np)
|
||||
return reduced_paths
|
||||
|
||||
|
||||
class MSVCCompiler(CCompiler):
|
||||
"""Concrete class that implements an interface to Microsoft Visual C++,
|
||||
as defined by the CCompiler abstract class."""
|
||||
|
||||
name = 'msvc'
|
||||
description = "Microsoft Visual C++"
|
||||
|
||||
# Just set this so CCompiler's constructor doesn't barf. We currently
|
||||
# don't use the 'set_executables()' bureaucracy provided by CCompiler,
|
||||
# as it really isn't necessary for this sort of single-compiler class.
|
||||
# Would be nice to have a consistent interface with UnixCCompiler,
|
||||
# though, so it's worth thinking about.
|
||||
executables = {}
|
||||
|
||||
# Private class data (need to distinguish C from C++ source for compiler)
|
||||
_c_extensions = ['.c']
|
||||
_cpp_extensions = ['.cc', '.cpp', '.cxx']
|
||||
_rc_extensions = ['.rc']
|
||||
_mc_extensions = ['.mc']
|
||||
|
||||
# Needed for the filename generation methods provided by the
|
||||
# base class, CCompiler.
|
||||
src_extensions = (_c_extensions + _cpp_extensions +
|
||||
_rc_extensions + _mc_extensions)
|
||||
res_extension = '.res'
|
||||
obj_extension = '.obj'
|
||||
static_lib_extension = '.lib'
|
||||
shared_lib_extension = '.dll'
|
||||
static_lib_format = shared_lib_format = '%s%s'
|
||||
exe_extension = '.exe'
|
||||
|
||||
def __init__(self, verbose=0, dry_run=False, force=False):
|
||||
CCompiler.__init__(self, verbose, dry_run, force)
|
||||
self.__version = get_build_version()
|
||||
self.__arch = get_build_architecture()
|
||||
if self.__arch == "Intel":
|
||||
# x86
|
||||
if self.__version >= 7:
|
||||
self.__root = r"Software\Microsoft\VisualStudio"
|
||||
self.__macros = MacroExpander(self.__version)
|
||||
else:
|
||||
self.__root = r"Software\Microsoft\Devstudio"
|
||||
self.__product = "Visual Studio version %s" % self.__version
|
||||
else:
|
||||
# Win64. Assume this was built with the platform SDK
|
||||
self.__product = "Microsoft SDK compiler %s" % (self.__version + 6)
|
||||
|
||||
self.initialized = False
|
||||
|
||||
def initialize(self):
|
||||
self.__paths = []
|
||||
if ("DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and
|
||||
self.find_exe("cl.exe")):
|
||||
# Assume that the SDK set up everything alright; don't try to be
|
||||
# smarter
|
||||
self.cc = "cl.exe"
|
||||
self.linker = "link.exe"
|
||||
self.lib = "lib.exe"
|
||||
self.rc = "rc.exe"
|
||||
self.mc = "mc.exe"
|
||||
else:
|
||||
self.__paths = self.get_msvc_paths("path")
|
||||
|
||||
if len(self.__paths) == 0:
|
||||
raise PackagingPlatformError("Python was built with %s "
|
||||
"and extensions need to be built with the same "
|
||||
"version of the compiler, but it isn't installed." %
|
||||
self.__product)
|
||||
|
||||
self.cc = self.find_exe("cl.exe")
|
||||
self.linker = self.find_exe("link.exe")
|
||||
self.lib = self.find_exe("lib.exe")
|
||||
self.rc = self.find_exe("rc.exe") # resource compiler
|
||||
self.mc = self.find_exe("mc.exe") # message compiler
|
||||
self.set_path_env_var('lib')
|
||||
self.set_path_env_var('include')
|
||||
|
||||
# extend the MSVC path with the current path
|
||||
try:
|
||||
for p in os.environ['path'].split(';'):
|
||||
self.__paths.append(p)
|
||||
except KeyError:
|
||||
pass
|
||||
self.__paths = normalize_and_reduce_paths(self.__paths)
|
||||
os.environ['path'] = ';'.join(self.__paths)
|
||||
|
||||
self.preprocess_options = None
|
||||
if self.__arch == "Intel":
|
||||
self.compile_options = ['/nologo', '/Ox', '/MD', '/W3', '/GX',
|
||||
'/DNDEBUG']
|
||||
self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GX',
|
||||
'/Z7', '/D_DEBUG']
|
||||
else:
|
||||
# Win64
|
||||
self.compile_options = ['/nologo', '/Ox', '/MD', '/W3', '/GS-',
|
||||
'/DNDEBUG']
|
||||
self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-',
|
||||
'/Z7', '/D_DEBUG']
|
||||
|
||||
self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO']
|
||||
if self.__version >= 7:
|
||||
self.ldflags_shared_debug = [
|
||||
'/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG'
|
||||
]
|
||||
else:
|
||||
self.ldflags_shared_debug = [
|
||||
'/DLL', '/nologo', '/INCREMENTAL:no', '/pdb:None', '/DEBUG'
|
||||
]
|
||||
self.ldflags_static = [ '/nologo']
|
||||
|
||||
self.initialized = True
|
||||
|
||||
# -- Worker methods ------------------------------------------------
|
||||
|
||||
def object_filenames(self, source_filenames, strip_dir=False, output_dir=''):
|
||||
# Copied from ccompiler.py, extended to return .res as 'object'-file
|
||||
# for .rc input file
|
||||
if output_dir is None:
|
||||
output_dir = ''
|
||||
obj_names = []
|
||||
for src_name in source_filenames:
|
||||
base, ext = os.path.splitext(src_name)
|
||||
base = os.path.splitdrive(base)[1] # Chop off the drive
|
||||
base = base[os.path.isabs(base):] # If abs, chop off leading /
|
||||
if ext not in self.src_extensions:
|
||||
# Better to raise an exception instead of silently continuing
|
||||
# and later complain about sources and targets having
|
||||
# different lengths
|
||||
raise CompileError("Don't know how to compile %s" % src_name)
|
||||
if strip_dir:
|
||||
base = os.path.basename(base)
|
||||
if ext in self._rc_extensions:
|
||||
obj_names.append(os.path.join(output_dir,
|
||||
base + self.res_extension))
|
||||
elif ext in self._mc_extensions:
|
||||
obj_names.append(os.path.join(output_dir,
|
||||
base + self.res_extension))
|
||||
else:
|
||||
obj_names.append(os.path.join(output_dir,
|
||||
base + self.obj_extension))
|
||||
return obj_names
|
||||
|
||||
def compile(self, sources,
|
||||
output_dir=None, macros=None, include_dirs=None, debug=False,
|
||||
extra_preargs=None, extra_postargs=None, depends=None):
|
||||
|
||||
if not self.initialized:
|
||||
self.initialize()
|
||||
macros, objects, extra_postargs, pp_opts, build = \
|
||||
self._setup_compile(output_dir, macros, include_dirs, sources,
|
||||
depends, extra_postargs)
|
||||
|
||||
compile_opts = extra_preargs or []
|
||||
compile_opts.append('/c')
|
||||
if debug:
|
||||
compile_opts.extend(self.compile_options_debug)
|
||||
else:
|
||||
compile_opts.extend(self.compile_options)
|
||||
|
||||
for obj in objects:
|
||||
try:
|
||||
src, ext = build[obj]
|
||||
except KeyError:
|
||||
continue
|
||||
if debug:
|
||||
# pass the full pathname to MSVC in debug mode,
|
||||
# this allows the debugger to find the source file
|
||||
# without asking the user to browse for it
|
||||
src = os.path.abspath(src)
|
||||
|
||||
if ext in self._c_extensions:
|
||||
input_opt = "/Tc" + src
|
||||
elif ext in self._cpp_extensions:
|
||||
input_opt = "/Tp" + src
|
||||
elif ext in self._rc_extensions:
|
||||
# compile .RC to .RES file
|
||||
input_opt = src
|
||||
output_opt = "/fo" + obj
|
||||
try:
|
||||
self.spawn([self.rc] + pp_opts +
|
||||
[output_opt] + [input_opt])
|
||||
except PackagingExecError as msg:
|
||||
raise CompileError(msg)
|
||||
continue
|
||||
elif ext in self._mc_extensions:
|
||||
|
||||
# Compile .MC to .RC file to .RES file.
|
||||
# * '-h dir' specifies the directory for the
|
||||
# generated include file
|
||||
# * '-r dir' specifies the target directory of the
|
||||
# generated RC file and the binary message resource
|
||||
# it includes
|
||||
#
|
||||
# For now (since there are no options to change this),
|
||||
# we use the source-directory for the include file and
|
||||
# the build directory for the RC file and message
|
||||
# resources. This works at least for win32all.
|
||||
|
||||
h_dir = os.path.dirname(src)
|
||||
rc_dir = os.path.dirname(obj)
|
||||
try:
|
||||
# first compile .MC to .RC and .H file
|
||||
self.spawn([self.mc] +
|
||||
['-h', h_dir, '-r', rc_dir] + [src])
|
||||
base, _ = os.path.splitext(os.path.basename(src))
|
||||
rc_file = os.path.join(rc_dir, base + '.rc')
|
||||
# then compile .RC to .RES file
|
||||
self.spawn([self.rc] +
|
||||
["/fo" + obj] + [rc_file])
|
||||
|
||||
except PackagingExecError as msg:
|
||||
raise CompileError(msg)
|
||||
continue
|
||||
else:
|
||||
# how to handle this file?
|
||||
raise CompileError(
|
||||
"Don't know how to compile %s to %s" %
|
||||
(src, obj))
|
||||
|
||||
output_opt = "/Fo" + obj
|
||||
try:
|
||||
self.spawn([self.cc] + compile_opts + pp_opts +
|
||||
[input_opt, output_opt] +
|
||||
extra_postargs)
|
||||
except PackagingExecError as msg:
|
||||
raise CompileError(msg)
|
||||
|
||||
return objects
|
||||
|
||||
def create_static_lib(self, objects, output_libname, output_dir=None,
|
||||
debug=False, target_lang=None):
|
||||
if not self.initialized:
|
||||
self.initialize()
|
||||
objects, output_dir = self._fix_object_args(objects, output_dir)
|
||||
output_filename = \
|
||||
self.library_filename(output_libname, output_dir=output_dir)
|
||||
|
||||
if self._need_link(objects, output_filename):
|
||||
lib_args = objects + ['/OUT:' + output_filename]
|
||||
if debug:
|
||||
pass # XXX what goes here?
|
||||
try:
|
||||
self.spawn([self.lib] + lib_args)
|
||||
except PackagingExecError as msg:
|
||||
raise LibError(msg)
|
||||
|
||||
else:
|
||||
logger.debug("skipping %s (up-to-date)", output_filename)
|
||||
|
||||
def link(self, target_desc, objects, output_filename, output_dir=None,
|
||||
libraries=None, library_dirs=None, runtime_library_dirs=None,
|
||||
export_symbols=None, debug=False, extra_preargs=None,
|
||||
extra_postargs=None, build_temp=None, target_lang=None):
|
||||
|
||||
if not self.initialized:
|
||||
self.initialize()
|
||||
objects, output_dir = self._fix_object_args(objects, output_dir)
|
||||
libraries, library_dirs, runtime_library_dirs = \
|
||||
self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)
|
||||
|
||||
if runtime_library_dirs:
|
||||
self.warn("don't know what to do with 'runtime_library_dirs': %s"
|
||||
% (runtime_library_dirs,))
|
||||
|
||||
lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs,
|
||||
libraries)
|
||||
if output_dir is not None:
|
||||
output_filename = os.path.join(output_dir, output_filename)
|
||||
|
||||
if self._need_link(objects, output_filename):
|
||||
|
||||
if target_desc == CCompiler.EXECUTABLE:
|
||||
if debug:
|
||||
ldflags = self.ldflags_shared_debug[1:]
|
||||
else:
|
||||
ldflags = self.ldflags_shared[1:]
|
||||
else:
|
||||
if debug:
|
||||
ldflags = self.ldflags_shared_debug
|
||||
else:
|
||||
ldflags = self.ldflags_shared
|
||||
|
||||
export_opts = []
|
||||
for sym in (export_symbols or []):
|
||||
export_opts.append("/EXPORT:" + sym)
|
||||
|
||||
ld_args = (ldflags + lib_opts + export_opts +
|
||||
objects + ['/OUT:' + output_filename])
|
||||
|
||||
# The MSVC linker generates .lib and .exp files, which cannot be
|
||||
# suppressed by any linker switches. The .lib files may even be
|
||||
# needed! Make sure they are generated in the temporary build
|
||||
# directory. Since they have different names for debug and release
|
||||
# builds, they can go into the same directory.
|
||||
if export_symbols is not None:
|
||||
dll_name, dll_ext = os.path.splitext(
|
||||
os.path.basename(output_filename))
|
||||
implib_file = os.path.join(
|
||||
os.path.dirname(objects[0]),
|
||||
self.library_filename(dll_name))
|
||||
ld_args.append('/IMPLIB:' + implib_file)
|
||||
|
||||
if extra_preargs:
|
||||
ld_args[:0] = extra_preargs
|
||||
if extra_postargs:
|
||||
ld_args.extend(extra_postargs)
|
||||
|
||||
self.mkpath(os.path.dirname(output_filename))
|
||||
try:
|
||||
self.spawn([self.linker] + ld_args)
|
||||
except PackagingExecError as msg:
|
||||
raise LinkError(msg)
|
||||
|
||||
else:
|
||||
logger.debug("skipping %s (up-to-date)", output_filename)
|
||||
|
||||
# -- Miscellaneous methods -----------------------------------------
|
||||
# These are all used by the 'gen_lib_options() function, in
|
||||
# ccompiler.py.
|
||||
|
||||
def library_dir_option(self, dir):
|
||||
return "/LIBPATH:" + dir
|
||||
|
||||
def runtime_library_dir_option(self, dir):
|
||||
raise PackagingPlatformError("don't know how to set runtime library search path for MSVC++")
|
||||
|
||||
def library_option(self, lib):
|
||||
return self.library_filename(lib)
|
||||
|
||||
def find_library_file(self, dirs, lib, debug=False):
|
||||
# Prefer a debugging library if found (and requested), but deal
|
||||
# with it if we don't have one.
|
||||
if debug:
|
||||
try_names = [lib + "_d", lib]
|
||||
else:
|
||||
try_names = [lib]
|
||||
for dir in dirs:
|
||||
for name in try_names:
|
||||
libfile = os.path.join(dir, self.library_filename(name))
|
||||
if os.path.exists(libfile):
|
||||
return libfile
|
||||
else:
|
||||
# Oops, didn't find it in *any* of 'dirs'
|
||||
return None
|
||||
|
||||
# Helper methods for using the MSVC registry settings
|
||||
|
||||
def find_exe(self, exe):
|
||||
"""Return path to an MSVC executable program.
|
||||
|
||||
Tries to find the program in several places: first, one of the
|
||||
MSVC program search paths from the registry; next, the directories
|
||||
in the PATH environment variable. If any of those work, return an
|
||||
absolute path that is known to exist. If none of them work, just
|
||||
return the original program name, 'exe'.
|
||||
"""
|
||||
|
||||
for p in self.__paths:
|
||||
fn = os.path.join(os.path.abspath(p), exe)
|
||||
if os.path.isfile(fn):
|
||||
return fn
|
||||
|
||||
# didn't find it; try existing path
|
||||
for p in os.environ['Path'].split(';'):
|
||||
fn = os.path.join(os.path.abspath(p), exe)
|
||||
if os.path.isfile(fn):
|
||||
return fn
|
||||
|
||||
return exe
|
||||
|
||||
def get_msvc_paths(self, path, platform='x86'):
|
||||
"""Get a list of devstudio directories (include, lib or path).
|
||||
|
||||
Return a list of strings. The list will be empty if unable to
|
||||
access the registry or appropriate registry keys not found.
|
||||
"""
|
||||
|
||||
if not _can_read_reg:
|
||||
return []
|
||||
|
||||
path = path + " dirs"
|
||||
if self.__version >= 7:
|
||||
key = (r"%s\%0.1f\VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories"
|
||||
% (self.__root, self.__version))
|
||||
else:
|
||||
key = (r"%s\6.0\Build System\Components\Platforms"
|
||||
r"\Win32 (%s)\Directories" % (self.__root, platform))
|
||||
|
||||
for base in HKEYS:
|
||||
d = read_values(base, key)
|
||||
if d:
|
||||
if self.__version >= 7:
|
||||
return self.__macros.sub(d[path]).split(";")
|
||||
else:
|
||||
return d[path].split(";")
|
||||
# MSVC 6 seems to create the registry entries we need only when
|
||||
# the GUI is run.
|
||||
if self.__version == 6:
|
||||
for base in HKEYS:
|
||||
if read_values(base, r"%s\6.0" % self.__root) is not None:
|
||||
self.warn("It seems you have Visual Studio 6 installed, "
|
||||
"but the expected registry settings are not present.\n"
|
||||
"You must at least run the Visual Studio GUI once "
|
||||
"so that these entries are created.")
|
||||
break
|
||||
return []
|
||||
|
||||
def set_path_env_var(self, name):
|
||||
"""Set environment variable 'name' to an MSVC path type value.
|
||||
|
||||
This is equivalent to a SET command prior to execution of spawned
|
||||
commands.
|
||||
"""
|
||||
|
||||
if name == "lib":
|
||||
p = self.get_msvc_paths("library")
|
||||
else:
|
||||
p = self.get_msvc_paths(name)
|
||||
if p:
|
||||
os.environ[name] = ';'.join(p)
|
||||
|
||||
|
||||
if get_build_version() >= 8.0:
|
||||
logger.debug("importing new compiler from distutils.msvc9compiler")
|
||||
OldMSVCCompiler = MSVCCompiler
|
||||
from packaging.compiler.msvc9compiler import MSVCCompiler
|
||||
# get_build_architecture not really relevant now we support cross-compile
|
||||
from packaging.compiler.msvc9compiler import MacroExpander
|
||||
339
Lib/packaging/compiler/unixccompiler.py
Normal file
339
Lib/packaging/compiler/unixccompiler.py
Normal file
|
|
@ -0,0 +1,339 @@
|
|||
"""CCompiler implementation for Unix compilers.
|
||||
|
||||
This module contains the UnixCCompiler class, a subclass of CCompiler
|
||||
that handles the "typical" Unix-style command-line C compiler:
|
||||
* macros defined with -Dname[=value]
|
||||
* macros undefined with -Uname
|
||||
* include search directories specified with -Idir
|
||||
* libraries specified with -lllib
|
||||
* library search directories specified with -Ldir
|
||||
* compile handled by 'cc' (or similar) executable with -c option:
|
||||
compiles .c to .o
|
||||
* link static library handled by 'ar' command (possibly with 'ranlib')
|
||||
* link shared library handled by 'cc -shared'
|
||||
"""
|
||||
|
||||
import os, sys
|
||||
|
||||
from packaging.util import newer
|
||||
from packaging.compiler.ccompiler import CCompiler
|
||||
from packaging.compiler import gen_preprocess_options, gen_lib_options
|
||||
from packaging.errors import (PackagingExecError, CompileError,
|
||||
LibError, LinkError)
|
||||
from packaging import logger
|
||||
import sysconfig
|
||||
|
||||
|
||||
# XXX Things not currently handled:
|
||||
# * optimization/debug/warning flags; we just use whatever's in Python's
|
||||
# Makefile and live with it. Is this adequate? If not, we might
|
||||
# have to have a bunch of subclasses GNUCCompiler, SGICCompiler,
|
||||
# SunCCompiler, and I suspect down that road lies madness.
|
||||
# * even if we don't know a warning flag from an optimization flag,
|
||||
# we need some way for outsiders to feed preprocessor/compiler/linker
|
||||
# flags in to us -- eg. a sysadmin might want to mandate certain flags
|
||||
# via a site config file, or a user might want to set something for
|
||||
# compiling this module distribution only via the setup.py command
|
||||
# line, whatever. As long as these options come from something on the
|
||||
# current system, they can be as system-dependent as they like, and we
|
||||
# should just happily stuff them into the preprocessor/compiler/linker
|
||||
# options and carry on.
|
||||
|
||||
def _darwin_compiler_fixup(compiler_so, cc_args):
|
||||
"""
|
||||
This function will strip '-isysroot PATH' and '-arch ARCH' from the
|
||||
compile flags if the user has specified one them in extra_compile_flags.
|
||||
|
||||
This is needed because '-arch ARCH' adds another architecture to the
|
||||
build, without a way to remove an architecture. Furthermore GCC will
|
||||
barf if multiple '-isysroot' arguments are present.
|
||||
"""
|
||||
stripArch = stripSysroot = False
|
||||
|
||||
compiler_so = list(compiler_so)
|
||||
kernel_version = os.uname()[2] # 8.4.3
|
||||
major_version = int(kernel_version.split('.')[0])
|
||||
|
||||
if major_version < 8:
|
||||
# OSX before 10.4.0, these don't support -arch and -isysroot at
|
||||
# all.
|
||||
stripArch = stripSysroot = True
|
||||
else:
|
||||
stripArch = '-arch' in cc_args
|
||||
stripSysroot = '-isysroot' in cc_args
|
||||
|
||||
if stripArch or 'ARCHFLAGS' in os.environ:
|
||||
while True:
|
||||
try:
|
||||
index = compiler_so.index('-arch')
|
||||
# Strip this argument and the next one:
|
||||
del compiler_so[index:index+2]
|
||||
except ValueError:
|
||||
break
|
||||
|
||||
if 'ARCHFLAGS' in os.environ and not stripArch:
|
||||
# User specified different -arch flags in the environ,
|
||||
# see also the sysconfig
|
||||
compiler_so = compiler_so + os.environ['ARCHFLAGS'].split()
|
||||
|
||||
if stripSysroot:
|
||||
try:
|
||||
index = compiler_so.index('-isysroot')
|
||||
# Strip this argument and the next one:
|
||||
del compiler_so[index:index+2]
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# Check if the SDK that is used during compilation actually exists,
|
||||
# the universal build requires the usage of a universal SDK and not all
|
||||
# users have that installed by default.
|
||||
sysroot = None
|
||||
if '-isysroot' in cc_args:
|
||||
idx = cc_args.index('-isysroot')
|
||||
sysroot = cc_args[idx+1]
|
||||
elif '-isysroot' in compiler_so:
|
||||
idx = compiler_so.index('-isysroot')
|
||||
sysroot = compiler_so[idx+1]
|
||||
|
||||
if sysroot and not os.path.isdir(sysroot):
|
||||
logger.warning(
|
||||
"compiling with an SDK that doesn't seem to exist: %r;\n"
|
||||
"please check your Xcode installation", sysroot)
|
||||
|
||||
return compiler_so
|
||||
|
||||
class UnixCCompiler(CCompiler):
|
||||
|
||||
name = 'unix'
|
||||
description = 'Standard UNIX-style compiler'
|
||||
|
||||
# These are used by CCompiler in two places: the constructor sets
|
||||
# instance attributes 'preprocessor', 'compiler', etc. from them, and
|
||||
# 'set_executable()' allows any of these to be set. The defaults here
|
||||
# are pretty generic; they will probably have to be set by an outsider
|
||||
# (eg. using information discovered by the sysconfig about building
|
||||
# Python extensions).
|
||||
executables = {'preprocessor' : None,
|
||||
'compiler' : ["cc"],
|
||||
'compiler_so' : ["cc"],
|
||||
'compiler_cxx' : ["cc"],
|
||||
'linker_so' : ["cc", "-shared"],
|
||||
'linker_exe' : ["cc"],
|
||||
'archiver' : ["ar", "-cr"],
|
||||
'ranlib' : None,
|
||||
}
|
||||
|
||||
if sys.platform[:6] == "darwin":
|
||||
executables['ranlib'] = ["ranlib"]
|
||||
|
||||
# Needed for the filename generation methods provided by the base
|
||||
# class, CCompiler. NB. whoever instantiates/uses a particular
|
||||
# UnixCCompiler instance should set 'shared_lib_ext' -- we set a
|
||||
# reasonable common default here, but it's not necessarily used on all
|
||||
# Unices!
|
||||
|
||||
src_extensions = [".c",".C",".cc",".cxx",".cpp",".m"]
|
||||
obj_extension = ".o"
|
||||
static_lib_extension = ".a"
|
||||
shared_lib_extension = ".so"
|
||||
dylib_lib_extension = ".dylib"
|
||||
static_lib_format = shared_lib_format = dylib_lib_format = "lib%s%s"
|
||||
if sys.platform == "cygwin":
|
||||
exe_extension = ".exe"
|
||||
|
||||
def preprocess(self, source,
|
||||
output_file=None, macros=None, include_dirs=None,
|
||||
extra_preargs=None, extra_postargs=None):
|
||||
ignore, macros, include_dirs = \
|
||||
self._fix_compile_args(None, macros, include_dirs)
|
||||
pp_opts = gen_preprocess_options(macros, include_dirs)
|
||||
pp_args = self.preprocessor + pp_opts
|
||||
if output_file:
|
||||
pp_args.extend(('-o', output_file))
|
||||
if extra_preargs:
|
||||
pp_args[:0] = extra_preargs
|
||||
if extra_postargs:
|
||||
pp_args.extend(extra_postargs)
|
||||
pp_args.append(source)
|
||||
|
||||
# We need to preprocess: either we're being forced to, or we're
|
||||
# generating output to stdout, or there's a target output file and
|
||||
# the source file is newer than the target (or the target doesn't
|
||||
# exist).
|
||||
if self.force or output_file is None or newer(source, output_file):
|
||||
if output_file:
|
||||
self.mkpath(os.path.dirname(output_file))
|
||||
try:
|
||||
self.spawn(pp_args)
|
||||
except PackagingExecError as msg:
|
||||
raise CompileError(msg)
|
||||
|
||||
def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
|
||||
compiler_so = self.compiler_so
|
||||
if sys.platform == 'darwin':
|
||||
compiler_so = _darwin_compiler_fixup(compiler_so, cc_args + extra_postargs)
|
||||
try:
|
||||
self.spawn(compiler_so + cc_args + [src, '-o', obj] +
|
||||
extra_postargs)
|
||||
except PackagingExecError as msg:
|
||||
raise CompileError(msg)
|
||||
|
||||
def create_static_lib(self, objects, output_libname,
|
||||
output_dir=None, debug=False, target_lang=None):
|
||||
objects, output_dir = self._fix_object_args(objects, output_dir)
|
||||
|
||||
output_filename = \
|
||||
self.library_filename(output_libname, output_dir=output_dir)
|
||||
|
||||
if self._need_link(objects, output_filename):
|
||||
self.mkpath(os.path.dirname(output_filename))
|
||||
self.spawn(self.archiver +
|
||||
[output_filename] +
|
||||
objects + self.objects)
|
||||
|
||||
# Not many Unices required ranlib anymore -- SunOS 4.x is, I
|
||||
# think the only major Unix that does. Maybe we need some
|
||||
# platform intelligence here to skip ranlib if it's not
|
||||
# needed -- or maybe Python's configure script took care of
|
||||
# it for us, hence the check for leading colon.
|
||||
if self.ranlib:
|
||||
try:
|
||||
self.spawn(self.ranlib + [output_filename])
|
||||
except PackagingExecError as msg:
|
||||
raise LibError(msg)
|
||||
else:
|
||||
logger.debug("skipping %s (up-to-date)", output_filename)
|
||||
|
||||
def link(self, target_desc, objects,
|
||||
output_filename, output_dir=None, libraries=None,
|
||||
library_dirs=None, runtime_library_dirs=None,
|
||||
export_symbols=None, debug=False, extra_preargs=None,
|
||||
extra_postargs=None, build_temp=None, target_lang=None):
|
||||
objects, output_dir = self._fix_object_args(objects, output_dir)
|
||||
libraries, library_dirs, runtime_library_dirs = \
|
||||
self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)
|
||||
|
||||
lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs,
|
||||
libraries)
|
||||
if type(output_dir) not in (str, type(None)):
|
||||
raise TypeError("'output_dir' must be a string or None")
|
||||
if output_dir is not None:
|
||||
output_filename = os.path.join(output_dir, output_filename)
|
||||
|
||||
if self._need_link(objects, output_filename):
|
||||
ld_args = (objects + self.objects +
|
||||
lib_opts + ['-o', output_filename])
|
||||
if debug:
|
||||
ld_args[:0] = ['-g']
|
||||
if extra_preargs:
|
||||
ld_args[:0] = extra_preargs
|
||||
if extra_postargs:
|
||||
ld_args.extend(extra_postargs)
|
||||
self.mkpath(os.path.dirname(output_filename))
|
||||
try:
|
||||
if target_desc == CCompiler.EXECUTABLE:
|
||||
linker = self.linker_exe[:]
|
||||
else:
|
||||
linker = self.linker_so[:]
|
||||
if target_lang == "c++" and self.compiler_cxx:
|
||||
# skip over environment variable settings if /usr/bin/env
|
||||
# is used to set up the linker's environment.
|
||||
# This is needed on OSX. Note: this assumes that the
|
||||
# normal and C++ compiler have the same environment
|
||||
# settings.
|
||||
i = 0
|
||||
if os.path.basename(linker[0]) == "env":
|
||||
i = 1
|
||||
while '=' in linker[i]:
|
||||
i = i + 1
|
||||
|
||||
linker[i] = self.compiler_cxx[i]
|
||||
|
||||
if sys.platform == 'darwin':
|
||||
linker = _darwin_compiler_fixup(linker, ld_args)
|
||||
|
||||
self.spawn(linker + ld_args)
|
||||
except PackagingExecError as msg:
|
||||
raise LinkError(msg)
|
||||
else:
|
||||
logger.debug("skipping %s (up-to-date)", output_filename)
|
||||
|
||||
# -- Miscellaneous methods -----------------------------------------
|
||||
# These are all used by the 'gen_lib_options() function, in
|
||||
# ccompiler.py.
|
||||
|
||||
def library_dir_option(self, dir):
|
||||
return "-L" + dir
|
||||
|
||||
def _is_gcc(self, compiler_name):
|
||||
return "gcc" in compiler_name or "g++" in compiler_name
|
||||
|
||||
def runtime_library_dir_option(self, dir):
|
||||
# XXX Hackish, at the very least. See Python bug #445902:
|
||||
# http://sourceforge.net/tracker/index.php
|
||||
# ?func=detail&aid=445902&group_id=5470&atid=105470
|
||||
# Linkers on different platforms need different options to
|
||||
# specify that directories need to be added to the list of
|
||||
# directories searched for dependencies when a dynamic library
|
||||
# is sought. GCC on GNU systems (Linux, FreeBSD, ...) has to
|
||||
# be told to pass the -R option through to the linker, whereas
|
||||
# other compilers and gcc on other systems just know this.
|
||||
# Other compilers may need something slightly different. At
|
||||
# this time, there's no way to determine this information from
|
||||
# the configuration data stored in the Python installation, so
|
||||
# we use this hack.
|
||||
|
||||
compiler = os.path.basename(sysconfig.get_config_var("CC"))
|
||||
if sys.platform[:6] == "darwin":
|
||||
# MacOSX's linker doesn't understand the -R flag at all
|
||||
return "-L" + dir
|
||||
elif sys.platform[:5] == "hp-ux":
|
||||
if self._is_gcc(compiler):
|
||||
return ["-Wl,+s", "-L" + dir]
|
||||
return ["+s", "-L" + dir]
|
||||
elif sys.platform[:7] == "irix646" or sys.platform[:6] == "osf1V5":
|
||||
return ["-rpath", dir]
|
||||
elif self._is_gcc(compiler):
|
||||
# gcc on non-GNU systems does not need -Wl, but can
|
||||
# use it anyway. Since distutils has always passed in
|
||||
# -Wl whenever gcc was used in the past it is probably
|
||||
# safest to keep doing so.
|
||||
if sysconfig.get_config_var("GNULD") == "yes":
|
||||
# GNU ld needs an extra option to get a RUNPATH
|
||||
# instead of just an RPATH.
|
||||
return "-Wl,--enable-new-dtags,-R" + dir
|
||||
else:
|
||||
return "-Wl,-R" + dir
|
||||
elif sys.platform[:3] == "aix":
|
||||
return "-blibpath:" + dir
|
||||
else:
|
||||
# No idea how --enable-new-dtags would be passed on to
|
||||
# ld if this system was using GNU ld. Don't know if a
|
||||
# system like this even exists.
|
||||
return "-R" + dir
|
||||
|
||||
def library_option(self, lib):
|
||||
return "-l" + lib
|
||||
|
||||
def find_library_file(self, dirs, lib, debug=False):
|
||||
shared_f = self.library_filename(lib, lib_type='shared')
|
||||
dylib_f = self.library_filename(lib, lib_type='dylib')
|
||||
static_f = self.library_filename(lib, lib_type='static')
|
||||
|
||||
for dir in dirs:
|
||||
shared = os.path.join(dir, shared_f)
|
||||
dylib = os.path.join(dir, dylib_f)
|
||||
static = os.path.join(dir, static_f)
|
||||
# We're second-guessing the linker here, with not much hard
|
||||
# data to go on: GCC seems to prefer the shared library, so I'm
|
||||
# assuming that *all* Unix C compilers do. And of course I'm
|
||||
# ignoring even GCC's "-static" option. So sue me.
|
||||
if os.path.exists(dylib):
|
||||
return dylib
|
||||
elif os.path.exists(shared):
|
||||
return shared
|
||||
elif os.path.exists(static):
|
||||
return static
|
||||
|
||||
# Oops, didn't find it in *any* of 'dirs'
|
||||
return None
|
||||
357
Lib/packaging/config.py
Normal file
357
Lib/packaging/config.py
Normal file
|
|
@ -0,0 +1,357 @@
|
|||
"""Utilities to find and read config files used by packaging."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
|
||||
from shlex import split
|
||||
from configparser import RawConfigParser
|
||||
from packaging import logger
|
||||
from packaging.errors import PackagingOptionError
|
||||
from packaging.compiler.extension import Extension
|
||||
from packaging.util import check_environ, iglob, resolve_name, strtobool
|
||||
from packaging.compiler import set_compiler
|
||||
from packaging.command import set_command
|
||||
from packaging.markers import interpret
|
||||
|
||||
|
||||
def _pop_values(values_dct, key):
|
||||
"""Remove values from the dictionary and convert them as a list"""
|
||||
vals_str = values_dct.pop(key, '')
|
||||
if not vals_str:
|
||||
return
|
||||
fields = []
|
||||
for field in vals_str.split(os.linesep):
|
||||
tmp_vals = field.split('--')
|
||||
if len(tmp_vals) == 2 and not interpret(tmp_vals[1]):
|
||||
continue
|
||||
fields.append(tmp_vals[0])
|
||||
# Get bash options like `gcc -print-file-name=libgcc.a` XXX bash options?
|
||||
vals = split(' '.join(fields))
|
||||
if vals:
|
||||
return vals
|
||||
|
||||
|
||||
def _rel_path(base, path):
|
||||
assert path.startswith(base)
|
||||
return path[len(base):].lstrip('/')
|
||||
|
||||
|
||||
def get_resources_dests(resources_root, rules):
|
||||
"""Find destinations for resources files"""
|
||||
destinations = {}
|
||||
for base, suffix, dest in rules:
|
||||
prefix = os.path.join(resources_root, base)
|
||||
for abs_base in iglob(prefix):
|
||||
abs_glob = os.path.join(abs_base, suffix)
|
||||
for abs_path in iglob(abs_glob):
|
||||
resource_file = _rel_path(resources_root, abs_path)
|
||||
if dest is None: # remove the entry if it was here
|
||||
destinations.pop(resource_file, None)
|
||||
else:
|
||||
rel_path = _rel_path(abs_base, abs_path)
|
||||
destinations[resource_file] = os.path.join(dest, rel_path)
|
||||
return destinations
|
||||
|
||||
|
||||
class Config:
|
||||
"""Reads configuration files and work with the Distribution instance
|
||||
"""
|
||||
def __init__(self, dist):
|
||||
self.dist = dist
|
||||
self.setup_hook = None
|
||||
|
||||
def run_hook(self, config):
|
||||
if self.setup_hook is None:
|
||||
return
|
||||
# the hook gets only the config
|
||||
self.setup_hook(config)
|
||||
|
||||
def find_config_files(self):
|
||||
"""Find as many configuration files as should be processed for this
|
||||
platform, and return a list of filenames in the order in which they
|
||||
should be parsed. The filenames returned are guaranteed to exist
|
||||
(modulo nasty race conditions).
|
||||
|
||||
There are three possible config files: packaging.cfg in the
|
||||
Packaging installation directory (ie. where the top-level
|
||||
Packaging __inst__.py file lives), a file in the user's home
|
||||
directory named .pydistutils.cfg on Unix and pydistutils.cfg
|
||||
on Windows/Mac; and setup.cfg in the current directory.
|
||||
|
||||
The file in the user's home directory can be disabled with the
|
||||
--no-user-cfg option.
|
||||
"""
|
||||
files = []
|
||||
check_environ()
|
||||
|
||||
# Where to look for the system-wide Packaging config file
|
||||
sys_dir = os.path.dirname(sys.modules['packaging'].__file__)
|
||||
|
||||
# Look for the system config file
|
||||
sys_file = os.path.join(sys_dir, "packaging.cfg")
|
||||
if os.path.isfile(sys_file):
|
||||
files.append(sys_file)
|
||||
|
||||
# What to call the per-user config file
|
||||
if os.name == 'posix':
|
||||
user_filename = ".pydistutils.cfg"
|
||||
else:
|
||||
user_filename = "pydistutils.cfg"
|
||||
|
||||
# And look for the user config file
|
||||
if self.dist.want_user_cfg:
|
||||
user_file = os.path.join(os.path.expanduser('~'), user_filename)
|
||||
if os.path.isfile(user_file):
|
||||
files.append(user_file)
|
||||
|
||||
# All platforms support local setup.cfg
|
||||
local_file = "setup.cfg"
|
||||
if os.path.isfile(local_file):
|
||||
files.append(local_file)
|
||||
|
||||
if logger.isEnabledFor(logging.DEBUG):
|
||||
logger.debug("using config files: %s", ', '.join(files))
|
||||
return files
|
||||
|
||||
def _convert_metadata(self, name, value):
|
||||
# converts a value found in setup.cfg into a valid metadata
|
||||
# XXX
|
||||
return value
|
||||
|
||||
def _multiline(self, value):
|
||||
value = [v for v in
|
||||
[v.strip() for v in value.split('\n')]
|
||||
if v != '']
|
||||
return value
|
||||
|
||||
def _read_setup_cfg(self, parser, cfg_filename):
|
||||
cfg_directory = os.path.dirname(os.path.abspath(cfg_filename))
|
||||
content = {}
|
||||
for section in parser.sections():
|
||||
content[section] = dict(parser.items(section))
|
||||
|
||||
# global:setup_hook is called *first*
|
||||
if 'global' in content:
|
||||
if 'setup_hook' in content['global']:
|
||||
setup_hook = content['global']['setup_hook']
|
||||
try:
|
||||
self.setup_hook = resolve_name(setup_hook)
|
||||
except ImportError as e:
|
||||
logger.warning('could not import setup_hook: %s',
|
||||
e.args[0])
|
||||
else:
|
||||
self.run_hook(content)
|
||||
|
||||
metadata = self.dist.metadata
|
||||
|
||||
# setting the metadata values
|
||||
if 'metadata' in content:
|
||||
for key, value in content['metadata'].items():
|
||||
key = key.replace('_', '-')
|
||||
if metadata.is_multi_field(key):
|
||||
value = self._multiline(value)
|
||||
|
||||
if key == 'project-url':
|
||||
value = [(label.strip(), url.strip())
|
||||
for label, url in
|
||||
[v.split(',') for v in value]]
|
||||
|
||||
if key == 'description-file':
|
||||
if 'description' in content['metadata']:
|
||||
msg = ("description and description-file' are "
|
||||
"mutually exclusive")
|
||||
raise PackagingOptionError(msg)
|
||||
|
||||
if isinstance(value, list):
|
||||
filenames = value
|
||||
else:
|
||||
filenames = value.split()
|
||||
|
||||
# concatenate each files
|
||||
value = ''
|
||||
for filename in filenames:
|
||||
# will raise if file not found
|
||||
with open(filename) as description_file:
|
||||
value += description_file.read().strip() + '\n'
|
||||
# add filename as a required file
|
||||
if filename not in metadata.requires_files:
|
||||
metadata.requires_files.append(filename)
|
||||
value = value.strip()
|
||||
key = 'description'
|
||||
|
||||
if metadata.is_metadata_field(key):
|
||||
metadata[key] = self._convert_metadata(key, value)
|
||||
|
||||
if 'files' in content:
|
||||
files = content['files']
|
||||
self.dist.package_dir = files.pop('packages_root', None)
|
||||
|
||||
files = dict((key, self._multiline(value)) for key, value in
|
||||
files.items())
|
||||
|
||||
self.dist.packages = []
|
||||
|
||||
packages = files.get('packages', [])
|
||||
if isinstance(packages, str):
|
||||
packages = [packages]
|
||||
|
||||
for package in packages:
|
||||
if ':' in package:
|
||||
dir_, package = package.split(':')
|
||||
self.dist.package_dir[package] = dir_
|
||||
self.dist.packages.append(package)
|
||||
|
||||
self.dist.py_modules = files.get('modules', [])
|
||||
if isinstance(self.dist.py_modules, str):
|
||||
self.dist.py_modules = [self.dist.py_modules]
|
||||
self.dist.scripts = files.get('scripts', [])
|
||||
if isinstance(self.dist.scripts, str):
|
||||
self.dist.scripts = [self.dist.scripts]
|
||||
|
||||
self.dist.package_data = {}
|
||||
for data in files.get('package_data', []):
|
||||
data = data.split('=')
|
||||
if len(data) != 2:
|
||||
continue # XXX error should never pass silently
|
||||
key, value = data
|
||||
self.dist.package_data[key.strip()] = value.strip()
|
||||
|
||||
self.dist.data_files = []
|
||||
for data in files.get('data_files', []):
|
||||
data = data.split('=')
|
||||
if len(data) != 2:
|
||||
continue
|
||||
key, value = data
|
||||
values = [v.strip() for v in value.split(',')]
|
||||
self.dist.data_files.append((key, values))
|
||||
|
||||
# manifest template
|
||||
self.dist.extra_files = files.get('extra_files', [])
|
||||
|
||||
resources = []
|
||||
for rule in files.get('resources', []):
|
||||
glob, destination = rule.split('=', 1)
|
||||
rich_glob = glob.strip().split(' ', 1)
|
||||
if len(rich_glob) == 2:
|
||||
prefix, suffix = rich_glob
|
||||
else:
|
||||
assert len(rich_glob) == 1
|
||||
prefix = ''
|
||||
suffix = glob
|
||||
if destination == '<exclude>':
|
||||
destination = None
|
||||
resources.append(
|
||||
(prefix.strip(), suffix.strip(), destination.strip()))
|
||||
self.dist.data_files = get_resources_dests(
|
||||
cfg_directory, resources)
|
||||
|
||||
ext_modules = self.dist.ext_modules
|
||||
for section_key in content:
|
||||
labels = section_key.split('=')
|
||||
if len(labels) == 2 and labels[0] == 'extension':
|
||||
# labels[1] not used from now but should be implemented
|
||||
# for extension build dependency
|
||||
values_dct = content[section_key]
|
||||
ext_modules.append(Extension(
|
||||
values_dct.pop('name'),
|
||||
_pop_values(values_dct, 'sources'),
|
||||
_pop_values(values_dct, 'include_dirs'),
|
||||
_pop_values(values_dct, 'define_macros'),
|
||||
_pop_values(values_dct, 'undef_macros'),
|
||||
_pop_values(values_dct, 'library_dirs'),
|
||||
_pop_values(values_dct, 'libraries'),
|
||||
_pop_values(values_dct, 'runtime_library_dirs'),
|
||||
_pop_values(values_dct, 'extra_objects'),
|
||||
_pop_values(values_dct, 'extra_compile_args'),
|
||||
_pop_values(values_dct, 'extra_link_args'),
|
||||
_pop_values(values_dct, 'export_symbols'),
|
||||
_pop_values(values_dct, 'swig_opts'),
|
||||
_pop_values(values_dct, 'depends'),
|
||||
values_dct.pop('language', None),
|
||||
values_dct.pop('optional', None),
|
||||
**values_dct))
|
||||
|
||||
def parse_config_files(self, filenames=None):
|
||||
if filenames is None:
|
||||
filenames = self.find_config_files()
|
||||
|
||||
logger.debug("Distribution.parse_config_files():")
|
||||
|
||||
parser = RawConfigParser()
|
||||
|
||||
for filename in filenames:
|
||||
logger.debug(" reading %s", filename)
|
||||
parser.read(filename)
|
||||
|
||||
if os.path.split(filename)[-1] == 'setup.cfg':
|
||||
self._read_setup_cfg(parser, filename)
|
||||
|
||||
for section in parser.sections():
|
||||
if section == 'global':
|
||||
if parser.has_option('global', 'compilers'):
|
||||
self._load_compilers(parser.get('global', 'compilers'))
|
||||
|
||||
if parser.has_option('global', 'commands'):
|
||||
self._load_commands(parser.get('global', 'commands'))
|
||||
|
||||
options = parser.options(section)
|
||||
opt_dict = self.dist.get_option_dict(section)
|
||||
|
||||
for opt in options:
|
||||
if opt == '__name__':
|
||||
continue
|
||||
val = parser.get(section, opt)
|
||||
opt = opt.replace('-', '_')
|
||||
|
||||
if opt == 'sub_commands':
|
||||
val = self._multiline(val)
|
||||
if isinstance(val, str):
|
||||
val = [val]
|
||||
|
||||
# Hooks use a suffix system to prevent being overriden
|
||||
# by a config file processed later (i.e. a hook set in
|
||||
# the user config file cannot be replaced by a hook
|
||||
# set in a project config file, unless they have the
|
||||
# same suffix).
|
||||
if (opt.startswith("pre_hook.") or
|
||||
opt.startswith("post_hook.")):
|
||||
hook_type, alias = opt.split(".")
|
||||
hook_dict = opt_dict.setdefault(
|
||||
hook_type, (filename, {}))[1]
|
||||
hook_dict[alias] = val
|
||||
else:
|
||||
opt_dict[opt] = filename, val
|
||||
|
||||
# Make the RawConfigParser forget everything (so we retain
|
||||
# the original filenames that options come from)
|
||||
parser.__init__()
|
||||
|
||||
# If there was a "global" section in the config file, use it
|
||||
# to set Distribution options.
|
||||
if 'global' in self.dist.command_options:
|
||||
for opt, (src, val) in self.dist.command_options['global'].items():
|
||||
alias = self.dist.negative_opt.get(opt)
|
||||
try:
|
||||
if alias:
|
||||
setattr(self.dist, alias, not strtobool(val))
|
||||
elif opt == 'dry_run': # FIXME ugh!
|
||||
setattr(self.dist, opt, strtobool(val))
|
||||
else:
|
||||
setattr(self.dist, opt, val)
|
||||
except ValueError as msg:
|
||||
raise PackagingOptionError(msg)
|
||||
|
||||
def _load_compilers(self, compilers):
|
||||
compilers = self._multiline(compilers)
|
||||
if isinstance(compilers, str):
|
||||
compilers = [compilers]
|
||||
for compiler in compilers:
|
||||
set_compiler(compiler.strip())
|
||||
|
||||
def _load_commands(self, commands):
|
||||
commands = self._multiline(commands)
|
||||
if isinstance(commands, str):
|
||||
commands = [commands]
|
||||
for command in commands:
|
||||
set_command(command.strip())
|
||||
693
Lib/packaging/create.py
Normal file
693
Lib/packaging/create.py
Normal file
|
|
@ -0,0 +1,693 @@
|
|||
#!/usr/bin/env python
|
||||
"""Interactive helper used to create a setup.cfg file.
|
||||
|
||||
This script will generate a packaging configuration file by looking at
|
||||
the current directory and asking the user questions. It is intended to
|
||||
be called as
|
||||
|
||||
pysetup create
|
||||
|
||||
or
|
||||
|
||||
python3.3 -m packaging.create
|
||||
"""
|
||||
|
||||
# Original code by Sean Reifschneider <jafo@tummy.com>
|
||||
|
||||
# Original TODO list:
|
||||
# Look for a license file and automatically add the category.
|
||||
# When a .c file is found during the walk, can we add it as an extension?
|
||||
# Ask if there is a maintainer different that the author
|
||||
# Ask for the platform (can we detect this via "import win32" or something?)
|
||||
# Ask for the dependencies.
|
||||
# Ask for the Requires-Dist
|
||||
# Ask for the Provides-Dist
|
||||
# Ask for a description
|
||||
# Detect scripts (not sure how. #! outside of package?)
|
||||
|
||||
import os
|
||||
import imp
|
||||
import sys
|
||||
import glob
|
||||
import re
|
||||
import shutil
|
||||
import sysconfig
|
||||
from configparser import RawConfigParser
|
||||
from textwrap import dedent
|
||||
from hashlib import md5
|
||||
from functools import cmp_to_key
|
||||
# importing this with an underscore as it should be replaced by the
|
||||
# dict form or another structures for all purposes
|
||||
from packaging._trove import all_classifiers as _CLASSIFIERS_LIST
|
||||
from packaging.version import is_valid_version
|
||||
|
||||
_FILENAME = 'setup.cfg'
|
||||
_DEFAULT_CFG = '.pypkgcreate'
|
||||
|
||||
_helptext = {
|
||||
'name': '''
|
||||
The name of the program to be packaged, usually a single word composed
|
||||
of lower-case characters such as "python", "sqlalchemy", or "CherryPy".
|
||||
''',
|
||||
'version': '''
|
||||
Version number of the software, typically 2 or 3 numbers separated by dots
|
||||
such as "1.00", "0.6", or "3.02.01". "0.1.0" is recommended for initial
|
||||
development.
|
||||
''',
|
||||
'summary': '''
|
||||
A one-line summary of what this project is or does, typically a sentence 80
|
||||
characters or less in length.
|
||||
''',
|
||||
'author': '''
|
||||
The full name of the author (typically you).
|
||||
''',
|
||||
'author_email': '''
|
||||
E-mail address of the project author (typically you).
|
||||
''',
|
||||
'do_classifier': '''
|
||||
Trove classifiers are optional identifiers that allow you to specify the
|
||||
intended audience by saying things like "Beta software with a text UI
|
||||
for Linux under the PSF license. However, this can be a somewhat involved
|
||||
process.
|
||||
''',
|
||||
'packages': '''
|
||||
You can provide a package name contained in your project.
|
||||
''',
|
||||
'modules': '''
|
||||
You can provide a python module contained in your project.
|
||||
''',
|
||||
'extra_files': '''
|
||||
You can provide extra files/dirs contained in your project.
|
||||
It has to follow the template syntax. XXX add help here.
|
||||
''',
|
||||
|
||||
'home_page': '''
|
||||
The home page for the project, typically starting with "http://".
|
||||
''',
|
||||
'trove_license': '''
|
||||
Optionally you can specify a license. Type a string that identifies a common
|
||||
license, and then you can select a list of license specifiers.
|
||||
''',
|
||||
'trove_generic': '''
|
||||
Optionally, you can set other trove identifiers for things such as the
|
||||
human language, programming language, user interface, etc...
|
||||
''',
|
||||
'setup.py found': '''
|
||||
The setup.py script will be executed to retrieve the metadata.
|
||||
A wizard will be run if you answer "n",
|
||||
''',
|
||||
}
|
||||
|
||||
PROJECT_MATURITY = ['Development Status :: 1 - Planning',
|
||||
'Development Status :: 2 - Pre-Alpha',
|
||||
'Development Status :: 3 - Alpha',
|
||||
'Development Status :: 4 - Beta',
|
||||
'Development Status :: 5 - Production/Stable',
|
||||
'Development Status :: 6 - Mature',
|
||||
'Development Status :: 7 - Inactive']
|
||||
|
||||
# XXX everything needs docstrings and tests (both low-level tests of various
|
||||
# methods and functional tests of running the script)
|
||||
|
||||
|
||||
def load_setup():
|
||||
"""run the setup script (i.e the setup.py file)
|
||||
|
||||
This function load the setup file in all cases (even if it have already
|
||||
been loaded before, because we are monkey patching its setup function with
|
||||
a particular one"""
|
||||
with open("setup.py") as f:
|
||||
imp.load_module("setup", f, "setup.py", (".py", "r", imp.PY_SOURCE))
|
||||
|
||||
|
||||
def ask_yn(question, default=None, helptext=None):
|
||||
question += ' (y/n)'
|
||||
while True:
|
||||
answer = ask(question, default, helptext, required=True)
|
||||
if answer and answer[0].lower() in 'yn':
|
||||
return answer[0].lower()
|
||||
|
||||
print('\nERROR: You must select "Y" or "N".\n')
|
||||
|
||||
|
||||
def ask(question, default=None, helptext=None, required=True,
|
||||
lengthy=False, multiline=False):
|
||||
prompt = '%s: ' % (question,)
|
||||
if default:
|
||||
prompt = '%s [%s]: ' % (question, default)
|
||||
if default and len(question) + len(default) > 70:
|
||||
prompt = '%s\n [%s]: ' % (question, default)
|
||||
if lengthy or multiline:
|
||||
prompt += '\n > '
|
||||
|
||||
if not helptext:
|
||||
helptext = 'No additional help available.'
|
||||
|
||||
helptext = helptext.strip("\n")
|
||||
|
||||
while True:
|
||||
sys.stdout.write(prompt)
|
||||
sys.stdout.flush()
|
||||
|
||||
line = sys.stdin.readline().strip()
|
||||
if line == '?':
|
||||
print('=' * 70)
|
||||
print(helptext)
|
||||
print('=' * 70)
|
||||
continue
|
||||
if default and not line:
|
||||
return default
|
||||
if not line and required:
|
||||
print('*' * 70)
|
||||
print('This value cannot be empty.')
|
||||
print('===========================')
|
||||
if helptext:
|
||||
print(helptext)
|
||||
print('*' * 70)
|
||||
continue
|
||||
return line
|
||||
|
||||
|
||||
def convert_yn_to_bool(yn, yes=True, no=False):
|
||||
"""Convert a y/yes or n/no to a boolean value."""
|
||||
if yn.lower().startswith('y'):
|
||||
return yes
|
||||
else:
|
||||
return no
|
||||
|
||||
|
||||
def _build_classifiers_dict(classifiers):
|
||||
d = {}
|
||||
for key in classifiers:
|
||||
subDict = d
|
||||
for subkey in key.split(' :: '):
|
||||
if not subkey in subDict:
|
||||
subDict[subkey] = {}
|
||||
subDict = subDict[subkey]
|
||||
return d
|
||||
|
||||
CLASSIFIERS = _build_classifiers_dict(_CLASSIFIERS_LIST)
|
||||
|
||||
|
||||
def _build_licences(classifiers):
|
||||
res = []
|
||||
for index, item in enumerate(classifiers):
|
||||
if not item.startswith('License :: '):
|
||||
continue
|
||||
res.append((index, item.split(' :: ')[-1].lower()))
|
||||
return res
|
||||
|
||||
LICENCES = _build_licences(_CLASSIFIERS_LIST)
|
||||
|
||||
|
||||
class MainProgram:
|
||||
"""Make a project setup configuration file (setup.cfg)."""
|
||||
|
||||
def __init__(self):
|
||||
self.configparser = None
|
||||
self.classifiers = set()
|
||||
self.data = {'name': '',
|
||||
'version': '1.0.0',
|
||||
'classifier': self.classifiers,
|
||||
'packages': [],
|
||||
'modules': [],
|
||||
'platform': [],
|
||||
'resources': [],
|
||||
'extra_files': [],
|
||||
'scripts': [],
|
||||
}
|
||||
self._load_defaults()
|
||||
|
||||
def __call__(self):
|
||||
setupcfg_defined = False
|
||||
if self.has_setup_py() and self._prompt_user_for_conversion():
|
||||
setupcfg_defined = self.convert_py_to_cfg()
|
||||
if not setupcfg_defined:
|
||||
self.define_cfg_values()
|
||||
self._write_cfg()
|
||||
|
||||
def has_setup_py(self):
|
||||
"""Test for the existance of a setup.py file."""
|
||||
return os.path.exists('setup.py')
|
||||
|
||||
def define_cfg_values(self):
|
||||
self.inspect()
|
||||
self.query_user()
|
||||
|
||||
def _lookup_option(self, key):
|
||||
if not self.configparser.has_option('DEFAULT', key):
|
||||
return None
|
||||
return self.configparser.get('DEFAULT', key)
|
||||
|
||||
def _load_defaults(self):
|
||||
# Load default values from a user configuration file
|
||||
self.configparser = RawConfigParser()
|
||||
# TODO replace with section in distutils config file
|
||||
default_cfg = os.path.expanduser(os.path.join('~', _DEFAULT_CFG))
|
||||
self.configparser.read(default_cfg)
|
||||
self.data['author'] = self._lookup_option('author')
|
||||
self.data['author_email'] = self._lookup_option('author_email')
|
||||
|
||||
def _prompt_user_for_conversion(self):
|
||||
# Prompt the user about whether they would like to use the setup.py
|
||||
# conversion utility to generate a setup.cfg or generate the setup.cfg
|
||||
# from scratch
|
||||
answer = ask_yn(('A legacy setup.py has been found.\n'
|
||||
'Would you like to convert it to a setup.cfg?'),
|
||||
default="y",
|
||||
helptext=_helptext['setup.py found'])
|
||||
return convert_yn_to_bool(answer)
|
||||
|
||||
def _dotted_packages(self, data):
|
||||
packages = sorted(data)
|
||||
modified_pkgs = []
|
||||
for pkg in packages:
|
||||
pkg = pkg.lstrip('./')
|
||||
pkg = pkg.replace('/', '.')
|
||||
modified_pkgs.append(pkg)
|
||||
return modified_pkgs
|
||||
|
||||
def _write_cfg(self):
|
||||
if os.path.exists(_FILENAME):
|
||||
if os.path.exists('%s.old' % _FILENAME):
|
||||
print("ERROR: %(name)s.old backup exists, please check that "
|
||||
"current %(name)s is correct and remove %(name)s.old" %
|
||||
{'name': _FILENAME})
|
||||
return
|
||||
shutil.move(_FILENAME, '%s.old' % _FILENAME)
|
||||
|
||||
with open(_FILENAME, 'w') as fp:
|
||||
fp.write('[metadata]\n')
|
||||
# simple string entries
|
||||
for name in ('name', 'version', 'summary', 'download_url'):
|
||||
fp.write('%s = %s\n' % (name, self.data.get(name, 'UNKNOWN')))
|
||||
# optional string entries
|
||||
if 'keywords' in self.data and self.data['keywords']:
|
||||
fp.write('keywords = %s\n' % ' '.join(self.data['keywords']))
|
||||
for name in ('home_page', 'author', 'author_email',
|
||||
'maintainer', 'maintainer_email', 'description-file'):
|
||||
if name in self.data and self.data[name]:
|
||||
fp.write('%s = %s\n' % (name, self.data[name]))
|
||||
if 'description' in self.data:
|
||||
fp.write(
|
||||
'description = %s\n'
|
||||
% '\n |'.join(self.data['description'].split('\n')))
|
||||
# multiple use string entries
|
||||
for name in ('platform', 'supported-platform', 'classifier',
|
||||
'requires-dist', 'provides-dist', 'obsoletes-dist',
|
||||
'requires-external'):
|
||||
if not(name in self.data and self.data[name]):
|
||||
continue
|
||||
fp.write('%s = ' % name)
|
||||
fp.write(''.join(' %s\n' % val
|
||||
for val in self.data[name]).lstrip())
|
||||
fp.write('\n[files]\n')
|
||||
for name in ('packages', 'modules', 'scripts',
|
||||
'package_data', 'extra_files'):
|
||||
if not(name in self.data and self.data[name]):
|
||||
continue
|
||||
fp.write('%s = %s\n'
|
||||
% (name, '\n '.join(self.data[name]).strip()))
|
||||
fp.write('\nresources =\n')
|
||||
for src, dest in self.data['resources']:
|
||||
fp.write(' %s = %s\n' % (src, dest))
|
||||
fp.write('\n')
|
||||
|
||||
os.chmod(_FILENAME, 0o644)
|
||||
print('Wrote "%s".' % _FILENAME)
|
||||
|
||||
def convert_py_to_cfg(self):
|
||||
"""Generate a setup.cfg from an existing setup.py.
|
||||
|
||||
It only exports the distutils metadata (setuptools specific metadata
|
||||
is not currently supported).
|
||||
"""
|
||||
data = self.data
|
||||
|
||||
def setup_mock(**attrs):
|
||||
"""Mock the setup(**attrs) in order to retrieve metadata."""
|
||||
# use the distutils v1 processings to correctly parse metadata.
|
||||
#XXX we could also use the setuptools distibution ???
|
||||
from distutils.dist import Distribution
|
||||
dist = Distribution(attrs)
|
||||
dist.parse_config_files()
|
||||
|
||||
# 1. retrieve metadata fields that are quite similar in
|
||||
# PEP 314 and PEP 345
|
||||
labels = (('name',) * 2,
|
||||
('version',) * 2,
|
||||
('author',) * 2,
|
||||
('author_email',) * 2,
|
||||
('maintainer',) * 2,
|
||||
('maintainer_email',) * 2,
|
||||
('description', 'summary'),
|
||||
('long_description', 'description'),
|
||||
('url', 'home_page'),
|
||||
('platforms', 'platform'),
|
||||
# backport only for 2.5+
|
||||
('provides', 'provides-dist'),
|
||||
('obsoletes', 'obsoletes-dist'),
|
||||
('requires', 'requires-dist'))
|
||||
|
||||
get = lambda lab: getattr(dist.metadata, lab.replace('-', '_'))
|
||||
data.update((new, get(old)) for old, new in labels if get(old))
|
||||
|
||||
# 2. retrieve data that requires special processing
|
||||
data['classifier'].update(dist.get_classifiers() or [])
|
||||
data['scripts'].extend(dist.scripts or [])
|
||||
data['packages'].extend(dist.packages or [])
|
||||
data['modules'].extend(dist.py_modules or [])
|
||||
# 2.1 data_files -> resources
|
||||
if dist.data_files:
|
||||
if len(dist.data_files) < 2 or \
|
||||
isinstance(dist.data_files[1], str):
|
||||
dist.data_files = [('', dist.data_files)]
|
||||
# add tokens in the destination paths
|
||||
vars = {'distribution.name': data['name']}
|
||||
path_tokens = list(sysconfig.get_paths(vars=vars).items())
|
||||
|
||||
def length_comparison(x, y):
|
||||
len_x = len(x[1])
|
||||
len_y = len(y[1])
|
||||
if len_x == len_y:
|
||||
return 0
|
||||
elif len_x < len_y:
|
||||
return -1
|
||||
else:
|
||||
return 1
|
||||
|
||||
# sort tokens to use the longest one first
|
||||
path_tokens.sort(key=cmp_to_key(length_comparison))
|
||||
for dest, srcs in (dist.data_files or []):
|
||||
dest = os.path.join(sys.prefix, dest)
|
||||
for tok, path in path_tokens:
|
||||
if dest.startswith(path):
|
||||
dest = ('{%s}' % tok) + dest[len(path):]
|
||||
files = [('/ '.join(src.rsplit('/', 1)), dest)
|
||||
for src in srcs]
|
||||
data['resources'].extend(files)
|
||||
continue
|
||||
# 2.2 package_data -> extra_files
|
||||
package_dirs = dist.package_dir or {}
|
||||
for package, extras in iter(dist.package_data.items()) or []:
|
||||
package_dir = package_dirs.get(package, package)
|
||||
files = [os.path.join(package_dir, f) for f in extras]
|
||||
data['extra_files'].extend(files)
|
||||
|
||||
# Use README file if its content is the desciption
|
||||
if "description" in data:
|
||||
ref = md5(re.sub('\s', '',
|
||||
self.data['description']).lower().encode())
|
||||
ref = ref.digest()
|
||||
for readme in glob.glob('README*'):
|
||||
with open(readme) as fp:
|
||||
contents = fp.read()
|
||||
val = md5(re.sub('\s', '',
|
||||
contents.lower()).encode()).digest()
|
||||
if val == ref:
|
||||
del data['description']
|
||||
data['description-file'] = readme
|
||||
break
|
||||
|
||||
# apply monkey patch to distutils (v1) and setuptools (if needed)
|
||||
# (abort the feature if distutils v1 has been killed)
|
||||
try:
|
||||
from distutils import core
|
||||
core.setup # make sure it's not d2 maskerading as d1
|
||||
except (ImportError, AttributeError):
|
||||
return
|
||||
saved_setups = [(core, core.setup)]
|
||||
core.setup = setup_mock
|
||||
try:
|
||||
import setuptools
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
saved_setups.append((setuptools, setuptools.setup))
|
||||
setuptools.setup = setup_mock
|
||||
# get metadata by executing the setup.py with the patched setup(...)
|
||||
success = False # for python < 2.4
|
||||
try:
|
||||
load_setup()
|
||||
success = True
|
||||
finally: # revert monkey patches
|
||||
for patched_module, original_setup in saved_setups:
|
||||
patched_module.setup = original_setup
|
||||
if not self.data:
|
||||
raise ValueError('Unable to load metadata from setup.py')
|
||||
return success
|
||||
|
||||
def inspect_file(self, path):
|
||||
with open(path, 'r') as fp:
|
||||
for _ in range(10):
|
||||
line = fp.readline()
|
||||
m = re.match(r'^#!.*python((?P<major>\d)(\.\d+)?)?$', line)
|
||||
if m:
|
||||
if m.group('major') == '3':
|
||||
self.classifiers.add(
|
||||
'Programming Language :: Python :: 3')
|
||||
else:
|
||||
self.classifiers.add(
|
||||
'Programming Language :: Python :: 2')
|
||||
|
||||
def inspect(self):
|
||||
"""Inspect the current working diretory for a name and version.
|
||||
|
||||
This information is harvested in where the directory is named
|
||||
like [name]-[version].
|
||||
"""
|
||||
dir_name = os.path.basename(os.getcwd())
|
||||
self.data['name'] = dir_name
|
||||
match = re.match(r'(.*)-(\d.+)', dir_name)
|
||||
if match:
|
||||
self.data['name'] = match.group(1)
|
||||
self.data['version'] = match.group(2)
|
||||
# TODO Needs tested!
|
||||
if not is_valid_version(self.data['version']):
|
||||
msg = "Invalid version discovered: %s" % self.data['version']
|
||||
raise RuntimeError(msg)
|
||||
|
||||
def query_user(self):
|
||||
self.data['name'] = ask('Project name', self.data['name'],
|
||||
_helptext['name'])
|
||||
|
||||
self.data['version'] = ask('Current version number',
|
||||
self.data.get('version'), _helptext['version'])
|
||||
self.data['summary'] = ask('Package summary',
|
||||
self.data.get('summary'), _helptext['summary'],
|
||||
lengthy=True)
|
||||
self.data['author'] = ask('Author name',
|
||||
self.data.get('author'), _helptext['author'])
|
||||
self.data['author_email'] = ask('Author e-mail address',
|
||||
self.data.get('author_email'), _helptext['author_email'])
|
||||
self.data['home_page'] = ask('Project Home Page',
|
||||
self.data.get('home_page'), _helptext['home_page'],
|
||||
required=False)
|
||||
|
||||
if ask_yn('Do you want me to automatically build the file list '
|
||||
'with everything I can find in the current directory ? '
|
||||
'If you say no, you will have to define them manually.') == 'y':
|
||||
self._find_files()
|
||||
else:
|
||||
while ask_yn('Do you want to add a single module ?'
|
||||
' (you will be able to add full packages next)',
|
||||
helptext=_helptext['modules']) == 'y':
|
||||
self._set_multi('Module name', 'modules')
|
||||
|
||||
while ask_yn('Do you want to add a package ?',
|
||||
helptext=_helptext['packages']) == 'y':
|
||||
self._set_multi('Package name', 'packages')
|
||||
|
||||
while ask_yn('Do you want to add an extra file ?',
|
||||
helptext=_helptext['extra_files']) == 'y':
|
||||
self._set_multi('Extra file/dir name', 'extra_files')
|
||||
|
||||
if ask_yn('Do you want to set Trove classifiers?',
|
||||
helptext=_helptext['do_classifier']) == 'y':
|
||||
self.set_classifier()
|
||||
|
||||
def _find_files(self):
|
||||
# we are looking for python modules and packages,
|
||||
# other stuff are added as regular files
|
||||
pkgs = self.data['packages']
|
||||
modules = self.data['modules']
|
||||
extra_files = self.data['extra_files']
|
||||
|
||||
def is_package(path):
|
||||
return os.path.exists(os.path.join(path, '__init__.py'))
|
||||
|
||||
curdir = os.getcwd()
|
||||
scanned = []
|
||||
_pref = ['lib', 'include', 'dist', 'build', '.', '~']
|
||||
_suf = ['.pyc']
|
||||
|
||||
def to_skip(path):
|
||||
path = relative(path)
|
||||
|
||||
for pref in _pref:
|
||||
if path.startswith(pref):
|
||||
return True
|
||||
|
||||
for suf in _suf:
|
||||
if path.endswith(suf):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def relative(path):
|
||||
return path[len(curdir) + 1:]
|
||||
|
||||
def dotted(path):
|
||||
res = relative(path).replace(os.path.sep, '.')
|
||||
if res.endswith('.py'):
|
||||
res = res[:-len('.py')]
|
||||
return res
|
||||
|
||||
# first pass: packages
|
||||
for root, dirs, files in os.walk(curdir):
|
||||
if to_skip(root):
|
||||
continue
|
||||
for dir_ in sorted(dirs):
|
||||
if to_skip(dir_):
|
||||
continue
|
||||
fullpath = os.path.join(root, dir_)
|
||||
dotted_name = dotted(fullpath)
|
||||
if is_package(fullpath) and dotted_name not in pkgs:
|
||||
pkgs.append(dotted_name)
|
||||
scanned.append(fullpath)
|
||||
|
||||
# modules and extra files
|
||||
for root, dirs, files in os.walk(curdir):
|
||||
if to_skip(root):
|
||||
continue
|
||||
|
||||
if any(root.startswith(path) for path in scanned):
|
||||
continue
|
||||
|
||||
for file in sorted(files):
|
||||
fullpath = os.path.join(root, file)
|
||||
if to_skip(fullpath):
|
||||
continue
|
||||
# single module?
|
||||
if os.path.splitext(file)[-1] == '.py':
|
||||
modules.append(dotted(fullpath))
|
||||
else:
|
||||
extra_files.append(relative(fullpath))
|
||||
|
||||
def _set_multi(self, question, name):
|
||||
existing_values = self.data[name]
|
||||
value = ask(question, helptext=_helptext[name]).strip()
|
||||
if value not in existing_values:
|
||||
existing_values.append(value)
|
||||
|
||||
def set_classifier(self):
|
||||
self.set_maturity_status(self.classifiers)
|
||||
self.set_license(self.classifiers)
|
||||
self.set_other_classifier(self.classifiers)
|
||||
|
||||
def set_other_classifier(self, classifiers):
|
||||
if ask_yn('Do you want to set other trove identifiers', 'n',
|
||||
_helptext['trove_generic']) != 'y':
|
||||
return
|
||||
self.walk_classifiers(classifiers, [CLASSIFIERS], '')
|
||||
|
||||
def walk_classifiers(self, classifiers, trovepath, desc):
|
||||
trove = trovepath[-1]
|
||||
|
||||
if not trove:
|
||||
return
|
||||
|
||||
for key in sorted(trove):
|
||||
if len(trove[key]) == 0:
|
||||
if ask_yn('Add "%s"' % desc[4:] + ' :: ' + key, 'n') == 'y':
|
||||
classifiers.add(desc[4:] + ' :: ' + key)
|
||||
continue
|
||||
|
||||
if ask_yn('Do you want to set items under\n "%s" (%d sub-items)'
|
||||
% (key, len(trove[key])), 'n',
|
||||
_helptext['trove_generic']) == 'y':
|
||||
self.walk_classifiers(classifiers, trovepath + [trove[key]],
|
||||
desc + ' :: ' + key)
|
||||
|
||||
def set_license(self, classifiers):
|
||||
while True:
|
||||
license = ask('What license do you use',
|
||||
helptext=_helptext['trove_license'], required=False)
|
||||
if not license:
|
||||
return
|
||||
|
||||
license_words = license.lower().split(' ')
|
||||
found_list = []
|
||||
|
||||
for index, licence in LICENCES:
|
||||
for word in license_words:
|
||||
if word in licence:
|
||||
found_list.append(index)
|
||||
break
|
||||
|
||||
if len(found_list) == 0:
|
||||
print('ERROR: Could not find a matching license for "%s"' %
|
||||
license)
|
||||
continue
|
||||
|
||||
question = 'Matching licenses:\n\n'
|
||||
|
||||
for index, list_index in enumerate(found_list):
|
||||
question += ' %s) %s\n' % (index + 1,
|
||||
_CLASSIFIERS_LIST[list_index])
|
||||
|
||||
question += ('\nType the number of the license you wish to use or '
|
||||
'? to try again:')
|
||||
choice = ask(question, required=False)
|
||||
|
||||
if choice == '?':
|
||||
continue
|
||||
if choice == '':
|
||||
return
|
||||
|
||||
try:
|
||||
index = found_list[int(choice) - 1]
|
||||
except ValueError:
|
||||
print("ERROR: Invalid selection, type a number from the list "
|
||||
"above.")
|
||||
|
||||
classifiers.add(_CLASSIFIERS_LIST[index])
|
||||
|
||||
def set_maturity_status(self, classifiers):
|
||||
maturity_name = lambda mat: mat.split('- ')[-1]
|
||||
maturity_question = '''\
|
||||
Please select the project status:
|
||||
|
||||
%s
|
||||
|
||||
Status''' % '\n'.join('%s - %s' % (i, maturity_name(n))
|
||||
for i, n in enumerate(PROJECT_MATURITY))
|
||||
while True:
|
||||
choice = ask(dedent(maturity_question), required=False)
|
||||
|
||||
if choice:
|
||||
try:
|
||||
choice = int(choice) - 1
|
||||
key = PROJECT_MATURITY[choice]
|
||||
classifiers.add(key)
|
||||
return
|
||||
except (IndexError, ValueError):
|
||||
print("ERROR: Invalid selection, type a single digit "
|
||||
"number.")
|
||||
|
||||
|
||||
def main():
|
||||
"""Main entry point."""
|
||||
program = MainProgram()
|
||||
# # uncomment when implemented
|
||||
# if not program.load_existing_setup_script():
|
||||
# program.inspect_directory()
|
||||
# program.query_user()
|
||||
# program.update_config_file()
|
||||
# program.write_setup_script()
|
||||
# packaging.util.cfg_to_args()
|
||||
program()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
627
Lib/packaging/database.py
Normal file
627
Lib/packaging/database.py
Normal file
|
|
@ -0,0 +1,627 @@
|
|||
"""PEP 376 implementation."""
|
||||
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
import csv
|
||||
import sys
|
||||
import zipimport
|
||||
from hashlib import md5
|
||||
from packaging import logger
|
||||
from packaging.errors import PackagingError
|
||||
from packaging.version import suggest_normalized_version, VersionPredicate
|
||||
from packaging.metadata import Metadata
|
||||
|
||||
|
||||
__all__ = [
|
||||
'Distribution', 'EggInfoDistribution', 'distinfo_dirname',
|
||||
'get_distributions', 'get_distribution', 'get_file_users',
|
||||
'provides_distribution', 'obsoletes_distribution',
|
||||
'enable_cache', 'disable_cache', 'clear_cache',
|
||||
]
|
||||
|
||||
|
||||
# TODO update docs
|
||||
|
||||
DIST_FILES = ('INSTALLER', 'METADATA', 'RECORD', 'REQUESTED', 'RESOURCES')
|
||||
|
||||
# Cache
|
||||
_cache_name = {} # maps names to Distribution instances
|
||||
_cache_name_egg = {} # maps names to EggInfoDistribution instances
|
||||
_cache_path = {} # maps paths to Distribution instances
|
||||
_cache_path_egg = {} # maps paths to EggInfoDistribution instances
|
||||
_cache_generated = False # indicates if .dist-info distributions are cached
|
||||
_cache_generated_egg = False # indicates if .dist-info and .egg are cached
|
||||
_cache_enabled = True
|
||||
|
||||
|
||||
def enable_cache():
|
||||
"""
|
||||
Enables the internal cache.
|
||||
|
||||
Note that this function will not clear the cache in any case, for that
|
||||
functionality see :func:`clear_cache`.
|
||||
"""
|
||||
global _cache_enabled
|
||||
|
||||
_cache_enabled = True
|
||||
|
||||
|
||||
def disable_cache():
|
||||
"""
|
||||
Disables the internal cache.
|
||||
|
||||
Note that this function will not clear the cache in any case, for that
|
||||
functionality see :func:`clear_cache`.
|
||||
"""
|
||||
global _cache_enabled
|
||||
|
||||
_cache_enabled = False
|
||||
|
||||
|
||||
def clear_cache():
|
||||
""" Clears the internal cache. """
|
||||
global _cache_name, _cache_name_egg, _cache_path, _cache_path_egg, \
|
||||
_cache_generated, _cache_generated_egg
|
||||
|
||||
_cache_name = {}
|
||||
_cache_name_egg = {}
|
||||
_cache_path = {}
|
||||
_cache_path_egg = {}
|
||||
_cache_generated = False
|
||||
_cache_generated_egg = False
|
||||
|
||||
|
||||
def _yield_distributions(include_dist, include_egg, paths=sys.path):
|
||||
"""
|
||||
Yield .dist-info and .egg(-info) distributions, based on the arguments
|
||||
|
||||
:parameter include_dist: yield .dist-info distributions
|
||||
:parameter include_egg: yield .egg(-info) distributions
|
||||
"""
|
||||
for path in paths:
|
||||
realpath = os.path.realpath(path)
|
||||
if not os.path.isdir(realpath):
|
||||
continue
|
||||
for dir in os.listdir(realpath):
|
||||
dist_path = os.path.join(realpath, dir)
|
||||
if include_dist and dir.endswith('.dist-info'):
|
||||
yield Distribution(dist_path)
|
||||
elif include_egg and (dir.endswith('.egg-info') or
|
||||
dir.endswith('.egg')):
|
||||
yield EggInfoDistribution(dist_path)
|
||||
|
||||
|
||||
def _generate_cache(use_egg_info=False, paths=sys.path):
|
||||
global _cache_generated, _cache_generated_egg
|
||||
|
||||
if _cache_generated_egg or (_cache_generated and not use_egg_info):
|
||||
return
|
||||
else:
|
||||
gen_dist = not _cache_generated
|
||||
gen_egg = use_egg_info
|
||||
|
||||
for dist in _yield_distributions(gen_dist, gen_egg, paths):
|
||||
if isinstance(dist, Distribution):
|
||||
_cache_path[dist.path] = dist
|
||||
if not dist.name in _cache_name:
|
||||
_cache_name[dist.name] = []
|
||||
_cache_name[dist.name].append(dist)
|
||||
else:
|
||||
_cache_path_egg[dist.path] = dist
|
||||
if not dist.name in _cache_name_egg:
|
||||
_cache_name_egg[dist.name] = []
|
||||
_cache_name_egg[dist.name].append(dist)
|
||||
|
||||
if gen_dist:
|
||||
_cache_generated = True
|
||||
if gen_egg:
|
||||
_cache_generated_egg = True
|
||||
|
||||
|
||||
class Distribution:
|
||||
"""Created with the *path* of the ``.dist-info`` directory provided to the
|
||||
constructor. It reads the metadata contained in ``METADATA`` when it is
|
||||
instantiated."""
|
||||
|
||||
name = ''
|
||||
"""The name of the distribution."""
|
||||
|
||||
version = ''
|
||||
"""The version of the distribution."""
|
||||
|
||||
metadata = None
|
||||
"""A :class:`packaging.metadata.Metadata` instance loaded with
|
||||
the distribution's ``METADATA`` file."""
|
||||
|
||||
requested = False
|
||||
"""A boolean that indicates whether the ``REQUESTED`` metadata file is
|
||||
present (in other words, whether the package was installed by user
|
||||
request or it was installed as a dependency)."""
|
||||
|
||||
def __init__(self, path):
|
||||
if _cache_enabled and path in _cache_path:
|
||||
self.metadata = _cache_path[path].metadata
|
||||
else:
|
||||
metadata_path = os.path.join(path, 'METADATA')
|
||||
self.metadata = Metadata(path=metadata_path)
|
||||
|
||||
self.name = self.metadata['Name']
|
||||
self.version = self.metadata['Version']
|
||||
self.path = path
|
||||
|
||||
if _cache_enabled and not path in _cache_path:
|
||||
_cache_path[path] = self
|
||||
|
||||
def __repr__(self):
|
||||
return '<Distribution %r %s at %r>' % (
|
||||
self.name, self.version, self.path)
|
||||
|
||||
def _get_records(self, local=False):
|
||||
with self.get_distinfo_file('RECORD') as record:
|
||||
record_reader = csv.reader(record, delimiter=',')
|
||||
# XXX needs an explaining comment
|
||||
for row in record_reader:
|
||||
path, checksum, size = (row[:] +
|
||||
[None for i in range(len(row), 3)])
|
||||
if local:
|
||||
path = path.replace('/', os.sep)
|
||||
path = os.path.join(sys.prefix, path)
|
||||
yield path, checksum, size
|
||||
|
||||
def get_resource_path(self, relative_path):
|
||||
with self.get_distinfo_file('RESOURCES') as resources_file:
|
||||
resources_reader = csv.reader(resources_file, delimiter=',')
|
||||
for relative, destination in resources_reader:
|
||||
if relative == relative_path:
|
||||
return destination
|
||||
raise KeyError(
|
||||
'no resource file with relative path %r is installed' %
|
||||
relative_path)
|
||||
|
||||
def list_installed_files(self, local=False):
|
||||
"""
|
||||
Iterates over the ``RECORD`` entries and returns a tuple
|
||||
``(path, md5, size)`` for each line. If *local* is ``True``,
|
||||
the returned path is transformed into a local absolute path.
|
||||
Otherwise the raw value from RECORD is returned.
|
||||
|
||||
A local absolute path is an absolute path in which occurrences of
|
||||
``'/'`` have been replaced by the system separator given by ``os.sep``.
|
||||
|
||||
:parameter local: flag to say if the path should be returned a local
|
||||
absolute path
|
||||
|
||||
:type local: boolean
|
||||
:returns: iterator of (path, md5, size)
|
||||
"""
|
||||
return self._get_records(local)
|
||||
|
||||
def uses(self, path):
|
||||
"""
|
||||
Returns ``True`` if path is listed in ``RECORD``. *path* can be a local
|
||||
absolute path or a relative ``'/'``-separated path.
|
||||
|
||||
:rtype: boolean
|
||||
"""
|
||||
for p, checksum, size in self._get_records():
|
||||
local_absolute = os.path.join(sys.prefix, p)
|
||||
if path == p or path == local_absolute:
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_distinfo_file(self, path, binary=False):
|
||||
"""
|
||||
Returns a file located under the ``.dist-info`` directory. Returns a
|
||||
``file`` instance for the file pointed by *path*.
|
||||
|
||||
:parameter path: a ``'/'``-separated path relative to the
|
||||
``.dist-info`` directory or an absolute path;
|
||||
If *path* is an absolute path and doesn't start
|
||||
with the ``.dist-info`` directory path,
|
||||
a :class:`PackagingError` is raised
|
||||
:type path: string
|
||||
:parameter binary: If *binary* is ``True``, opens the file in read-only
|
||||
binary mode (``rb``), otherwise opens it in
|
||||
read-only mode (``r``).
|
||||
:rtype: file object
|
||||
"""
|
||||
open_flags = 'r'
|
||||
if binary:
|
||||
open_flags += 'b'
|
||||
|
||||
# Check if it is an absolute path # XXX use relpath, add tests
|
||||
if path.find(os.sep) >= 0:
|
||||
# it's an absolute path?
|
||||
distinfo_dirname, path = path.split(os.sep)[-2:]
|
||||
if distinfo_dirname != self.path.split(os.sep)[-1]:
|
||||
raise PackagingError(
|
||||
'dist-info file %r does not belong to the %r %s '
|
||||
'distribution' % (path, self.name, self.version))
|
||||
|
||||
# The file must be relative
|
||||
if path not in DIST_FILES:
|
||||
raise PackagingError('invalid path for a dist-info file: %r' %
|
||||
path)
|
||||
|
||||
path = os.path.join(self.path, path)
|
||||
return open(path, open_flags)
|
||||
|
||||
def list_distinfo_files(self, local=False):
|
||||
"""
|
||||
Iterates over the ``RECORD`` entries and returns paths for each line if
|
||||
the path is pointing to a file located in the ``.dist-info`` directory
|
||||
or one of its subdirectories.
|
||||
|
||||
:parameter local: If *local* is ``True``, each returned path is
|
||||
transformed into a local absolute path. Otherwise the
|
||||
raw value from ``RECORD`` is returned.
|
||||
:type local: boolean
|
||||
:returns: iterator of paths
|
||||
"""
|
||||
for path, checksum, size in self._get_records(local):
|
||||
yield path
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, Distribution) and self.path == other.path
|
||||
|
||||
# See http://docs.python.org/reference/datamodel#object.__hash__
|
||||
__hash__ = object.__hash__
|
||||
|
||||
|
||||
class EggInfoDistribution:
|
||||
"""Created with the *path* of the ``.egg-info`` directory or file provided
|
||||
to the constructor. It reads the metadata contained in the file itself, or
|
||||
if the given path happens to be a directory, the metadata is read from the
|
||||
file ``PKG-INFO`` under that directory."""
|
||||
|
||||
name = ''
|
||||
"""The name of the distribution."""
|
||||
|
||||
version = ''
|
||||
"""The version of the distribution."""
|
||||
|
||||
metadata = None
|
||||
"""A :class:`packaging.metadata.Metadata` instance loaded with
|
||||
the distribution's ``METADATA`` file."""
|
||||
|
||||
_REQUIREMENT = re.compile(
|
||||
r'(?P<name>[-A-Za-z0-9_.]+)\s*'
|
||||
r'(?P<first>(?:<|<=|!=|==|>=|>)[-A-Za-z0-9_.]+)?\s*'
|
||||
r'(?P<rest>(?:\s*,\s*(?:<|<=|!=|==|>=|>)[-A-Za-z0-9_.]+)*)\s*'
|
||||
r'(?P<extras>\[.*\])?')
|
||||
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
if _cache_enabled and path in _cache_path_egg:
|
||||
self.metadata = _cache_path_egg[path].metadata
|
||||
self.name = self.metadata['Name']
|
||||
self.version = self.metadata['Version']
|
||||
return
|
||||
|
||||
# reused from Distribute's pkg_resources
|
||||
def yield_lines(strs):
|
||||
"""Yield non-empty/non-comment lines of a ``basestring``
|
||||
or sequence"""
|
||||
if isinstance(strs, str):
|
||||
for s in strs.splitlines():
|
||||
s = s.strip()
|
||||
# skip blank lines/comments
|
||||
if s and not s.startswith('#'):
|
||||
yield s
|
||||
else:
|
||||
for ss in strs:
|
||||
for s in yield_lines(ss):
|
||||
yield s
|
||||
|
||||
requires = None
|
||||
|
||||
if path.endswith('.egg'):
|
||||
if os.path.isdir(path):
|
||||
meta_path = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
|
||||
self.metadata = Metadata(path=meta_path)
|
||||
try:
|
||||
req_path = os.path.join(path, 'EGG-INFO', 'requires.txt')
|
||||
with open(req_path, 'r') as fp:
|
||||
requires = fp.read()
|
||||
except IOError:
|
||||
requires = None
|
||||
else:
|
||||
# FIXME handle the case where zipfile is not available
|
||||
zipf = zipimport.zipimporter(path)
|
||||
fileobj = io.StringIO(
|
||||
zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8'))
|
||||
self.metadata = Metadata(fileobj=fileobj)
|
||||
try:
|
||||
requires = zipf.get_data('EGG-INFO/requires.txt')
|
||||
except IOError:
|
||||
requires = None
|
||||
self.name = self.metadata['Name']
|
||||
self.version = self.metadata['Version']
|
||||
|
||||
elif path.endswith('.egg-info'):
|
||||
if os.path.isdir(path):
|
||||
path = os.path.join(path, 'PKG-INFO')
|
||||
try:
|
||||
with open(os.path.join(path, 'requires.txt'), 'r') as fp:
|
||||
requires = fp.read()
|
||||
except IOError:
|
||||
requires = None
|
||||
self.metadata = Metadata(path=path)
|
||||
self.name = self.metadata['name']
|
||||
self.version = self.metadata['Version']
|
||||
|
||||
else:
|
||||
raise ValueError('path must end with .egg-info or .egg, got %r' %
|
||||
path)
|
||||
|
||||
if requires is not None:
|
||||
if self.metadata['Metadata-Version'] == '1.1':
|
||||
# we can't have 1.1 metadata *and* Setuptools requires
|
||||
for field in ('Obsoletes', 'Requires', 'Provides'):
|
||||
del self.metadata[field]
|
||||
|
||||
reqs = []
|
||||
|
||||
if requires is not None:
|
||||
for line in yield_lines(requires):
|
||||
if line.startswith('['):
|
||||
logger.warning(
|
||||
'extensions in requires.txt are not supported '
|
||||
'(used by %r %s)', self.name, self.version)
|
||||
break
|
||||
else:
|
||||
match = self._REQUIREMENT.match(line.strip())
|
||||
if not match:
|
||||
# this happens when we encounter extras; since they
|
||||
# are written at the end of the file we just exit
|
||||
break
|
||||
else:
|
||||
if match.group('extras'):
|
||||
msg = ('extra requirements are not supported '
|
||||
'(used by %r %s)', self.name, self.version)
|
||||
logger.warning(msg, self.name)
|
||||
name = match.group('name')
|
||||
version = None
|
||||
if match.group('first'):
|
||||
version = match.group('first')
|
||||
if match.group('rest'):
|
||||
version += match.group('rest')
|
||||
version = version.replace(' ', '') # trim spaces
|
||||
if version is None:
|
||||
reqs.append(name)
|
||||
else:
|
||||
reqs.append('%s (%s)' % (name, version))
|
||||
|
||||
if len(reqs) > 0:
|
||||
self.metadata['Requires-Dist'] += reqs
|
||||
|
||||
if _cache_enabled:
|
||||
_cache_path_egg[self.path] = self
|
||||
|
||||
def __repr__(self):
|
||||
return '<EggInfoDistribution %r %s at %r>' % (
|
||||
self.name, self.version, self.path)
|
||||
|
||||
def list_installed_files(self, local=False):
|
||||
|
||||
def _md5(path):
|
||||
with open(path, 'rb') as f:
|
||||
content = f.read()
|
||||
return md5(content).hexdigest()
|
||||
|
||||
def _size(path):
|
||||
return os.stat(path).st_size
|
||||
|
||||
path = self.path
|
||||
if local:
|
||||
path = path.replace('/', os.sep)
|
||||
|
||||
# XXX What about scripts and data files ?
|
||||
if os.path.isfile(path):
|
||||
return [(path, _md5(path), _size(path))]
|
||||
else:
|
||||
files = []
|
||||
for root, dir, files_ in os.walk(path):
|
||||
for item in files_:
|
||||
item = os.path.join(root, item)
|
||||
files.append((item, _md5(item), _size(item)))
|
||||
return files
|
||||
|
||||
return []
|
||||
|
||||
def uses(self, path):
|
||||
return False
|
||||
|
||||
def __eq__(self, other):
|
||||
return (isinstance(other, EggInfoDistribution) and
|
||||
self.path == other.path)
|
||||
|
||||
# See http://docs.python.org/reference/datamodel#object.__hash__
|
||||
__hash__ = object.__hash__
|
||||
|
||||
|
||||
def distinfo_dirname(name, version):
|
||||
"""
|
||||
The *name* and *version* parameters are converted into their
|
||||
filename-escaped form, i.e. any ``'-'`` characters are replaced
|
||||
with ``'_'`` other than the one in ``'dist-info'`` and the one
|
||||
separating the name from the version number.
|
||||
|
||||
:parameter name: is converted to a standard distribution name by replacing
|
||||
any runs of non- alphanumeric characters with a single
|
||||
``'-'``.
|
||||
:type name: string
|
||||
:parameter version: is converted to a standard version string. Spaces
|
||||
become dots, and all other non-alphanumeric characters
|
||||
(except dots) become dashes, with runs of multiple
|
||||
dashes condensed to a single dash.
|
||||
:type version: string
|
||||
:returns: directory name
|
||||
:rtype: string"""
|
||||
file_extension = '.dist-info'
|
||||
name = name.replace('-', '_')
|
||||
normalized_version = suggest_normalized_version(version)
|
||||
# Because this is a lookup procedure, something will be returned even if
|
||||
# it is a version that cannot be normalized
|
||||
if normalized_version is None:
|
||||
# Unable to achieve normality?
|
||||
normalized_version = version
|
||||
return '-'.join([name, normalized_version]) + file_extension
|
||||
|
||||
|
||||
def get_distributions(use_egg_info=False, paths=sys.path):
|
||||
"""
|
||||
Provides an iterator that looks for ``.dist-info`` directories in
|
||||
``sys.path`` and returns :class:`Distribution` instances for each one of
|
||||
them. If the parameters *use_egg_info* is ``True``, then the ``.egg-info``
|
||||
files and directores are iterated as well.
|
||||
|
||||
:rtype: iterator of :class:`Distribution` and :class:`EggInfoDistribution`
|
||||
instances
|
||||
"""
|
||||
if not _cache_enabled:
|
||||
for dist in _yield_distributions(True, use_egg_info, paths):
|
||||
yield dist
|
||||
else:
|
||||
_generate_cache(use_egg_info, paths)
|
||||
|
||||
for dist in _cache_path.values():
|
||||
yield dist
|
||||
|
||||
if use_egg_info:
|
||||
for dist in _cache_path_egg.values():
|
||||
yield dist
|
||||
|
||||
|
||||
def get_distribution(name, use_egg_info=False, paths=None):
|
||||
"""
|
||||
Scans all elements in ``sys.path`` and looks for all directories
|
||||
ending with ``.dist-info``. Returns a :class:`Distribution`
|
||||
corresponding to the ``.dist-info`` directory that contains the
|
||||
``METADATA`` that matches *name* for the *name* metadata field.
|
||||
If no distribution exists with the given *name* and the parameter
|
||||
*use_egg_info* is set to ``True``, then all files and directories ending
|
||||
with ``.egg-info`` are scanned. A :class:`EggInfoDistribution` instance is
|
||||
returned if one is found that has metadata that matches *name* for the
|
||||
*name* metadata field.
|
||||
|
||||
This function only returns the first result found, as no more than one
|
||||
value is expected. If the directory is not found, ``None`` is returned.
|
||||
|
||||
:rtype: :class:`Distribution` or :class:`EggInfoDistribution` or None
|
||||
"""
|
||||
if paths == None:
|
||||
paths = sys.path
|
||||
|
||||
if not _cache_enabled:
|
||||
for dist in _yield_distributions(True, use_egg_info, paths):
|
||||
if dist.name == name:
|
||||
return dist
|
||||
else:
|
||||
_generate_cache(use_egg_info, paths)
|
||||
|
||||
if name in _cache_name:
|
||||
return _cache_name[name][0]
|
||||
elif use_egg_info and name in _cache_name_egg:
|
||||
return _cache_name_egg[name][0]
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def obsoletes_distribution(name, version=None, use_egg_info=False):
|
||||
"""
|
||||
Iterates over all distributions to find which distributions obsolete
|
||||
*name*.
|
||||
|
||||
If a *version* is provided, it will be used to filter the results.
|
||||
If the argument *use_egg_info* is set to ``True``, then ``.egg-info``
|
||||
distributions will be considered as well.
|
||||
|
||||
:type name: string
|
||||
:type version: string
|
||||
:parameter name:
|
||||
"""
|
||||
for dist in get_distributions(use_egg_info):
|
||||
obsoleted = (dist.metadata['Obsoletes-Dist'] +
|
||||
dist.metadata['Obsoletes'])
|
||||
for obs in obsoleted:
|
||||
o_components = obs.split(' ', 1)
|
||||
if len(o_components) == 1 or version is None:
|
||||
if name == o_components[0]:
|
||||
yield dist
|
||||
break
|
||||
else:
|
||||
try:
|
||||
predicate = VersionPredicate(obs)
|
||||
except ValueError:
|
||||
raise PackagingError(
|
||||
'distribution %r has ill-formed obsoletes field: '
|
||||
'%r' % (dist.name, obs))
|
||||
if name == o_components[0] and predicate.match(version):
|
||||
yield dist
|
||||
break
|
||||
|
||||
|
||||
def provides_distribution(name, version=None, use_egg_info=False):
|
||||
"""
|
||||
Iterates over all distributions to find which distributions provide *name*.
|
||||
If a *version* is provided, it will be used to filter the results. Scans
|
||||
all elements in ``sys.path`` and looks for all directories ending with
|
||||
``.dist-info``. Returns a :class:`Distribution` corresponding to the
|
||||
``.dist-info`` directory that contains a ``METADATA`` that matches *name*
|
||||
for the name metadata. If the argument *use_egg_info* is set to ``True``,
|
||||
then all files and directories ending with ``.egg-info`` are considered
|
||||
as well and returns an :class:`EggInfoDistribution` instance.
|
||||
|
||||
This function only returns the first result found, since no more than
|
||||
one values are expected. If the directory is not found, returns ``None``.
|
||||
|
||||
:parameter version: a version specifier that indicates the version
|
||||
required, conforming to the format in ``PEP-345``
|
||||
|
||||
:type name: string
|
||||
:type version: string
|
||||
"""
|
||||
predicate = None
|
||||
if not version is None:
|
||||
try:
|
||||
predicate = VersionPredicate(name + ' (' + version + ')')
|
||||
except ValueError:
|
||||
raise PackagingError('invalid name or version: %r, %r' %
|
||||
(name, version))
|
||||
|
||||
for dist in get_distributions(use_egg_info):
|
||||
provided = dist.metadata['Provides-Dist'] + dist.metadata['Provides']
|
||||
|
||||
for p in provided:
|
||||
p_components = p.rsplit(' ', 1)
|
||||
if len(p_components) == 1 or predicate is None:
|
||||
if name == p_components[0]:
|
||||
yield dist
|
||||
break
|
||||
else:
|
||||
p_name, p_ver = p_components
|
||||
if len(p_ver) < 2 or p_ver[0] != '(' or p_ver[-1] != ')':
|
||||
raise PackagingError(
|
||||
'distribution %r has invalid Provides field: %r' %
|
||||
(dist.name, p))
|
||||
p_ver = p_ver[1:-1] # trim off the parenthesis
|
||||
if p_name == name and predicate.match(p_ver):
|
||||
yield dist
|
||||
break
|
||||
|
||||
|
||||
def get_file_users(path):
|
||||
"""
|
||||
Iterates over all distributions to find out which distributions use
|
||||
*path*.
|
||||
|
||||
:parameter path: can be a local absolute path or a relative
|
||||
``'/'``-separated path.
|
||||
:type path: string
|
||||
:rtype: iterator of :class:`Distribution` instances
|
||||
"""
|
||||
for dist in get_distributions():
|
||||
if dist.uses(path):
|
||||
yield dist
|
||||
270
Lib/packaging/depgraph.py
Normal file
270
Lib/packaging/depgraph.py
Normal file
|
|
@ -0,0 +1,270 @@
|
|||
"""Class and functions dealing with dependencies between distributions.
|
||||
|
||||
This module provides a DependencyGraph class to represent the
|
||||
dependencies between distributions. Auxiliary functions can generate a
|
||||
graph, find reverse dependencies, and print a graph in DOT format.
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
from io import StringIO
|
||||
from packaging.errors import PackagingError
|
||||
from packaging.version import VersionPredicate, IrrationalVersionError
|
||||
|
||||
__all__ = ['DependencyGraph', 'generate_graph', 'dependent_dists',
|
||||
'graph_to_dot']
|
||||
|
||||
|
||||
class DependencyGraph:
|
||||
"""
|
||||
Represents a dependency graph between distributions.
|
||||
|
||||
The dependency relationships are stored in an ``adjacency_list`` that maps
|
||||
distributions to a list of ``(other, label)`` tuples where ``other``
|
||||
is a distribution and the edge is labeled with ``label`` (i.e. the version
|
||||
specifier, if such was provided). Also, for more efficient traversal, for
|
||||
every distribution ``x``, a list of predecessors is kept in
|
||||
``reverse_list[x]``. An edge from distribution ``a`` to
|
||||
distribution ``b`` means that ``a`` depends on ``b``. If any missing
|
||||
dependencies are found, they are stored in ``missing``, which is a
|
||||
dictionary that maps distributions to a list of requirements that were not
|
||||
provided by any other distributions.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.adjacency_list = {}
|
||||
self.reverse_list = {}
|
||||
self.missing = {}
|
||||
|
||||
def add_distribution(self, distribution):
|
||||
"""Add the *distribution* to the graph.
|
||||
|
||||
:type distribution: :class:`packaging.database.Distribution` or
|
||||
:class:`packaging.database.EggInfoDistribution`
|
||||
"""
|
||||
self.adjacency_list[distribution] = []
|
||||
self.reverse_list[distribution] = []
|
||||
self.missing[distribution] = []
|
||||
|
||||
def add_edge(self, x, y, label=None):
|
||||
"""Add an edge from distribution *x* to distribution *y* with the given
|
||||
*label*.
|
||||
|
||||
:type x: :class:`packaging.database.Distribution` or
|
||||
:class:`packaging.database.EggInfoDistribution`
|
||||
:type y: :class:`packaging.database.Distribution` or
|
||||
:class:`packaging.database.EggInfoDistribution`
|
||||
:type label: ``str`` or ``None``
|
||||
"""
|
||||
self.adjacency_list[x].append((y, label))
|
||||
# multiple edges are allowed, so be careful
|
||||
if not x in self.reverse_list[y]:
|
||||
self.reverse_list[y].append(x)
|
||||
|
||||
def add_missing(self, distribution, requirement):
|
||||
"""
|
||||
Add a missing *requirement* for the given *distribution*.
|
||||
|
||||
:type distribution: :class:`packaging.database.Distribution` or
|
||||
:class:`packaging.database.EggInfoDistribution`
|
||||
:type requirement: ``str``
|
||||
"""
|
||||
self.missing[distribution].append(requirement)
|
||||
|
||||
def _repr_dist(self, dist):
|
||||
return '%s %s' % (dist.name, dist.metadata['Version'])
|
||||
|
||||
def repr_node(self, dist, level=1):
|
||||
"""Prints only a subgraph"""
|
||||
output = []
|
||||
output.append(self._repr_dist(dist))
|
||||
for other, label in self.adjacency_list[dist]:
|
||||
dist = self._repr_dist(other)
|
||||
if label is not None:
|
||||
dist = '%s [%s]' % (dist, label)
|
||||
output.append(' ' * level + str(dist))
|
||||
suboutput = self.repr_node(other, level + 1)
|
||||
subs = suboutput.split('\n')
|
||||
output.extend(subs[1:])
|
||||
return '\n'.join(output)
|
||||
|
||||
def __repr__(self):
|
||||
"""Representation of the graph"""
|
||||
output = []
|
||||
for dist, adjs in self.adjacency_list.items():
|
||||
output.append(self.repr_node(dist))
|
||||
return '\n'.join(output)
|
||||
|
||||
|
||||
def graph_to_dot(graph, f, skip_disconnected=True):
|
||||
"""Writes a DOT output for the graph to the provided file *f*.
|
||||
|
||||
If *skip_disconnected* is set to ``True``, then all distributions
|
||||
that are not dependent on any other distribution are skipped.
|
||||
|
||||
:type f: has to support ``file``-like operations
|
||||
:type skip_disconnected: ``bool``
|
||||
"""
|
||||
disconnected = []
|
||||
|
||||
f.write("digraph dependencies {\n")
|
||||
for dist, adjs in graph.adjacency_list.items():
|
||||
if len(adjs) == 0 and not skip_disconnected:
|
||||
disconnected.append(dist)
|
||||
for other, label in adjs:
|
||||
if not label is None:
|
||||
f.write('"%s" -> "%s" [label="%s"]\n' %
|
||||
(dist.name, other.name, label))
|
||||
else:
|
||||
f.write('"%s" -> "%s"\n' % (dist.name, other.name))
|
||||
if not skip_disconnected and len(disconnected) > 0:
|
||||
f.write('subgraph disconnected {\n')
|
||||
f.write('label = "Disconnected"\n')
|
||||
f.write('bgcolor = red\n')
|
||||
|
||||
for dist in disconnected:
|
||||
f.write('"%s"' % dist.name)
|
||||
f.write('\n')
|
||||
f.write('}\n')
|
||||
f.write('}\n')
|
||||
|
||||
|
||||
def generate_graph(dists):
|
||||
"""Generates a dependency graph from the given distributions.
|
||||
|
||||
:parameter dists: a list of distributions
|
||||
:type dists: list of :class:`packaging.database.Distribution` and
|
||||
:class:`packaging.database.EggInfoDistribution` instances
|
||||
:rtype: a :class:`DependencyGraph` instance
|
||||
"""
|
||||
graph = DependencyGraph()
|
||||
provided = {} # maps names to lists of (version, dist) tuples
|
||||
|
||||
# first, build the graph and find out the provides
|
||||
for dist in dists:
|
||||
graph.add_distribution(dist)
|
||||
provides = (dist.metadata['Provides-Dist'] +
|
||||
dist.metadata['Provides'] +
|
||||
['%s (%s)' % (dist.name, dist.metadata['Version'])])
|
||||
|
||||
for p in provides:
|
||||
comps = p.strip().rsplit(" ", 1)
|
||||
name = comps[0]
|
||||
version = None
|
||||
if len(comps) == 2:
|
||||
version = comps[1]
|
||||
if len(version) < 3 or version[0] != '(' or version[-1] != ')':
|
||||
raise PackagingError('Distribution %s has ill formed' \
|
||||
'provides field: %s' % (dist.name, p))
|
||||
version = version[1:-1] # trim off parenthesis
|
||||
if not name in provided:
|
||||
provided[name] = []
|
||||
provided[name].append((version, dist))
|
||||
|
||||
# now make the edges
|
||||
for dist in dists:
|
||||
requires = dist.metadata['Requires-Dist'] + dist.metadata['Requires']
|
||||
for req in requires:
|
||||
try:
|
||||
predicate = VersionPredicate(req)
|
||||
except IrrationalVersionError:
|
||||
# XXX compat-mode if cannot read the version
|
||||
name = req.split()[0]
|
||||
predicate = VersionPredicate(name)
|
||||
|
||||
name = predicate.name
|
||||
|
||||
if not name in provided:
|
||||
graph.add_missing(dist, req)
|
||||
else:
|
||||
matched = False
|
||||
for version, provider in provided[name]:
|
||||
try:
|
||||
match = predicate.match(version)
|
||||
except IrrationalVersionError:
|
||||
# XXX small compat-mode
|
||||
if version.split(' ') == 1:
|
||||
match = True
|
||||
else:
|
||||
match = False
|
||||
|
||||
if match:
|
||||
graph.add_edge(dist, provider, req)
|
||||
matched = True
|
||||
break
|
||||
if not matched:
|
||||
graph.add_missing(dist, req)
|
||||
return graph
|
||||
|
||||
|
||||
def dependent_dists(dists, dist):
|
||||
"""Recursively generate a list of distributions from *dists* that are
|
||||
dependent on *dist*.
|
||||
|
||||
:param dists: a list of distributions
|
||||
:param dist: a distribution, member of *dists* for which we are interested
|
||||
"""
|
||||
if not dist in dists:
|
||||
raise ValueError('The given distribution is not a member of the list')
|
||||
graph = generate_graph(dists)
|
||||
|
||||
dep = [dist] # dependent distributions
|
||||
fringe = graph.reverse_list[dist] # list of nodes we should inspect
|
||||
|
||||
while not len(fringe) == 0:
|
||||
node = fringe.pop()
|
||||
dep.append(node)
|
||||
for prev in graph.reverse_list[node]:
|
||||
if not prev in dep:
|
||||
fringe.append(prev)
|
||||
|
||||
dep.pop(0) # remove dist from dep, was there to prevent infinite loops
|
||||
return dep
|
||||
|
||||
|
||||
def main():
|
||||
from packaging.database import get_distributions
|
||||
tempout = StringIO()
|
||||
try:
|
||||
old = sys.stderr
|
||||
sys.stderr = tempout
|
||||
try:
|
||||
dists = list(get_distributions(use_egg_info=True))
|
||||
graph = generate_graph(dists)
|
||||
finally:
|
||||
sys.stderr = old
|
||||
except Exception as e:
|
||||
tempout.seek(0)
|
||||
tempout = tempout.read()
|
||||
print('Could not generate the graph\n%s\n%s\n' % (tempout, e))
|
||||
sys.exit(1)
|
||||
|
||||
for dist, reqs in graph.missing.items():
|
||||
if len(reqs) > 0:
|
||||
print("Warning: Missing dependencies for %s:" % dist.name,
|
||||
", ".join(reqs))
|
||||
# XXX replace with argparse
|
||||
if len(sys.argv) == 1:
|
||||
print('Dependency graph:')
|
||||
print(' ' + repr(graph).replace('\n', '\n '))
|
||||
sys.exit(0)
|
||||
elif len(sys.argv) > 1 and sys.argv[1] in ('-d', '--dot'):
|
||||
if len(sys.argv) > 2:
|
||||
filename = sys.argv[2]
|
||||
else:
|
||||
filename = 'depgraph.dot'
|
||||
|
||||
with open(filename, 'w') as f:
|
||||
graph_to_dot(graph, f, True)
|
||||
tempout.seek(0)
|
||||
tempout = tempout.read()
|
||||
print(tempout)
|
||||
print('Dot file written at "%s"' % filename)
|
||||
sys.exit(0)
|
||||
else:
|
||||
print('Supported option: -d [filename]')
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
819
Lib/packaging/dist.py
Normal file
819
Lib/packaging/dist.py
Normal file
|
|
@ -0,0 +1,819 @@
|
|||
"""Class representing the distribution being built/installed/etc."""
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
from packaging.errors import (PackagingOptionError, PackagingArgError,
|
||||
PackagingModuleError, PackagingClassError)
|
||||
from packaging.fancy_getopt import FancyGetopt
|
||||
from packaging.util import strtobool, resolve_name
|
||||
from packaging import logger
|
||||
from packaging.metadata import Metadata
|
||||
from packaging.config import Config
|
||||
from packaging.command import get_command_class, STANDARD_COMMANDS
|
||||
|
||||
# Regex to define acceptable Packaging command names. This is not *quite*
|
||||
# the same as a Python NAME -- I don't allow leading underscores. The fact
|
||||
# that they're very similar is no coincidence; the default naming scheme is
|
||||
# to look for a Python module named after the command.
|
||||
command_re = re.compile(r'^[a-zA-Z]([a-zA-Z0-9_]*)$')
|
||||
|
||||
USAGE = """\
|
||||
usage: %(script)s [global_opts] cmd1 [cmd1_opts] [cmd2 [cmd2_opts] ...]
|
||||
or: %(script)s --help [cmd1 cmd2 ...]
|
||||
or: %(script)s --help-commands
|
||||
or: %(script)s cmd --help
|
||||
"""
|
||||
|
||||
|
||||
def gen_usage(script_name):
|
||||
script = os.path.basename(script_name)
|
||||
return USAGE % {'script': script}
|
||||
|
||||
|
||||
class Distribution:
|
||||
"""The core of the Packaging. Most of the work hiding behind 'setup'
|
||||
is really done within a Distribution instance, which farms the work out
|
||||
to the Packaging commands specified on the command line.
|
||||
|
||||
Setup scripts will almost never instantiate Distribution directly,
|
||||
unless the 'setup()' function is totally inadequate to their needs.
|
||||
However, it is conceivable that a setup script might wish to subclass
|
||||
Distribution for some specialized purpose, and then pass the subclass
|
||||
to 'setup()' as the 'distclass' keyword argument. If so, it is
|
||||
necessary to respect the expectations that 'setup' has of Distribution.
|
||||
See the code for 'setup()', in run.py, for details.
|
||||
"""
|
||||
|
||||
# 'global_options' describes the command-line options that may be
|
||||
# supplied to the setup script prior to any actual commands.
|
||||
# Eg. "./setup.py -n" or "./setup.py --dry-run" both take advantage of
|
||||
# these global options. This list should be kept to a bare minimum,
|
||||
# since every global option is also valid as a command option -- and we
|
||||
# don't want to pollute the commands with too many options that they
|
||||
# have minimal control over.
|
||||
global_options = [
|
||||
('dry-run', 'n', "don't actually do anything"),
|
||||
('help', 'h', "show detailed help message"),
|
||||
('no-user-cfg', None, 'ignore pydistutils.cfg in your home directory'),
|
||||
]
|
||||
|
||||
# 'common_usage' is a short (2-3 line) string describing the common
|
||||
# usage of the setup script.
|
||||
common_usage = """\
|
||||
Common commands: (see '--help-commands' for more)
|
||||
|
||||
setup.py build will build the package underneath 'build/'
|
||||
setup.py install will install the package
|
||||
"""
|
||||
|
||||
# options that are not propagated to the commands
|
||||
display_options = [
|
||||
('help-commands', None,
|
||||
"list all available commands"),
|
||||
('name', None,
|
||||
"print package name"),
|
||||
('version', 'V',
|
||||
"print package version"),
|
||||
('fullname', None,
|
||||
"print <package name>-<version>"),
|
||||
('author', None,
|
||||
"print the author's name"),
|
||||
('author-email', None,
|
||||
"print the author's email address"),
|
||||
('maintainer', None,
|
||||
"print the maintainer's name"),
|
||||
('maintainer-email', None,
|
||||
"print the maintainer's email address"),
|
||||
('contact', None,
|
||||
"print the maintainer's name if known, else the author's"),
|
||||
('contact-email', None,
|
||||
"print the maintainer's email address if known, else the author's"),
|
||||
('url', None,
|
||||
"print the URL for this package"),
|
||||
('license', None,
|
||||
"print the license of the package"),
|
||||
('licence', None,
|
||||
"alias for --license"),
|
||||
('description', None,
|
||||
"print the package description"),
|
||||
('long-description', None,
|
||||
"print the long package description"),
|
||||
('platforms', None,
|
||||
"print the list of platforms"),
|
||||
('classifier', None,
|
||||
"print the list of classifiers"),
|
||||
('keywords', None,
|
||||
"print the list of keywords"),
|
||||
('provides', None,
|
||||
"print the list of packages/modules provided"),
|
||||
('requires', None,
|
||||
"print the list of packages/modules required"),
|
||||
('obsoletes', None,
|
||||
"print the list of packages/modules made obsolete"),
|
||||
('use-2to3', None,
|
||||
"use 2to3 to make source python 3.x compatible"),
|
||||
('convert-2to3-doctests', None,
|
||||
"use 2to3 to convert doctests in seperate text files"),
|
||||
]
|
||||
display_option_names = [x[0].replace('-', '_') for x in display_options]
|
||||
|
||||
# negative options are options that exclude other options
|
||||
negative_opt = {}
|
||||
|
||||
# -- Creation/initialization methods -------------------------------
|
||||
def __init__(self, attrs=None):
|
||||
"""Construct a new Distribution instance: initialize all the
|
||||
attributes of a Distribution, and then use 'attrs' (a dictionary
|
||||
mapping attribute names to values) to assign some of those
|
||||
attributes their "real" values. (Any attributes not mentioned in
|
||||
'attrs' will be assigned to some null value: 0, None, an empty list
|
||||
or dictionary, etc.) Most importantly, initialize the
|
||||
'command_obj' attribute to the empty dictionary; this will be
|
||||
filled in with real command objects by 'parse_command_line()'.
|
||||
"""
|
||||
|
||||
# Default values for our command-line options
|
||||
self.dry_run = False
|
||||
self.help = False
|
||||
for attr in self.display_option_names:
|
||||
setattr(self, attr, False)
|
||||
|
||||
# Store the configuration
|
||||
self.config = Config(self)
|
||||
|
||||
# Store the distribution metadata (name, version, author, and so
|
||||
# forth) in a separate object -- we're getting to have enough
|
||||
# information here (and enough command-line options) that it's
|
||||
# worth it.
|
||||
self.metadata = Metadata()
|
||||
|
||||
# 'cmdclass' maps command names to class objects, so we
|
||||
# can 1) quickly figure out which class to instantiate when
|
||||
# we need to create a new command object, and 2) have a way
|
||||
# for the setup script to override command classes
|
||||
self.cmdclass = {}
|
||||
|
||||
# 'script_name' and 'script_args' are usually set to sys.argv[0]
|
||||
# and sys.argv[1:], but they can be overridden when the caller is
|
||||
# not necessarily a setup script run from the command line.
|
||||
self.script_name = None
|
||||
self.script_args = None
|
||||
|
||||
# 'command_options' is where we store command options between
|
||||
# parsing them (from config files, the command line, etc.) and when
|
||||
# they are actually needed -- ie. when the command in question is
|
||||
# instantiated. It is a dictionary of dictionaries of 2-tuples:
|
||||
# command_options = { command_name : { option : (source, value) } }
|
||||
self.command_options = {}
|
||||
|
||||
# 'dist_files' is the list of (command, pyversion, file) that
|
||||
# have been created by any dist commands run so far. This is
|
||||
# filled regardless of whether the run is dry or not. pyversion
|
||||
# gives sysconfig.get_python_version() if the dist file is
|
||||
# specific to a Python version, 'any' if it is good for all
|
||||
# Python versions on the target platform, and '' for a source
|
||||
# file. pyversion should not be used to specify minimum or
|
||||
# maximum required Python versions; use the metainfo for that
|
||||
# instead.
|
||||
self.dist_files = []
|
||||
|
||||
# These options are really the business of various commands, rather
|
||||
# than of the Distribution itself. We provide aliases for them in
|
||||
# Distribution as a convenience to the developer.
|
||||
self.packages = []
|
||||
self.package_data = {}
|
||||
self.package_dir = None
|
||||
self.py_modules = []
|
||||
self.libraries = []
|
||||
self.headers = []
|
||||
self.ext_modules = []
|
||||
self.ext_package = None
|
||||
self.include_dirs = []
|
||||
self.extra_path = None
|
||||
self.scripts = []
|
||||
self.data_files = {}
|
||||
self.password = ''
|
||||
self.use_2to3 = False
|
||||
self.convert_2to3_doctests = []
|
||||
self.extra_files = []
|
||||
|
||||
# And now initialize bookkeeping stuff that can't be supplied by
|
||||
# the caller at all. 'command_obj' maps command names to
|
||||
# Command instances -- that's how we enforce that every command
|
||||
# class is a singleton.
|
||||
self.command_obj = {}
|
||||
|
||||
# 'have_run' maps command names to boolean values; it keeps track
|
||||
# of whether we have actually run a particular command, to make it
|
||||
# cheap to "run" a command whenever we think we might need to -- if
|
||||
# it's already been done, no need for expensive filesystem
|
||||
# operations, we just check the 'have_run' dictionary and carry on.
|
||||
# It's only safe to query 'have_run' for a command class that has
|
||||
# been instantiated -- a false value will be inserted when the
|
||||
# command object is created, and replaced with a true value when
|
||||
# the command is successfully run. Thus it's probably best to use
|
||||
# '.get()' rather than a straight lookup.
|
||||
self.have_run = {}
|
||||
|
||||
# Now we'll use the attrs dictionary (ultimately, keyword args from
|
||||
# the setup script) to possibly override any or all of these
|
||||
# distribution options.
|
||||
|
||||
if attrs is not None:
|
||||
# Pull out the set of command options and work on them
|
||||
# specifically. Note that this order guarantees that aliased
|
||||
# command options will override any supplied redundantly
|
||||
# through the general options dictionary.
|
||||
options = attrs.get('options')
|
||||
if options is not None:
|
||||
del attrs['options']
|
||||
for command, cmd_options in options.items():
|
||||
opt_dict = self.get_option_dict(command)
|
||||
for opt, val in cmd_options.items():
|
||||
opt_dict[opt] = ("setup script", val)
|
||||
|
||||
# Now work on the rest of the attributes. Any attribute that's
|
||||
# not already defined is invalid!
|
||||
for key, val in attrs.items():
|
||||
if self.metadata.is_metadata_field(key):
|
||||
self.metadata[key] = val
|
||||
elif hasattr(self, key):
|
||||
setattr(self, key, val)
|
||||
else:
|
||||
logger.warning(
|
||||
'unknown argument given to Distribution: %r', key)
|
||||
|
||||
# no-user-cfg is handled before other command line args
|
||||
# because other args override the config files, and this
|
||||
# one is needed before we can load the config files.
|
||||
# If attrs['script_args'] wasn't passed, assume false.
|
||||
#
|
||||
# This also make sure we just look at the global options
|
||||
self.want_user_cfg = True
|
||||
|
||||
if self.script_args is not None:
|
||||
for arg in self.script_args:
|
||||
if not arg.startswith('-'):
|
||||
break
|
||||
if arg == '--no-user-cfg':
|
||||
self.want_user_cfg = False
|
||||
break
|
||||
|
||||
self.finalize_options()
|
||||
|
||||
def get_option_dict(self, command):
|
||||
"""Get the option dictionary for a given command. If that
|
||||
command's option dictionary hasn't been created yet, then create it
|
||||
and return the new dictionary; otherwise, return the existing
|
||||
option dictionary.
|
||||
"""
|
||||
d = self.command_options.get(command)
|
||||
if d is None:
|
||||
d = self.command_options[command] = {}
|
||||
return d
|
||||
|
||||
def get_fullname(self):
|
||||
return self.metadata.get_fullname()
|
||||
|
||||
def dump_option_dicts(self, header=None, commands=None, indent=""):
|
||||
from pprint import pformat
|
||||
|
||||
if commands is None: # dump all command option dicts
|
||||
commands = sorted(self.command_options)
|
||||
|
||||
if header is not None:
|
||||
logger.info(indent + header)
|
||||
indent = indent + " "
|
||||
|
||||
if not commands:
|
||||
logger.info(indent + "no commands known yet")
|
||||
return
|
||||
|
||||
for cmd_name in commands:
|
||||
opt_dict = self.command_options.get(cmd_name)
|
||||
if opt_dict is None:
|
||||
logger.info(indent + "no option dict for %r command",
|
||||
cmd_name)
|
||||
else:
|
||||
logger.info(indent + "option dict for %r command:", cmd_name)
|
||||
out = pformat(opt_dict)
|
||||
for line in out.split('\n'):
|
||||
logger.info(indent + " " + line)
|
||||
|
||||
# -- Config file finding/parsing methods ---------------------------
|
||||
# XXX to be removed
|
||||
def parse_config_files(self, filenames=None):
|
||||
return self.config.parse_config_files(filenames)
|
||||
|
||||
def find_config_files(self):
|
||||
return self.config.find_config_files()
|
||||
|
||||
# -- Command-line parsing methods ----------------------------------
|
||||
|
||||
def parse_command_line(self):
|
||||
"""Parse the setup script's command line, taken from the
|
||||
'script_args' instance attribute (which defaults to 'sys.argv[1:]'
|
||||
-- see 'setup()' in run.py). This list is first processed for
|
||||
"global options" -- options that set attributes of the Distribution
|
||||
instance. Then, it is alternately scanned for Packaging commands
|
||||
and options for that command. Each new command terminates the
|
||||
options for the previous command. The allowed options for a
|
||||
command are determined by the 'user_options' attribute of the
|
||||
command class -- thus, we have to be able to load command classes
|
||||
in order to parse the command line. Any error in that 'options'
|
||||
attribute raises PackagingGetoptError; any error on the
|
||||
command line raises PackagingArgError. If no Packaging commands
|
||||
were found on the command line, raises PackagingArgError. Return
|
||||
true if command line was successfully parsed and we should carry
|
||||
on with executing commands; false if no errors but we shouldn't
|
||||
execute commands (currently, this only happens if user asks for
|
||||
help).
|
||||
"""
|
||||
#
|
||||
# We now have enough information to show the Macintosh dialog
|
||||
# that allows the user to interactively specify the "command line".
|
||||
#
|
||||
toplevel_options = self._get_toplevel_options()
|
||||
|
||||
# We have to parse the command line a bit at a time -- global
|
||||
# options, then the first command, then its options, and so on --
|
||||
# because each command will be handled by a different class, and
|
||||
# the options that are valid for a particular class aren't known
|
||||
# until we have loaded the command class, which doesn't happen
|
||||
# until we know what the command is.
|
||||
|
||||
self.commands = []
|
||||
parser = FancyGetopt(toplevel_options + self.display_options)
|
||||
parser.set_negative_aliases(self.negative_opt)
|
||||
parser.set_aliases({'licence': 'license'})
|
||||
args = parser.getopt(args=self.script_args, object=self)
|
||||
option_order = parser.get_option_order()
|
||||
|
||||
# for display options we return immediately
|
||||
if self.handle_display_options(option_order):
|
||||
return
|
||||
|
||||
while args:
|
||||
args = self._parse_command_opts(parser, args)
|
||||
if args is None: # user asked for help (and got it)
|
||||
return
|
||||
|
||||
# Handle the cases of --help as a "global" option, ie.
|
||||
# "setup.py --help" and "setup.py --help command ...". For the
|
||||
# former, we show global options (--dry-run, etc.)
|
||||
# and display-only options (--name, --version, etc.); for the
|
||||
# latter, we omit the display-only options and show help for
|
||||
# each command listed on the command line.
|
||||
if self.help:
|
||||
self._show_help(parser,
|
||||
display_options=len(self.commands) == 0,
|
||||
commands=self.commands)
|
||||
return
|
||||
|
||||
return 1
|
||||
|
||||
def _get_toplevel_options(self):
|
||||
"""Return the non-display options recognized at the top level.
|
||||
|
||||
This includes options that are recognized *only* at the top
|
||||
level as well as options recognized for commands.
|
||||
"""
|
||||
return self.global_options
|
||||
|
||||
def _parse_command_opts(self, parser, args):
|
||||
"""Parse the command-line options for a single command.
|
||||
'parser' must be a FancyGetopt instance; 'args' must be the list
|
||||
of arguments, starting with the current command (whose options
|
||||
we are about to parse). Returns a new version of 'args' with
|
||||
the next command at the front of the list; will be the empty
|
||||
list if there are no more commands on the command line. Returns
|
||||
None if the user asked for help on this command.
|
||||
"""
|
||||
# Pull the current command from the head of the command line
|
||||
command = args[0]
|
||||
if not command_re.match(command):
|
||||
raise SystemExit("invalid command name %r" % command)
|
||||
self.commands.append(command)
|
||||
|
||||
# Dig up the command class that implements this command, so we
|
||||
# 1) know that it's a valid command, and 2) know which options
|
||||
# it takes.
|
||||
try:
|
||||
cmd_class = get_command_class(command)
|
||||
except PackagingModuleError as msg:
|
||||
raise PackagingArgError(msg)
|
||||
|
||||
# XXX We want to push this in packaging.command
|
||||
#
|
||||
# Require that the command class be derived from Command -- want
|
||||
# to be sure that the basic "command" interface is implemented.
|
||||
for meth in ('initialize_options', 'finalize_options', 'run'):
|
||||
if hasattr(cmd_class, meth):
|
||||
continue
|
||||
raise PackagingClassError(
|
||||
'command %r must implement %r' % (cmd_class, meth))
|
||||
|
||||
# Also make sure that the command object provides a list of its
|
||||
# known options.
|
||||
if not (hasattr(cmd_class, 'user_options') and
|
||||
isinstance(cmd_class.user_options, list)):
|
||||
raise PackagingClassError(
|
||||
"command class %s must provide "
|
||||
"'user_options' attribute (a list of tuples)" % cmd_class)
|
||||
|
||||
# If the command class has a list of negative alias options,
|
||||
# merge it in with the global negative aliases.
|
||||
negative_opt = self.negative_opt
|
||||
if hasattr(cmd_class, 'negative_opt'):
|
||||
negative_opt = negative_opt.copy()
|
||||
negative_opt.update(cmd_class.negative_opt)
|
||||
|
||||
# Check for help_options in command class. They have a different
|
||||
# format (tuple of four) so we need to preprocess them here.
|
||||
if (hasattr(cmd_class, 'help_options') and
|
||||
isinstance(cmd_class.help_options, list)):
|
||||
help_options = cmd_class.help_options[:]
|
||||
else:
|
||||
help_options = []
|
||||
|
||||
# All commands support the global options too, just by adding
|
||||
# in 'global_options'.
|
||||
parser.set_option_table(self.global_options +
|
||||
cmd_class.user_options +
|
||||
help_options)
|
||||
parser.set_negative_aliases(negative_opt)
|
||||
args, opts = parser.getopt(args[1:])
|
||||
if hasattr(opts, 'help') and opts.help:
|
||||
self._show_help(parser, display_options=False,
|
||||
commands=[cmd_class])
|
||||
return
|
||||
|
||||
if (hasattr(cmd_class, 'help_options') and
|
||||
isinstance(cmd_class.help_options, list)):
|
||||
help_option_found = False
|
||||
for help_option, short, desc, func in cmd_class.help_options:
|
||||
if hasattr(opts, help_option.replace('-', '_')):
|
||||
help_option_found = True
|
||||
if hasattr(func, '__call__'):
|
||||
func()
|
||||
else:
|
||||
raise PackagingClassError(
|
||||
"invalid help function %r for help option %r: "
|
||||
"must be a callable object (function, etc.)"
|
||||
% (func, help_option))
|
||||
|
||||
if help_option_found:
|
||||
return
|
||||
|
||||
# Put the options from the command line into their official
|
||||
# holding pen, the 'command_options' dictionary.
|
||||
opt_dict = self.get_option_dict(command)
|
||||
for name, value in vars(opts).items():
|
||||
opt_dict[name] = ("command line", value)
|
||||
|
||||
return args
|
||||
|
||||
def finalize_options(self):
|
||||
"""Set final values for all the options on the Distribution
|
||||
instance, analogous to the .finalize_options() method of Command
|
||||
objects.
|
||||
"""
|
||||
if getattr(self, 'convert_2to3_doctests', None):
|
||||
self.convert_2to3_doctests = [os.path.join(p)
|
||||
for p in self.convert_2to3_doctests]
|
||||
else:
|
||||
self.convert_2to3_doctests = []
|
||||
|
||||
def _show_help(self, parser, global_options=True, display_options=True,
|
||||
commands=[]):
|
||||
"""Show help for the setup script command line in the form of
|
||||
several lists of command-line options. 'parser' should be a
|
||||
FancyGetopt instance; do not expect it to be returned in the
|
||||
same state, as its option table will be reset to make it
|
||||
generate the correct help text.
|
||||
|
||||
If 'global_options' is true, lists the global options:
|
||||
--dry-run, etc. If 'display_options' is true, lists
|
||||
the "display-only" options: --name, --version, etc. Finally,
|
||||
lists per-command help for every command name or command class
|
||||
in 'commands'.
|
||||
"""
|
||||
# late import because of mutual dependence between these modules
|
||||
from packaging.command.cmd import Command
|
||||
|
||||
if global_options:
|
||||
if display_options:
|
||||
options = self._get_toplevel_options()
|
||||
else:
|
||||
options = self.global_options
|
||||
parser.set_option_table(options)
|
||||
parser.print_help(self.common_usage + "\nGlobal options:")
|
||||
print('')
|
||||
|
||||
if display_options:
|
||||
parser.set_option_table(self.display_options)
|
||||
parser.print_help(
|
||||
"Information display options (just display " +
|
||||
"information, ignore any commands)")
|
||||
print('')
|
||||
|
||||
for command in self.commands:
|
||||
if isinstance(command, type) and issubclass(command, Command):
|
||||
cls = command
|
||||
else:
|
||||
cls = get_command_class(command)
|
||||
if (hasattr(cls, 'help_options') and
|
||||
isinstance(cls.help_options, list)):
|
||||
parser.set_option_table(cls.user_options + cls.help_options)
|
||||
else:
|
||||
parser.set_option_table(cls.user_options)
|
||||
parser.print_help("Options for %r command:" % cls.__name__)
|
||||
print('')
|
||||
|
||||
print(gen_usage(self.script_name))
|
||||
|
||||
def handle_display_options(self, option_order):
|
||||
"""If there were any non-global "display-only" options
|
||||
(--help-commands or the metadata display options) on the command
|
||||
line, display the requested info and return true; else return
|
||||
false.
|
||||
"""
|
||||
# User just wants a list of commands -- we'll print it out and stop
|
||||
# processing now (ie. if they ran "setup --help-commands foo bar",
|
||||
# we ignore "foo bar").
|
||||
if self.help_commands:
|
||||
self.print_commands()
|
||||
print('')
|
||||
print(gen_usage(self.script_name))
|
||||
return 1
|
||||
|
||||
# If user supplied any of the "display metadata" options, then
|
||||
# display that metadata in the order in which the user supplied the
|
||||
# metadata options.
|
||||
any_display_options = False
|
||||
is_display_option = set()
|
||||
for option in self.display_options:
|
||||
is_display_option.add(option[0])
|
||||
|
||||
for opt, val in option_order:
|
||||
if val and opt in is_display_option:
|
||||
opt = opt.replace('-', '_')
|
||||
value = self.metadata[opt]
|
||||
if opt in ('keywords', 'platform'):
|
||||
print(','.join(value))
|
||||
elif opt in ('classifier', 'provides', 'requires',
|
||||
'obsoletes'):
|
||||
print('\n'.join(value))
|
||||
else:
|
||||
print(value)
|
||||
any_display_options = True
|
||||
|
||||
return any_display_options
|
||||
|
||||
def print_command_list(self, commands, header, max_length):
|
||||
"""Print a subset of the list of all commands -- used by
|
||||
'print_commands()'.
|
||||
"""
|
||||
print(header + ":")
|
||||
|
||||
for cmd in commands:
|
||||
cls = self.cmdclass.get(cmd) or get_command_class(cmd)
|
||||
description = getattr(cls, 'description',
|
||||
'(no description available)')
|
||||
|
||||
print(" %-*s %s" % (max_length, cmd, description))
|
||||
|
||||
def _get_command_groups(self):
|
||||
"""Helper function to retrieve all the command class names divided
|
||||
into standard commands (listed in
|
||||
packaging2.command.STANDARD_COMMANDS) and extra commands (given in
|
||||
self.cmdclass and not standard commands).
|
||||
"""
|
||||
extra_commands = [cmd for cmd in self.cmdclass
|
||||
if cmd not in STANDARD_COMMANDS]
|
||||
return STANDARD_COMMANDS, extra_commands
|
||||
|
||||
def print_commands(self):
|
||||
"""Print out a help message listing all available commands with a
|
||||
description of each. The list is divided into standard commands
|
||||
(listed in packaging2.command.STANDARD_COMMANDS) and extra commands
|
||||
(given in self.cmdclass and not standard commands). The
|
||||
descriptions come from the command class attribute
|
||||
'description'.
|
||||
"""
|
||||
std_commands, extra_commands = self._get_command_groups()
|
||||
max_length = 0
|
||||
for cmd in (std_commands + extra_commands):
|
||||
if len(cmd) > max_length:
|
||||
max_length = len(cmd)
|
||||
|
||||
self.print_command_list(std_commands,
|
||||
"Standard commands",
|
||||
max_length)
|
||||
if extra_commands:
|
||||
print()
|
||||
self.print_command_list(extra_commands,
|
||||
"Extra commands",
|
||||
max_length)
|
||||
|
||||
# -- Command class/object methods ----------------------------------
|
||||
|
||||
def get_command_obj(self, command, create=True):
|
||||
"""Return the command object for 'command'. Normally this object
|
||||
is cached on a previous call to 'get_command_obj()'; if no command
|
||||
object for 'command' is in the cache, then we either create and
|
||||
return it (if 'create' is true) or return None.
|
||||
"""
|
||||
cmd_obj = self.command_obj.get(command)
|
||||
if not cmd_obj and create:
|
||||
logger.debug("Distribution.get_command_obj(): " \
|
||||
"creating %r command object", command)
|
||||
|
||||
cls = get_command_class(command)
|
||||
cmd_obj = self.command_obj[command] = cls(self)
|
||||
self.have_run[command] = 0
|
||||
|
||||
# Set any options that were supplied in config files
|
||||
# or on the command line. (NB. support for error
|
||||
# reporting is lame here: any errors aren't reported
|
||||
# until 'finalize_options()' is called, which means
|
||||
# we won't report the source of the error.)
|
||||
options = self.command_options.get(command)
|
||||
if options:
|
||||
self._set_command_options(cmd_obj, options)
|
||||
|
||||
return cmd_obj
|
||||
|
||||
def _set_command_options(self, command_obj, option_dict=None):
|
||||
"""Set the options for 'command_obj' from 'option_dict'. Basically
|
||||
this means copying elements of a dictionary ('option_dict') to
|
||||
attributes of an instance ('command').
|
||||
|
||||
'command_obj' must be a Command instance. If 'option_dict' is not
|
||||
supplied, uses the standard option dictionary for this command
|
||||
(from 'self.command_options').
|
||||
"""
|
||||
command_name = command_obj.get_command_name()
|
||||
if option_dict is None:
|
||||
option_dict = self.get_option_dict(command_name)
|
||||
|
||||
logger.debug(" setting options for %r command:", command_name)
|
||||
|
||||
for option, (source, value) in option_dict.items():
|
||||
logger.debug(" %s = %s (from %s)", option, value, source)
|
||||
try:
|
||||
bool_opts = [x.replace('-', '_')
|
||||
for x in command_obj.boolean_options]
|
||||
except AttributeError:
|
||||
bool_opts = []
|
||||
try:
|
||||
neg_opt = command_obj.negative_opt
|
||||
except AttributeError:
|
||||
neg_opt = {}
|
||||
|
||||
try:
|
||||
is_string = isinstance(value, str)
|
||||
if option in neg_opt and is_string:
|
||||
setattr(command_obj, neg_opt[option], not strtobool(value))
|
||||
elif option in bool_opts and is_string:
|
||||
setattr(command_obj, option, strtobool(value))
|
||||
elif hasattr(command_obj, option):
|
||||
setattr(command_obj, option, value)
|
||||
else:
|
||||
raise PackagingOptionError(
|
||||
"error in %s: command %r has no such option %r" %
|
||||
(source, command_name, option))
|
||||
except ValueError as msg:
|
||||
raise PackagingOptionError(msg)
|
||||
|
||||
def get_reinitialized_command(self, command, reinit_subcommands=False):
|
||||
"""Reinitializes a command to the state it was in when first
|
||||
returned by 'get_command_obj()': ie., initialized but not yet
|
||||
finalized. This provides the opportunity to sneak option
|
||||
values in programmatically, overriding or supplementing
|
||||
user-supplied values from the config files and command line.
|
||||
You'll have to re-finalize the command object (by calling
|
||||
'finalize_options()' or 'ensure_finalized()') before using it for
|
||||
real.
|
||||
|
||||
'command' should be a command name (string) or command object. If
|
||||
'reinit_subcommands' is true, also reinitializes the command's
|
||||
sub-commands, as declared by the 'sub_commands' class attribute (if
|
||||
it has one). See the "install_dist" command for an example. Only
|
||||
reinitializes the sub-commands that actually matter, ie. those
|
||||
whose test predicates return true.
|
||||
|
||||
Returns the reinitialized command object.
|
||||
"""
|
||||
from packaging.command.cmd import Command
|
||||
if not isinstance(command, Command):
|
||||
command_name = command
|
||||
command = self.get_command_obj(command_name)
|
||||
else:
|
||||
command_name = command.get_command_name()
|
||||
|
||||
if not command.finalized:
|
||||
return command
|
||||
command.initialize_options()
|
||||
self.have_run[command_name] = 0
|
||||
command.finalized = False
|
||||
self._set_command_options(command)
|
||||
|
||||
if reinit_subcommands:
|
||||
for sub in command.get_sub_commands():
|
||||
self.get_reinitialized_command(sub, reinit_subcommands)
|
||||
|
||||
return command
|
||||
|
||||
# -- Methods that operate on the Distribution ----------------------
|
||||
|
||||
def run_commands(self):
|
||||
"""Run each command that was seen on the setup script command line.
|
||||
Uses the list of commands found and cache of command objects
|
||||
created by 'get_command_obj()'.
|
||||
"""
|
||||
for cmd in self.commands:
|
||||
self.run_command(cmd)
|
||||
|
||||
# -- Methods that operate on its Commands --------------------------
|
||||
|
||||
def run_command(self, command, options=None):
|
||||
"""Do whatever it takes to run a command (including nothing at all,
|
||||
if the command has already been run). Specifically: if we have
|
||||
already created and run the command named by 'command', return
|
||||
silently without doing anything. If the command named by 'command'
|
||||
doesn't even have a command object yet, create one. Then invoke
|
||||
'run()' on that command object (or an existing one).
|
||||
"""
|
||||
# Already been here, done that? then return silently.
|
||||
if self.have_run.get(command):
|
||||
return
|
||||
|
||||
if options is not None:
|
||||
self.command_options[command] = options
|
||||
|
||||
cmd_obj = self.get_command_obj(command)
|
||||
cmd_obj.ensure_finalized()
|
||||
self.run_command_hooks(cmd_obj, 'pre_hook')
|
||||
logger.info("running %s", command)
|
||||
cmd_obj.run()
|
||||
self.run_command_hooks(cmd_obj, 'post_hook')
|
||||
self.have_run[command] = 1
|
||||
|
||||
def run_command_hooks(self, cmd_obj, hook_kind):
|
||||
"""Run hooks registered for that command and phase.
|
||||
|
||||
*cmd_obj* is a finalized command object; *hook_kind* is either
|
||||
'pre_hook' or 'post_hook'.
|
||||
"""
|
||||
if hook_kind not in ('pre_hook', 'post_hook'):
|
||||
raise ValueError('invalid hook kind: %r' % hook_kind)
|
||||
|
||||
hooks = getattr(cmd_obj, hook_kind, None)
|
||||
|
||||
if hooks is None:
|
||||
return
|
||||
|
||||
for hook in hooks.values():
|
||||
if isinstance(hook, str):
|
||||
try:
|
||||
hook_obj = resolve_name(hook)
|
||||
except ImportError as e:
|
||||
raise PackagingModuleError(e)
|
||||
else:
|
||||
hook_obj = hook
|
||||
|
||||
if not hasattr(hook_obj, '__call__'):
|
||||
raise PackagingOptionError('hook %r is not callable' % hook)
|
||||
|
||||
logger.info('running %s %s for command %s',
|
||||
hook_kind, hook, cmd_obj.get_command_name())
|
||||
hook_obj(cmd_obj)
|
||||
|
||||
# -- Distribution query methods ------------------------------------
|
||||
def has_pure_modules(self):
|
||||
return len(self.packages or self.py_modules or []) > 0
|
||||
|
||||
def has_ext_modules(self):
|
||||
return self.ext_modules and len(self.ext_modules) > 0
|
||||
|
||||
def has_c_libraries(self):
|
||||
return self.libraries and len(self.libraries) > 0
|
||||
|
||||
def has_modules(self):
|
||||
return self.has_pure_modules() or self.has_ext_modules()
|
||||
|
||||
def has_headers(self):
|
||||
return self.headers and len(self.headers) > 0
|
||||
|
||||
def has_scripts(self):
|
||||
return self.scripts and len(self.scripts) > 0
|
||||
|
||||
def has_data_files(self):
|
||||
return self.data_files and len(self.data_files) > 0
|
||||
|
||||
def is_pure(self):
|
||||
return (self.has_pure_modules() and
|
||||
not self.has_ext_modules() and
|
||||
not self.has_c_libraries())
|
||||
142
Lib/packaging/errors.py
Normal file
142
Lib/packaging/errors.py
Normal file
|
|
@ -0,0 +1,142 @@
|
|||
"""Exceptions used throughout the package.
|
||||
|
||||
Submodules of packaging may raise exceptions defined in this module as
|
||||
well as standard exceptions; in particular, SystemExit is usually raised
|
||||
for errors that are obviously the end-user's fault (e.g. bad
|
||||
command-line arguments).
|
||||
"""
|
||||
|
||||
|
||||
class PackagingError(Exception):
|
||||
"""The root of all Packaging evil."""
|
||||
|
||||
|
||||
class PackagingModuleError(PackagingError):
|
||||
"""Unable to load an expected module, or to find an expected class
|
||||
within some module (in particular, command modules and classes)."""
|
||||
|
||||
|
||||
class PackagingClassError(PackagingError):
|
||||
"""Some command class (or possibly distribution class, if anyone
|
||||
feels a need to subclass Distribution) is found not to be holding
|
||||
up its end of the bargain, ie. implementing some part of the
|
||||
"command "interface."""
|
||||
|
||||
|
||||
class PackagingGetoptError(PackagingError):
|
||||
"""The option table provided to 'fancy_getopt()' is bogus."""
|
||||
|
||||
|
||||
class PackagingArgError(PackagingError):
|
||||
"""Raised by fancy_getopt in response to getopt.error -- ie. an
|
||||
error in the command line usage."""
|
||||
|
||||
|
||||
class PackagingFileError(PackagingError):
|
||||
"""Any problems in the filesystem: expected file not found, etc.
|
||||
Typically this is for problems that we detect before IOError or
|
||||
OSError could be raised."""
|
||||
|
||||
|
||||
class PackagingOptionError(PackagingError):
|
||||
"""Syntactic/semantic errors in command options, such as use of
|
||||
mutually conflicting options, or inconsistent options,
|
||||
badly-spelled values, etc. No distinction is made between option
|
||||
values originating in the setup script, the command line, config
|
||||
files, or what-have-you -- but if we *know* something originated in
|
||||
the setup script, we'll raise PackagingSetupError instead."""
|
||||
|
||||
|
||||
class PackagingSetupError(PackagingError):
|
||||
"""For errors that can be definitely blamed on the setup script,
|
||||
such as invalid keyword arguments to 'setup()'."""
|
||||
|
||||
|
||||
class PackagingPlatformError(PackagingError):
|
||||
"""We don't know how to do something on the current platform (but
|
||||
we do know how to do it on some platform) -- eg. trying to compile
|
||||
C files on a platform not supported by a CCompiler subclass."""
|
||||
|
||||
|
||||
class PackagingExecError(PackagingError):
|
||||
"""Any problems executing an external program (such as the C
|
||||
compiler, when compiling C files)."""
|
||||
|
||||
|
||||
class PackagingInternalError(PackagingError):
|
||||
"""Internal inconsistencies or impossibilities (obviously, this
|
||||
should never be seen if the code is working!)."""
|
||||
|
||||
|
||||
class PackagingTemplateError(PackagingError):
|
||||
"""Syntax error in a file list template."""
|
||||
|
||||
|
||||
class PackagingByteCompileError(PackagingError):
|
||||
"""Byte compile error."""
|
||||
|
||||
|
||||
class PackagingPyPIError(PackagingError):
|
||||
"""Any problem occuring during using the indexes."""
|
||||
|
||||
|
||||
# Exception classes used by the CCompiler implementation classes
|
||||
class CCompilerError(Exception):
|
||||
"""Some compile/link operation failed."""
|
||||
|
||||
|
||||
class PreprocessError(CCompilerError):
|
||||
"""Failure to preprocess one or more C/C++ files."""
|
||||
|
||||
|
||||
class CompileError(CCompilerError):
|
||||
"""Failure to compile one or more C/C++ source files."""
|
||||
|
||||
|
||||
class LibError(CCompilerError):
|
||||
"""Failure to create a static library from one or more C/C++ object
|
||||
files."""
|
||||
|
||||
|
||||
class LinkError(CCompilerError):
|
||||
"""Failure to link one or more C/C++ object files into an executable
|
||||
or shared library file."""
|
||||
|
||||
|
||||
class UnknownFileError(CCompilerError):
|
||||
"""Attempt to process an unknown file type."""
|
||||
|
||||
|
||||
class MetadataMissingError(PackagingError):
|
||||
"""A required metadata is missing"""
|
||||
|
||||
|
||||
class MetadataConflictError(PackagingError):
|
||||
"""Attempt to read or write metadata fields that are conflictual."""
|
||||
|
||||
|
||||
class MetadataUnrecognizedVersionError(PackagingError):
|
||||
"""Unknown metadata version number."""
|
||||
|
||||
|
||||
class IrrationalVersionError(Exception):
|
||||
"""This is an irrational version."""
|
||||
pass
|
||||
|
||||
|
||||
class HugeMajorVersionNumError(IrrationalVersionError):
|
||||
"""An irrational version because the major version number is huge
|
||||
(often because a year or date was used).
|
||||
|
||||
See `error_on_huge_major_num` option in `NormalizedVersion` for details.
|
||||
This guard can be disabled by setting that option False.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class InstallationException(Exception):
|
||||
"""Base exception for installation scripts"""
|
||||
|
||||
|
||||
class InstallationConflict(InstallationException):
|
||||
"""Raised when a conflict is detected"""
|
||||
451
Lib/packaging/fancy_getopt.py
Normal file
451
Lib/packaging/fancy_getopt.py
Normal file
|
|
@ -0,0 +1,451 @@
|
|||
"""Command line parsing machinery.
|
||||
|
||||
The FancyGetopt class is a Wrapper around the getopt module that
|
||||
provides the following additional features:
|
||||
* short and long options are tied together
|
||||
* options have help strings, so fancy_getopt could potentially
|
||||
create a complete usage summary
|
||||
* options set attributes of a passed-in object.
|
||||
|
||||
It is used under the hood by the command classes. Do not use directly.
|
||||
"""
|
||||
|
||||
import getopt
|
||||
import re
|
||||
import sys
|
||||
import string
|
||||
import textwrap
|
||||
|
||||
from packaging.errors import PackagingGetoptError, PackagingArgError
|
||||
|
||||
# Much like command_re in packaging.core, this is close to but not quite
|
||||
# the same as a Python NAME -- except, in the spirit of most GNU
|
||||
# utilities, we use '-' in place of '_'. (The spirit of LISP lives on!)
|
||||
# The similarities to NAME are again not a coincidence...
|
||||
longopt_pat = r'[a-zA-Z](?:[a-zA-Z0-9-]*)'
|
||||
longopt_re = re.compile(r'^%s$' % longopt_pat)
|
||||
|
||||
# For recognizing "negative alias" options, eg. "quiet=!verbose"
|
||||
neg_alias_re = re.compile("^(%s)=!(%s)$" % (longopt_pat, longopt_pat))
|
||||
|
||||
|
||||
class FancyGetopt:
|
||||
"""Wrapper around the standard 'getopt()' module that provides some
|
||||
handy extra functionality:
|
||||
* short and long options are tied together
|
||||
* options have help strings, and help text can be assembled
|
||||
from them
|
||||
* options set attributes of a passed-in object
|
||||
* boolean options can have "negative aliases" -- eg. if
|
||||
--quiet is the "negative alias" of --verbose, then "--quiet"
|
||||
on the command line sets 'verbose' to false
|
||||
"""
|
||||
|
||||
def __init__(self, option_table=None):
|
||||
|
||||
# The option table is (currently) a list of tuples. The
|
||||
# tuples may have 3 or four values:
|
||||
# (long_option, short_option, help_string [, repeatable])
|
||||
# if an option takes an argument, its long_option should have '='
|
||||
# appended; short_option should just be a single character, no ':'
|
||||
# in any case. If a long_option doesn't have a corresponding
|
||||
# short_option, short_option should be None. All option tuples
|
||||
# must have long options.
|
||||
self.option_table = option_table
|
||||
|
||||
# 'option_index' maps long option names to entries in the option
|
||||
# table (ie. those 3-tuples).
|
||||
self.option_index = {}
|
||||
if self.option_table:
|
||||
self._build_index()
|
||||
|
||||
# 'alias' records (duh) alias options; {'foo': 'bar'} means
|
||||
# --foo is an alias for --bar
|
||||
self.alias = {}
|
||||
|
||||
# 'negative_alias' keeps track of options that are the boolean
|
||||
# opposite of some other option
|
||||
self.negative_alias = {}
|
||||
|
||||
# These keep track of the information in the option table. We
|
||||
# don't actually populate these structures until we're ready to
|
||||
# parse the command line, since the 'option_table' passed in here
|
||||
# isn't necessarily the final word.
|
||||
self.short_opts = []
|
||||
self.long_opts = []
|
||||
self.short2long = {}
|
||||
self.attr_name = {}
|
||||
self.takes_arg = {}
|
||||
|
||||
# And 'option_order' is filled up in 'getopt()'; it records the
|
||||
# original order of options (and their values) on the command line,
|
||||
# but expands short options, converts aliases, etc.
|
||||
self.option_order = []
|
||||
|
||||
def _build_index(self):
|
||||
self.option_index.clear()
|
||||
for option in self.option_table:
|
||||
self.option_index[option[0]] = option
|
||||
|
||||
def set_option_table(self, option_table):
|
||||
self.option_table = option_table
|
||||
self._build_index()
|
||||
|
||||
def add_option(self, long_option, short_option=None, help_string=None):
|
||||
if long_option in self.option_index:
|
||||
raise PackagingGetoptError(
|
||||
"option conflict: already an option '%s'" % long_option)
|
||||
else:
|
||||
option = (long_option, short_option, help_string)
|
||||
self.option_table.append(option)
|
||||
self.option_index[long_option] = option
|
||||
|
||||
def has_option(self, long_option):
|
||||
"""Return true if the option table for this parser has an
|
||||
option with long name 'long_option'."""
|
||||
return long_option in self.option_index
|
||||
|
||||
def _check_alias_dict(self, aliases, what):
|
||||
assert isinstance(aliases, dict)
|
||||
for alias, opt in aliases.items():
|
||||
if alias not in self.option_index:
|
||||
raise PackagingGetoptError(
|
||||
("invalid %s '%s': "
|
||||
"option '%s' not defined") % (what, alias, alias))
|
||||
if opt not in self.option_index:
|
||||
raise PackagingGetoptError(
|
||||
("invalid %s '%s': "
|
||||
"aliased option '%s' not defined") % (what, alias, opt))
|
||||
|
||||
def set_aliases(self, alias):
|
||||
"""Set the aliases for this option parser."""
|
||||
self._check_alias_dict(alias, "alias")
|
||||
self.alias = alias
|
||||
|
||||
def set_negative_aliases(self, negative_alias):
|
||||
"""Set the negative aliases for this option parser.
|
||||
'negative_alias' should be a dictionary mapping option names to
|
||||
option names, both the key and value must already be defined
|
||||
in the option table."""
|
||||
self._check_alias_dict(negative_alias, "negative alias")
|
||||
self.negative_alias = negative_alias
|
||||
|
||||
def _grok_option_table(self):
|
||||
"""Populate the various data structures that keep tabs on the
|
||||
option table. Called by 'getopt()' before it can do anything
|
||||
worthwhile.
|
||||
"""
|
||||
self.long_opts = []
|
||||
self.short_opts = []
|
||||
self.short2long.clear()
|
||||
self.repeat = {}
|
||||
|
||||
for option in self.option_table:
|
||||
if len(option) == 3:
|
||||
integer, short, help = option
|
||||
repeat = 0
|
||||
elif len(option) == 4:
|
||||
integer, short, help, repeat = option
|
||||
else:
|
||||
# the option table is part of the code, so simply
|
||||
# assert that it is correct
|
||||
raise ValueError("invalid option tuple: %r" % option)
|
||||
|
||||
# Type- and value-check the option names
|
||||
if not isinstance(integer, str) or len(integer) < 2:
|
||||
raise PackagingGetoptError(
|
||||
("invalid long option '%s': "
|
||||
"must be a string of length >= 2") % integer)
|
||||
|
||||
if (not ((short is None) or
|
||||
(isinstance(short, str) and len(short) == 1))):
|
||||
raise PackagingGetoptError(
|
||||
("invalid short option '%s': "
|
||||
"must be a single character or None") % short)
|
||||
|
||||
self.repeat[integer] = repeat
|
||||
self.long_opts.append(integer)
|
||||
|
||||
if integer[-1] == '=': # option takes an argument?
|
||||
if short:
|
||||
short = short + ':'
|
||||
integer = integer[0:-1]
|
||||
self.takes_arg[integer] = 1
|
||||
else:
|
||||
|
||||
# Is option is a "negative alias" for some other option (eg.
|
||||
# "quiet" == "!verbose")?
|
||||
alias_to = self.negative_alias.get(integer)
|
||||
if alias_to is not None:
|
||||
if self.takes_arg[alias_to]:
|
||||
raise PackagingGetoptError(
|
||||
("invalid negative alias '%s': "
|
||||
"aliased option '%s' takes a value") % \
|
||||
(integer, alias_to))
|
||||
|
||||
self.long_opts[-1] = integer # XXX redundant?!
|
||||
self.takes_arg[integer] = 0
|
||||
|
||||
else:
|
||||
self.takes_arg[integer] = 0
|
||||
|
||||
# If this is an alias option, make sure its "takes arg" flag is
|
||||
# the same as the option it's aliased to.
|
||||
alias_to = self.alias.get(integer)
|
||||
if alias_to is not None:
|
||||
if self.takes_arg[integer] != self.takes_arg[alias_to]:
|
||||
raise PackagingGetoptError(
|
||||
("invalid alias '%s': inconsistent with "
|
||||
"aliased option '%s' (one of them takes a value, "
|
||||
"the other doesn't") % (integer, alias_to))
|
||||
|
||||
# Now enforce some bondage on the long option name, so we can
|
||||
# later translate it to an attribute name on some object. Have
|
||||
# to do this a bit late to make sure we've removed any trailing
|
||||
# '='.
|
||||
if not longopt_re.match(integer):
|
||||
raise PackagingGetoptError(
|
||||
("invalid long option name '%s' " +
|
||||
"(must be letters, numbers, hyphens only") % integer)
|
||||
|
||||
self.attr_name[integer] = integer.replace('-', '_')
|
||||
if short:
|
||||
self.short_opts.append(short)
|
||||
self.short2long[short[0]] = integer
|
||||
|
||||
def getopt(self, args=None, object=None):
|
||||
"""Parse command-line options in args. Store as attributes on object.
|
||||
|
||||
If 'args' is None or not supplied, uses 'sys.argv[1:]'. If
|
||||
'object' is None or not supplied, creates a new OptionDummy
|
||||
object, stores option values there, and returns a tuple (args,
|
||||
object). If 'object' is supplied, it is modified in place and
|
||||
'getopt()' just returns 'args'; in both cases, the returned
|
||||
'args' is a modified copy of the passed-in 'args' list, which
|
||||
is left untouched.
|
||||
"""
|
||||
if args is None:
|
||||
args = sys.argv[1:]
|
||||
if object is None:
|
||||
object = OptionDummy()
|
||||
created_object = 1
|
||||
else:
|
||||
created_object = 0
|
||||
|
||||
self._grok_option_table()
|
||||
|
||||
short_opts = ' '.join(self.short_opts)
|
||||
|
||||
try:
|
||||
opts, args = getopt.getopt(args, short_opts, self.long_opts)
|
||||
except getopt.error as msg:
|
||||
raise PackagingArgError(msg)
|
||||
|
||||
for opt, val in opts:
|
||||
if len(opt) == 2 and opt[0] == '-': # it's a short option
|
||||
opt = self.short2long[opt[1]]
|
||||
else:
|
||||
assert len(opt) > 2 and opt[:2] == '--'
|
||||
opt = opt[2:]
|
||||
|
||||
alias = self.alias.get(opt)
|
||||
if alias:
|
||||
opt = alias
|
||||
|
||||
if not self.takes_arg[opt]: # boolean option?
|
||||
assert val == '', "boolean option can't have value"
|
||||
alias = self.negative_alias.get(opt)
|
||||
if alias:
|
||||
opt = alias
|
||||
val = 0
|
||||
else:
|
||||
val = 1
|
||||
|
||||
attr = self.attr_name[opt]
|
||||
# The only repeating option at the moment is 'verbose'.
|
||||
# It has a negative option -q quiet, which should set verbose = 0.
|
||||
if val and self.repeat.get(attr) is not None:
|
||||
val = getattr(object, attr, 0) + 1
|
||||
setattr(object, attr, val)
|
||||
self.option_order.append((opt, val))
|
||||
|
||||
# for opts
|
||||
if created_object:
|
||||
return args, object
|
||||
else:
|
||||
return args
|
||||
|
||||
def get_option_order(self):
|
||||
"""Returns the list of (option, value) tuples processed by the
|
||||
previous run of 'getopt()'. Raises RuntimeError if
|
||||
'getopt()' hasn't been called yet.
|
||||
"""
|
||||
if self.option_order is None:
|
||||
raise RuntimeError("'getopt()' hasn't been called yet")
|
||||
else:
|
||||
return self.option_order
|
||||
|
||||
return self.option_order
|
||||
|
||||
def generate_help(self, header=None):
|
||||
"""Generate help text (a list of strings, one per suggested line of
|
||||
output) from the option table for this FancyGetopt object.
|
||||
"""
|
||||
# Blithely assume the option table is good: probably wouldn't call
|
||||
# 'generate_help()' unless you've already called 'getopt()'.
|
||||
|
||||
# First pass: determine maximum length of long option names
|
||||
max_opt = 0
|
||||
for option in self.option_table:
|
||||
integer = option[0]
|
||||
short = option[1]
|
||||
l = len(integer)
|
||||
if integer[-1] == '=':
|
||||
l = l - 1
|
||||
if short is not None:
|
||||
l = l + 5 # " (-x)" where short == 'x'
|
||||
if l > max_opt:
|
||||
max_opt = l
|
||||
|
||||
opt_width = max_opt + 2 + 2 + 2 # room for indent + dashes + gutter
|
||||
|
||||
# Typical help block looks like this:
|
||||
# --foo controls foonabulation
|
||||
# Help block for longest option looks like this:
|
||||
# --flimflam set the flim-flam level
|
||||
# and with wrapped text:
|
||||
# --flimflam set the flim-flam level (must be between
|
||||
# 0 and 100, except on Tuesdays)
|
||||
# Options with short names will have the short name shown (but
|
||||
# it doesn't contribute to max_opt):
|
||||
# --foo (-f) controls foonabulation
|
||||
# If adding the short option would make the left column too wide,
|
||||
# we push the explanation off to the next line
|
||||
# --flimflam (-l)
|
||||
# set the flim-flam level
|
||||
# Important parameters:
|
||||
# - 2 spaces before option block start lines
|
||||
# - 2 dashes for each long option name
|
||||
# - min. 2 spaces between option and explanation (gutter)
|
||||
# - 5 characters (incl. space) for short option name
|
||||
|
||||
# Now generate lines of help text. (If 80 columns were good enough
|
||||
# for Jesus, then 78 columns are good enough for me!)
|
||||
line_width = 78
|
||||
text_width = line_width - opt_width
|
||||
big_indent = ' ' * opt_width
|
||||
if header:
|
||||
lines = [header]
|
||||
else:
|
||||
lines = ['Option summary:']
|
||||
|
||||
for option in self.option_table:
|
||||
integer, short, help = option[:3]
|
||||
text = textwrap.wrap(help, text_width)
|
||||
|
||||
# Case 1: no short option at all (makes life easy)
|
||||
if short is None:
|
||||
if text:
|
||||
lines.append(" --%-*s %s" % (max_opt, integer, text[0]))
|
||||
else:
|
||||
lines.append(" --%-*s " % (max_opt, integer))
|
||||
|
||||
# Case 2: we have a short option, so we have to include it
|
||||
# just after the long option
|
||||
else:
|
||||
opt_names = "%s (-%s)" % (integer, short)
|
||||
if text:
|
||||
lines.append(" --%-*s %s" %
|
||||
(max_opt, opt_names, text[0]))
|
||||
else:
|
||||
lines.append(" --%-*s" % opt_names)
|
||||
|
||||
for l in text[1:]:
|
||||
lines.append(big_indent + l)
|
||||
|
||||
return lines
|
||||
|
||||
def print_help(self, header=None, file=None):
|
||||
if file is None:
|
||||
file = sys.stdout
|
||||
for line in self.generate_help(header):
|
||||
file.write(line + "\n")
|
||||
|
||||
|
||||
def fancy_getopt(options, negative_opt, object, args):
|
||||
parser = FancyGetopt(options)
|
||||
parser.set_negative_aliases(negative_opt)
|
||||
return parser.getopt(args, object)
|
||||
|
||||
|
||||
WS_TRANS = str.maketrans(string.whitespace, ' ' * len(string.whitespace))
|
||||
|
||||
|
||||
def wrap_text(text, width):
|
||||
"""Split *text* into lines of no more than *width* characters each.
|
||||
|
||||
*text* is a str and *width* an int. Returns a list of str.
|
||||
"""
|
||||
|
||||
if text is None:
|
||||
return []
|
||||
if len(text) <= width:
|
||||
return [text]
|
||||
|
||||
text = text.expandtabs()
|
||||
text = text.translate(WS_TRANS)
|
||||
|
||||
chunks = re.split(r'( +|-+)', text)
|
||||
chunks = [_f for _f in chunks if _f] # ' - ' results in empty strings
|
||||
lines = []
|
||||
|
||||
while chunks:
|
||||
|
||||
cur_line = [] # list of chunks (to-be-joined)
|
||||
cur_len = 0 # length of current line
|
||||
|
||||
while chunks:
|
||||
l = len(chunks[0])
|
||||
if cur_len + l <= width: # can squeeze (at least) this chunk in
|
||||
cur_line.append(chunks[0])
|
||||
del chunks[0]
|
||||
cur_len = cur_len + l
|
||||
else: # this line is full
|
||||
# drop last chunk if all space
|
||||
if cur_line and cur_line[-1][0] == ' ':
|
||||
del cur_line[-1]
|
||||
break
|
||||
|
||||
if chunks: # any chunks left to process?
|
||||
|
||||
# if the current line is still empty, then we had a single
|
||||
# chunk that's too big too fit on a line -- so we break
|
||||
# down and break it up at the line width
|
||||
if cur_len == 0:
|
||||
cur_line.append(chunks[0][0:width])
|
||||
chunks[0] = chunks[0][width:]
|
||||
|
||||
# all-whitespace chunks at the end of a line can be discarded
|
||||
# (and we know from the re.split above that if a chunk has
|
||||
# *any* whitespace, it is *all* whitespace)
|
||||
if chunks[0][0] == ' ':
|
||||
del chunks[0]
|
||||
|
||||
# and store this line in the list-of-all-lines -- as a single
|
||||
# string, of course!
|
||||
lines.append(''.join(cur_line))
|
||||
|
||||
# while chunks
|
||||
|
||||
return lines
|
||||
|
||||
|
||||
class OptionDummy:
|
||||
"""Dummy class just used as a place to hold command-line option
|
||||
values as instance attributes."""
|
||||
|
||||
def __init__(self, options=[]):
|
||||
"""Create a new OptionDummy instance. The attributes listed in
|
||||
'options' will be initialized to None."""
|
||||
for opt in options:
|
||||
setattr(self, opt, None)
|
||||
483
Lib/packaging/install.py
Normal file
483
Lib/packaging/install.py
Normal file
|
|
@ -0,0 +1,483 @@
|
|||
"""Building blocks for installers.
|
||||
|
||||
When used as a script, this module installs a release thanks to info
|
||||
obtained from an index (e.g. PyPI), with dependencies.
|
||||
|
||||
This is a higher-level module built on packaging.database and
|
||||
packaging.pypi.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import stat
|
||||
import errno
|
||||
import shutil
|
||||
import logging
|
||||
import tempfile
|
||||
from sysconfig import get_config_var
|
||||
|
||||
from packaging import logger
|
||||
from packaging.dist import Distribution
|
||||
from packaging.util import (_is_archive_file, ask, get_install_method,
|
||||
egginfo_to_distinfo)
|
||||
from packaging.pypi import wrapper
|
||||
from packaging.version import get_version_predicate
|
||||
from packaging.database import get_distributions, get_distribution
|
||||
from packaging.depgraph import generate_graph
|
||||
|
||||
from packaging.errors import (PackagingError, InstallationException,
|
||||
InstallationConflict, CCompilerError)
|
||||
from packaging.pypi.errors import ProjectNotFound, ReleaseNotFound
|
||||
|
||||
__all__ = ['install_dists', 'install_from_infos', 'get_infos', 'remove',
|
||||
'install', 'install_local_project']
|
||||
|
||||
|
||||
def _move_files(files, destination):
|
||||
"""Move the list of files in the destination folder, keeping the same
|
||||
structure.
|
||||
|
||||
Return a list of tuple (old, new) emplacement of files
|
||||
|
||||
:param files: a list of files to move.
|
||||
:param destination: the destination directory to put on the files.
|
||||
if not defined, create a new one, using mkdtemp
|
||||
"""
|
||||
if not destination:
|
||||
destination = tempfile.mkdtemp()
|
||||
|
||||
for old in files:
|
||||
# not using os.path.join() because basename() might not be
|
||||
# unique in destination
|
||||
new = "%s%s" % (destination, old)
|
||||
|
||||
# try to make the paths.
|
||||
try:
|
||||
os.makedirs(os.path.dirname(new))
|
||||
except OSError as e:
|
||||
if e.errno == errno.EEXIST:
|
||||
pass
|
||||
else:
|
||||
raise e
|
||||
os.rename(old, new)
|
||||
yield old, new
|
||||
|
||||
|
||||
def _run_distutils_install(path):
|
||||
# backward compat: using setuptools or plain-distutils
|
||||
cmd = '%s setup.py install --record=%s'
|
||||
record_file = os.path.join(path, 'RECORD')
|
||||
os.system(cmd % (sys.executable, record_file))
|
||||
if not os.path.exists(record_file):
|
||||
raise ValueError('failed to install')
|
||||
else:
|
||||
egginfo_to_distinfo(record_file, remove_egginfo=True)
|
||||
|
||||
|
||||
def _run_setuptools_install(path):
|
||||
cmd = '%s setup.py install --record=%s --single-version-externally-managed'
|
||||
record_file = os.path.join(path, 'RECORD')
|
||||
os.system(cmd % (sys.executable, record_file))
|
||||
if not os.path.exists(record_file):
|
||||
raise ValueError('failed to install')
|
||||
else:
|
||||
egginfo_to_distinfo(record_file, remove_egginfo=True)
|
||||
|
||||
|
||||
def _run_packaging_install(path):
|
||||
# XXX check for a valid setup.cfg?
|
||||
dist = Distribution()
|
||||
dist.parse_config_files()
|
||||
try:
|
||||
dist.run_command('install_dist')
|
||||
except (IOError, os.error, PackagingError, CCompilerError) as msg:
|
||||
raise SystemExit("error: " + str(msg))
|
||||
|
||||
|
||||
def _install_dist(dist, path):
|
||||
"""Install a distribution into a path.
|
||||
|
||||
This:
|
||||
|
||||
* unpack the distribution
|
||||
* copy the files in "path"
|
||||
* determine if the distribution is packaging or distutils1.
|
||||
"""
|
||||
where = dist.unpack()
|
||||
|
||||
if where is None:
|
||||
raise ValueError('Cannot locate the unpacked archive')
|
||||
|
||||
return _run_install_from_archive(where)
|
||||
|
||||
|
||||
def install_local_project(path):
|
||||
"""Install a distribution from a source directory.
|
||||
|
||||
If the source directory contains a setup.py install using distutils1.
|
||||
If a setup.cfg is found, install using the install_dist command.
|
||||
|
||||
"""
|
||||
path = os.path.abspath(path)
|
||||
if os.path.isdir(path):
|
||||
logger.info('installing from source directory: %s', path)
|
||||
_run_install_from_dir(path)
|
||||
elif _is_archive_file(path):
|
||||
logger.info('installing from archive: %s', path)
|
||||
_unpacked_dir = tempfile.mkdtemp()
|
||||
shutil.unpack_archive(path, _unpacked_dir)
|
||||
_run_install_from_archive(_unpacked_dir)
|
||||
else:
|
||||
logger.warning('no projects to install')
|
||||
|
||||
|
||||
def _run_install_from_archive(source_dir):
|
||||
# XXX need a better way
|
||||
for item in os.listdir(source_dir):
|
||||
fullpath = os.path.join(source_dir, item)
|
||||
if os.path.isdir(fullpath):
|
||||
source_dir = fullpath
|
||||
break
|
||||
return _run_install_from_dir(source_dir)
|
||||
|
||||
|
||||
install_methods = {
|
||||
'packaging': _run_packaging_install,
|
||||
'setuptools': _run_setuptools_install,
|
||||
'distutils': _run_distutils_install}
|
||||
|
||||
|
||||
def _run_install_from_dir(source_dir):
|
||||
old_dir = os.getcwd()
|
||||
os.chdir(source_dir)
|
||||
install_method = get_install_method(source_dir)
|
||||
func = install_methods[install_method]
|
||||
try:
|
||||
func = install_methods[install_method]
|
||||
return func(source_dir)
|
||||
finally:
|
||||
os.chdir(old_dir)
|
||||
|
||||
|
||||
def install_dists(dists, path, paths=sys.path):
|
||||
"""Install all distributions provided in dists, with the given prefix.
|
||||
|
||||
If an error occurs while installing one of the distributions, uninstall all
|
||||
the installed distribution (in the context if this function).
|
||||
|
||||
Return a list of installed dists.
|
||||
|
||||
:param dists: distributions to install
|
||||
:param path: base path to install distribution in
|
||||
:param paths: list of paths (defaults to sys.path) to look for info
|
||||
"""
|
||||
if not path:
|
||||
path = tempfile.mkdtemp()
|
||||
|
||||
installed_dists = []
|
||||
for dist in dists:
|
||||
logger.info('installing %s %s', dist.name, dist.version)
|
||||
try:
|
||||
_install_dist(dist, path)
|
||||
installed_dists.append(dist)
|
||||
except Exception as e:
|
||||
logger.info('failed: %s', e)
|
||||
|
||||
# reverting
|
||||
for installed_dist in installed_dists:
|
||||
logger.info('reverting %s', installed_dist)
|
||||
_remove_dist(installed_dist, paths)
|
||||
raise e
|
||||
return installed_dists
|
||||
|
||||
|
||||
def install_from_infos(install_path=None, install=[], remove=[], conflicts=[],
|
||||
paths=sys.path):
|
||||
"""Install and remove the given distributions.
|
||||
|
||||
The function signature is made to be compatible with the one of get_infos.
|
||||
The aim of this script is to povide a way to install/remove what's asked,
|
||||
and to rollback if needed.
|
||||
|
||||
So, it's not possible to be in an inconsistant state, it could be either
|
||||
installed, either uninstalled, not half-installed.
|
||||
|
||||
The process follow those steps:
|
||||
|
||||
1. Move all distributions that will be removed in a temporary location
|
||||
2. Install all the distributions that will be installed in a temp. loc.
|
||||
3. If the installation fails, rollback (eg. move back) those
|
||||
distributions, or remove what have been installed.
|
||||
4. Else, move the distributions to the right locations, and remove for
|
||||
real the distributions thats need to be removed.
|
||||
|
||||
:param install_path: the installation path where we want to install the
|
||||
distributions.
|
||||
:param install: list of distributions that will be installed; install_path
|
||||
must be provided if this list is not empty.
|
||||
:param remove: list of distributions that will be removed.
|
||||
:param conflicts: list of conflicting distributions, eg. that will be in
|
||||
conflict once the install and remove distribution will be
|
||||
processed.
|
||||
:param paths: list of paths (defaults to sys.path) to look for info
|
||||
"""
|
||||
# first of all, if we have conflicts, stop here.
|
||||
if conflicts:
|
||||
raise InstallationConflict(conflicts)
|
||||
|
||||
if install and not install_path:
|
||||
raise ValueError("Distributions are to be installed but `install_path`"
|
||||
" is not provided.")
|
||||
|
||||
# before removing the files, we will start by moving them away
|
||||
# then, if any error occurs, we could replace them in the good place.
|
||||
temp_files = {} # contains lists of {dist: (old, new)} paths
|
||||
temp_dir = None
|
||||
if remove:
|
||||
temp_dir = tempfile.mkdtemp()
|
||||
for dist in remove:
|
||||
files = dist.list_installed_files()
|
||||
temp_files[dist] = _move_files(files, temp_dir)
|
||||
try:
|
||||
if install:
|
||||
install_dists(install, install_path, paths)
|
||||
except:
|
||||
# if an error occurs, put back the files in the right place.
|
||||
for files in temp_files.values():
|
||||
for old, new in files:
|
||||
shutil.move(new, old)
|
||||
if temp_dir:
|
||||
shutil.rmtree(temp_dir)
|
||||
# now re-raising
|
||||
raise
|
||||
|
||||
# we can remove them for good
|
||||
for files in temp_files.values():
|
||||
for old, new in files:
|
||||
os.remove(new)
|
||||
if temp_dir:
|
||||
shutil.rmtree(temp_dir)
|
||||
|
||||
|
||||
def _get_setuptools_deps(release):
|
||||
# NotImplementedError
|
||||
pass
|
||||
|
||||
|
||||
def get_infos(requirements, index=None, installed=None, prefer_final=True):
|
||||
"""Return the informations on what's going to be installed and upgraded.
|
||||
|
||||
:param requirements: is a *string* containing the requirements for this
|
||||
project (for instance "FooBar 1.1" or "BarBaz (<1.2)")
|
||||
:param index: If an index is specified, use this one, otherwise, use
|
||||
:class index.ClientWrapper: to get project metadatas.
|
||||
:param installed: a list of already installed distributions.
|
||||
:param prefer_final: when picking up the releases, prefer a "final" one
|
||||
over a beta/alpha/etc one.
|
||||
|
||||
The results are returned in a dict, containing all the operations
|
||||
needed to install the given requirements::
|
||||
|
||||
>>> get_install_info("FooBar (<=1.2)")
|
||||
{'install': [<FooBar 1.1>], 'remove': [], 'conflict': []}
|
||||
|
||||
Conflict contains all the conflicting distributions, if there is a
|
||||
conflict.
|
||||
"""
|
||||
# this function does several things:
|
||||
# 1. get a release specified by the requirements
|
||||
# 2. gather its metadata, using setuptools compatibility if needed
|
||||
# 3. compare this tree with what is currently installed on the system,
|
||||
# return the requirements of what is missing
|
||||
# 4. do that recursively and merge back the results
|
||||
# 5. return a dict containing information about what is needed to install
|
||||
# or remove
|
||||
|
||||
if not installed:
|
||||
logger.info('reading installed distributions')
|
||||
installed = list(get_distributions(use_egg_info=True))
|
||||
|
||||
infos = {'install': [], 'remove': [], 'conflict': []}
|
||||
# Is a compatible version of the project already installed ?
|
||||
predicate = get_version_predicate(requirements)
|
||||
found = False
|
||||
|
||||
# check that the project isn't already installed
|
||||
for installed_project in installed:
|
||||
# is it a compatible project ?
|
||||
if predicate.name.lower() != installed_project.name.lower():
|
||||
continue
|
||||
found = True
|
||||
logger.info('found %s %s', installed_project.name,
|
||||
installed_project.metadata['version'])
|
||||
|
||||
# if we already have something installed, check it matches the
|
||||
# requirements
|
||||
if predicate.match(installed_project.metadata['version']):
|
||||
return infos
|
||||
break
|
||||
|
||||
if not found:
|
||||
logger.info('project not installed')
|
||||
|
||||
if not index:
|
||||
index = wrapper.ClientWrapper()
|
||||
|
||||
if not installed:
|
||||
installed = get_distributions(use_egg_info=True)
|
||||
|
||||
# Get all the releases that match the requirements
|
||||
try:
|
||||
release = index.get_release(requirements)
|
||||
except (ReleaseNotFound, ProjectNotFound):
|
||||
raise InstallationException('Release not found: "%s"' % requirements)
|
||||
|
||||
if release is None:
|
||||
logger.info('could not find a matching project')
|
||||
return infos
|
||||
|
||||
metadata = release.fetch_metadata()
|
||||
|
||||
# we need to build setuptools deps if any
|
||||
if 'requires_dist' not in metadata:
|
||||
metadata['requires_dist'] = _get_setuptools_deps(release)
|
||||
|
||||
# build the dependency graph with local and required dependencies
|
||||
dists = list(installed)
|
||||
dists.append(release)
|
||||
depgraph = generate_graph(dists)
|
||||
|
||||
# Get what the missing deps are
|
||||
dists = depgraph.missing[release]
|
||||
if dists:
|
||||
logger.info("missing dependencies found, retrieving metadata")
|
||||
# we have missing deps
|
||||
for dist in dists:
|
||||
_update_infos(infos, get_infos(dist, index, installed))
|
||||
|
||||
# Fill in the infos
|
||||
existing = [d for d in installed if d.name == release.name]
|
||||
if existing:
|
||||
infos['remove'].append(existing[0])
|
||||
infos['conflict'].extend(depgraph.reverse_list[existing[0]])
|
||||
infos['install'].append(release)
|
||||
return infos
|
||||
|
||||
|
||||
def _update_infos(infos, new_infos):
|
||||
"""extends the lists contained in the `info` dict with those contained
|
||||
in the `new_info` one
|
||||
"""
|
||||
for key, value in infos.items():
|
||||
if key in new_infos:
|
||||
infos[key].extend(new_infos[key])
|
||||
|
||||
|
||||
def _remove_dist(dist, paths=sys.path):
|
||||
remove(dist.name, paths)
|
||||
|
||||
|
||||
def remove(project_name, paths=sys.path, auto_confirm=True):
|
||||
"""Removes a single project from the installation"""
|
||||
dist = get_distribution(project_name, use_egg_info=True, paths=paths)
|
||||
if dist is None:
|
||||
raise PackagingError('Distribution "%s" not found' % project_name)
|
||||
files = dist.list_installed_files(local=True)
|
||||
rmdirs = []
|
||||
rmfiles = []
|
||||
tmp = tempfile.mkdtemp(prefix=project_name + '-uninstall')
|
||||
try:
|
||||
for file_, md5, size in files:
|
||||
if os.path.isfile(file_):
|
||||
dirname, filename = os.path.split(file_)
|
||||
tmpfile = os.path.join(tmp, filename)
|
||||
try:
|
||||
os.rename(file_, tmpfile)
|
||||
finally:
|
||||
if not os.path.isfile(file_):
|
||||
os.rename(tmpfile, file_)
|
||||
if file_ not in rmfiles:
|
||||
rmfiles.append(file_)
|
||||
if dirname not in rmdirs:
|
||||
rmdirs.append(dirname)
|
||||
finally:
|
||||
shutil.rmtree(tmp)
|
||||
|
||||
logger.info('removing %r: ', project_name)
|
||||
|
||||
for file_ in rmfiles:
|
||||
logger.info(' %s', file_)
|
||||
|
||||
# Taken from the pip project
|
||||
if auto_confirm:
|
||||
response = 'y'
|
||||
else:
|
||||
response = ask('Proceed (y/n)? ', ('y', 'n'))
|
||||
|
||||
if response == 'y':
|
||||
file_count = 0
|
||||
for file_ in rmfiles:
|
||||
os.remove(file_)
|
||||
file_count += 1
|
||||
|
||||
dir_count = 0
|
||||
for dirname in rmdirs:
|
||||
if not os.path.exists(dirname):
|
||||
# could
|
||||
continue
|
||||
|
||||
files_count = 0
|
||||
for root, dir, files in os.walk(dirname):
|
||||
files_count += len(files)
|
||||
|
||||
if files_count > 0:
|
||||
# XXX Warning
|
||||
continue
|
||||
|
||||
# empty dirs with only empty dirs
|
||||
if os.stat(dirname).st_mode & stat.S_IWUSR:
|
||||
# XXX Add a callable in shutil.rmtree to count
|
||||
# the number of deleted elements
|
||||
shutil.rmtree(dirname)
|
||||
dir_count += 1
|
||||
|
||||
# removing the top path
|
||||
# XXX count it ?
|
||||
if os.path.exists(dist.path):
|
||||
shutil.rmtree(dist.path)
|
||||
|
||||
logger.info('success: removed %d files and %d dirs',
|
||||
file_count, dir_count)
|
||||
|
||||
|
||||
def install(project):
|
||||
logger.info('getting information about %r', project)
|
||||
try:
|
||||
info = get_infos(project)
|
||||
except InstallationException:
|
||||
logger.info('cound not find %r', project)
|
||||
return
|
||||
|
||||
if info['install'] == []:
|
||||
logger.info('nothing to install')
|
||||
return
|
||||
|
||||
install_path = get_config_var('base')
|
||||
try:
|
||||
install_from_infos(install_path,
|
||||
info['install'], info['remove'], info['conflict'])
|
||||
|
||||
except InstallationConflict as e:
|
||||
if logger.isEnabledFor(logging.INFO):
|
||||
projects = ['%s %s' % (p.name, p.version) for p in e.args[0]]
|
||||
logger.info('%r conflicts with %s', project, ','.join(projects))
|
||||
|
||||
|
||||
def _main(**attrs):
|
||||
if 'script_args' not in attrs:
|
||||
import sys
|
||||
attrs['requirements'] = sys.argv[1]
|
||||
get_infos(**attrs)
|
||||
|
||||
if __name__ == '__main__':
|
||||
_main()
|
||||
372
Lib/packaging/manifest.py
Normal file
372
Lib/packaging/manifest.py
Normal file
|
|
@ -0,0 +1,372 @@
|
|||
"""Class representing the list of files in a distribution.
|
||||
|
||||
The Manifest class can be used to:
|
||||
|
||||
- read or write a MANIFEST file
|
||||
- read a template file and find out the file list
|
||||
"""
|
||||
# XXX todo: document + add tests
|
||||
import re
|
||||
import os
|
||||
import fnmatch
|
||||
|
||||
from packaging import logger
|
||||
from packaging.util import write_file, convert_path
|
||||
from packaging.errors import (PackagingTemplateError,
|
||||
PackagingInternalError)
|
||||
|
||||
__all__ = ['Manifest']
|
||||
|
||||
# a \ followed by some spaces + EOL
|
||||
_COLLAPSE_PATTERN = re.compile('\\\w*\n', re.M)
|
||||
_COMMENTED_LINE = re.compile('#.*?(?=\n)|\n(?=$)', re.M | re.S)
|
||||
|
||||
|
||||
class Manifest(object):
|
||||
"""A list of files built by on exploring the filesystem and filtered by
|
||||
applying various patterns to what we find there.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.allfiles = None
|
||||
self.files = []
|
||||
|
||||
#
|
||||
# Public API
|
||||
#
|
||||
|
||||
def findall(self, dir=os.curdir):
|
||||
self.allfiles = _findall(dir)
|
||||
|
||||
def append(self, item):
|
||||
self.files.append(item)
|
||||
|
||||
def extend(self, items):
|
||||
self.files.extend(items)
|
||||
|
||||
def sort(self):
|
||||
# Not a strict lexical sort!
|
||||
self.files = [os.path.join(*path_tuple) for path_tuple in
|
||||
sorted(os.path.split(path) for path in self.files)]
|
||||
|
||||
def clear(self):
|
||||
"""Clear all collected files."""
|
||||
self.files = []
|
||||
if self.allfiles is not None:
|
||||
self.allfiles = []
|
||||
|
||||
def remove_duplicates(self):
|
||||
# Assumes list has been sorted!
|
||||
for i in range(len(self.files) - 1, 0, -1):
|
||||
if self.files[i] == self.files[i - 1]:
|
||||
del self.files[i]
|
||||
|
||||
def read_template(self, path_or_file):
|
||||
"""Read and parse a manifest template file.
|
||||
'path' can be a path or a file-like object.
|
||||
|
||||
Updates the list accordingly.
|
||||
"""
|
||||
if isinstance(path_or_file, str):
|
||||
f = open(path_or_file)
|
||||
else:
|
||||
f = path_or_file
|
||||
|
||||
try:
|
||||
content = f.read()
|
||||
# first, let's unwrap collapsed lines
|
||||
content = _COLLAPSE_PATTERN.sub('', content)
|
||||
# next, let's remove commented lines and empty lines
|
||||
content = _COMMENTED_LINE.sub('', content)
|
||||
|
||||
# now we have our cleaned up lines
|
||||
lines = [line.strip() for line in content.split('\n')]
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
for line in lines:
|
||||
if line == '':
|
||||
continue
|
||||
try:
|
||||
self._process_template_line(line)
|
||||
except PackagingTemplateError as msg:
|
||||
logger.warning("%s, %s", path_or_file, msg)
|
||||
|
||||
def write(self, path):
|
||||
"""Write the file list in 'self.filelist' (presumably as filled in
|
||||
by 'add_defaults()' and 'read_template()') to the manifest file
|
||||
named by 'self.manifest'.
|
||||
"""
|
||||
if os.path.isfile(path):
|
||||
with open(path) as fp:
|
||||
first_line = fp.readline()
|
||||
|
||||
if first_line != '# file GENERATED by packaging, do NOT edit\n':
|
||||
logger.info("not writing to manually maintained "
|
||||
"manifest file %r", path)
|
||||
return
|
||||
|
||||
self.sort()
|
||||
self.remove_duplicates()
|
||||
content = self.files[:]
|
||||
content.insert(0, '# file GENERATED by packaging, do NOT edit')
|
||||
logger.info("writing manifest file %r", path)
|
||||
write_file(path, content)
|
||||
|
||||
def read(self, path):
|
||||
"""Read the manifest file (named by 'self.manifest') and use it to
|
||||
fill in 'self.filelist', the list of files to include in the source
|
||||
distribution.
|
||||
"""
|
||||
logger.info("reading manifest file %r", path)
|
||||
with open(path) as manifest:
|
||||
for line in manifest.readlines():
|
||||
self.append(line)
|
||||
|
||||
def exclude_pattern(self, pattern, anchor=True, prefix=None,
|
||||
is_regex=False):
|
||||
"""Remove strings (presumably filenames) from 'files' that match
|
||||
'pattern'.
|
||||
|
||||
Other parameters are the same as for 'include_pattern()', above.
|
||||
The list 'self.files' is modified in place. Return True if files are
|
||||
found.
|
||||
"""
|
||||
files_found = False
|
||||
pattern_re = _translate_pattern(pattern, anchor, prefix, is_regex)
|
||||
for i in range(len(self.files) - 1, -1, -1):
|
||||
if pattern_re.search(self.files[i]):
|
||||
del self.files[i]
|
||||
files_found = True
|
||||
|
||||
return files_found
|
||||
|
||||
#
|
||||
# Private API
|
||||
#
|
||||
|
||||
def _parse_template_line(self, line):
|
||||
words = line.split()
|
||||
if len(words) == 1:
|
||||
# no action given, let's use the default 'include'
|
||||
words.insert(0, 'include')
|
||||
|
||||
action = words[0]
|
||||
patterns = dir = dir_pattern = None
|
||||
|
||||
if action in ('include', 'exclude',
|
||||
'global-include', 'global-exclude'):
|
||||
if len(words) < 2:
|
||||
raise PackagingTemplateError(
|
||||
"%r expects <pattern1> <pattern2> ..." % action)
|
||||
|
||||
patterns = [convert_path(word) for word in words[1:]]
|
||||
|
||||
elif action in ('recursive-include', 'recursive-exclude'):
|
||||
if len(words) < 3:
|
||||
raise PackagingTemplateError(
|
||||
"%r expects <dir> <pattern1> <pattern2> ..." % action)
|
||||
|
||||
dir = convert_path(words[1])
|
||||
patterns = [convert_path(word) for word in words[2:]]
|
||||
|
||||
elif action in ('graft', 'prune'):
|
||||
if len(words) != 2:
|
||||
raise PackagingTemplateError(
|
||||
"%r expects a single <dir_pattern>" % action)
|
||||
|
||||
dir_pattern = convert_path(words[1])
|
||||
|
||||
else:
|
||||
raise PackagingTemplateError("unknown action %r" % action)
|
||||
|
||||
return action, patterns, dir, dir_pattern
|
||||
|
||||
def _process_template_line(self, line):
|
||||
# Parse the line: split it up, make sure the right number of words
|
||||
# is there, and return the relevant words. 'action' is always
|
||||
# defined: it's the first word of the line. Which of the other
|
||||
# three are defined depends on the action; it'll be either
|
||||
# patterns, (dir and patterns), or (dir_pattern).
|
||||
action, patterns, dir, dir_pattern = self._parse_template_line(line)
|
||||
|
||||
# OK, now we know that the action is valid and we have the
|
||||
# right number of words on the line for that action -- so we
|
||||
# can proceed with minimal error-checking.
|
||||
if action == 'include':
|
||||
for pattern in patterns:
|
||||
if not self._include_pattern(pattern, anchor=True):
|
||||
logger.warning("no files found matching %r", pattern)
|
||||
|
||||
elif action == 'exclude':
|
||||
for pattern in patterns:
|
||||
if not self.exclude_pattern(pattern, anchor=True):
|
||||
logger.warning("no previously-included files "
|
||||
"found matching %r", pattern)
|
||||
|
||||
elif action == 'global-include':
|
||||
for pattern in patterns:
|
||||
if not self._include_pattern(pattern, anchor=False):
|
||||
logger.warning("no files found matching %r "
|
||||
"anywhere in distribution", pattern)
|
||||
|
||||
elif action == 'global-exclude':
|
||||
for pattern in patterns:
|
||||
if not self.exclude_pattern(pattern, anchor=False):
|
||||
logger.warning("no previously-included files "
|
||||
"matching %r found anywhere in "
|
||||
"distribution", pattern)
|
||||
|
||||
elif action == 'recursive-include':
|
||||
for pattern in patterns:
|
||||
if not self._include_pattern(pattern, prefix=dir):
|
||||
logger.warning("no files found matching %r "
|
||||
"under directory %r", pattern, dir)
|
||||
|
||||
elif action == 'recursive-exclude':
|
||||
for pattern in patterns:
|
||||
if not self.exclude_pattern(pattern, prefix=dir):
|
||||
logger.warning("no previously-included files "
|
||||
"matching %r found under directory %r",
|
||||
pattern, dir)
|
||||
|
||||
elif action == 'graft':
|
||||
if not self._include_pattern(None, prefix=dir_pattern):
|
||||
logger.warning("no directories found matching %r",
|
||||
dir_pattern)
|
||||
|
||||
elif action == 'prune':
|
||||
if not self.exclude_pattern(None, prefix=dir_pattern):
|
||||
logger.warning("no previously-included directories found "
|
||||
"matching %r", dir_pattern)
|
||||
else:
|
||||
raise PackagingInternalError(
|
||||
"this cannot happen: invalid action %r" % action)
|
||||
|
||||
def _include_pattern(self, pattern, anchor=True, prefix=None,
|
||||
is_regex=False):
|
||||
"""Select strings (presumably filenames) from 'self.files' that
|
||||
match 'pattern', a Unix-style wildcard (glob) pattern.
|
||||
|
||||
Patterns are not quite the same as implemented by the 'fnmatch'
|
||||
module: '*' and '?' match non-special characters, where "special"
|
||||
is platform-dependent: slash on Unix; colon, slash, and backslash on
|
||||
DOS/Windows; and colon on Mac OS.
|
||||
|
||||
If 'anchor' is true (the default), then the pattern match is more
|
||||
stringent: "*.py" will match "foo.py" but not "foo/bar.py". If
|
||||
'anchor' is false, both of these will match.
|
||||
|
||||
If 'prefix' is supplied, then only filenames starting with 'prefix'
|
||||
(itself a pattern) and ending with 'pattern', with anything in between
|
||||
them, will match. 'anchor' is ignored in this case.
|
||||
|
||||
If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
|
||||
'pattern' is assumed to be either a string containing a regex or a
|
||||
regex object -- no translation is done, the regex is just compiled
|
||||
and used as-is.
|
||||
|
||||
Selected strings will be added to self.files.
|
||||
|
||||
Return True if files are found.
|
||||
"""
|
||||
files_found = False
|
||||
pattern_re = _translate_pattern(pattern, anchor, prefix, is_regex)
|
||||
|
||||
# delayed loading of allfiles list
|
||||
if self.allfiles is None:
|
||||
self.findall()
|
||||
|
||||
for name in self.allfiles:
|
||||
if pattern_re.search(name):
|
||||
self.files.append(name)
|
||||
files_found = True
|
||||
|
||||
return files_found
|
||||
|
||||
|
||||
#
|
||||
# Utility functions
|
||||
#
|
||||
def _findall(dir=os.curdir):
|
||||
"""Find all files under 'dir' and return the list of full filenames
|
||||
(relative to 'dir').
|
||||
"""
|
||||
from stat import S_ISREG, S_ISDIR, S_ISLNK
|
||||
|
||||
list = []
|
||||
stack = [dir]
|
||||
pop = stack.pop
|
||||
push = stack.append
|
||||
|
||||
while stack:
|
||||
dir = pop()
|
||||
names = os.listdir(dir)
|
||||
|
||||
for name in names:
|
||||
if dir != os.curdir: # avoid the dreaded "./" syndrome
|
||||
fullname = os.path.join(dir, name)
|
||||
else:
|
||||
fullname = name
|
||||
|
||||
# Avoid excess stat calls -- just one will do, thank you!
|
||||
stat = os.stat(fullname)
|
||||
mode = stat.st_mode
|
||||
if S_ISREG(mode):
|
||||
list.append(fullname)
|
||||
elif S_ISDIR(mode) and not S_ISLNK(mode):
|
||||
push(fullname)
|
||||
|
||||
return list
|
||||
|
||||
|
||||
def _glob_to_re(pattern):
|
||||
"""Translate a shell-like glob pattern to a regular expression.
|
||||
|
||||
Return a string containing the regex. Differs from
|
||||
'fnmatch.translate()' in that '*' does not match "special characters"
|
||||
(which are platform-specific).
|
||||
"""
|
||||
pattern_re = fnmatch.translate(pattern)
|
||||
|
||||
# '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
|
||||
# IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
|
||||
# and by extension they shouldn't match such "special characters" under
|
||||
# any OS. So change all non-escaped dots in the RE to match any
|
||||
# character except the special characters.
|
||||
# XXX currently the "special characters" are just slash -- i.e. this is
|
||||
# Unix-only.
|
||||
pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', r'\1[^/]', pattern_re)
|
||||
|
||||
return pattern_re
|
||||
|
||||
|
||||
def _translate_pattern(pattern, anchor=True, prefix=None, is_regex=False):
|
||||
"""Translate a shell-like wildcard pattern to a compiled regular
|
||||
expression.
|
||||
|
||||
Return the compiled regex. If 'is_regex' true,
|
||||
then 'pattern' is directly compiled to a regex (if it's a string)
|
||||
or just returned as-is (assumes it's a regex object).
|
||||
"""
|
||||
if is_regex:
|
||||
if isinstance(pattern, str):
|
||||
return re.compile(pattern)
|
||||
else:
|
||||
return pattern
|
||||
|
||||
if pattern:
|
||||
pattern_re = _glob_to_re(pattern)
|
||||
else:
|
||||
pattern_re = ''
|
||||
|
||||
if prefix is not None:
|
||||
# ditch end of pattern character
|
||||
empty_pattern = _glob_to_re('')
|
||||
prefix_re = _glob_to_re(prefix)[:-len(empty_pattern)]
|
||||
pattern_re = "^" + os.path.join(prefix_re, ".*" + pattern_re)
|
||||
else: # no prefix -- respect anchor flag
|
||||
if anchor:
|
||||
pattern_re = "^" + pattern_re
|
||||
|
||||
return re.compile(pattern_re)
|
||||
187
Lib/packaging/markers.py
Normal file
187
Lib/packaging/markers.py
Normal file
|
|
@ -0,0 +1,187 @@
|
|||
"""Parser for the environment markers micro-language defined in PEP 345."""
|
||||
|
||||
import sys
|
||||
import platform
|
||||
import os
|
||||
|
||||
from tokenize import tokenize, NAME, OP, STRING, ENDMARKER, ENCODING
|
||||
from io import BytesIO
|
||||
|
||||
__all__ = ['interpret']
|
||||
|
||||
|
||||
# allowed operators
|
||||
_OPERATORS = {'==': lambda x, y: x == y,
|
||||
'!=': lambda x, y: x != y,
|
||||
'>': lambda x, y: x > y,
|
||||
'>=': lambda x, y: x >= y,
|
||||
'<': lambda x, y: x < y,
|
||||
'<=': lambda x, y: x <= y,
|
||||
'in': lambda x, y: x in y,
|
||||
'not in': lambda x, y: x not in y}
|
||||
|
||||
|
||||
def _operate(operation, x, y):
|
||||
return _OPERATORS[operation](x, y)
|
||||
|
||||
|
||||
# restricted set of variables
|
||||
_VARS = {'sys.platform': sys.platform,
|
||||
'python_version': sys.version[:3],
|
||||
'python_full_version': sys.version.split(' ', 1)[0],
|
||||
'os.name': os.name,
|
||||
'platform.version': platform.version(),
|
||||
'platform.machine': platform.machine(),
|
||||
'platform.python_implementation': platform.python_implementation()}
|
||||
|
||||
|
||||
class _Operation:
|
||||
|
||||
def __init__(self, execution_context=None):
|
||||
self.left = None
|
||||
self.op = None
|
||||
self.right = None
|
||||
if execution_context is None:
|
||||
execution_context = {}
|
||||
self.execution_context = execution_context
|
||||
|
||||
def _get_var(self, name):
|
||||
if name in self.execution_context:
|
||||
return self.execution_context[name]
|
||||
return _VARS[name]
|
||||
|
||||
def __repr__(self):
|
||||
return '%s %s %s' % (self.left, self.op, self.right)
|
||||
|
||||
def _is_string(self, value):
|
||||
if value is None or len(value) < 2:
|
||||
return False
|
||||
for delimiter in '"\'':
|
||||
if value[0] == value[-1] == delimiter:
|
||||
return True
|
||||
return False
|
||||
|
||||
def _is_name(self, value):
|
||||
return value in _VARS
|
||||
|
||||
def _convert(self, value):
|
||||
if value in _VARS:
|
||||
return self._get_var(value)
|
||||
return value.strip('"\'')
|
||||
|
||||
def _check_name(self, value):
|
||||
if value not in _VARS:
|
||||
raise NameError(value)
|
||||
|
||||
def _nonsense_op(self):
|
||||
msg = 'This operation is not supported : "%s"' % self
|
||||
raise SyntaxError(msg)
|
||||
|
||||
def __call__(self):
|
||||
# make sure we do something useful
|
||||
if self._is_string(self.left):
|
||||
if self._is_string(self.right):
|
||||
self._nonsense_op()
|
||||
self._check_name(self.right)
|
||||
else:
|
||||
if not self._is_string(self.right):
|
||||
self._nonsense_op()
|
||||
self._check_name(self.left)
|
||||
|
||||
if self.op not in _OPERATORS:
|
||||
raise TypeError('Operator not supported "%s"' % self.op)
|
||||
|
||||
left = self._convert(self.left)
|
||||
right = self._convert(self.right)
|
||||
return _operate(self.op, left, right)
|
||||
|
||||
|
||||
class _OR:
|
||||
def __init__(self, left, right=None):
|
||||
self.left = left
|
||||
self.right = right
|
||||
|
||||
def filled(self):
|
||||
return self.right is not None
|
||||
|
||||
def __repr__(self):
|
||||
return 'OR(%r, %r)' % (self.left, self.right)
|
||||
|
||||
def __call__(self):
|
||||
return self.left() or self.right()
|
||||
|
||||
|
||||
class _AND:
|
||||
def __init__(self, left, right=None):
|
||||
self.left = left
|
||||
self.right = right
|
||||
|
||||
def filled(self):
|
||||
return self.right is not None
|
||||
|
||||
def __repr__(self):
|
||||
return 'AND(%r, %r)' % (self.left, self.right)
|
||||
|
||||
def __call__(self):
|
||||
return self.left() and self.right()
|
||||
|
||||
|
||||
def interpret(marker, execution_context=None):
|
||||
"""Interpret a marker and return a result depending on environment."""
|
||||
marker = marker.strip().encode()
|
||||
ops = []
|
||||
op_starting = True
|
||||
for token in tokenize(BytesIO(marker).readline):
|
||||
# Unpack token
|
||||
toktype, tokval, rowcol, line, logical_line = token
|
||||
if toktype not in (NAME, OP, STRING, ENDMARKER, ENCODING):
|
||||
raise SyntaxError('Type not supported "%s"' % tokval)
|
||||
|
||||
if op_starting:
|
||||
op = _Operation(execution_context)
|
||||
if len(ops) > 0:
|
||||
last = ops[-1]
|
||||
if isinstance(last, (_OR, _AND)) and not last.filled():
|
||||
last.right = op
|
||||
else:
|
||||
ops.append(op)
|
||||
else:
|
||||
ops.append(op)
|
||||
op_starting = False
|
||||
else:
|
||||
op = ops[-1]
|
||||
|
||||
if (toktype == ENDMARKER or
|
||||
(toktype == NAME and tokval in ('and', 'or'))):
|
||||
if toktype == NAME and tokval == 'and':
|
||||
ops.append(_AND(ops.pop()))
|
||||
elif toktype == NAME and tokval == 'or':
|
||||
ops.append(_OR(ops.pop()))
|
||||
op_starting = True
|
||||
continue
|
||||
|
||||
if isinstance(op, (_OR, _AND)) and op.right is not None:
|
||||
op = op.right
|
||||
|
||||
if ((toktype in (NAME, STRING) and tokval not in ('in', 'not'))
|
||||
or (toktype == OP and tokval == '.')):
|
||||
if op.op is None:
|
||||
if op.left is None:
|
||||
op.left = tokval
|
||||
else:
|
||||
op.left += tokval
|
||||
else:
|
||||
if op.right is None:
|
||||
op.right = tokval
|
||||
else:
|
||||
op.right += tokval
|
||||
elif toktype == OP or tokval in ('in', 'not'):
|
||||
if tokval == 'in' and op.op == 'not':
|
||||
op.op = 'not in'
|
||||
else:
|
||||
op.op = tokval
|
||||
|
||||
for op in ops:
|
||||
if not op():
|
||||
return False
|
||||
return True
|
||||
552
Lib/packaging/metadata.py
Normal file
552
Lib/packaging/metadata.py
Normal file
|
|
@ -0,0 +1,552 @@
|
|||
"""Implementation of the Metadata for Python packages PEPs.
|
||||
|
||||
Supports all metadata formats (1.0, 1.1, 1.2).
|
||||
"""
|
||||
|
||||
import re
|
||||
import logging
|
||||
|
||||
from io import StringIO
|
||||
from email import message_from_file
|
||||
from packaging import logger
|
||||
from packaging.markers import interpret
|
||||
from packaging.version import (is_valid_predicate, is_valid_version,
|
||||
is_valid_versions)
|
||||
from packaging.errors import (MetadataMissingError,
|
||||
MetadataConflictError,
|
||||
MetadataUnrecognizedVersionError)
|
||||
|
||||
try:
|
||||
# docutils is installed
|
||||
from docutils.utils import Reporter
|
||||
from docutils.parsers.rst import Parser
|
||||
from docutils import frontend
|
||||
from docutils import nodes
|
||||
|
||||
class SilentReporter(Reporter):
|
||||
|
||||
def __init__(self, source, report_level, halt_level, stream=None,
|
||||
debug=0, encoding='ascii', error_handler='replace'):
|
||||
self.messages = []
|
||||
Reporter.__init__(self, source, report_level, halt_level, stream,
|
||||
debug, encoding, error_handler)
|
||||
|
||||
def system_message(self, level, message, *children, **kwargs):
|
||||
self.messages.append((level, message, children, kwargs))
|
||||
|
||||
_HAS_DOCUTILS = True
|
||||
except ImportError:
|
||||
# docutils is not installed
|
||||
_HAS_DOCUTILS = False
|
||||
|
||||
# public API of this module
|
||||
__all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION']
|
||||
|
||||
# Encoding used for the PKG-INFO files
|
||||
PKG_INFO_ENCODING = 'utf-8'
|
||||
|
||||
# preferred version. Hopefully will be changed
|
||||
# to 1.2 once PEP 345 is supported everywhere
|
||||
PKG_INFO_PREFERRED_VERSION = '1.0'
|
||||
|
||||
_LINE_PREFIX = re.compile('\n \|')
|
||||
_241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
|
||||
'Summary', 'Description',
|
||||
'Keywords', 'Home-page', 'Author', 'Author-email',
|
||||
'License')
|
||||
|
||||
_314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
|
||||
'Supported-Platform', 'Summary', 'Description',
|
||||
'Keywords', 'Home-page', 'Author', 'Author-email',
|
||||
'License', 'Classifier', 'Download-URL', 'Obsoletes',
|
||||
'Provides', 'Requires')
|
||||
|
||||
_314_MARKERS = ('Obsoletes', 'Provides', 'Requires')
|
||||
|
||||
_345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
|
||||
'Supported-Platform', 'Summary', 'Description',
|
||||
'Keywords', 'Home-page', 'Author', 'Author-email',
|
||||
'Maintainer', 'Maintainer-email', 'License',
|
||||
'Classifier', 'Download-URL', 'Obsoletes-Dist',
|
||||
'Project-URL', 'Provides-Dist', 'Requires-Dist',
|
||||
'Requires-Python', 'Requires-External')
|
||||
|
||||
_345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python',
|
||||
'Obsoletes-Dist', 'Requires-External', 'Maintainer',
|
||||
'Maintainer-email', 'Project-URL')
|
||||
|
||||
_ALL_FIELDS = set()
|
||||
_ALL_FIELDS.update(_241_FIELDS)
|
||||
_ALL_FIELDS.update(_314_FIELDS)
|
||||
_ALL_FIELDS.update(_345_FIELDS)
|
||||
|
||||
|
||||
def _version2fieldlist(version):
|
||||
if version == '1.0':
|
||||
return _241_FIELDS
|
||||
elif version == '1.1':
|
||||
return _314_FIELDS
|
||||
elif version == '1.2':
|
||||
return _345_FIELDS
|
||||
raise MetadataUnrecognizedVersionError(version)
|
||||
|
||||
|
||||
def _best_version(fields):
|
||||
"""Detect the best version depending on the fields used."""
|
||||
def _has_marker(keys, markers):
|
||||
for marker in markers:
|
||||
if marker in keys:
|
||||
return True
|
||||
return False
|
||||
|
||||
keys = list(fields)
|
||||
possible_versions = ['1.0', '1.1', '1.2']
|
||||
|
||||
# first let's try to see if a field is not part of one of the version
|
||||
for key in keys:
|
||||
if key not in _241_FIELDS and '1.0' in possible_versions:
|
||||
possible_versions.remove('1.0')
|
||||
if key not in _314_FIELDS and '1.1' in possible_versions:
|
||||
possible_versions.remove('1.1')
|
||||
if key not in _345_FIELDS and '1.2' in possible_versions:
|
||||
possible_versions.remove('1.2')
|
||||
|
||||
# possible_version contains qualified versions
|
||||
if len(possible_versions) == 1:
|
||||
return possible_versions[0] # found !
|
||||
elif len(possible_versions) == 0:
|
||||
raise MetadataConflictError('Unknown metadata set')
|
||||
|
||||
# let's see if one unique marker is found
|
||||
is_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS)
|
||||
is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS)
|
||||
if is_1_1 and is_1_2:
|
||||
raise MetadataConflictError('You used incompatible 1.1 and 1.2 fields')
|
||||
|
||||
# we have the choice, either 1.0, or 1.2
|
||||
# - 1.0 has a broken Summary field but works with all tools
|
||||
# - 1.1 is to avoid
|
||||
# - 1.2 fixes Summary but is not widespread yet
|
||||
if not is_1_1 and not is_1_2:
|
||||
# we couldn't find any specific marker
|
||||
if PKG_INFO_PREFERRED_VERSION in possible_versions:
|
||||
return PKG_INFO_PREFERRED_VERSION
|
||||
if is_1_1:
|
||||
return '1.1'
|
||||
|
||||
# default marker when 1.0 is disqualified
|
||||
return '1.2'
|
||||
|
||||
|
||||
_ATTR2FIELD = {
|
||||
'metadata_version': 'Metadata-Version',
|
||||
'name': 'Name',
|
||||
'version': 'Version',
|
||||
'platform': 'Platform',
|
||||
'supported_platform': 'Supported-Platform',
|
||||
'summary': 'Summary',
|
||||
'description': 'Description',
|
||||
'keywords': 'Keywords',
|
||||
'home_page': 'Home-page',
|
||||
'author': 'Author',
|
||||
'author_email': 'Author-email',
|
||||
'maintainer': 'Maintainer',
|
||||
'maintainer_email': 'Maintainer-email',
|
||||
'license': 'License',
|
||||
'classifier': 'Classifier',
|
||||
'download_url': 'Download-URL',
|
||||
'obsoletes_dist': 'Obsoletes-Dist',
|
||||
'provides_dist': 'Provides-Dist',
|
||||
'requires_dist': 'Requires-Dist',
|
||||
'requires_python': 'Requires-Python',
|
||||
'requires_external': 'Requires-External',
|
||||
'requires': 'Requires',
|
||||
'provides': 'Provides',
|
||||
'obsoletes': 'Obsoletes',
|
||||
'project_url': 'Project-URL',
|
||||
}
|
||||
|
||||
_PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist')
|
||||
_VERSIONS_FIELDS = ('Requires-Python',)
|
||||
_VERSION_FIELDS = ('Version',)
|
||||
_LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes',
|
||||
'Requires', 'Provides', 'Obsoletes-Dist',
|
||||
'Provides-Dist', 'Requires-Dist', 'Requires-External',
|
||||
'Project-URL', 'Supported-Platform')
|
||||
_LISTTUPLEFIELDS = ('Project-URL',)
|
||||
|
||||
_ELEMENTSFIELD = ('Keywords',)
|
||||
|
||||
_UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description')
|
||||
|
||||
_MISSING = object()
|
||||
|
||||
|
||||
class NoDefault:
|
||||
"""Marker object used for clean representation"""
|
||||
def __repr__(self):
|
||||
return '<NoDefault>'
|
||||
|
||||
_MISSING = NoDefault()
|
||||
|
||||
|
||||
class Metadata:
|
||||
"""The metadata of a release.
|
||||
|
||||
Supports versions 1.0, 1.1 and 1.2 (auto-detected). You can
|
||||
instantiate the class with one of these arguments (or none):
|
||||
- *path*, the path to a METADATA file
|
||||
- *fileobj* give a file-like object with METADATA as content
|
||||
- *mapping* is a dict-like object
|
||||
"""
|
||||
# TODO document that execution_context and platform_dependent are used
|
||||
# to filter on query, not when setting a key
|
||||
# also document the mapping API and UNKNOWN default key
|
||||
|
||||
def __init__(self, path=None, platform_dependent=False,
|
||||
execution_context=None, fileobj=None, mapping=None):
|
||||
self._fields = {}
|
||||
self.requires_files = []
|
||||
self.docutils_support = _HAS_DOCUTILS
|
||||
self.platform_dependent = platform_dependent
|
||||
self.execution_context = execution_context
|
||||
if [path, fileobj, mapping].count(None) < 2:
|
||||
raise TypeError('path, fileobj and mapping are exclusive')
|
||||
if path is not None:
|
||||
self.read(path)
|
||||
elif fileobj is not None:
|
||||
self.read_file(fileobj)
|
||||
elif mapping is not None:
|
||||
self.update(mapping)
|
||||
|
||||
def _set_best_version(self):
|
||||
self._fields['Metadata-Version'] = _best_version(self._fields)
|
||||
|
||||
def _write_field(self, file, name, value):
|
||||
file.write('%s: %s\n' % (name, value))
|
||||
|
||||
def __getitem__(self, name):
|
||||
return self.get(name)
|
||||
|
||||
def __setitem__(self, name, value):
|
||||
return self.set(name, value)
|
||||
|
||||
def __delitem__(self, name):
|
||||
field_name = self._convert_name(name)
|
||||
try:
|
||||
del self._fields[field_name]
|
||||
except KeyError:
|
||||
raise KeyError(name)
|
||||
self._set_best_version()
|
||||
|
||||
def __contains__(self, name):
|
||||
return (name in self._fields or
|
||||
self._convert_name(name) in self._fields)
|
||||
|
||||
def _convert_name(self, name):
|
||||
if name in _ALL_FIELDS:
|
||||
return name
|
||||
name = name.replace('-', '_').lower()
|
||||
return _ATTR2FIELD.get(name, name)
|
||||
|
||||
def _default_value(self, name):
|
||||
if name in _LISTFIELDS or name in _ELEMENTSFIELD:
|
||||
return []
|
||||
return 'UNKNOWN'
|
||||
|
||||
def _check_rst_data(self, data):
|
||||
"""Return warnings when the provided data has syntax errors."""
|
||||
source_path = StringIO()
|
||||
parser = Parser()
|
||||
settings = frontend.OptionParser().get_default_values()
|
||||
settings.tab_width = 4
|
||||
settings.pep_references = None
|
||||
settings.rfc_references = None
|
||||
reporter = SilentReporter(source_path,
|
||||
settings.report_level,
|
||||
settings.halt_level,
|
||||
stream=settings.warning_stream,
|
||||
debug=settings.debug,
|
||||
encoding=settings.error_encoding,
|
||||
error_handler=settings.error_encoding_error_handler)
|
||||
|
||||
document = nodes.document(settings, reporter, source=source_path)
|
||||
document.note_source(source_path, -1)
|
||||
try:
|
||||
parser.parse(data, document)
|
||||
except AttributeError:
|
||||
reporter.messages.append((-1, 'Could not finish the parsing.',
|
||||
'', {}))
|
||||
|
||||
return reporter.messages
|
||||
|
||||
def _platform(self, value):
|
||||
if not self.platform_dependent or ';' not in value:
|
||||
return True, value
|
||||
value, marker = value.split(';')
|
||||
return interpret(marker, self.execution_context), value
|
||||
|
||||
def _remove_line_prefix(self, value):
|
||||
return _LINE_PREFIX.sub('\n', value)
|
||||
|
||||
#
|
||||
# Public API
|
||||
#
|
||||
def get_fullname(self):
|
||||
"""Return the distribution name with version"""
|
||||
return '%s-%s' % (self['Name'], self['Version'])
|
||||
|
||||
def is_metadata_field(self, name):
|
||||
"""return True if name is a valid metadata key"""
|
||||
name = self._convert_name(name)
|
||||
return name in _ALL_FIELDS
|
||||
|
||||
def is_multi_field(self, name):
|
||||
name = self._convert_name(name)
|
||||
return name in _LISTFIELDS
|
||||
|
||||
def read(self, filepath):
|
||||
"""Read the metadata values from a file path."""
|
||||
with open(filepath, 'r', encoding='ascii') as fp:
|
||||
self.read_file(fp)
|
||||
|
||||
def read_file(self, fileob):
|
||||
"""Read the metadata values from a file object."""
|
||||
msg = message_from_file(fileob)
|
||||
self._fields['Metadata-Version'] = msg['metadata-version']
|
||||
|
||||
for field in _version2fieldlist(self['Metadata-Version']):
|
||||
if field in _LISTFIELDS:
|
||||
# we can have multiple lines
|
||||
values = msg.get_all(field)
|
||||
if field in _LISTTUPLEFIELDS and values is not None:
|
||||
values = [tuple(value.split(',')) for value in values]
|
||||
self.set(field, values)
|
||||
else:
|
||||
# single line
|
||||
value = msg[field]
|
||||
if value is not None and value != 'UNKNOWN':
|
||||
self.set(field, value)
|
||||
|
||||
def write(self, filepath):
|
||||
"""Write the metadata fields to filepath."""
|
||||
with open(filepath, 'w') as fp:
|
||||
self.write_file(fp)
|
||||
|
||||
def write_file(self, fileobject):
|
||||
"""Write the PKG-INFO format data to a file object."""
|
||||
self._set_best_version()
|
||||
for field in _version2fieldlist(self['Metadata-Version']):
|
||||
values = self.get(field)
|
||||
if field in _ELEMENTSFIELD:
|
||||
self._write_field(fileobject, field, ','.join(values))
|
||||
continue
|
||||
if field not in _LISTFIELDS:
|
||||
if field == 'Description':
|
||||
values = values.replace('\n', '\n |')
|
||||
values = [values]
|
||||
|
||||
if field in _LISTTUPLEFIELDS:
|
||||
values = [','.join(value) for value in values]
|
||||
|
||||
for value in values:
|
||||
self._write_field(fileobject, field, value)
|
||||
|
||||
def update(self, other=None, **kwargs):
|
||||
"""Set metadata values from the given iterable `other` and kwargs.
|
||||
|
||||
Behavior is like `dict.update`: If `other` has a ``keys`` method,
|
||||
they are looped over and ``self[key]`` is assigned ``other[key]``.
|
||||
Else, ``other`` is an iterable of ``(key, value)`` iterables.
|
||||
|
||||
Keys that don't match a metadata field or that have an empty value are
|
||||
dropped.
|
||||
"""
|
||||
def _set(key, value):
|
||||
if key in _ATTR2FIELD and value:
|
||||
self.set(self._convert_name(key), value)
|
||||
|
||||
if other is None:
|
||||
pass
|
||||
elif hasattr(other, 'keys'):
|
||||
for k in other.keys():
|
||||
_set(k, other[k])
|
||||
else:
|
||||
for k, v in other:
|
||||
_set(k, v)
|
||||
|
||||
if kwargs:
|
||||
self.update(kwargs)
|
||||
|
||||
def set(self, name, value):
|
||||
"""Control then set a metadata field."""
|
||||
name = self._convert_name(name)
|
||||
|
||||
if ((name in _ELEMENTSFIELD or name == 'Platform') and
|
||||
not isinstance(value, (list, tuple))):
|
||||
if isinstance(value, str):
|
||||
value = [v.strip() for v in value.split(',')]
|
||||
else:
|
||||
value = []
|
||||
elif (name in _LISTFIELDS and
|
||||
not isinstance(value, (list, tuple))):
|
||||
if isinstance(value, str):
|
||||
value = [value]
|
||||
else:
|
||||
value = []
|
||||
|
||||
if logger.isEnabledFor(logging.WARNING):
|
||||
if name in _PREDICATE_FIELDS and value is not None:
|
||||
for v in value:
|
||||
# check that the values are valid predicates
|
||||
if not is_valid_predicate(v.split(';')[0]):
|
||||
logger.warning(
|
||||
'%r is not a valid predicate (field %r)',
|
||||
v, name)
|
||||
# FIXME this rejects UNKNOWN, is that right?
|
||||
elif name in _VERSIONS_FIELDS and value is not None:
|
||||
if not is_valid_versions(value):
|
||||
logger.warning('%r is not a valid version (field %r)',
|
||||
value, name)
|
||||
elif name in _VERSION_FIELDS and value is not None:
|
||||
if not is_valid_version(value):
|
||||
logger.warning('%r is not a valid version (field %r)',
|
||||
value, name)
|
||||
|
||||
if name in _UNICODEFIELDS:
|
||||
if name == 'Description':
|
||||
value = self._remove_line_prefix(value)
|
||||
|
||||
self._fields[name] = value
|
||||
self._set_best_version()
|
||||
|
||||
def get(self, name, default=_MISSING):
|
||||
"""Get a metadata field."""
|
||||
name = self._convert_name(name)
|
||||
if name not in self._fields:
|
||||
if default is _MISSING:
|
||||
default = self._default_value(name)
|
||||
return default
|
||||
if name in _UNICODEFIELDS:
|
||||
value = self._fields[name]
|
||||
return value
|
||||
elif name in _LISTFIELDS:
|
||||
value = self._fields[name]
|
||||
if value is None:
|
||||
return []
|
||||
res = []
|
||||
for val in value:
|
||||
valid, val = self._platform(val)
|
||||
if not valid:
|
||||
continue
|
||||
if name not in _LISTTUPLEFIELDS:
|
||||
res.append(val)
|
||||
else:
|
||||
# That's for Project-URL
|
||||
res.append((val[0], val[1]))
|
||||
return res
|
||||
|
||||
elif name in _ELEMENTSFIELD:
|
||||
valid, value = self._platform(self._fields[name])
|
||||
if not valid:
|
||||
return []
|
||||
if isinstance(value, str):
|
||||
return value.split(',')
|
||||
valid, value = self._platform(self._fields[name])
|
||||
if not valid:
|
||||
return None
|
||||
return value
|
||||
|
||||
def check(self, strict=False, restructuredtext=False):
|
||||
"""Check if the metadata is compliant. If strict is False then raise if
|
||||
no Name or Version are provided"""
|
||||
# XXX should check the versions (if the file was loaded)
|
||||
missing, warnings = [], []
|
||||
|
||||
for attr in ('Name', 'Version'): # required by PEP 345
|
||||
if attr not in self:
|
||||
missing.append(attr)
|
||||
|
||||
if strict and missing != []:
|
||||
msg = 'missing required metadata: %s' % ', '.join(missing)
|
||||
raise MetadataMissingError(msg)
|
||||
|
||||
for attr in ('Home-page', 'Author'):
|
||||
if attr not in self:
|
||||
missing.append(attr)
|
||||
|
||||
if _HAS_DOCUTILS and restructuredtext:
|
||||
warnings.extend(self._check_rst_data(self['Description']))
|
||||
|
||||
# checking metadata 1.2 (XXX needs to check 1.1, 1.0)
|
||||
if self['Metadata-Version'] != '1.2':
|
||||
return missing, warnings
|
||||
|
||||
def is_valid_predicates(value):
|
||||
for v in value:
|
||||
if not is_valid_predicate(v.split(';')[0]):
|
||||
return False
|
||||
return True
|
||||
|
||||
for fields, controller in ((_PREDICATE_FIELDS, is_valid_predicates),
|
||||
(_VERSIONS_FIELDS, is_valid_versions),
|
||||
(_VERSION_FIELDS, is_valid_version)):
|
||||
for field in fields:
|
||||
value = self.get(field, None)
|
||||
if value is not None and not controller(value):
|
||||
warnings.append('Wrong value for %r: %s' % (field, value))
|
||||
|
||||
return missing, warnings
|
||||
|
||||
def todict(self):
|
||||
"""Return fields as a dict.
|
||||
|
||||
Field names will be converted to use the underscore-lowercase style
|
||||
instead of hyphen-mixed case (i.e. home_page instead of Home-page).
|
||||
"""
|
||||
data = {
|
||||
'metadata_version': self['Metadata-Version'],
|
||||
'name': self['Name'],
|
||||
'version': self['Version'],
|
||||
'summary': self['Summary'],
|
||||
'home_page': self['Home-page'],
|
||||
'author': self['Author'],
|
||||
'author_email': self['Author-email'],
|
||||
'license': self['License'],
|
||||
'description': self['Description'],
|
||||
'keywords': self['Keywords'],
|
||||
'platform': self['Platform'],
|
||||
'classifier': self['Classifier'],
|
||||
'download_url': self['Download-URL'],
|
||||
}
|
||||
|
||||
if self['Metadata-Version'] == '1.2':
|
||||
data['requires_dist'] = self['Requires-Dist']
|
||||
data['requires_python'] = self['Requires-Python']
|
||||
data['requires_external'] = self['Requires-External']
|
||||
data['provides_dist'] = self['Provides-Dist']
|
||||
data['obsoletes_dist'] = self['Obsoletes-Dist']
|
||||
data['project_url'] = [','.join(url) for url in
|
||||
self['Project-URL']]
|
||||
|
||||
elif self['Metadata-Version'] == '1.1':
|
||||
data['provides'] = self['Provides']
|
||||
data['requires'] = self['Requires']
|
||||
data['obsoletes'] = self['Obsoletes']
|
||||
|
||||
return data
|
||||
|
||||
# Mapping API
|
||||
|
||||
def keys(self):
|
||||
return _version2fieldlist(self['Metadata-Version'])
|
||||
|
||||
def __iter__(self):
|
||||
for key in self.keys():
|
||||
yield key
|
||||
|
||||
def values(self):
|
||||
return [self[key] for key in list(self.keys())]
|
||||
|
||||
def items(self):
|
||||
return [(key, self[key]) for key in list(self.keys())]
|
||||
9
Lib/packaging/pypi/__init__.py
Normal file
9
Lib/packaging/pypi/__init__.py
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
"""Low-level and high-level APIs to interact with project indexes."""
|
||||
|
||||
__all__ = ['simple',
|
||||
'xmlrpc',
|
||||
'dist',
|
||||
'errors',
|
||||
'mirrors']
|
||||
|
||||
from packaging.pypi.dist import ReleaseInfo, ReleasesList, DistInfo
|
||||
48
Lib/packaging/pypi/base.py
Normal file
48
Lib/packaging/pypi/base.py
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
"""Base class for index crawlers."""
|
||||
|
||||
from packaging.pypi.dist import ReleasesList
|
||||
|
||||
|
||||
class BaseClient:
|
||||
"""Base class containing common methods for the index crawlers/clients"""
|
||||
|
||||
def __init__(self, prefer_final, prefer_source):
|
||||
self._prefer_final = prefer_final
|
||||
self._prefer_source = prefer_source
|
||||
self._index = self
|
||||
|
||||
def _get_prefer_final(self, prefer_final=None):
|
||||
"""Return the prefer_final internal parameter or the specified one if
|
||||
provided"""
|
||||
if prefer_final:
|
||||
return prefer_final
|
||||
else:
|
||||
return self._prefer_final
|
||||
|
||||
def _get_prefer_source(self, prefer_source=None):
|
||||
"""Return the prefer_source internal parameter or the specified one if
|
||||
provided"""
|
||||
if prefer_source:
|
||||
return prefer_source
|
||||
else:
|
||||
return self._prefer_source
|
||||
|
||||
def _get_project(self, project_name):
|
||||
"""Return an project instance, create it if necessary"""
|
||||
return self._projects.setdefault(project_name.lower(),
|
||||
ReleasesList(project_name, index=self._index))
|
||||
|
||||
def download_distribution(self, requirements, temp_path=None,
|
||||
prefer_source=None, prefer_final=None):
|
||||
"""Download a distribution from the last release according to the
|
||||
requirements.
|
||||
|
||||
If temp_path is provided, download to this path, otherwise, create a
|
||||
temporary location for the download and return it.
|
||||
"""
|
||||
prefer_final = self._get_prefer_final(prefer_final)
|
||||
prefer_source = self._get_prefer_source(prefer_source)
|
||||
release = self.get_release(requirements, prefer_final)
|
||||
if release:
|
||||
dist = release.get_distribution(prefer_source=prefer_source)
|
||||
return dist.download(temp_path)
|
||||
547
Lib/packaging/pypi/dist.py
Normal file
547
Lib/packaging/pypi/dist.py
Normal file
|
|
@ -0,0 +1,547 @@
|
|||
"""Classes representing releases and distributions retrieved from indexes.
|
||||
|
||||
A project (= unique name) can have several releases (= versions) and
|
||||
each release can have several distributions (= sdist and bdists).
|
||||
|
||||
Release objects contain metadata-related information (see PEP 376);
|
||||
distribution objects contain download-related information.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import mimetypes
|
||||
import re
|
||||
import tempfile
|
||||
import urllib.request
|
||||
import urllib.parse
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import hashlib
|
||||
from shutil import unpack_archive
|
||||
|
||||
from packaging.errors import IrrationalVersionError
|
||||
from packaging.version import (suggest_normalized_version, NormalizedVersion,
|
||||
get_version_predicate)
|
||||
from packaging.metadata import Metadata
|
||||
from packaging.pypi.errors import (HashDoesNotMatch, UnsupportedHashName,
|
||||
CantParseArchiveName)
|
||||
|
||||
|
||||
__all__ = ['ReleaseInfo', 'DistInfo', 'ReleasesList', 'get_infos_from_url']
|
||||
|
||||
EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz .egg".split()
|
||||
MD5_HASH = re.compile(r'^.*#md5=([a-f0-9]+)$')
|
||||
DIST_TYPES = ['bdist', 'sdist']
|
||||
|
||||
|
||||
class IndexReference:
|
||||
"""Mixin used to store the index reference"""
|
||||
def set_index(self, index=None):
|
||||
self._index = index
|
||||
|
||||
|
||||
class ReleaseInfo(IndexReference):
|
||||
"""Represent a release of a project (a project with a specific version).
|
||||
The release contain the _metadata informations related to this specific
|
||||
version, and is also a container for distribution related informations.
|
||||
|
||||
See the DistInfo class for more information about distributions.
|
||||
"""
|
||||
|
||||
def __init__(self, name, version, metadata=None, hidden=False,
|
||||
index=None, **kwargs):
|
||||
"""
|
||||
:param name: the name of the distribution
|
||||
:param version: the version of the distribution
|
||||
:param metadata: the metadata fields of the release.
|
||||
:type metadata: dict
|
||||
:param kwargs: optional arguments for a new distribution.
|
||||
"""
|
||||
self.set_index(index)
|
||||
self.name = name
|
||||
self._version = None
|
||||
self.version = version
|
||||
if metadata:
|
||||
self.metadata = Metadata(mapping=metadata)
|
||||
else:
|
||||
self.metadata = None
|
||||
self.dists = {}
|
||||
self.hidden = hidden
|
||||
|
||||
if 'dist_type' in kwargs:
|
||||
dist_type = kwargs.pop('dist_type')
|
||||
self.add_distribution(dist_type, **kwargs)
|
||||
|
||||
def set_version(self, version):
|
||||
try:
|
||||
self._version = NormalizedVersion(version)
|
||||
except IrrationalVersionError:
|
||||
suggestion = suggest_normalized_version(version)
|
||||
if suggestion:
|
||||
self.version = suggestion
|
||||
else:
|
||||
raise IrrationalVersionError(version)
|
||||
|
||||
def get_version(self):
|
||||
return self._version
|
||||
|
||||
version = property(get_version, set_version)
|
||||
|
||||
def fetch_metadata(self):
|
||||
"""If the metadata is not set, use the indexes to get it"""
|
||||
if not self.metadata:
|
||||
self._index.get_metadata(self.name, str(self.version))
|
||||
return self.metadata
|
||||
|
||||
@property
|
||||
def is_final(self):
|
||||
"""proxy to version.is_final"""
|
||||
return self.version.is_final
|
||||
|
||||
def fetch_distributions(self):
|
||||
if self.dists is None:
|
||||
self._index.get_distributions(self.name, str(self.version))
|
||||
if self.dists is None:
|
||||
self.dists = {}
|
||||
return self.dists
|
||||
|
||||
def add_distribution(self, dist_type='sdist', python_version=None,
|
||||
**params):
|
||||
"""Add distribution informations to this release.
|
||||
If distribution information is already set for this distribution type,
|
||||
add the given url paths to the distribution. This can be useful while
|
||||
some of them fails to download.
|
||||
|
||||
:param dist_type: the distribution type (eg. "sdist", "bdist", etc.)
|
||||
:param params: the fields to be passed to the distribution object
|
||||
(see the :class:DistInfo constructor).
|
||||
"""
|
||||
if dist_type not in DIST_TYPES:
|
||||
raise ValueError(dist_type)
|
||||
if dist_type in self.dists:
|
||||
self.dists[dist_type].add_url(**params)
|
||||
else:
|
||||
self.dists[dist_type] = DistInfo(self, dist_type,
|
||||
index=self._index, **params)
|
||||
if python_version:
|
||||
self.dists[dist_type].python_version = python_version
|
||||
|
||||
def get_distribution(self, dist_type=None, prefer_source=True):
|
||||
"""Return a distribution.
|
||||
|
||||
If dist_type is set, find first for this distribution type, and just
|
||||
act as an alias of __get_item__.
|
||||
|
||||
If prefer_source is True, search first for source distribution, and if
|
||||
not return one existing distribution.
|
||||
"""
|
||||
if len(self.dists) == 0:
|
||||
raise LookupError()
|
||||
if dist_type:
|
||||
return self[dist_type]
|
||||
if prefer_source:
|
||||
if "sdist" in self.dists:
|
||||
dist = self["sdist"]
|
||||
else:
|
||||
dist = next(self.dists.values())
|
||||
return dist
|
||||
|
||||
def unpack(self, path=None, prefer_source=True):
|
||||
"""Unpack the distribution to the given path.
|
||||
|
||||
If not destination is given, creates a temporary location.
|
||||
|
||||
Returns the location of the extracted files (root).
|
||||
"""
|
||||
return self.get_distribution(prefer_source=prefer_source)\
|
||||
.unpack(path=path)
|
||||
|
||||
def download(self, temp_path=None, prefer_source=True):
|
||||
"""Download the distribution, using the requirements.
|
||||
|
||||
If more than one distribution match the requirements, use the last
|
||||
version.
|
||||
Download the distribution, and put it in the temp_path. If no temp_path
|
||||
is given, creates and return one.
|
||||
|
||||
Returns the complete absolute path to the downloaded archive.
|
||||
"""
|
||||
return self.get_distribution(prefer_source=prefer_source)\
|
||||
.download(path=temp_path)
|
||||
|
||||
def set_metadata(self, metadata):
|
||||
if not self.metadata:
|
||||
self.metadata = Metadata()
|
||||
self.metadata.update(metadata)
|
||||
|
||||
def __getitem__(self, item):
|
||||
"""distributions are available using release["sdist"]"""
|
||||
return self.dists[item]
|
||||
|
||||
def _check_is_comparable(self, other):
|
||||
if not isinstance(other, ReleaseInfo):
|
||||
raise TypeError("cannot compare %s and %s"
|
||||
% (type(self).__name__, type(other).__name__))
|
||||
elif self.name != other.name:
|
||||
raise TypeError("cannot compare %s and %s"
|
||||
% (self.name, other.name))
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s %s>" % (self.name, self.version)
|
||||
|
||||
def __eq__(self, other):
|
||||
self._check_is_comparable(other)
|
||||
return self.version == other.version
|
||||
|
||||
def __lt__(self, other):
|
||||
self._check_is_comparable(other)
|
||||
return self.version < other.version
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __gt__(self, other):
|
||||
return not (self.__lt__(other) or self.__eq__(other))
|
||||
|
||||
def __le__(self, other):
|
||||
return self.__eq__(other) or self.__lt__(other)
|
||||
|
||||
def __ge__(self, other):
|
||||
return self.__eq__(other) or self.__gt__(other)
|
||||
|
||||
# See http://docs.python.org/reference/datamodel#object.__hash__
|
||||
__hash__ = object.__hash__
|
||||
|
||||
|
||||
class DistInfo(IndexReference):
|
||||
"""Represents a distribution retrieved from an index (sdist, bdist, ...)
|
||||
"""
|
||||
|
||||
def __init__(self, release, dist_type=None, url=None, hashname=None,
|
||||
hashval=None, is_external=True, python_version=None,
|
||||
index=None):
|
||||
"""Create a new instance of DistInfo.
|
||||
|
||||
:param release: a DistInfo class is relative to a release.
|
||||
:param dist_type: the type of the dist (eg. source, bin-*, etc.)
|
||||
:param url: URL where we found this distribution
|
||||
:param hashname: the name of the hash we want to use. Refer to the
|
||||
hashlib.new documentation for more information.
|
||||
:param hashval: the hash value.
|
||||
:param is_external: we need to know if the provided url comes from
|
||||
an index browsing, or from an external resource.
|
||||
|
||||
"""
|
||||
self.set_index(index)
|
||||
self.release = release
|
||||
self.dist_type = dist_type
|
||||
self.python_version = python_version
|
||||
self._unpacked_dir = None
|
||||
# set the downloaded path to None by default. The goal here
|
||||
# is to not download distributions multiple times
|
||||
self.downloaded_location = None
|
||||
# We store urls in dict, because we need to have a bit more infos
|
||||
# than the simple URL. It will be used later to find the good url to
|
||||
# use.
|
||||
# We have two _url* attributes: _url and urls. urls contains a list
|
||||
# of dict for the different urls, and _url contains the choosen url, in
|
||||
# order to dont make the selection process multiple times.
|
||||
self.urls = []
|
||||
self._url = None
|
||||
self.add_url(url, hashname, hashval, is_external)
|
||||
|
||||
def add_url(self, url=None, hashname=None, hashval=None, is_external=True):
|
||||
"""Add a new url to the list of urls"""
|
||||
if hashname is not None:
|
||||
try:
|
||||
hashlib.new(hashname)
|
||||
except ValueError:
|
||||
raise UnsupportedHashName(hashname)
|
||||
if not url in [u['url'] for u in self.urls]:
|
||||
self.urls.append({
|
||||
'url': url,
|
||||
'hashname': hashname,
|
||||
'hashval': hashval,
|
||||
'is_external': is_external,
|
||||
})
|
||||
# reset the url selection process
|
||||
self._url = None
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
"""Pick up the right url for the list of urls in self.urls"""
|
||||
# We return internal urls over externals.
|
||||
# If there is more than one internal or external, return the first
|
||||
# one.
|
||||
if self._url is None:
|
||||
if len(self.urls) > 1:
|
||||
internals_urls = [u for u in self.urls \
|
||||
if u['is_external'] == False]
|
||||
if len(internals_urls) >= 1:
|
||||
self._url = internals_urls[0]
|
||||
if self._url is None:
|
||||
self._url = self.urls[0]
|
||||
return self._url
|
||||
|
||||
@property
|
||||
def is_source(self):
|
||||
"""return if the distribution is a source one or not"""
|
||||
return self.dist_type == 'sdist'
|
||||
|
||||
def download(self, path=None):
|
||||
"""Download the distribution to a path, and return it.
|
||||
|
||||
If the path is given in path, use this, otherwise, generates a new one
|
||||
Return the download location.
|
||||
"""
|
||||
if path is None:
|
||||
path = tempfile.mkdtemp()
|
||||
|
||||
# if we do not have downloaded it yet, do it.
|
||||
if self.downloaded_location is None:
|
||||
url = self.url['url']
|
||||
archive_name = urllib.parse.urlparse(url)[2].split('/')[-1]
|
||||
filename, headers = urllib.request.urlretrieve(url,
|
||||
path + "/" + archive_name)
|
||||
self.downloaded_location = filename
|
||||
self._check_md5(filename)
|
||||
return self.downloaded_location
|
||||
|
||||
def unpack(self, path=None):
|
||||
"""Unpack the distribution to the given path.
|
||||
|
||||
If not destination is given, creates a temporary location.
|
||||
|
||||
Returns the location of the extracted files (root).
|
||||
"""
|
||||
if not self._unpacked_dir:
|
||||
if path is None:
|
||||
path = tempfile.mkdtemp()
|
||||
|
||||
filename = self.download(path)
|
||||
content_type = mimetypes.guess_type(filename)[0]
|
||||
unpack_archive(filename, path)
|
||||
self._unpacked_dir = path
|
||||
|
||||
return path
|
||||
|
||||
def _check_md5(self, filename):
|
||||
"""Check that the md5 checksum of the given file matches the one in
|
||||
url param"""
|
||||
hashname = self.url['hashname']
|
||||
expected_hashval = self.url['hashval']
|
||||
if not None in (expected_hashval, hashname):
|
||||
with open(filename, 'rb') as f:
|
||||
hashval = hashlib.new(hashname)
|
||||
hashval.update(f.read())
|
||||
|
||||
if hashval.hexdigest() != expected_hashval:
|
||||
raise HashDoesNotMatch("got %s instead of %s"
|
||||
% (hashval.hexdigest(), expected_hashval))
|
||||
|
||||
def __repr__(self):
|
||||
if self.release is None:
|
||||
return "<? ? %s>" % self.dist_type
|
||||
|
||||
return "<%s %s %s>" % (
|
||||
self.release.name, self.release.version, self.dist_type or "")
|
||||
|
||||
|
||||
class ReleasesList(IndexReference):
|
||||
"""A container of Release.
|
||||
|
||||
Provides useful methods and facilities to sort and filter releases.
|
||||
"""
|
||||
def __init__(self, name, releases=None, contains_hidden=False, index=None):
|
||||
self.set_index(index)
|
||||
self.releases = []
|
||||
self.name = name
|
||||
self.contains_hidden = contains_hidden
|
||||
if releases:
|
||||
self.add_releases(releases)
|
||||
|
||||
def fetch_releases(self):
|
||||
self._index.get_releases(self.name)
|
||||
return self.releases
|
||||
|
||||
def filter(self, predicate):
|
||||
"""Filter and return a subset of releases matching the given predicate.
|
||||
"""
|
||||
return ReleasesList(self.name, [release for release in self.releases
|
||||
if predicate.match(release.version)],
|
||||
index=self._index)
|
||||
|
||||
def get_last(self, requirements, prefer_final=None):
|
||||
"""Return the "last" release, that satisfy the given predicates.
|
||||
|
||||
"last" is defined by the version number of the releases, you also could
|
||||
set prefer_final parameter to True or False to change the order results
|
||||
"""
|
||||
predicate = get_version_predicate(requirements)
|
||||
releases = self.filter(predicate)
|
||||
if len(releases) == 0:
|
||||
return None
|
||||
releases.sort_releases(prefer_final, reverse=True)
|
||||
return releases[0]
|
||||
|
||||
def add_releases(self, releases):
|
||||
"""Add releases in the release list.
|
||||
|
||||
:param: releases is a list of ReleaseInfo objects.
|
||||
"""
|
||||
for r in releases:
|
||||
self.add_release(release=r)
|
||||
|
||||
def add_release(self, version=None, dist_type='sdist', release=None,
|
||||
**dist_args):
|
||||
"""Add a release to the list.
|
||||
|
||||
The release can be passed in the `release` parameter, and in this case,
|
||||
it will be crawled to extract the useful informations if necessary, or
|
||||
the release informations can be directly passed in the `version` and
|
||||
`dist_type` arguments.
|
||||
|
||||
Other keywords arguments can be provided, and will be forwarded to the
|
||||
distribution creation (eg. the arguments of the DistInfo constructor).
|
||||
"""
|
||||
if release:
|
||||
if release.name.lower() != self.name.lower():
|
||||
raise ValueError("%s is not the same project as %s" %
|
||||
(release.name, self.name))
|
||||
version = str(release.version)
|
||||
|
||||
if not version in self.get_versions():
|
||||
# append only if not already exists
|
||||
self.releases.append(release)
|
||||
for dist in release.dists.values():
|
||||
for url in dist.urls:
|
||||
self.add_release(version, dist.dist_type, **url)
|
||||
else:
|
||||
matches = [r for r in self.releases
|
||||
if str(r.version) == version and r.name == self.name]
|
||||
if not matches:
|
||||
release = ReleaseInfo(self.name, version, index=self._index)
|
||||
self.releases.append(release)
|
||||
else:
|
||||
release = matches[0]
|
||||
|
||||
release.add_distribution(dist_type=dist_type, **dist_args)
|
||||
|
||||
def sort_releases(self, prefer_final=False, reverse=True, *args, **kwargs):
|
||||
"""Sort the results with the given properties.
|
||||
|
||||
The `prefer_final` argument can be used to specify if final
|
||||
distributions (eg. not dev, bet or alpha) would be prefered or not.
|
||||
|
||||
Results can be inverted by using `reverse`.
|
||||
|
||||
Any other parameter provided will be forwarded to the sorted call. You
|
||||
cannot redefine the key argument of "sorted" here, as it is used
|
||||
internally to sort the releases.
|
||||
"""
|
||||
|
||||
sort_by = []
|
||||
if prefer_final:
|
||||
sort_by.append("is_final")
|
||||
sort_by.append("version")
|
||||
|
||||
self.releases.sort(
|
||||
key=lambda i: tuple(getattr(i, arg) for arg in sort_by),
|
||||
reverse=reverse, *args, **kwargs)
|
||||
|
||||
def get_release(self, version):
|
||||
"""Return a release from its version."""
|
||||
matches = [r for r in self.releases if str(r.version) == version]
|
||||
if len(matches) != 1:
|
||||
raise KeyError(version)
|
||||
return matches[0]
|
||||
|
||||
def get_versions(self):
|
||||
"""Return a list of releases versions contained"""
|
||||
return [str(r.version) for r in self.releases]
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.releases[key]
|
||||
|
||||
def __len__(self):
|
||||
return len(self.releases)
|
||||
|
||||
def __repr__(self):
|
||||
string = 'Project "%s"' % self.name
|
||||
if self.get_versions():
|
||||
string += ' versions: %s' % ', '.join(self.get_versions())
|
||||
return '<%s>' % string
|
||||
|
||||
|
||||
def get_infos_from_url(url, probable_dist_name=None, is_external=True):
|
||||
"""Get useful informations from an URL.
|
||||
|
||||
Return a dict of (name, version, url, hashtype, hash, is_external)
|
||||
|
||||
:param url: complete url of the distribution
|
||||
:param probable_dist_name: A probable name of the project.
|
||||
:param is_external: Tell if the url commes from an index or from
|
||||
an external URL.
|
||||
"""
|
||||
# if the url contains a md5 hash, get it.
|
||||
md5_hash = None
|
||||
match = MD5_HASH.match(url)
|
||||
if match is not None:
|
||||
md5_hash = match.group(1)
|
||||
# remove the hash
|
||||
url = url.replace("#md5=%s" % md5_hash, "")
|
||||
|
||||
# parse the archive name to find dist name and version
|
||||
archive_name = urllib.parse.urlparse(url)[2].split('/')[-1]
|
||||
extension_matched = False
|
||||
# remove the extension from the name
|
||||
for ext in EXTENSIONS:
|
||||
if archive_name.endswith(ext):
|
||||
archive_name = archive_name[:-len(ext)]
|
||||
extension_matched = True
|
||||
|
||||
name, version = split_archive_name(archive_name)
|
||||
if extension_matched is True:
|
||||
return {'name': name,
|
||||
'version': version,
|
||||
'url': url,
|
||||
'hashname': "md5",
|
||||
'hashval': md5_hash,
|
||||
'is_external': is_external,
|
||||
'dist_type': 'sdist'}
|
||||
|
||||
|
||||
def split_archive_name(archive_name, probable_name=None):
|
||||
"""Split an archive name into two parts: name and version.
|
||||
|
||||
Return the tuple (name, version)
|
||||
"""
|
||||
# Try to determine wich part is the name and wich is the version using the
|
||||
# "-" separator. Take the larger part to be the version number then reduce
|
||||
# if this not works.
|
||||
def eager_split(str, maxsplit=2):
|
||||
# split using the "-" separator
|
||||
splits = str.rsplit("-", maxsplit)
|
||||
name = splits[0]
|
||||
version = "-".join(splits[1:])
|
||||
if version.startswith("-"):
|
||||
version = version[1:]
|
||||
if suggest_normalized_version(version) is None and maxsplit >= 0:
|
||||
# we dont get a good version number: recurse !
|
||||
return eager_split(str, maxsplit - 1)
|
||||
else:
|
||||
return name, version
|
||||
if probable_name is not None:
|
||||
probable_name = probable_name.lower()
|
||||
name = None
|
||||
if probable_name is not None and probable_name in archive_name:
|
||||
# we get the name from probable_name, if given.
|
||||
name = probable_name
|
||||
version = archive_name.lstrip(name)
|
||||
else:
|
||||
name, version = eager_split(archive_name)
|
||||
|
||||
version = suggest_normalized_version(version)
|
||||
if version is not None and name != "":
|
||||
return name.lower(), version
|
||||
else:
|
||||
raise CantParseArchiveName(archive_name)
|
||||
39
Lib/packaging/pypi/errors.py
Normal file
39
Lib/packaging/pypi/errors.py
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
"""Exceptions raised by packaging.pypi code."""
|
||||
|
||||
from packaging.errors import PackagingPyPIError
|
||||
|
||||
|
||||
class ProjectNotFound(PackagingPyPIError):
|
||||
"""Project has not been found"""
|
||||
|
||||
|
||||
class DistributionNotFound(PackagingPyPIError):
|
||||
"""The release has not been found"""
|
||||
|
||||
|
||||
class ReleaseNotFound(PackagingPyPIError):
|
||||
"""The release has not been found"""
|
||||
|
||||
|
||||
class CantParseArchiveName(PackagingPyPIError):
|
||||
"""An archive name can't be parsed to find distribution name and version"""
|
||||
|
||||
|
||||
class DownloadError(PackagingPyPIError):
|
||||
"""An error has occurs while downloading"""
|
||||
|
||||
|
||||
class HashDoesNotMatch(DownloadError):
|
||||
"""Compared hashes does not match"""
|
||||
|
||||
|
||||
class UnsupportedHashName(PackagingPyPIError):
|
||||
"""A unsupported hashname has been used"""
|
||||
|
||||
|
||||
class UnableToDownload(PackagingPyPIError):
|
||||
"""All mirrors have been tried, without success"""
|
||||
|
||||
|
||||
class InvalidSearchField(PackagingPyPIError):
|
||||
"""An invalid search field has been used"""
|
||||
52
Lib/packaging/pypi/mirrors.py
Normal file
52
Lib/packaging/pypi/mirrors.py
Normal file
|
|
@ -0,0 +1,52 @@
|
|||
"""Utilities related to the mirror infrastructure defined in PEP 381."""
|
||||
|
||||
from string import ascii_lowercase
|
||||
import socket
|
||||
|
||||
DEFAULT_MIRROR_URL = "last.pypi.python.org"
|
||||
|
||||
|
||||
def get_mirrors(hostname=None):
|
||||
"""Return the list of mirrors from the last record found on the DNS
|
||||
entry::
|
||||
|
||||
>>> from packaging.pypi.mirrors import get_mirrors
|
||||
>>> get_mirrors()
|
||||
['a.pypi.python.org', 'b.pypi.python.org', 'c.pypi.python.org',
|
||||
'd.pypi.python.org']
|
||||
|
||||
"""
|
||||
if hostname is None:
|
||||
hostname = DEFAULT_MIRROR_URL
|
||||
|
||||
# return the last mirror registered on PyPI.
|
||||
try:
|
||||
hostname = socket.gethostbyname_ex(hostname)[0]
|
||||
except socket.gaierror:
|
||||
return []
|
||||
end_letter = hostname.split(".", 1)
|
||||
|
||||
# determine the list from the last one.
|
||||
return ["%s.%s" % (s, end_letter[1]) for s in string_range(end_letter[0])]
|
||||
|
||||
|
||||
def string_range(last):
|
||||
"""Compute the range of string between "a" and last.
|
||||
|
||||
This works for simple "a to z" lists, but also for "a to zz" lists.
|
||||
"""
|
||||
for k in range(len(last)):
|
||||
for x in product(ascii_lowercase, repeat=(k + 1)):
|
||||
result = ''.join(x)
|
||||
yield result
|
||||
if result == last:
|
||||
return
|
||||
|
||||
|
||||
def product(*args, **kwds):
|
||||
pools = [tuple(arg) for arg in args] * kwds.get('repeat', 1)
|
||||
result = [[]]
|
||||
for pool in pools:
|
||||
result = [x + [y] for x in result for y in pool]
|
||||
for prod in result:
|
||||
yield tuple(prod)
|
||||
452
Lib/packaging/pypi/simple.py
Normal file
452
Lib/packaging/pypi/simple.py
Normal file
|
|
@ -0,0 +1,452 @@
|
|||
"""Spider using the screen-scraping "simple" PyPI API.
|
||||
|
||||
This module contains the class SimpleIndexCrawler, a simple spider that
|
||||
can be used to find and retrieve distributions from a project index
|
||||
(like the Python Package Index), using its so-called simple API (see
|
||||
reference implementation available at http://pypi.python.org/simple/).
|
||||
"""
|
||||
|
||||
import http.client
|
||||
import re
|
||||
import socket
|
||||
import sys
|
||||
import urllib.request
|
||||
import urllib.parse
|
||||
import urllib.error
|
||||
import os
|
||||
|
||||
|
||||
from fnmatch import translate
|
||||
from packaging import logger
|
||||
from packaging.metadata import Metadata
|
||||
from packaging.version import get_version_predicate
|
||||
from packaging import __version__ as packaging_version
|
||||
from packaging.pypi.base import BaseClient
|
||||
from packaging.pypi.dist import (ReleasesList, EXTENSIONS,
|
||||
get_infos_from_url, MD5_HASH)
|
||||
from packaging.pypi.errors import (PackagingPyPIError, DownloadError,
|
||||
UnableToDownload, CantParseArchiveName,
|
||||
ReleaseNotFound, ProjectNotFound)
|
||||
from packaging.pypi.mirrors import get_mirrors
|
||||
from packaging.metadata import Metadata
|
||||
|
||||
__all__ = ['Crawler', 'DEFAULT_SIMPLE_INDEX_URL']
|
||||
|
||||
# -- Constants -----------------------------------------------
|
||||
DEFAULT_SIMPLE_INDEX_URL = "http://a.pypi.python.org/simple/"
|
||||
DEFAULT_HOSTS = ("*",)
|
||||
SOCKET_TIMEOUT = 15
|
||||
USER_AGENT = "Python-urllib/%s packaging/%s" % (
|
||||
sys.version[:3], packaging_version)
|
||||
|
||||
# -- Regexps -------------------------------------------------
|
||||
EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.]+)$')
|
||||
HREF = re.compile("""href\\s*=\\s*['"]?([^'"> ]+)""", re.I)
|
||||
URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):', re.I).match
|
||||
|
||||
# This pattern matches a character entity reference (a decimal numeric
|
||||
# references, a hexadecimal numeric reference, or a named reference).
|
||||
ENTITY_SUB = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub
|
||||
REL = re.compile("""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I)
|
||||
|
||||
|
||||
def socket_timeout(timeout=SOCKET_TIMEOUT):
|
||||
"""Decorator to add a socket timeout when requesting pages on PyPI.
|
||||
"""
|
||||
def _socket_timeout(func):
|
||||
def _socket_timeout(self, *args, **kwargs):
|
||||
old_timeout = socket.getdefaulttimeout()
|
||||
if hasattr(self, "_timeout"):
|
||||
timeout = self._timeout
|
||||
socket.setdefaulttimeout(timeout)
|
||||
try:
|
||||
return func(self, *args, **kwargs)
|
||||
finally:
|
||||
socket.setdefaulttimeout(old_timeout)
|
||||
return _socket_timeout
|
||||
return _socket_timeout
|
||||
|
||||
|
||||
def with_mirror_support():
|
||||
"""Decorator that makes the mirroring support easier"""
|
||||
def wrapper(func):
|
||||
def wrapped(self, *args, **kwargs):
|
||||
try:
|
||||
return func(self, *args, **kwargs)
|
||||
except DownloadError:
|
||||
# if an error occurs, try with the next index_url
|
||||
if self._mirrors_tries >= self._mirrors_max_tries:
|
||||
try:
|
||||
self._switch_to_next_mirror()
|
||||
except KeyError:
|
||||
raise UnableToDownload("Tried all mirrors")
|
||||
else:
|
||||
self._mirrors_tries += 1
|
||||
self._projects.clear()
|
||||
return wrapped(self, *args, **kwargs)
|
||||
return wrapped
|
||||
return wrapper
|
||||
|
||||
|
||||
class Crawler(BaseClient):
|
||||
"""Provides useful tools to request the Python Package Index simple API.
|
||||
|
||||
You can specify both mirrors and mirrors_url, but mirrors_url will only be
|
||||
used if mirrors is set to None.
|
||||
|
||||
:param index_url: the url of the simple index to search on.
|
||||
:param prefer_final: if the version is not mentioned, and the last
|
||||
version is not a "final" one (alpha, beta, etc.),
|
||||
pick up the last final version.
|
||||
:param prefer_source: if the distribution type is not mentioned, pick up
|
||||
the source one if available.
|
||||
:param follow_externals: tell if following external links is needed or
|
||||
not. Default is False.
|
||||
:param hosts: a list of hosts allowed to be processed while using
|
||||
follow_externals=True. Default behavior is to follow all
|
||||
hosts.
|
||||
:param follow_externals: tell if following external links is needed or
|
||||
not. Default is False.
|
||||
:param mirrors_url: the url to look on for DNS records giving mirror
|
||||
adresses.
|
||||
:param mirrors: a list of mirrors (see PEP 381).
|
||||
:param timeout: time in seconds to consider a url has timeouted.
|
||||
:param mirrors_max_tries": number of times to try requesting informations
|
||||
on mirrors before switching.
|
||||
"""
|
||||
|
||||
def __init__(self, index_url=DEFAULT_SIMPLE_INDEX_URL, prefer_final=False,
|
||||
prefer_source=True, hosts=DEFAULT_HOSTS,
|
||||
follow_externals=False, mirrors_url=None, mirrors=None,
|
||||
timeout=SOCKET_TIMEOUT, mirrors_max_tries=0):
|
||||
super(Crawler, self).__init__(prefer_final, prefer_source)
|
||||
self.follow_externals = follow_externals
|
||||
|
||||
# mirroring attributes.
|
||||
if not index_url.endswith("/"):
|
||||
index_url += "/"
|
||||
# if no mirrors are defined, use the method described in PEP 381.
|
||||
if mirrors is None:
|
||||
mirrors = get_mirrors(mirrors_url)
|
||||
self._mirrors = set(mirrors)
|
||||
self._mirrors_used = set()
|
||||
self.index_url = index_url
|
||||
self._mirrors_max_tries = mirrors_max_tries
|
||||
self._mirrors_tries = 0
|
||||
self._timeout = timeout
|
||||
|
||||
# create a regexp to match all given hosts
|
||||
self._allowed_hosts = re.compile('|'.join(map(translate, hosts))).match
|
||||
|
||||
# we keep an index of pages we have processed, in order to avoid
|
||||
# scanning them multple time (eg. if there is multiple pages pointing
|
||||
# on one)
|
||||
self._processed_urls = []
|
||||
self._projects = {}
|
||||
|
||||
@with_mirror_support()
|
||||
def search_projects(self, name=None, **kwargs):
|
||||
"""Search the index for projects containing the given name.
|
||||
|
||||
Return a list of names.
|
||||
"""
|
||||
with self._open_url(self.index_url) as index:
|
||||
if '*' in name:
|
||||
name.replace('*', '.*')
|
||||
else:
|
||||
name = "%s%s%s" % ('*.?', name, '*.?')
|
||||
name = name.replace('*', '[^<]*') # avoid matching end tag
|
||||
projectname = re.compile('<a[^>]*>(%s)</a>' % name, re.I)
|
||||
matching_projects = []
|
||||
|
||||
index_content = index.read()
|
||||
|
||||
# FIXME should use bytes I/O and regexes instead of decoding
|
||||
index_content = index_content.decode()
|
||||
|
||||
for match in projectname.finditer(index_content):
|
||||
project_name = match.group(1)
|
||||
matching_projects.append(self._get_project(project_name))
|
||||
return matching_projects
|
||||
|
||||
def get_releases(self, requirements, prefer_final=None,
|
||||
force_update=False):
|
||||
"""Search for releases and return a ReleaseList object containing
|
||||
the results.
|
||||
"""
|
||||
predicate = get_version_predicate(requirements)
|
||||
if predicate.name.lower() in self._projects and not force_update:
|
||||
return self._projects.get(predicate.name.lower())
|
||||
prefer_final = self._get_prefer_final(prefer_final)
|
||||
logger.info('reading info on PyPI about %s', predicate.name)
|
||||
self._process_index_page(predicate.name)
|
||||
|
||||
if predicate.name.lower() not in self._projects:
|
||||
raise ProjectNotFound()
|
||||
|
||||
releases = self._projects.get(predicate.name.lower())
|
||||
releases.sort_releases(prefer_final=prefer_final)
|
||||
return releases
|
||||
|
||||
def get_release(self, requirements, prefer_final=None):
|
||||
"""Return only one release that fulfill the given requirements"""
|
||||
predicate = get_version_predicate(requirements)
|
||||
release = self.get_releases(predicate, prefer_final)\
|
||||
.get_last(predicate)
|
||||
if not release:
|
||||
raise ReleaseNotFound("No release matches the given criterias")
|
||||
return release
|
||||
|
||||
def get_distributions(self, project_name, version):
|
||||
"""Return the distributions found on the index for the specific given
|
||||
release"""
|
||||
# as the default behavior of get_release is to return a release
|
||||
# containing the distributions, just alias it.
|
||||
return self.get_release("%s (%s)" % (project_name, version))
|
||||
|
||||
def get_metadata(self, project_name, version):
|
||||
"""Return the metadatas from the simple index.
|
||||
|
||||
Currently, download one archive, extract it and use the PKG-INFO file.
|
||||
"""
|
||||
release = self.get_distributions(project_name, version)
|
||||
if not release.metadata:
|
||||
location = release.get_distribution().unpack()
|
||||
pkg_info = os.path.join(location, 'PKG-INFO')
|
||||
release.metadata = Metadata(pkg_info)
|
||||
return release
|
||||
|
||||
def _switch_to_next_mirror(self):
|
||||
"""Switch to the next mirror (eg. point self.index_url to the next
|
||||
mirror url.
|
||||
|
||||
Raise a KeyError if all mirrors have been tried.
|
||||
"""
|
||||
self._mirrors_used.add(self.index_url)
|
||||
index_url = self._mirrors.pop()
|
||||
if not ("http://" or "https://" or "file://") in index_url:
|
||||
index_url = "http://%s" % index_url
|
||||
|
||||
if not index_url.endswith("/simple"):
|
||||
index_url = "%s/simple/" % index_url
|
||||
|
||||
self.index_url = index_url
|
||||
|
||||
def _is_browsable(self, url):
|
||||
"""Tell if the given URL can be browsed or not.
|
||||
|
||||
It uses the follow_externals and the hosts list to tell if the given
|
||||
url is browsable or not.
|
||||
"""
|
||||
# if _index_url is contained in the given URL, we are browsing the
|
||||
# index, and it's always "browsable".
|
||||
# local files are always considered browable resources
|
||||
if self.index_url in url or urllib.parse.urlparse(url)[0] == "file":
|
||||
return True
|
||||
elif self.follow_externals:
|
||||
if self._allowed_hosts(urllib.parse.urlparse(url)[1]): # 1 is netloc
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
return False
|
||||
|
||||
def _is_distribution(self, link):
|
||||
"""Tell if the given URL matches to a distribution name or not.
|
||||
"""
|
||||
#XXX find a better way to check that links are distributions
|
||||
# Using a regexp ?
|
||||
for ext in EXTENSIONS:
|
||||
if ext in link:
|
||||
return True
|
||||
return False
|
||||
|
||||
def _register_release(self, release=None, release_info={}):
|
||||
"""Register a new release.
|
||||
|
||||
Both a release or a dict of release_info can be provided, the prefered
|
||||
way (eg. the quicker) is the dict one.
|
||||
|
||||
Return the list of existing releases for the given project.
|
||||
"""
|
||||
# Check if the project already has a list of releases (refering to
|
||||
# the project name). If not, create a new release list.
|
||||
# Then, add the release to the list.
|
||||
if release:
|
||||
name = release.name
|
||||
else:
|
||||
name = release_info['name']
|
||||
if not name.lower() in self._projects:
|
||||
self._projects[name.lower()] = ReleasesList(name, index=self._index)
|
||||
|
||||
if release:
|
||||
self._projects[name.lower()].add_release(release=release)
|
||||
else:
|
||||
name = release_info.pop('name')
|
||||
version = release_info.pop('version')
|
||||
dist_type = release_info.pop('dist_type')
|
||||
self._projects[name.lower()].add_release(version, dist_type,
|
||||
**release_info)
|
||||
return self._projects[name.lower()]
|
||||
|
||||
def _process_url(self, url, project_name=None, follow_links=True):
|
||||
"""Process an url and search for distributions packages.
|
||||
|
||||
For each URL found, if it's a download, creates a PyPIdistribution
|
||||
object. If it's a homepage and we can follow links, process it too.
|
||||
|
||||
:param url: the url to process
|
||||
:param project_name: the project name we are searching for.
|
||||
:param follow_links: Do not want to follow links more than from one
|
||||
level. This parameter tells if we want to follow
|
||||
the links we find (eg. run recursively this
|
||||
method on it)
|
||||
"""
|
||||
with self._open_url(url) as f:
|
||||
base_url = f.url
|
||||
if url not in self._processed_urls:
|
||||
self._processed_urls.append(url)
|
||||
link_matcher = self._get_link_matcher(url)
|
||||
for link, is_download in link_matcher(f.read().decode(), base_url):
|
||||
if link not in self._processed_urls:
|
||||
if self._is_distribution(link) or is_download:
|
||||
self._processed_urls.append(link)
|
||||
# it's a distribution, so create a dist object
|
||||
try:
|
||||
infos = get_infos_from_url(link, project_name,
|
||||
is_external=not self.index_url in url)
|
||||
except CantParseArchiveName as e:
|
||||
logger.warning(
|
||||
"version has not been parsed: %s", e)
|
||||
else:
|
||||
self._register_release(release_info=infos)
|
||||
else:
|
||||
if self._is_browsable(link) and follow_links:
|
||||
self._process_url(link, project_name,
|
||||
follow_links=False)
|
||||
|
||||
def _get_link_matcher(self, url):
|
||||
"""Returns the right link matcher function of the given url
|
||||
"""
|
||||
if self.index_url in url:
|
||||
return self._simple_link_matcher
|
||||
else:
|
||||
return self._default_link_matcher
|
||||
|
||||
def _get_full_url(self, url, base_url):
|
||||
return urllib.parse.urljoin(base_url, self._htmldecode(url))
|
||||
|
||||
def _simple_link_matcher(self, content, base_url):
|
||||
"""Yield all links with a rel="download" or rel="homepage".
|
||||
|
||||
This matches the simple index requirements for matching links.
|
||||
If follow_externals is set to False, dont yeld the external
|
||||
urls.
|
||||
|
||||
:param content: the content of the page we want to parse
|
||||
:param base_url: the url of this page.
|
||||
"""
|
||||
for match in HREF.finditer(content):
|
||||
url = self._get_full_url(match.group(1), base_url)
|
||||
if MD5_HASH.match(url):
|
||||
yield (url, True)
|
||||
|
||||
for match in REL.finditer(content):
|
||||
# search for rel links.
|
||||
tag, rel = match.groups()
|
||||
rels = [s.strip() for s in rel.lower().split(',')]
|
||||
if 'homepage' in rels or 'download' in rels:
|
||||
for match in HREF.finditer(tag):
|
||||
url = self._get_full_url(match.group(1), base_url)
|
||||
if 'download' in rels or self._is_browsable(url):
|
||||
# yield a list of (url, is_download)
|
||||
yield (url, 'download' in rels)
|
||||
|
||||
def _default_link_matcher(self, content, base_url):
|
||||
"""Yield all links found on the page.
|
||||
"""
|
||||
for match in HREF.finditer(content):
|
||||
url = self._get_full_url(match.group(1), base_url)
|
||||
if self._is_browsable(url):
|
||||
yield (url, False)
|
||||
|
||||
@with_mirror_support()
|
||||
def _process_index_page(self, name):
|
||||
"""Find and process a PyPI page for the given project name.
|
||||
|
||||
:param name: the name of the project to find the page
|
||||
"""
|
||||
# Browse and index the content of the given PyPI page.
|
||||
url = self.index_url + name + "/"
|
||||
self._process_url(url, name)
|
||||
|
||||
@socket_timeout()
|
||||
def _open_url(self, url):
|
||||
"""Open a urllib2 request, handling HTTP authentication, and local
|
||||
files support.
|
||||
|
||||
"""
|
||||
scheme, netloc, path, params, query, frag = urllib.parse.urlparse(url)
|
||||
|
||||
# authentication stuff
|
||||
if scheme in ('http', 'https'):
|
||||
auth, host = urllib.parse.splituser(netloc)
|
||||
else:
|
||||
auth = None
|
||||
|
||||
# add index.html automatically for filesystem paths
|
||||
if scheme == 'file':
|
||||
if url.endswith('/'):
|
||||
url += "index.html"
|
||||
|
||||
# add authorization headers if auth is provided
|
||||
if auth:
|
||||
auth = "Basic " + \
|
||||
urllib.parse.unquote(auth).encode('base64').strip()
|
||||
new_url = urllib.parse.urlunparse((
|
||||
scheme, host, path, params, query, frag))
|
||||
request = urllib.request.Request(new_url)
|
||||
request.add_header("Authorization", auth)
|
||||
else:
|
||||
request = urllib.request.Request(url)
|
||||
request.add_header('User-Agent', USER_AGENT)
|
||||
try:
|
||||
fp = urllib.request.urlopen(request)
|
||||
except (ValueError, http.client.InvalidURL) as v:
|
||||
msg = ' '.join([str(arg) for arg in v.args])
|
||||
raise PackagingPyPIError('%s %s' % (url, msg))
|
||||
except urllib.error.HTTPError as v:
|
||||
return v
|
||||
except urllib.error.URLError as v:
|
||||
raise DownloadError("Download error for %s: %s" % (url, v.reason))
|
||||
except http.client.BadStatusLine as v:
|
||||
raise DownloadError('%s returned a bad status line. '
|
||||
'The server might be down, %s' % (url, v.line))
|
||||
except http.client.HTTPException as v:
|
||||
raise DownloadError("Download error for %s: %s" % (url, v))
|
||||
except socket.timeout:
|
||||
raise DownloadError("The server timeouted")
|
||||
|
||||
if auth:
|
||||
# Put authentication info back into request URL if same host,
|
||||
# so that links found on the page will work
|
||||
s2, h2, path2, param2, query2, frag2 = \
|
||||
urllib.parse.urlparse(fp.url)
|
||||
if s2 == scheme and h2 == host:
|
||||
fp.url = urllib.parse.urlunparse(
|
||||
(s2, netloc, path2, param2, query2, frag2))
|
||||
return fp
|
||||
|
||||
def _decode_entity(self, match):
|
||||
what = match.group(1)
|
||||
if what.startswith('#x'):
|
||||
what = int(what[2:], 16)
|
||||
elif what.startswith('#'):
|
||||
what = int(what[1:])
|
||||
else:
|
||||
from html.entities import name2codepoint
|
||||
what = name2codepoint.get(what, match.group(0))
|
||||
return chr(what)
|
||||
|
||||
def _htmldecode(self, text):
|
||||
"""Decode HTML entities in the given text."""
|
||||
return ENTITY_SUB(self._decode_entity, text)
|
||||
99
Lib/packaging/pypi/wrapper.py
Normal file
99
Lib/packaging/pypi/wrapper.py
Normal file
|
|
@ -0,0 +1,99 @@
|
|||
"""Convenient client for all PyPI APIs.
|
||||
|
||||
This module provides a ClientWrapper class which will use the "simple"
|
||||
or XML-RPC API to request information or files from an index.
|
||||
"""
|
||||
|
||||
from packaging.pypi import simple, xmlrpc
|
||||
|
||||
_WRAPPER_MAPPINGS = {'get_release': 'simple',
|
||||
'get_releases': 'simple',
|
||||
'search_projects': 'simple',
|
||||
'get_metadata': 'xmlrpc',
|
||||
'get_distributions': 'simple'}
|
||||
|
||||
_WRAPPER_INDEXES = {'xmlrpc': xmlrpc.Client,
|
||||
'simple': simple.Crawler}
|
||||
|
||||
|
||||
def switch_index_if_fails(func, wrapper):
|
||||
"""Decorator that switch of index (for instance from xmlrpc to simple)
|
||||
if the first mirror return an empty list or raises an exception.
|
||||
"""
|
||||
def decorator(*args, **kwargs):
|
||||
retry = True
|
||||
exception = None
|
||||
methods = [func]
|
||||
for f in wrapper._indexes.values():
|
||||
if f != func.__self__ and hasattr(f, func.__name__):
|
||||
methods.append(getattr(f, func.__name__))
|
||||
for method in methods:
|
||||
try:
|
||||
response = method(*args, **kwargs)
|
||||
retry = False
|
||||
except Exception as e:
|
||||
exception = e
|
||||
if not retry:
|
||||
break
|
||||
if retry and exception:
|
||||
raise exception
|
||||
else:
|
||||
return response
|
||||
return decorator
|
||||
|
||||
|
||||
class ClientWrapper:
|
||||
"""Wrapper around simple and xmlrpc clients,
|
||||
|
||||
Choose the best implementation to use depending the needs, using the given
|
||||
mappings.
|
||||
If one of the indexes returns an error, tries to use others indexes.
|
||||
|
||||
:param index: tell which index to rely on by default.
|
||||
:param index_classes: a dict of name:class to use as indexes.
|
||||
:param indexes: a dict of name:index already instantiated
|
||||
:param mappings: the mappings to use for this wrapper
|
||||
"""
|
||||
|
||||
def __init__(self, default_index='simple', index_classes=_WRAPPER_INDEXES,
|
||||
indexes={}, mappings=_WRAPPER_MAPPINGS):
|
||||
self._projects = {}
|
||||
self._mappings = mappings
|
||||
self._indexes = indexes
|
||||
self._default_index = default_index
|
||||
|
||||
# instantiate the classes and set their _project attribute to the one
|
||||
# of the wrapper.
|
||||
for name, cls in index_classes.items():
|
||||
obj = self._indexes.setdefault(name, cls())
|
||||
obj._projects = self._projects
|
||||
obj._index = self
|
||||
|
||||
def __getattr__(self, method_name):
|
||||
"""When asking for methods of the wrapper, return the implementation of
|
||||
the wrapped classes, depending the mapping.
|
||||
|
||||
Decorate the methods to switch of implementation if an error occurs
|
||||
"""
|
||||
real_method = None
|
||||
if method_name in _WRAPPER_MAPPINGS:
|
||||
obj = self._indexes[_WRAPPER_MAPPINGS[method_name]]
|
||||
real_method = getattr(obj, method_name)
|
||||
else:
|
||||
# the method is not defined in the mappings, so we try first to get
|
||||
# it via the default index, and rely on others if needed.
|
||||
try:
|
||||
real_method = getattr(self._indexes[self._default_index],
|
||||
method_name)
|
||||
except AttributeError:
|
||||
other_indexes = [i for i in self._indexes
|
||||
if i != self._default_index]
|
||||
for index in other_indexes:
|
||||
real_method = getattr(self._indexes[index], method_name,
|
||||
None)
|
||||
if real_method:
|
||||
break
|
||||
if real_method:
|
||||
return switch_index_if_fails(real_method, self)
|
||||
else:
|
||||
raise AttributeError("No index have attribute '%s'" % method_name)
|
||||
200
Lib/packaging/pypi/xmlrpc.py
Normal file
200
Lib/packaging/pypi/xmlrpc.py
Normal file
|
|
@ -0,0 +1,200 @@
|
|||
"""Spider using the XML-RPC PyPI API.
|
||||
|
||||
This module contains the class Client, a spider that can be used to find
|
||||
and retrieve distributions from a project index (like the Python Package
|
||||
Index), using its XML-RPC API (see documentation of the reference
|
||||
implementation at http://wiki.python.org/moin/PyPiXmlRpc).
|
||||
"""
|
||||
|
||||
import xmlrpc.client
|
||||
|
||||
from packaging import logger
|
||||
from packaging.errors import IrrationalVersionError
|
||||
from packaging.version import get_version_predicate
|
||||
from packaging.pypi.base import BaseClient
|
||||
from packaging.pypi.errors import (ProjectNotFound, InvalidSearchField,
|
||||
ReleaseNotFound)
|
||||
from packaging.pypi.dist import ReleaseInfo
|
||||
|
||||
__all__ = ['Client', 'DEFAULT_XMLRPC_INDEX_URL']
|
||||
|
||||
DEFAULT_XMLRPC_INDEX_URL = 'http://python.org/pypi'
|
||||
|
||||
_SEARCH_FIELDS = ['name', 'version', 'author', 'author_email', 'maintainer',
|
||||
'maintainer_email', 'home_page', 'license', 'summary',
|
||||
'description', 'keywords', 'platform', 'download_url']
|
||||
|
||||
|
||||
class Client(BaseClient):
|
||||
"""Client to query indexes using XML-RPC method calls.
|
||||
|
||||
If no server_url is specified, use the default PyPI XML-RPC URL,
|
||||
defined in the DEFAULT_XMLRPC_INDEX_URL constant::
|
||||
|
||||
>>> client = XMLRPCClient()
|
||||
>>> client.server_url == DEFAULT_XMLRPC_INDEX_URL
|
||||
True
|
||||
|
||||
>>> client = XMLRPCClient("http://someurl/")
|
||||
>>> client.server_url
|
||||
'http://someurl/'
|
||||
"""
|
||||
|
||||
def __init__(self, server_url=DEFAULT_XMLRPC_INDEX_URL, prefer_final=False,
|
||||
prefer_source=True):
|
||||
super(Client, self).__init__(prefer_final, prefer_source)
|
||||
self.server_url = server_url
|
||||
self._projects = {}
|
||||
|
||||
def get_release(self, requirements, prefer_final=False):
|
||||
"""Return a release with all complete metadata and distribution
|
||||
related informations.
|
||||
"""
|
||||
prefer_final = self._get_prefer_final(prefer_final)
|
||||
predicate = get_version_predicate(requirements)
|
||||
releases = self.get_releases(predicate.name)
|
||||
release = releases.get_last(predicate, prefer_final)
|
||||
self.get_metadata(release.name, str(release.version))
|
||||
self.get_distributions(release.name, str(release.version))
|
||||
return release
|
||||
|
||||
def get_releases(self, requirements, prefer_final=None, show_hidden=True,
|
||||
force_update=False):
|
||||
"""Return the list of existing releases for a specific project.
|
||||
|
||||
Cache the results from one call to another.
|
||||
|
||||
If show_hidden is True, return the hidden releases too.
|
||||
If force_update is True, reprocess the index to update the
|
||||
informations (eg. make a new XML-RPC call).
|
||||
::
|
||||
|
||||
>>> client = XMLRPCClient()
|
||||
>>> client.get_releases('Foo')
|
||||
['1.1', '1.2', '1.3']
|
||||
|
||||
If no such project exists, raise a ProjectNotFound exception::
|
||||
|
||||
>>> client.get_project_versions('UnexistingProject')
|
||||
ProjectNotFound: UnexistingProject
|
||||
|
||||
"""
|
||||
def get_versions(project_name, show_hidden):
|
||||
return self.proxy.package_releases(project_name, show_hidden)
|
||||
|
||||
predicate = get_version_predicate(requirements)
|
||||
prefer_final = self._get_prefer_final(prefer_final)
|
||||
project_name = predicate.name
|
||||
if not force_update and (project_name.lower() in self._projects):
|
||||
project = self._projects[project_name.lower()]
|
||||
if not project.contains_hidden and show_hidden:
|
||||
# if hidden releases are requested, and have an existing
|
||||
# list of releases that does not contains hidden ones
|
||||
all_versions = get_versions(project_name, show_hidden)
|
||||
existing_versions = project.get_versions()
|
||||
hidden_versions = set(all_versions) - set(existing_versions)
|
||||
for version in hidden_versions:
|
||||
project.add_release(release=ReleaseInfo(project_name,
|
||||
version, index=self._index))
|
||||
else:
|
||||
versions = get_versions(project_name, show_hidden)
|
||||
if not versions:
|
||||
raise ProjectNotFound(project_name)
|
||||
project = self._get_project(project_name)
|
||||
project.add_releases([ReleaseInfo(project_name, version,
|
||||
index=self._index)
|
||||
for version in versions])
|
||||
project = project.filter(predicate)
|
||||
if len(project) == 0:
|
||||
raise ReleaseNotFound("%s" % predicate)
|
||||
project.sort_releases(prefer_final)
|
||||
return project
|
||||
|
||||
|
||||
def get_distributions(self, project_name, version):
|
||||
"""Grab informations about distributions from XML-RPC.
|
||||
|
||||
Return a ReleaseInfo object, with distribution-related informations
|
||||
filled in.
|
||||
"""
|
||||
url_infos = self.proxy.release_urls(project_name, version)
|
||||
project = self._get_project(project_name)
|
||||
if version not in project.get_versions():
|
||||
project.add_release(release=ReleaseInfo(project_name, version,
|
||||
index=self._index))
|
||||
release = project.get_release(version)
|
||||
for info in url_infos:
|
||||
packagetype = info['packagetype']
|
||||
dist_infos = {'url': info['url'],
|
||||
'hashval': info['md5_digest'],
|
||||
'hashname': 'md5',
|
||||
'is_external': False,
|
||||
'python_version': info['python_version']}
|
||||
release.add_distribution(packagetype, **dist_infos)
|
||||
return release
|
||||
|
||||
def get_metadata(self, project_name, version):
|
||||
"""Retrieve project metadata.
|
||||
|
||||
Return a ReleaseInfo object, with metadata informations filled in.
|
||||
"""
|
||||
# to be case-insensitive, get the informations from the XMLRPC API
|
||||
projects = [d['name'] for d in
|
||||
self.proxy.search({'name': project_name})
|
||||
if d['name'].lower() == project_name]
|
||||
if len(projects) > 0:
|
||||
project_name = projects[0]
|
||||
|
||||
metadata = self.proxy.release_data(project_name, version)
|
||||
project = self._get_project(project_name)
|
||||
if version not in project.get_versions():
|
||||
project.add_release(release=ReleaseInfo(project_name, version,
|
||||
index=self._index))
|
||||
release = project.get_release(version)
|
||||
release.set_metadata(metadata)
|
||||
return release
|
||||
|
||||
def search_projects(self, name=None, operator="or", **kwargs):
|
||||
"""Find using the keys provided in kwargs.
|
||||
|
||||
You can set operator to "and" or "or".
|
||||
"""
|
||||
for key in kwargs:
|
||||
if key not in _SEARCH_FIELDS:
|
||||
raise InvalidSearchField(key)
|
||||
if name:
|
||||
kwargs["name"] = name
|
||||
projects = self.proxy.search(kwargs, operator)
|
||||
for p in projects:
|
||||
project = self._get_project(p['name'])
|
||||
try:
|
||||
project.add_release(release=ReleaseInfo(p['name'],
|
||||
p['version'], metadata={'summary': p['summary']},
|
||||
index=self._index))
|
||||
except IrrationalVersionError as e:
|
||||
logger.warning("Irrational version error found: %s", e)
|
||||
return [self._projects[p['name'].lower()] for p in projects]
|
||||
|
||||
def get_all_projects(self):
|
||||
"""Return the list of all projects registered in the package index"""
|
||||
projects = self.proxy.list_packages()
|
||||
for name in projects:
|
||||
self.get_releases(name, show_hidden=True)
|
||||
|
||||
return [self._projects[name.lower()] for name in set(projects)]
|
||||
|
||||
@property
|
||||
def proxy(self):
|
||||
"""Property used to return the XMLRPC server proxy.
|
||||
|
||||
If no server proxy is defined yet, creates a new one::
|
||||
|
||||
>>> client = XmlRpcClient()
|
||||
>>> client.proxy()
|
||||
<ServerProxy for python.org/pypi>
|
||||
|
||||
"""
|
||||
if not hasattr(self, '_server_proxy'):
|
||||
self._server_proxy = xmlrpc.client.ServerProxy(self.server_url)
|
||||
|
||||
return self._server_proxy
|
||||
25
Lib/packaging/resources.py
Normal file
25
Lib/packaging/resources.py
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
"""Data file path abstraction.
|
||||
|
||||
Functions in this module use sysconfig to find the paths to the resource
|
||||
files registered in project's setup.cfg file. See the documentation for
|
||||
more information.
|
||||
"""
|
||||
# TODO write that documentation
|
||||
|
||||
from packaging.database import get_distribution
|
||||
|
||||
__all__ = ['get_file_path', 'get_file']
|
||||
|
||||
|
||||
def get_file_path(distribution_name, relative_path):
|
||||
"""Return the path to a resource file."""
|
||||
dist = get_distribution(distribution_name)
|
||||
if dist != None:
|
||||
return dist.get_resource_path(relative_path)
|
||||
raise LookupError('no distribution named %r found' % distribution_name)
|
||||
|
||||
|
||||
def get_file(distribution_name, relative_path, *args, **kwargs):
|
||||
"""Open and return a resource file."""
|
||||
return open(get_file_path(distribution_name, relative_path),
|
||||
*args, **kwargs)
|
||||
645
Lib/packaging/run.py
Normal file
645
Lib/packaging/run.py
Normal file
|
|
@ -0,0 +1,645 @@
|
|||
"""Main command line parser. Implements the pysetup script."""
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import getopt
|
||||
import logging
|
||||
|
||||
from packaging import logger
|
||||
from packaging.dist import Distribution
|
||||
from packaging.util import _is_archive_file
|
||||
from packaging.command import get_command_class, STANDARD_COMMANDS
|
||||
from packaging.install import install, install_local_project, remove
|
||||
from packaging.database import get_distribution, get_distributions
|
||||
from packaging.depgraph import generate_graph
|
||||
from packaging.fancy_getopt import FancyGetopt
|
||||
from packaging.errors import (PackagingArgError, PackagingError,
|
||||
PackagingModuleError, PackagingClassError,
|
||||
CCompilerError)
|
||||
|
||||
|
||||
command_re = re.compile(r'^[a-zA-Z]([a-zA-Z0-9_]*)$')
|
||||
|
||||
common_usage = """\
|
||||
Actions:
|
||||
%(actions)s
|
||||
|
||||
To get more help on an action, use:
|
||||
|
||||
pysetup action --help
|
||||
"""
|
||||
|
||||
create_usage = """\
|
||||
Usage: pysetup create
|
||||
or: pysetup create --help
|
||||
|
||||
Create a new Python package.
|
||||
"""
|
||||
|
||||
graph_usage = """\
|
||||
Usage: pysetup graph dist
|
||||
or: pysetup graph --help
|
||||
|
||||
Print dependency graph for the distribution.
|
||||
|
||||
positional arguments:
|
||||
dist installed distribution name
|
||||
"""
|
||||
|
||||
install_usage = """\
|
||||
Usage: pysetup install [dist]
|
||||
or: pysetup install [archive]
|
||||
or: pysetup install [src_dir]
|
||||
or: pysetup install --help
|
||||
|
||||
Install a Python distribution from the indexes, source directory, or sdist.
|
||||
|
||||
positional arguments:
|
||||
archive path to source distribution (zip, tar.gz)
|
||||
dist distribution name to install from the indexes
|
||||
scr_dir path to source directory
|
||||
|
||||
"""
|
||||
|
||||
metadata_usage = """\
|
||||
Usage: pysetup metadata [dist] [-f field ...]
|
||||
or: pysetup metadata [dist] [--all]
|
||||
or: pysetup metadata --help
|
||||
|
||||
Print metadata for the distribution.
|
||||
|
||||
positional arguments:
|
||||
dist installed distribution name
|
||||
|
||||
optional arguments:
|
||||
-f metadata field to print
|
||||
--all print all metadata fields
|
||||
"""
|
||||
|
||||
remove_usage = """\
|
||||
Usage: pysetup remove dist [-y]
|
||||
or: pysetup remove --help
|
||||
|
||||
Uninstall a Python distribution.
|
||||
|
||||
positional arguments:
|
||||
dist installed distribution name
|
||||
|
||||
optional arguments:
|
||||
-y auto confirm package removal
|
||||
"""
|
||||
|
||||
run_usage = """\
|
||||
Usage: pysetup run [global_opts] cmd1 [cmd1_opts] [cmd2 [cmd2_opts] ...]
|
||||
or: pysetup run --help
|
||||
or: pysetup run --list-commands
|
||||
or: pysetup run cmd --help
|
||||
"""
|
||||
|
||||
list_usage = """\
|
||||
Usage: pysetup list dist [dist ...]
|
||||
or: pysetup list --help
|
||||
or: pysetup list --all
|
||||
|
||||
Print name, version and location for the matching installed distributions.
|
||||
|
||||
positional arguments:
|
||||
dist installed distribution name
|
||||
|
||||
optional arguments:
|
||||
--all list all installed distributions
|
||||
"""
|
||||
|
||||
search_usage = """\
|
||||
Usage: pysetup search [project] [--simple [url]] [--xmlrpc [url] [--fieldname value ...] --operator or|and]
|
||||
or: pysetup search --help
|
||||
|
||||
Search the indexes for the matching projects.
|
||||
|
||||
positional arguments:
|
||||
project the project pattern to search for
|
||||
|
||||
optional arguments:
|
||||
--xmlrpc [url] wether to use the xmlrpc index or not. If an url is
|
||||
specified, it will be used rather than the default one.
|
||||
|
||||
--simple [url] wether to use the simple index or not. If an url is
|
||||
specified, it will be used rather than the default one.
|
||||
|
||||
--fieldname value Make a search on this field. Can only be used if
|
||||
--xmlrpc has been selected or is the default index.
|
||||
|
||||
--operator or|and Defines what is the operator to use when doing xmlrpc
|
||||
searchs with multiple fieldnames. Can only be used if
|
||||
--xmlrpc has been selected or is the default index.
|
||||
"""
|
||||
|
||||
global_options = [
|
||||
# The fourth entry for verbose means that it can be repeated.
|
||||
('verbose', 'v', "run verbosely (default)", True),
|
||||
('quiet', 'q', "run quietly (turns verbosity off)"),
|
||||
('dry-run', 'n', "don't actually do anything"),
|
||||
('help', 'h', "show detailed help message"),
|
||||
('no-user-cfg', None, 'ignore pydistutils.cfg in your home directory'),
|
||||
('version', None, 'Display the version'),
|
||||
]
|
||||
|
||||
negative_opt = {'quiet': 'verbose'}
|
||||
|
||||
display_options = [
|
||||
('help-commands', None, "list all available commands"),
|
||||
]
|
||||
|
||||
display_option_names = [x[0].replace('-', '_') for x in display_options]
|
||||
|
||||
|
||||
def _parse_args(args, options, long_options):
|
||||
"""Transform sys.argv input into a dict.
|
||||
|
||||
:param args: the args to parse (i.e sys.argv)
|
||||
:param options: the list of options to pass to getopt
|
||||
:param long_options: the list of string with the names of the long options
|
||||
to be passed to getopt.
|
||||
|
||||
The function returns a dict with options/long_options as keys and matching
|
||||
values as values.
|
||||
"""
|
||||
optlist, args = getopt.gnu_getopt(args, options, long_options)
|
||||
optdict = {}
|
||||
optdict['args'] = args
|
||||
for k, v in optlist:
|
||||
k = k.lstrip('-')
|
||||
if k not in optdict:
|
||||
optdict[k] = []
|
||||
if v:
|
||||
optdict[k].append(v)
|
||||
else:
|
||||
optdict[k].append(v)
|
||||
return optdict
|
||||
|
||||
|
||||
class action_help:
|
||||
"""Prints a help message when the standard help flags: -h and --help
|
||||
are used on the commandline.
|
||||
"""
|
||||
|
||||
def __init__(self, help_msg):
|
||||
self.help_msg = help_msg
|
||||
|
||||
def __call__(self, f):
|
||||
def wrapper(*args, **kwargs):
|
||||
f_args = args[1]
|
||||
if '--help' in f_args or '-h' in f_args:
|
||||
print(self.help_msg)
|
||||
return
|
||||
return f(*args, **kwargs)
|
||||
return wrapper
|
||||
|
||||
|
||||
@action_help(create_usage)
|
||||
def _create(distpatcher, args, **kw):
|
||||
from packaging.create import main
|
||||
return main()
|
||||
|
||||
|
||||
@action_help(graph_usage)
|
||||
def _graph(dispatcher, args, **kw):
|
||||
name = args[1]
|
||||
dist = get_distribution(name, use_egg_info=True)
|
||||
if dist is None:
|
||||
print('Distribution not found.')
|
||||
else:
|
||||
dists = get_distributions(use_egg_info=True)
|
||||
graph = generate_graph(dists)
|
||||
print(graph.repr_node(dist))
|
||||
|
||||
|
||||
@action_help(install_usage)
|
||||
def _install(dispatcher, args, **kw):
|
||||
# first check if we are in a source directory
|
||||
if len(args) < 2:
|
||||
# are we inside a project dir?
|
||||
listing = os.listdir(os.getcwd())
|
||||
if 'setup.py' in listing or 'setup.cfg' in listing:
|
||||
args.insert(1, os.getcwd())
|
||||
else:
|
||||
logger.warning('no project to install')
|
||||
return
|
||||
|
||||
# installing from a source dir or archive file?
|
||||
if os.path.isdir(args[1]) or _is_archive_file(args[1]):
|
||||
install_local_project(args[1])
|
||||
else:
|
||||
# download from PyPI
|
||||
install(args[1])
|
||||
|
||||
|
||||
@action_help(metadata_usage)
|
||||
def _metadata(dispatcher, args, **kw):
|
||||
opts = _parse_args(args[1:], 'f:', ['all'])
|
||||
if opts['args']:
|
||||
name = opts['args'][0]
|
||||
dist = get_distribution(name, use_egg_info=True)
|
||||
if dist is None:
|
||||
logger.warning('%s not installed', name)
|
||||
return
|
||||
else:
|
||||
logger.info('searching local dir for metadata')
|
||||
dist = Distribution()
|
||||
dist.parse_config_files()
|
||||
|
||||
metadata = dist.metadata
|
||||
|
||||
if 'all' in opts:
|
||||
keys = metadata.keys()
|
||||
else:
|
||||
if 'f' in opts:
|
||||
keys = (k for k in opts['f'] if k in metadata)
|
||||
else:
|
||||
keys = ()
|
||||
|
||||
for key in keys:
|
||||
if key in metadata:
|
||||
print(metadata._convert_name(key) + ':')
|
||||
value = metadata[key]
|
||||
if isinstance(value, list):
|
||||
for v in value:
|
||||
print(' ' + v)
|
||||
else:
|
||||
print(' ' + value.replace('\n', '\n '))
|
||||
|
||||
|
||||
@action_help(remove_usage)
|
||||
def _remove(distpatcher, args, **kw):
|
||||
opts = _parse_args(args[1:], 'y', [])
|
||||
if 'y' in opts:
|
||||
auto_confirm = True
|
||||
else:
|
||||
auto_confirm = False
|
||||
|
||||
for dist in set(opts['args']):
|
||||
try:
|
||||
remove(dist, auto_confirm=auto_confirm)
|
||||
except PackagingError:
|
||||
logger.warning('%s not installed', dist)
|
||||
|
||||
|
||||
@action_help(run_usage)
|
||||
def _run(dispatcher, args, **kw):
|
||||
parser = dispatcher.parser
|
||||
args = args[1:]
|
||||
|
||||
commands = STANDARD_COMMANDS # + extra commands
|
||||
|
||||
if args == ['--list-commands']:
|
||||
print('List of available commands:')
|
||||
cmds = sorted(commands)
|
||||
|
||||
for cmd in cmds:
|
||||
cls = dispatcher.cmdclass.get(cmd) or get_command_class(cmd)
|
||||
desc = getattr(cls, 'description',
|
||||
'(no description available)')
|
||||
print(' %s: %s' % (cmd, desc))
|
||||
return
|
||||
|
||||
while args:
|
||||
args = dispatcher._parse_command_opts(parser, args)
|
||||
if args is None:
|
||||
return
|
||||
|
||||
# create the Distribution class
|
||||
# need to feed setup.cfg here !
|
||||
dist = Distribution()
|
||||
|
||||
# Find and parse the config file(s): they will override options from
|
||||
# the setup script, but be overridden by the command line.
|
||||
|
||||
# XXX still need to be extracted from Distribution
|
||||
dist.parse_config_files()
|
||||
|
||||
try:
|
||||
for cmd in dispatcher.commands:
|
||||
dist.run_command(cmd, dispatcher.command_options[cmd])
|
||||
|
||||
except KeyboardInterrupt:
|
||||
raise SystemExit("interrupted")
|
||||
except (IOError, os.error, PackagingError, CCompilerError) as msg:
|
||||
raise SystemExit("error: " + str(msg))
|
||||
|
||||
# XXX this is crappy
|
||||
return dist
|
||||
|
||||
|
||||
@action_help(list_usage)
|
||||
def _list(dispatcher, args, **kw):
|
||||
opts = _parse_args(args[1:], '', ['all'])
|
||||
dists = get_distributions(use_egg_info=True)
|
||||
if 'all' in opts:
|
||||
results = dists
|
||||
else:
|
||||
results = [d for d in dists if d.name.lower() in opts['args']]
|
||||
|
||||
for dist in results:
|
||||
print('%s %s at %s' % (dist.name, dist.metadata['version'], dist.path))
|
||||
|
||||
|
||||
@action_help(search_usage)
|
||||
def _search(dispatcher, args, **kw):
|
||||
"""The search action.
|
||||
|
||||
It is able to search for a specific index (specified with --index), using
|
||||
the simple or xmlrpc index types (with --type xmlrpc / --type simple)
|
||||
"""
|
||||
opts = _parse_args(args[1:], '', ['simple', 'xmlrpc'])
|
||||
# 1. what kind of index is requested ? (xmlrpc / simple)
|
||||
|
||||
|
||||
actions = [
|
||||
('run', 'Run one or several commands', _run),
|
||||
('metadata', 'Display the metadata of a project', _metadata),
|
||||
('install', 'Install a project', _install),
|
||||
('remove', 'Remove a project', _remove),
|
||||
('search', 'Search for a project in the indexes', _search),
|
||||
('list', 'Search for local projects', _list),
|
||||
('graph', 'Display a graph', _graph),
|
||||
('create', 'Create a Project', _create),
|
||||
]
|
||||
|
||||
|
||||
class Dispatcher:
|
||||
"""Reads the command-line options
|
||||
"""
|
||||
def __init__(self, args=None):
|
||||
self.verbose = 1
|
||||
self.dry_run = False
|
||||
self.help = False
|
||||
self.script_name = 'pysetup'
|
||||
self.cmdclass = {}
|
||||
self.commands = []
|
||||
self.command_options = {}
|
||||
|
||||
for attr in display_option_names:
|
||||
setattr(self, attr, False)
|
||||
|
||||
self.parser = FancyGetopt(global_options + display_options)
|
||||
self.parser.set_negative_aliases(negative_opt)
|
||||
# FIXME this parses everything, including command options (e.g. "run
|
||||
# build -i" errors with "option -i not recognized")
|
||||
args = self.parser.getopt(args=args, object=self)
|
||||
|
||||
# if first arg is "run", we have some commands
|
||||
if len(args) == 0:
|
||||
self.action = None
|
||||
else:
|
||||
self.action = args[0]
|
||||
|
||||
allowed = [action[0] for action in actions] + [None]
|
||||
if self.action not in allowed:
|
||||
msg = 'Unrecognized action "%s"' % self.action
|
||||
raise PackagingArgError(msg)
|
||||
|
||||
# setting up the logging level from the command-line options
|
||||
# -q gets warning, error and critical
|
||||
if self.verbose == 0:
|
||||
level = logging.WARNING
|
||||
# default level or -v gets info too
|
||||
# XXX there's a bug somewhere: the help text says that -v is default
|
||||
# (and verbose is set to 1 above), but when the user explicitly gives
|
||||
# -v on the command line, self.verbose is incremented to 2! Here we
|
||||
# compensate for that (I tested manually). On a related note, I think
|
||||
# it's a good thing to use -q/nothing/-v/-vv on the command line
|
||||
# instead of logging constants; it will be easy to add support for
|
||||
# logging configuration in setup.cfg for advanced users. --merwok
|
||||
elif self.verbose in (1, 2):
|
||||
level = logging.INFO
|
||||
else: # -vv and more for debug
|
||||
level = logging.DEBUG
|
||||
|
||||
# for display options we return immediately
|
||||
option_order = self.parser.get_option_order()
|
||||
|
||||
self.args = args
|
||||
|
||||
if self.help or self.action is None:
|
||||
self._show_help(self.parser, display_options_=False)
|
||||
|
||||
def _parse_command_opts(self, parser, args):
|
||||
# Pull the current command from the head of the command line
|
||||
command = args[0]
|
||||
if not command_re.match(command):
|
||||
raise SystemExit("invalid command name %r" % (command,))
|
||||
self.commands.append(command)
|
||||
|
||||
# Dig up the command class that implements this command, so we
|
||||
# 1) know that it's a valid command, and 2) know which options
|
||||
# it takes.
|
||||
try:
|
||||
cmd_class = get_command_class(command)
|
||||
except PackagingModuleError as msg:
|
||||
raise PackagingArgError(msg)
|
||||
|
||||
# XXX We want to push this in packaging.command
|
||||
#
|
||||
# Require that the command class be derived from Command -- want
|
||||
# to be sure that the basic "command" interface is implemented.
|
||||
for meth in ('initialize_options', 'finalize_options', 'run'):
|
||||
if hasattr(cmd_class, meth):
|
||||
continue
|
||||
raise PackagingClassError(
|
||||
'command %r must implement %r' % (cmd_class, meth))
|
||||
|
||||
# Also make sure that the command object provides a list of its
|
||||
# known options.
|
||||
if not (hasattr(cmd_class, 'user_options') and
|
||||
isinstance(cmd_class.user_options, list)):
|
||||
raise PackagingClassError(
|
||||
"command class %s must provide "
|
||||
"'user_options' attribute (a list of tuples)" % cmd_class)
|
||||
|
||||
# If the command class has a list of negative alias options,
|
||||
# merge it in with the global negative aliases.
|
||||
_negative_opt = negative_opt.copy()
|
||||
|
||||
if hasattr(cmd_class, 'negative_opt'):
|
||||
_negative_opt.update(cmd_class.negative_opt)
|
||||
|
||||
# Check for help_options in command class. They have a different
|
||||
# format (tuple of four) so we need to preprocess them here.
|
||||
if (hasattr(cmd_class, 'help_options') and
|
||||
isinstance(cmd_class.help_options, list)):
|
||||
help_options = cmd_class.help_options[:]
|
||||
else:
|
||||
help_options = []
|
||||
|
||||
# All commands support the global options too, just by adding
|
||||
# in 'global_options'.
|
||||
parser.set_option_table(global_options +
|
||||
cmd_class.user_options +
|
||||
help_options)
|
||||
parser.set_negative_aliases(_negative_opt)
|
||||
args, opts = parser.getopt(args[1:])
|
||||
|
||||
if hasattr(opts, 'help') and opts.help:
|
||||
self._show_command_help(cmd_class)
|
||||
return
|
||||
|
||||
if (hasattr(cmd_class, 'help_options') and
|
||||
isinstance(cmd_class.help_options, list)):
|
||||
help_option_found = False
|
||||
for help_option, short, desc, func in cmd_class.help_options:
|
||||
if hasattr(opts, help_option.replace('-', '_')):
|
||||
help_option_found = True
|
||||
if hasattr(func, '__call__'):
|
||||
func()
|
||||
else:
|
||||
raise PackagingClassError(
|
||||
"invalid help function %r for help option %r: "
|
||||
"must be a callable object (function, etc.)"
|
||||
% (func, help_option))
|
||||
|
||||
if help_option_found:
|
||||
return
|
||||
|
||||
# Put the options from the command line into their official
|
||||
# holding pen, the 'command_options' dictionary.
|
||||
opt_dict = self.get_option_dict(command)
|
||||
for name, value in vars(opts).items():
|
||||
opt_dict[name] = ("command line", value)
|
||||
|
||||
return args
|
||||
|
||||
def get_option_dict(self, command):
|
||||
"""Get the option dictionary for a given command. If that
|
||||
command's option dictionary hasn't been created yet, then create it
|
||||
and return the new dictionary; otherwise, return the existing
|
||||
option dictionary.
|
||||
"""
|
||||
d = self.command_options.get(command)
|
||||
if d is None:
|
||||
d = self.command_options[command] = {}
|
||||
return d
|
||||
|
||||
def show_help(self):
|
||||
self._show_help(self.parser)
|
||||
|
||||
def print_usage(self, parser):
|
||||
parser.set_option_table(global_options)
|
||||
|
||||
actions_ = [' %s: %s' % (name, desc) for name, desc, __ in actions]
|
||||
usage = common_usage % {'actions': '\n'.join(actions_)}
|
||||
|
||||
parser.print_help(usage + "\nGlobal options:")
|
||||
|
||||
def _show_help(self, parser, global_options_=True, display_options_=True,
|
||||
commands=[]):
|
||||
# late import because of mutual dependence between these modules
|
||||
from packaging.command.cmd import Command
|
||||
|
||||
print('Usage: pysetup [options] action [action_options]')
|
||||
print('')
|
||||
if global_options_:
|
||||
self.print_usage(self.parser)
|
||||
print('')
|
||||
|
||||
if display_options_:
|
||||
parser.set_option_table(display_options)
|
||||
parser.print_help(
|
||||
"Information display options (just display " +
|
||||
"information, ignore any commands)")
|
||||
print('')
|
||||
|
||||
for command in commands:
|
||||
if isinstance(command, type) and issubclass(command, Command):
|
||||
cls = command
|
||||
else:
|
||||
cls = get_command_class(command)
|
||||
if (hasattr(cls, 'help_options') and
|
||||
isinstance(cls.help_options, list)):
|
||||
parser.set_option_table(cls.user_options + cls.help_options)
|
||||
else:
|
||||
parser.set_option_table(cls.user_options)
|
||||
|
||||
parser.print_help("Options for %r command:" % cls.__name__)
|
||||
print('')
|
||||
|
||||
def _show_command_help(self, command):
|
||||
if isinstance(command, str):
|
||||
command = get_command_class(command)
|
||||
|
||||
name = command.get_command_name()
|
||||
|
||||
desc = getattr(command, 'description', '(no description available)')
|
||||
print('Description: %s' % desc)
|
||||
print('')
|
||||
|
||||
if (hasattr(command, 'help_options') and
|
||||
isinstance(command.help_options, list)):
|
||||
self.parser.set_option_table(command.user_options +
|
||||
command.help_options)
|
||||
else:
|
||||
self.parser.set_option_table(command.user_options)
|
||||
|
||||
self.parser.print_help("Options:")
|
||||
print('')
|
||||
|
||||
def _get_command_groups(self):
|
||||
"""Helper function to retrieve all the command class names divided
|
||||
into standard commands (listed in
|
||||
packaging.command.STANDARD_COMMANDS) and extra commands (given in
|
||||
self.cmdclass and not standard commands).
|
||||
"""
|
||||
extra_commands = [cmd for cmd in self.cmdclass
|
||||
if cmd not in STANDARD_COMMANDS]
|
||||
return STANDARD_COMMANDS, extra_commands
|
||||
|
||||
def print_commands(self):
|
||||
"""Print out a help message listing all available commands with a
|
||||
description of each. The list is divided into standard commands
|
||||
(listed in packaging.command.STANDARD_COMMANDS) and extra commands
|
||||
(given in self.cmdclass and not standard commands). The
|
||||
descriptions come from the command class attribute
|
||||
'description'.
|
||||
"""
|
||||
std_commands, extra_commands = self._get_command_groups()
|
||||
max_length = max(len(command)
|
||||
for commands in (std_commands, extra_commands)
|
||||
for command in commands)
|
||||
|
||||
self.print_command_list(std_commands, "Standard commands", max_length)
|
||||
if extra_commands:
|
||||
print()
|
||||
self.print_command_list(extra_commands, "Extra commands",
|
||||
max_length)
|
||||
|
||||
def print_command_list(self, commands, header, max_length):
|
||||
"""Print a subset of the list of all commands -- used by
|
||||
'print_commands()'.
|
||||
"""
|
||||
print(header + ":")
|
||||
|
||||
for cmd in commands:
|
||||
cls = self.cmdclass.get(cmd) or get_command_class(cmd)
|
||||
description = getattr(cls, 'description',
|
||||
'(no description available)')
|
||||
|
||||
print(" %-*s %s" % (max_length, cmd, description))
|
||||
|
||||
def __call__(self):
|
||||
if self.action is None:
|
||||
return
|
||||
for action, desc, func in actions:
|
||||
if action == self.action:
|
||||
return func(self, self.args)
|
||||
return -1
|
||||
|
||||
|
||||
def main(args=None):
|
||||
dispatcher = Dispatcher(args)
|
||||
if dispatcher.action is None:
|
||||
return
|
||||
|
||||
return dispatcher()
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
44
Lib/packaging/tests/LONG_DESC.txt
Normal file
44
Lib/packaging/tests/LONG_DESC.txt
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
CLVault
|
||||
=======
|
||||
|
||||
CLVault uses Keyring to provide a command-line utility to safely store
|
||||
and retrieve passwords.
|
||||
|
||||
Install it using pip or the setup.py script::
|
||||
|
||||
$ python setup.py install
|
||||
|
||||
$ pip install clvault
|
||||
|
||||
Once it's installed, you will have three scripts installed in your
|
||||
Python scripts folder, you can use to list, store and retrieve passwords::
|
||||
|
||||
$ clvault-set blog
|
||||
Set your password:
|
||||
Set the associated username (can be blank): tarek
|
||||
Set a description (can be blank): My blog password
|
||||
Password set.
|
||||
|
||||
$ clvault-get blog
|
||||
The username is "tarek"
|
||||
The password has been copied in your clipboard
|
||||
|
||||
$ clvault-list
|
||||
Registered services:
|
||||
blog My blog password
|
||||
|
||||
|
||||
*clvault-set* takes a service name then prompt you for a password, and some
|
||||
optional information about your service. The password is safely stored in
|
||||
a keyring while the description is saved in a ``.clvault`` file in your
|
||||
home directory. This file is created automatically the first time the command
|
||||
is used.
|
||||
|
||||
*clvault-get* copies the password for a given service in your clipboard, and
|
||||
displays the associated user if any.
|
||||
|
||||
*clvault-list* lists all registered services, with their description when
|
||||
given.
|
||||
|
||||
|
||||
Project page: http://bitbucket.org/tarek/clvault
|
||||
57
Lib/packaging/tests/PKG-INFO
Normal file
57
Lib/packaging/tests/PKG-INFO
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
Metadata-Version: 1.2
|
||||
Name: CLVault
|
||||
Version: 0.5
|
||||
Summary: Command-Line utility to store and retrieve passwords
|
||||
Home-page: http://bitbucket.org/tarek/clvault
|
||||
Author: Tarek Ziade
|
||||
Author-email: tarek@ziade.org
|
||||
License: PSF
|
||||
Keywords: keyring,password,crypt
|
||||
Requires-Dist: foo; sys.platform == 'okook'
|
||||
Requires-Dist: bar; sys.platform == '%s'
|
||||
Platform: UNKNOWN
|
||||
Description: CLVault
|
||||
|=======
|
||||
|
|
||||
|CLVault uses Keyring to provide a command-line utility to safely store
|
||||
|and retrieve passwords.
|
||||
|
|
||||
|Install it using pip or the setup.py script::
|
||||
|
|
||||
| $ python setup.py install
|
||||
|
|
||||
| $ pip install clvault
|
||||
|
|
||||
|Once it's installed, you will have three scripts installed in your
|
||||
|Python scripts folder, you can use to list, store and retrieve passwords::
|
||||
|
|
||||
| $ clvault-set blog
|
||||
| Set your password:
|
||||
| Set the associated username (can be blank): tarek
|
||||
| Set a description (can be blank): My blog password
|
||||
| Password set.
|
||||
|
|
||||
| $ clvault-get blog
|
||||
| The username is "tarek"
|
||||
| The password has been copied in your clipboard
|
||||
|
|
||||
| $ clvault-list
|
||||
| Registered services:
|
||||
| blog My blog password
|
||||
|
|
||||
|
|
||||
|*clvault-set* takes a service name then prompt you for a password, and some
|
||||
|optional information about your service. The password is safely stored in
|
||||
|a keyring while the description is saved in a ``.clvault`` file in your
|
||||
|home directory. This file is created automatically the first time the command
|
||||
|is used.
|
||||
|
|
||||
|*clvault-get* copies the password for a given service in your clipboard, and
|
||||
|displays the associated user if any.
|
||||
|
|
||||
|*clvault-list* lists all registered services, with their description when
|
||||
|given.
|
||||
|
|
||||
|
|
||||
|Project page: http://bitbucket.org/tarek/clvault
|
||||
|
|
||||
182
Lib/packaging/tests/SETUPTOOLS-PKG-INFO
Normal file
182
Lib/packaging/tests/SETUPTOOLS-PKG-INFO
Normal file
|
|
@ -0,0 +1,182 @@
|
|||
Metadata-Version: 1.0
|
||||
Name: setuptools
|
||||
Version: 0.6c9
|
||||
Summary: Download, build, install, upgrade, and uninstall Python packages -- easily!
|
||||
Home-page: http://pypi.python.org/pypi/setuptools
|
||||
Author: Phillip J. Eby
|
||||
Author-email: distutils-sig@python.org
|
||||
License: PSF or ZPL
|
||||
Description: ===============================
|
||||
Installing and Using Setuptools
|
||||
===============================
|
||||
|
||||
.. contents:: **Table of Contents**
|
||||
|
||||
|
||||
-------------------------
|
||||
Installation Instructions
|
||||
-------------------------
|
||||
|
||||
Windows
|
||||
=======
|
||||
|
||||
Install setuptools using the provided ``.exe`` installer. If you've previously
|
||||
installed older versions of setuptools, please delete all ``setuptools*.egg``
|
||||
and ``setuptools.pth`` files from your system's ``site-packages`` directory
|
||||
(and any other ``sys.path`` directories) FIRST.
|
||||
|
||||
If you are upgrading a previous version of setuptools that was installed using
|
||||
an ``.exe`` installer, please be sure to also *uninstall that older version*
|
||||
via your system's "Add/Remove Programs" feature, BEFORE installing the newer
|
||||
version.
|
||||
|
||||
Once installation is complete, you will find an ``easy_install.exe`` program in
|
||||
your Python ``Scripts`` subdirectory. Be sure to add this directory to your
|
||||
``PATH`` environment variable, if you haven't already done so.
|
||||
|
||||
|
||||
RPM-Based Systems
|
||||
=================
|
||||
|
||||
Install setuptools using the provided source RPM. The included ``.spec`` file
|
||||
assumes you are installing using the default ``python`` executable, and is not
|
||||
specific to a particular Python version. The ``easy_install`` executable will
|
||||
be installed to a system ``bin`` directory such as ``/usr/bin``.
|
||||
|
||||
If you wish to install to a location other than the default Python
|
||||
installation's default ``site-packages`` directory (and ``$prefix/bin`` for
|
||||
scripts), please use the ``.egg``-based installation approach described in the
|
||||
following section.
|
||||
|
||||
|
||||
Cygwin, Mac OS X, Linux, Other
|
||||
==============================
|
||||
|
||||
1. Download the appropriate egg for your version of Python (e.g.
|
||||
``setuptools-0.6c9-py2.4.egg``). Do NOT rename it.
|
||||
|
||||
2. Run it as if it were a shell script, e.g. ``sh setuptools-0.6c9-py2.4.egg``.
|
||||
Setuptools will install itself using the matching version of Python (e.g.
|
||||
``python2.4``), and will place the ``easy_install`` executable in the
|
||||
default location for installing Python scripts (as determined by the
|
||||
standard distutils configuration files, or by the Python installation).
|
||||
|
||||
If you want to install setuptools to somewhere other than ``site-packages`` or
|
||||
your default distutils installation locations for libraries and scripts, you
|
||||
may include EasyInstall command-line options such as ``--prefix``,
|
||||
``--install-dir``, and so on, following the ``.egg`` filename on the same
|
||||
command line. For example::
|
||||
|
||||
sh setuptools-0.6c9-py2.4.egg --prefix=~
|
||||
|
||||
You can use ``--help`` to get a full options list, but we recommend consulting
|
||||
the `EasyInstall manual`_ for detailed instructions, especially `the section
|
||||
on custom installation locations`_.
|
||||
|
||||
.. _EasyInstall manual: http://peak.telecommunity.com/DevCenter/EasyInstall
|
||||
.. _the section on custom installation locations: http://peak.telecommunity.com/DevCenter/EasyInstall#custom-installation-locations
|
||||
|
||||
|
||||
Cygwin Note
|
||||
-----------
|
||||
|
||||
If you are trying to install setuptools for the **Windows** version of Python
|
||||
(as opposed to the Cygwin version that lives in ``/usr/bin``), you must make
|
||||
sure that an appropriate executable (``python2.3``, ``python2.4``, or
|
||||
``python2.5``) is on your **Cygwin** ``PATH`` when invoking the egg. For
|
||||
example, doing the following at a Cygwin bash prompt will install setuptools
|
||||
for the **Windows** Python found at ``C:\\Python24``::
|
||||
|
||||
ln -s /cygdrive/c/Python24/python.exe python2.4
|
||||
PATH=.:$PATH sh setuptools-0.6c9-py2.4.egg
|
||||
rm python2.4
|
||||
|
||||
|
||||
Downloads
|
||||
=========
|
||||
|
||||
All setuptools downloads can be found at `the project's home page in the Python
|
||||
Package Index`_. Scroll to the very bottom of the page to find the links.
|
||||
|
||||
.. _the project's home page in the Python Package Index: http://pypi.python.org/pypi/setuptools
|
||||
|
||||
In addition to the PyPI downloads, the development version of ``setuptools``
|
||||
is available from the `Python SVN sandbox`_, and in-development versions of the
|
||||
`0.6 branch`_ are available as well.
|
||||
|
||||
.. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06
|
||||
|
||||
.. _Python SVN sandbox: http://svn.python.org/projects/sandbox/trunk/setuptools/#egg=setuptools-dev
|
||||
|
||||
--------------------------------
|
||||
Using Setuptools and EasyInstall
|
||||
--------------------------------
|
||||
|
||||
Here are some of the available manuals, tutorials, and other resources for
|
||||
learning about Setuptools, Python Eggs, and EasyInstall:
|
||||
|
||||
* `The EasyInstall user's guide and reference manual`_
|
||||
* `The setuptools Developer's Guide`_
|
||||
* `The pkg_resources API reference`_
|
||||
* `Package Compatibility Notes`_ (user-maintained)
|
||||
* `The Internal Structure of Python Eggs`_
|
||||
|
||||
Questions, comments, and bug reports should be directed to the `distutils-sig
|
||||
mailing list`_. If you have written (or know of) any tutorials, documentation,
|
||||
plug-ins, or other resources for setuptools users, please let us know about
|
||||
them there, so this reference list can be updated. If you have working,
|
||||
*tested* patches to correct problems or add features, you may submit them to
|
||||
the `setuptools bug tracker`_.
|
||||
|
||||
.. _setuptools bug tracker: http://bugs.python.org/setuptools/
|
||||
.. _Package Compatibility Notes: http://peak.telecommunity.com/DevCenter/PackageNotes
|
||||
.. _The Internal Structure of Python Eggs: http://peak.telecommunity.com/DevCenter/EggFormats
|
||||
.. _The setuptools Developer's Guide: http://peak.telecommunity.com/DevCenter/setuptools
|
||||
.. _The pkg_resources API reference: http://peak.telecommunity.com/DevCenter/PkgResources
|
||||
.. _The EasyInstall user's guide and reference manual: http://peak.telecommunity.com/DevCenter/EasyInstall
|
||||
.. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/
|
||||
|
||||
|
||||
-------
|
||||
Credits
|
||||
-------
|
||||
|
||||
* The original design for the ``.egg`` format and the ``pkg_resources`` API was
|
||||
co-created by Phillip Eby and Bob Ippolito. Bob also implemented the first
|
||||
version of ``pkg_resources``, and supplied the OS X operating system version
|
||||
compatibility algorithm.
|
||||
|
||||
* Ian Bicking implemented many early "creature comfort" features of
|
||||
easy_install, including support for downloading via Sourceforge and
|
||||
Subversion repositories. Ian's comments on the Web-SIG about WSGI
|
||||
application deployment also inspired the concept of "entry points" in eggs,
|
||||
and he has given talks at PyCon and elsewhere to inform and educate the
|
||||
community about eggs and setuptools.
|
||||
|
||||
* Jim Fulton contributed time and effort to build automated tests of various
|
||||
aspects of ``easy_install``, and supplied the doctests for the command-line
|
||||
``.exe`` wrappers on Windows.
|
||||
|
||||
* Phillip J. Eby is the principal author and maintainer of setuptools, and
|
||||
first proposed the idea of an importable binary distribution format for
|
||||
Python application plug-ins.
|
||||
|
||||
* Significant parts of the implementation of setuptools were funded by the Open
|
||||
Source Applications Foundation, to provide a plug-in infrastructure for the
|
||||
Chandler PIM application. In addition, many OSAF staffers (such as Mike
|
||||
"Code Bear" Taylor) contributed their time and stress as guinea pigs for the
|
||||
use of eggs and setuptools, even before eggs were "cool". (Thanks, guys!)
|
||||
|
||||
|
||||
Keywords: CPAN PyPI distutils eggs package management
|
||||
Platform: UNKNOWN
|
||||
Classifier: Development Status :: 3 - Alpha
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: Python Software Foundation License
|
||||
Classifier: License :: OSI Approved :: Zope Public License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||
Classifier: Topic :: System :: Archiving :: Packaging
|
||||
Classifier: Topic :: System :: Systems Administration
|
||||
Classifier: Topic :: Utilities
|
||||
183
Lib/packaging/tests/SETUPTOOLS-PKG-INFO2
Normal file
183
Lib/packaging/tests/SETUPTOOLS-PKG-INFO2
Normal file
|
|
@ -0,0 +1,183 @@
|
|||
Metadata-Version: 1.1
|
||||
Name: setuptools
|
||||
Version: 0.6c9
|
||||
Summary: Download, build, install, upgrade, and uninstall Python packages -- easily!
|
||||
Home-page: http://pypi.python.org/pypi/setuptools
|
||||
Author: Phillip J. Eby
|
||||
Author-email: distutils-sig@python.org
|
||||
License: PSF or ZPL
|
||||
Description: ===============================
|
||||
Installing and Using Setuptools
|
||||
===============================
|
||||
|
||||
.. contents:: **Table of Contents**
|
||||
|
||||
|
||||
-------------------------
|
||||
Installation Instructions
|
||||
-------------------------
|
||||
|
||||
Windows
|
||||
=======
|
||||
|
||||
Install setuptools using the provided ``.exe`` installer. If you've previously
|
||||
installed older versions of setuptools, please delete all ``setuptools*.egg``
|
||||
and ``setuptools.pth`` files from your system's ``site-packages`` directory
|
||||
(and any other ``sys.path`` directories) FIRST.
|
||||
|
||||
If you are upgrading a previous version of setuptools that was installed using
|
||||
an ``.exe`` installer, please be sure to also *uninstall that older version*
|
||||
via your system's "Add/Remove Programs" feature, BEFORE installing the newer
|
||||
version.
|
||||
|
||||
Once installation is complete, you will find an ``easy_install.exe`` program in
|
||||
your Python ``Scripts`` subdirectory. Be sure to add this directory to your
|
||||
``PATH`` environment variable, if you haven't already done so.
|
||||
|
||||
|
||||
RPM-Based Systems
|
||||
=================
|
||||
|
||||
Install setuptools using the provided source RPM. The included ``.spec`` file
|
||||
assumes you are installing using the default ``python`` executable, and is not
|
||||
specific to a particular Python version. The ``easy_install`` executable will
|
||||
be installed to a system ``bin`` directory such as ``/usr/bin``.
|
||||
|
||||
If you wish to install to a location other than the default Python
|
||||
installation's default ``site-packages`` directory (and ``$prefix/bin`` for
|
||||
scripts), please use the ``.egg``-based installation approach described in the
|
||||
following section.
|
||||
|
||||
|
||||
Cygwin, Mac OS X, Linux, Other
|
||||
==============================
|
||||
|
||||
1. Download the appropriate egg for your version of Python (e.g.
|
||||
``setuptools-0.6c9-py2.4.egg``). Do NOT rename it.
|
||||
|
||||
2. Run it as if it were a shell script, e.g. ``sh setuptools-0.6c9-py2.4.egg``.
|
||||
Setuptools will install itself using the matching version of Python (e.g.
|
||||
``python2.4``), and will place the ``easy_install`` executable in the
|
||||
default location for installing Python scripts (as determined by the
|
||||
standard distutils configuration files, or by the Python installation).
|
||||
|
||||
If you want to install setuptools to somewhere other than ``site-packages`` or
|
||||
your default distutils installation locations for libraries and scripts, you
|
||||
may include EasyInstall command-line options such as ``--prefix``,
|
||||
``--install-dir``, and so on, following the ``.egg`` filename on the same
|
||||
command line. For example::
|
||||
|
||||
sh setuptools-0.6c9-py2.4.egg --prefix=~
|
||||
|
||||
You can use ``--help`` to get a full options list, but we recommend consulting
|
||||
the `EasyInstall manual`_ for detailed instructions, especially `the section
|
||||
on custom installation locations`_.
|
||||
|
||||
.. _EasyInstall manual: http://peak.telecommunity.com/DevCenter/EasyInstall
|
||||
.. _the section on custom installation locations: http://peak.telecommunity.com/DevCenter/EasyInstall#custom-installation-locations
|
||||
|
||||
|
||||
Cygwin Note
|
||||
-----------
|
||||
|
||||
If you are trying to install setuptools for the **Windows** version of Python
|
||||
(as opposed to the Cygwin version that lives in ``/usr/bin``), you must make
|
||||
sure that an appropriate executable (``python2.3``, ``python2.4``, or
|
||||
``python2.5``) is on your **Cygwin** ``PATH`` when invoking the egg. For
|
||||
example, doing the following at a Cygwin bash prompt will install setuptools
|
||||
for the **Windows** Python found at ``C:\\Python24``::
|
||||
|
||||
ln -s /cygdrive/c/Python24/python.exe python2.4
|
||||
PATH=.:$PATH sh setuptools-0.6c9-py2.4.egg
|
||||
rm python2.4
|
||||
|
||||
|
||||
Downloads
|
||||
=========
|
||||
|
||||
All setuptools downloads can be found at `the project's home page in the Python
|
||||
Package Index`_. Scroll to the very bottom of the page to find the links.
|
||||
|
||||
.. _the project's home page in the Python Package Index: http://pypi.python.org/pypi/setuptools
|
||||
|
||||
In addition to the PyPI downloads, the development version of ``setuptools``
|
||||
is available from the `Python SVN sandbox`_, and in-development versions of the
|
||||
`0.6 branch`_ are available as well.
|
||||
|
||||
.. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06
|
||||
|
||||
.. _Python SVN sandbox: http://svn.python.org/projects/sandbox/trunk/setuptools/#egg=setuptools-dev
|
||||
|
||||
--------------------------------
|
||||
Using Setuptools and EasyInstall
|
||||
--------------------------------
|
||||
|
||||
Here are some of the available manuals, tutorials, and other resources for
|
||||
learning about Setuptools, Python Eggs, and EasyInstall:
|
||||
|
||||
* `The EasyInstall user's guide and reference manual`_
|
||||
* `The setuptools Developer's Guide`_
|
||||
* `The pkg_resources API reference`_
|
||||
* `Package Compatibility Notes`_ (user-maintained)
|
||||
* `The Internal Structure of Python Eggs`_
|
||||
|
||||
Questions, comments, and bug reports should be directed to the `distutils-sig
|
||||
mailing list`_. If you have written (or know of) any tutorials, documentation,
|
||||
plug-ins, or other resources for setuptools users, please let us know about
|
||||
them there, so this reference list can be updated. If you have working,
|
||||
*tested* patches to correct problems or add features, you may submit them to
|
||||
the `setuptools bug tracker`_.
|
||||
|
||||
.. _setuptools bug tracker: http://bugs.python.org/setuptools/
|
||||
.. _Package Compatibility Notes: http://peak.telecommunity.com/DevCenter/PackageNotes
|
||||
.. _The Internal Structure of Python Eggs: http://peak.telecommunity.com/DevCenter/EggFormats
|
||||
.. _The setuptools Developer's Guide: http://peak.telecommunity.com/DevCenter/setuptools
|
||||
.. _The pkg_resources API reference: http://peak.telecommunity.com/DevCenter/PkgResources
|
||||
.. _The EasyInstall user's guide and reference manual: http://peak.telecommunity.com/DevCenter/EasyInstall
|
||||
.. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/
|
||||
|
||||
|
||||
-------
|
||||
Credits
|
||||
-------
|
||||
|
||||
* The original design for the ``.egg`` format and the ``pkg_resources`` API was
|
||||
co-created by Phillip Eby and Bob Ippolito. Bob also implemented the first
|
||||
version of ``pkg_resources``, and supplied the OS X operating system version
|
||||
compatibility algorithm.
|
||||
|
||||
* Ian Bicking implemented many early "creature comfort" features of
|
||||
easy_install, including support for downloading via Sourceforge and
|
||||
Subversion repositories. Ian's comments on the Web-SIG about WSGI
|
||||
application deployment also inspired the concept of "entry points" in eggs,
|
||||
and he has given talks at PyCon and elsewhere to inform and educate the
|
||||
community about eggs and setuptools.
|
||||
|
||||
* Jim Fulton contributed time and effort to build automated tests of various
|
||||
aspects of ``easy_install``, and supplied the doctests for the command-line
|
||||
``.exe`` wrappers on Windows.
|
||||
|
||||
* Phillip J. Eby is the principal author and maintainer of setuptools, and
|
||||
first proposed the idea of an importable binary distribution format for
|
||||
Python application plug-ins.
|
||||
|
||||
* Significant parts of the implementation of setuptools were funded by the Open
|
||||
Source Applications Foundation, to provide a plug-in infrastructure for the
|
||||
Chandler PIM application. In addition, many OSAF staffers (such as Mike
|
||||
"Code Bear" Taylor) contributed their time and stress as guinea pigs for the
|
||||
use of eggs and setuptools, even before eggs were "cool". (Thanks, guys!)
|
||||
|
||||
|
||||
Keywords: CPAN PyPI distutils eggs package management
|
||||
Platform: UNKNOWN
|
||||
Classifier: Development Status :: 3 - Alpha
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: Python Software Foundation License
|
||||
Classifier: License :: OSI Approved :: Zope Public License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||
Classifier: Topic :: System :: Archiving :: Packaging
|
||||
Classifier: Topic :: System :: Systems Administration
|
||||
Classifier: Topic :: Utilities
|
||||
Requires: Foo
|
||||
133
Lib/packaging/tests/__init__.py
Normal file
133
Lib/packaging/tests/__init__.py
Normal file
|
|
@ -0,0 +1,133 @@
|
|||
"""Test suite for packaging.
|
||||
|
||||
This test suite consists of a collection of test modules in the
|
||||
packaging.tests package. Each test module has a name starting with
|
||||
'test' and contains a function test_suite(). The function is expected
|
||||
to return an initialized unittest.TestSuite instance.
|
||||
|
||||
Utility code is included in packaging.tests.support.
|
||||
"""
|
||||
|
||||
# Put this text back for the backport
|
||||
#Always import unittest from this module, it will be the right version
|
||||
#(standard library unittest for 3.2 and higher, third-party unittest2
|
||||
#elease for older versions).
|
||||
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
from test.support import TESTFN
|
||||
|
||||
# XXX move helpers to support, add tests for them, remove things that
|
||||
# duplicate test.support (or keep them for the backport; needs thinking)
|
||||
|
||||
here = os.path.dirname(__file__) or os.curdir
|
||||
verbose = 1
|
||||
|
||||
def test_suite():
|
||||
suite = unittest.TestSuite()
|
||||
for fn in os.listdir(here):
|
||||
if fn.startswith("test") and fn.endswith(".py"):
|
||||
modname = "packaging.tests." + fn[:-3]
|
||||
__import__(modname)
|
||||
module = sys.modules[modname]
|
||||
suite.addTest(module.test_suite())
|
||||
return suite
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
"""Base class for regression test exceptions."""
|
||||
|
||||
|
||||
class TestFailed(Error):
|
||||
"""Test failed."""
|
||||
|
||||
|
||||
class BasicTestRunner:
|
||||
def run(self, test):
|
||||
result = unittest.TestResult()
|
||||
test(result)
|
||||
return result
|
||||
|
||||
|
||||
def _run_suite(suite, verbose_=1):
|
||||
"""Run tests from a unittest.TestSuite-derived class."""
|
||||
global verbose
|
||||
verbose = verbose_
|
||||
if verbose_:
|
||||
runner = unittest.TextTestRunner(sys.stdout, verbosity=2)
|
||||
else:
|
||||
runner = BasicTestRunner()
|
||||
|
||||
result = runner.run(suite)
|
||||
if not result.wasSuccessful():
|
||||
if len(result.errors) == 1 and not result.failures:
|
||||
err = result.errors[0][1]
|
||||
elif len(result.failures) == 1 and not result.errors:
|
||||
err = result.failures[0][1]
|
||||
else:
|
||||
err = "errors occurred; run in verbose mode for details"
|
||||
raise TestFailed(err)
|
||||
|
||||
|
||||
def run_unittest(classes, verbose_=1):
|
||||
"""Run tests from unittest.TestCase-derived classes.
|
||||
|
||||
Originally extracted from stdlib test.test_support and modified to
|
||||
support unittest2.
|
||||
"""
|
||||
valid_types = (unittest.TestSuite, unittest.TestCase)
|
||||
suite = unittest.TestSuite()
|
||||
for cls in classes:
|
||||
if isinstance(cls, str):
|
||||
if cls in sys.modules:
|
||||
suite.addTest(unittest.findTestCases(sys.modules[cls]))
|
||||
else:
|
||||
raise ValueError("str arguments must be keys in sys.modules")
|
||||
elif isinstance(cls, valid_types):
|
||||
suite.addTest(cls)
|
||||
else:
|
||||
suite.addTest(unittest.makeSuite(cls))
|
||||
_run_suite(suite, verbose_)
|
||||
|
||||
|
||||
def reap_children():
|
||||
"""Use this function at the end of test_main() whenever sub-processes
|
||||
are started. This will help ensure that no extra children (zombies)
|
||||
stick around to hog resources and create problems when looking
|
||||
for refleaks.
|
||||
|
||||
Extracted from stdlib test.support.
|
||||
"""
|
||||
|
||||
# Reap all our dead child processes so we don't leave zombies around.
|
||||
# These hog resources and might be causing some of the buildbots to die.
|
||||
if hasattr(os, 'waitpid'):
|
||||
any_process = -1
|
||||
while True:
|
||||
try:
|
||||
# This will raise an exception on Windows. That's ok.
|
||||
pid, status = os.waitpid(any_process, os.WNOHANG)
|
||||
if pid == 0:
|
||||
break
|
||||
except:
|
||||
break
|
||||
|
||||
|
||||
def captured_stdout(func, *args, **kw):
|
||||
import io
|
||||
orig_stdout = getattr(sys, 'stdout')
|
||||
setattr(sys, 'stdout', io.StringIO())
|
||||
try:
|
||||
res = func(*args, **kw)
|
||||
sys.stdout.seek(0)
|
||||
return res, sys.stdout.read()
|
||||
finally:
|
||||
setattr(sys, 'stdout', orig_stdout)
|
||||
|
||||
|
||||
def unload(name):
|
||||
try:
|
||||
del sys.modules[name]
|
||||
except KeyError:
|
||||
pass
|
||||
20
Lib/packaging/tests/__main__.py
Normal file
20
Lib/packaging/tests/__main__.py
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
"""Packaging test suite runner."""
|
||||
|
||||
# Ripped from importlib tests, thanks Brett!
|
||||
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
from test.support import run_unittest, reap_children
|
||||
|
||||
|
||||
def test_main():
|
||||
start_dir = os.path.dirname(__file__)
|
||||
top_dir = os.path.dirname(os.path.dirname(start_dir))
|
||||
test_loader = unittest.TestLoader()
|
||||
run_unittest(test_loader.discover(start_dir, top_level_dir=top_dir))
|
||||
reap_children()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
test_main()
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
Metadata-version: 1.2
|
||||
Name: babar
|
||||
Version: 0.1
|
||||
Author: FELD Boris
|
||||
|
|
@ -0,0 +1,2 @@
|
|||
babar.png,babar.png
|
||||
babar.cfg,babar.cfg
|
||||
1
Lib/packaging/tests/fake_dists/babar.cfg
Normal file
1
Lib/packaging/tests/fake_dists/babar.cfg
Normal file
|
|
@ -0,0 +1 @@
|
|||
Config
|
||||
0
Lib/packaging/tests/fake_dists/babar.png
Normal file
0
Lib/packaging/tests/fake_dists/babar.png
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
Metadata-Version: 1.2
|
||||
Name: bacon
|
||||
Version: 0.1
|
||||
Provides-Dist: truffles (2.0)
|
||||
Provides-Dist: bacon (0.1)
|
||||
Obsoletes-Dist: truffles (>=0.9,<=1.5)
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
Metadata-Version: 1.0
|
||||
Name: banana
|
||||
Version: 0.4
|
||||
Summary: A yellow fruit
|
||||
Home-page: http://en.wikipedia.org/wiki/Banana
|
||||
Author: Josip Djolonga
|
||||
Author-email: foo@nbar.com
|
||||
License: BSD
|
||||
Description: A fruit
|
||||
Keywords: foo bar
|
||||
Platform: UNKNOWN
|
||||
Classifier: Development Status :: 4 - Beta
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: Intended Audience :: Science/Research
|
||||
Classifier: License :: OSI Approved :: BSD License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Topic :: Scientific/Engineering :: GIS
|
||||
|
|
@ -0,0 +1 @@
|
|||
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
|
||||
# -*- Entry points: -*-
|
||||
|
||||
|
|
@ -0,0 +1 @@
|
|||
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
# this should be ignored
|
||||
|
||||
strawberry >=0.5
|
||||
|
||||
[section ignored]
|
||||
foo ==0.5
|
||||
5
Lib/packaging/tests/fake_dists/cheese-2.0.2.egg-info
Normal file
5
Lib/packaging/tests/fake_dists/cheese-2.0.2.egg-info
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
Metadata-Version: 1.2
|
||||
Name: cheese
|
||||
Version: 2.0.2
|
||||
Provides-Dist: truffles (1.0.2)
|
||||
Obsoletes-Dist: truffles (!=1.2,<=2.0)
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
Metadata-Version: 1.2
|
||||
Name: choxie
|
||||
Version: 2.0.0.9
|
||||
Summary: Chocolate with a kick!
|
||||
Requires-Dist: towel-stuff (0.1)
|
||||
Requires-Dist: nut
|
||||
Provides-Dist: truffles (1.0)
|
||||
Obsoletes-Dist: truffles (<=0.8,>=0.5)
|
||||
Obsoletes-Dist: truffles (<=0.9,>=0.6)
|
||||
|
|
@ -0,0 +1 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from towel_stuff import Towel
|
||||
|
||||
class Chocolate(object):
|
||||
"""A piece of chocolate."""
|
||||
|
||||
def wrap_with_towel(self):
|
||||
towel = Towel()
|
||||
towel.wrap(self)
|
||||
return towel
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from choxie.chocolate import Chocolate
|
||||
|
||||
class Truffle(Chocolate):
|
||||
"""A truffle."""
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
Metadata-Version: 1.2
|
||||
Name: coconuts-aster
|
||||
Version: 10.3
|
||||
Provides-Dist: strawberry (0.6)
|
||||
Provides-Dist: banana (0.4)
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
Metadata-Version: 1.2
|
||||
Name: grammar
|
||||
Version: 1.0a4
|
||||
Requires-Dist: truffles (>=1.2)
|
||||
Author: Sherlock Holmes
|
||||
|
|
@ -0,0 +1 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue