Christina Theodoridou
6 years ago
348 changed files with 48806 additions and 0 deletions
@ -0,0 +1 @@ |
|||
hello world |
@ -0,0 +1,76 @@ |
|||
# This file must be used with "source bin/activate" *from bash* |
|||
# you cannot run it directly |
|||
|
|||
deactivate () { |
|||
# reset old environment variables |
|||
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then |
|||
PATH="${_OLD_VIRTUAL_PATH:-}" |
|||
export PATH |
|||
unset _OLD_VIRTUAL_PATH |
|||
fi |
|||
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then |
|||
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}" |
|||
export PYTHONHOME |
|||
unset _OLD_VIRTUAL_PYTHONHOME |
|||
fi |
|||
|
|||
# This should detect bash and zsh, which have a hash command that must |
|||
# be called to get it to forget past commands. Without forgetting |
|||
# past commands the $PATH changes we made may not be respected |
|||
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then |
|||
hash -r |
|||
fi |
|||
|
|||
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then |
|||
PS1="${_OLD_VIRTUAL_PS1:-}" |
|||
export PS1 |
|||
unset _OLD_VIRTUAL_PS1 |
|||
fi |
|||
|
|||
unset VIRTUAL_ENV |
|||
if [ ! "$1" = "nondestructive" ] ; then |
|||
# Self destruct! |
|||
unset -f deactivate |
|||
fi |
|||
} |
|||
|
|||
# unset irrelevant variables |
|||
deactivate nondestructive |
|||
|
|||
VIRTUAL_ENV="/home/chris/Storage/christina/Desktop/THMMY/AVT/THE-Assignment/classifier/myenv" |
|||
export VIRTUAL_ENV |
|||
|
|||
_OLD_VIRTUAL_PATH="$PATH" |
|||
PATH="$VIRTUAL_ENV/bin:$PATH" |
|||
export PATH |
|||
|
|||
# unset PYTHONHOME if set |
|||
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway) |
|||
# could use `if (set -u; : $PYTHONHOME) ;` in bash |
|||
if [ -n "${PYTHONHOME:-}" ] ; then |
|||
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}" |
|||
unset PYTHONHOME |
|||
fi |
|||
|
|||
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then |
|||
_OLD_VIRTUAL_PS1="${PS1:-}" |
|||
if [ "x(myenv) " != x ] ; then |
|||
PS1="(myenv) ${PS1:-}" |
|||
else |
|||
if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then |
|||
# special case for Aspen magic directories |
|||
# see http://www.zetadev.com/software/aspen/ |
|||
PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1" |
|||
else |
|||
PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1" |
|||
fi |
|||
fi |
|||
export PS1 |
|||
fi |
|||
|
|||
# This should detect bash and zsh, which have a hash command that must |
|||
# be called to get it to forget past commands. Without forgetting |
|||
# past commands the $PATH changes we made may not be respected |
|||
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then |
|||
hash -r |
|||
fi |
@ -0,0 +1,37 @@ |
|||
# This file must be used with "source bin/activate.csh" *from csh*. |
|||
# You cannot run it directly. |
|||
# Created by Davide Di Blasi <davidedb@gmail.com>. |
|||
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com> |
|||
|
|||
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate' |
|||
|
|||
# Unset irrelevant variables. |
|||
deactivate nondestructive |
|||
|
|||
setenv VIRTUAL_ENV "/home/chris/Storage/christina/Desktop/THMMY/AVT/THE-Assignment/classifier/myenv" |
|||
|
|||
set _OLD_VIRTUAL_PATH="$PATH" |
|||
setenv PATH "$VIRTUAL_ENV/bin:$PATH" |
|||
|
|||
|
|||
set _OLD_VIRTUAL_PROMPT="$prompt" |
|||
|
|||
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then |
|||
if ("myenv" != "") then |
|||
set env_name = "myenv" |
|||
else |
|||
if (`basename "VIRTUAL_ENV"` == "__") then |
|||
# special case for Aspen magic directories |
|||
# see http://www.zetadev.com/software/aspen/ |
|||
set env_name = `basename \`dirname "$VIRTUAL_ENV"\`` |
|||
else |
|||
set env_name = `basename "$VIRTUAL_ENV"` |
|||
endif |
|||
endif |
|||
set prompt = "[$env_name] $prompt" |
|||
unset env_name |
|||
endif |
|||
|
|||
alias pydoc python -m pydoc |
|||
|
|||
rehash |
@ -0,0 +1,75 @@ |
|||
# This file must be used with ". bin/activate.fish" *from fish* (http://fishshell.org) |
|||
# you cannot run it directly |
|||
|
|||
function deactivate -d "Exit virtualenv and return to normal shell environment" |
|||
# reset old environment variables |
|||
if test -n "$_OLD_VIRTUAL_PATH" |
|||
set -gx PATH $_OLD_VIRTUAL_PATH |
|||
set -e _OLD_VIRTUAL_PATH |
|||
end |
|||
if test -n "$_OLD_VIRTUAL_PYTHONHOME" |
|||
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME |
|||
set -e _OLD_VIRTUAL_PYTHONHOME |
|||
end |
|||
|
|||
if test -n "$_OLD_FISH_PROMPT_OVERRIDE" |
|||
functions -e fish_prompt |
|||
set -e _OLD_FISH_PROMPT_OVERRIDE |
|||
functions -c _old_fish_prompt fish_prompt |
|||
functions -e _old_fish_prompt |
|||
end |
|||
|
|||
set -e VIRTUAL_ENV |
|||
if test "$argv[1]" != "nondestructive" |
|||
# Self destruct! |
|||
functions -e deactivate |
|||
end |
|||
end |
|||
|
|||
# unset irrelevant variables |
|||
deactivate nondestructive |
|||
|
|||
set -gx VIRTUAL_ENV "/home/chris/Storage/christina/Desktop/THMMY/AVT/THE-Assignment/classifier/myenv" |
|||
|
|||
set -gx _OLD_VIRTUAL_PATH $PATH |
|||
set -gx PATH "$VIRTUAL_ENV/bin" $PATH |
|||
|
|||
# unset PYTHONHOME if set |
|||
if set -q PYTHONHOME |
|||
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME |
|||
set -e PYTHONHOME |
|||
end |
|||
|
|||
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" |
|||
# fish uses a function instead of an env var to generate the prompt. |
|||
|
|||
# save the current fish_prompt function as the function _old_fish_prompt |
|||
functions -c fish_prompt _old_fish_prompt |
|||
|
|||
# with the original prompt function renamed, we can override with our own. |
|||
function fish_prompt |
|||
# Save the return status of the last command |
|||
set -l old_status $status |
|||
|
|||
# Prompt override? |
|||
if test -n "(myenv) " |
|||
printf "%s%s" "(myenv) " (set_color normal) |
|||
else |
|||
# ...Otherwise, prepend env |
|||
set -l _checkbase (basename "$VIRTUAL_ENV") |
|||
if test $_checkbase = "__" |
|||
# special case for Aspen magic directories |
|||
# see http://www.zetadev.com/software/aspen/ |
|||
printf "%s[%s]%s " (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal) |
|||
else |
|||
printf "%s(%s)%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal) |
|||
end |
|||
end |
|||
|
|||
# Restore the return status of the previous command. |
|||
echo "exit $old_status" | . |
|||
_old_fish_prompt |
|||
end |
|||
|
|||
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" |
|||
end |
@ -0,0 +1,11 @@ |
|||
#!/home/chris/Storage/christina/Desktop/THMMY/AVT/THE-Assignment/classifier/myenv/bin/python3 |
|||
|
|||
# -*- coding: utf-8 -*- |
|||
import re |
|||
import sys |
|||
|
|||
from setuptools.command.easy_install import main |
|||
|
|||
if __name__ == '__main__': |
|||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) |
|||
sys.exit(main()) |
@ -0,0 +1,11 @@ |
|||
#!/home/chris/Storage/christina/Desktop/THMMY/AVT/THE-Assignment/classifier/myenv/bin/python3 |
|||
|
|||
# -*- coding: utf-8 -*- |
|||
import re |
|||
import sys |
|||
|
|||
from setuptools.command.easy_install import main |
|||
|
|||
if __name__ == '__main__': |
|||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) |
|||
sys.exit(main()) |
@ -0,0 +1,11 @@ |
|||
#!/home/chris/Storage/christina/Desktop/THMMY/AVT/THE-Assignment/classifier/myenv/bin/python3 |
|||
|
|||
# -*- coding: utf-8 -*- |
|||
import re |
|||
import sys |
|||
|
|||
from pip import main |
|||
|
|||
if __name__ == '__main__': |
|||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) |
|||
sys.exit(main()) |
@ -0,0 +1,11 @@ |
|||
#!/home/chris/Storage/christina/Desktop/THMMY/AVT/THE-Assignment/classifier/myenv/bin/python3 |
|||
|
|||
# -*- coding: utf-8 -*- |
|||
import re |
|||
import sys |
|||
|
|||
from pip import main |
|||
|
|||
if __name__ == '__main__': |
|||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) |
|||
sys.exit(main()) |
@ -0,0 +1,11 @@ |
|||
#!/home/chris/Storage/christina/Desktop/THMMY/AVT/THE-Assignment/classifier/myenv/bin/python3 |
|||
|
|||
# -*- coding: utf-8 -*- |
|||
import re |
|||
import sys |
|||
|
|||
from pip import main |
|||
|
|||
if __name__ == '__main__': |
|||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) |
|||
sys.exit(main()) |
@ -0,0 +1 @@ |
|||
python3 |
@ -0,0 +1 @@ |
|||
/usr/bin/python3 |
Binary file not shown.
@ -0,0 +1,5 @@ |
|||
"""Run the EasyInstall command""" |
|||
|
|||
if __name__ == '__main__': |
|||
from setuptools.command.easy_install import main |
|||
main() |
@ -0,0 +1,39 @@ |
|||
pip |
|||
=== |
|||
|
|||
The `PyPA recommended |
|||
<https://packaging.python.org/en/latest/current/>`_ |
|||
tool for installing Python packages. |
|||
|
|||
* `Installation <https://pip.pypa.io/en/stable/installing.html>`_ |
|||
* `Documentation <https://pip.pypa.io/>`_ |
|||
* `Changelog <https://pip.pypa.io/en/stable/news.html>`_ |
|||
* `Github Page <https://github.com/pypa/pip>`_ |
|||
* `Issue Tracking <https://github.com/pypa/pip/issues>`_ |
|||
* `User mailing list <http://groups.google.com/group/python-virtualenv>`_ |
|||
* `Dev mailing list <http://groups.google.com/group/pypa-dev>`_ |
|||
* User IRC: #pypa on Freenode. |
|||
* Dev IRC: #pypa-dev on Freenode. |
|||
|
|||
|
|||
.. image:: https://img.shields.io/pypi/v/pip.svg |
|||
:target: https://pypi.python.org/pypi/pip |
|||
|
|||
.. image:: https://img.shields.io/travis/pypa/pip/master.svg |
|||
:target: http://travis-ci.org/pypa/pip |
|||
|
|||
.. image:: https://img.shields.io/appveyor/ci/pypa/pip.svg |
|||
:target: https://ci.appveyor.com/project/pypa/pip/history |
|||
|
|||
.. image:: https://readthedocs.org/projects/pip/badge/?version=stable |
|||
:target: https://pip.pypa.io/en/stable |
|||
|
|||
Code of Conduct |
|||
--------------- |
|||
|
|||
Everyone interacting in the pip project's codebases, issue trackers, chat |
|||
rooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_. |
|||
|
|||
.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/ |
|||
|
|||
|
@ -0,0 +1 @@ |
|||
pip |
@ -0,0 +1,70 @@ |
|||
Metadata-Version: 2.0 |
|||
Name: pip |
|||
Version: 9.0.1 |
|||
Summary: The PyPA recommended tool for installing Python packages. |
|||
Home-page: https://pip.pypa.io/ |
|||
Author: The pip developers |
|||
Author-email: python-virtualenv@groups.google.com |
|||
License: MIT |
|||
Keywords: easy_install distutils setuptools egg virtualenv |
|||
Platform: UNKNOWN |
|||
Classifier: Development Status :: 5 - Production/Stable |
|||
Classifier: Intended Audience :: Developers |
|||
Classifier: License :: OSI Approved :: MIT License |
|||
Classifier: Topic :: Software Development :: Build Tools |
|||
Classifier: Programming Language :: Python :: 2 |
|||
Classifier: Programming Language :: Python :: 2.6 |
|||
Classifier: Programming Language :: Python :: 2.7 |
|||
Classifier: Programming Language :: Python :: 3 |
|||
Classifier: Programming Language :: Python :: 3.3 |
|||
Classifier: Programming Language :: Python :: 3.4 |
|||
Classifier: Programming Language :: Python :: 3.5 |
|||
Classifier: Programming Language :: Python :: Implementation :: PyPy |
|||
Requires-Python: >=2.6,!=3.0.*,!=3.1.*,!=3.2.* |
|||
Provides-Extra: testing |
|||
Provides-Extra: testing |
|||
Requires-Dist: mock; extra == 'testing' |
|||
Requires-Dist: pretend; extra == 'testing' |
|||
Requires-Dist: pytest; extra == 'testing' |
|||
Requires-Dist: scripttest (>=1.3); extra == 'testing' |
|||
Requires-Dist: virtualenv (>=1.10); extra == 'testing' |
|||
|
|||
pip |
|||
=== |
|||
|
|||
The `PyPA recommended |
|||
<https://packaging.python.org/en/latest/current/>`_ |
|||
tool for installing Python packages. |
|||
|
|||
* `Installation <https://pip.pypa.io/en/stable/installing.html>`_ |
|||
* `Documentation <https://pip.pypa.io/>`_ |
|||
* `Changelog <https://pip.pypa.io/en/stable/news.html>`_ |
|||
* `Github Page <https://github.com/pypa/pip>`_ |
|||
* `Issue Tracking <https://github.com/pypa/pip/issues>`_ |
|||
* `User mailing list <http://groups.google.com/group/python-virtualenv>`_ |
|||
* `Dev mailing list <http://groups.google.com/group/pypa-dev>`_ |
|||
* User IRC: #pypa on Freenode. |
|||
* Dev IRC: #pypa-dev on Freenode. |
|||
|
|||
|
|||
.. image:: https://img.shields.io/pypi/v/pip.svg |
|||
:target: https://pypi.python.org/pypi/pip |
|||
|
|||
.. image:: https://img.shields.io/travis/pypa/pip/master.svg |
|||
:target: http://travis-ci.org/pypa/pip |
|||
|
|||
.. image:: https://img.shields.io/appveyor/ci/pypa/pip.svg |
|||
:target: https://ci.appveyor.com/project/pypa/pip/history |
|||
|
|||
.. image:: https://readthedocs.org/projects/pip/badge/?version=stable |
|||
:target: https://pip.pypa.io/en/stable |
|||
|
|||
Code of Conduct |
|||
--------------- |
|||
|
|||
Everyone interacting in the pip project's codebases, issue trackers, chat |
|||
rooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_. |
|||
|
|||
.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/ |
|||
|
|||
|
@ -0,0 +1,123 @@ |
|||
pip/__init__.py,sha256=ds3YAAeZwhX6e8yfxjLCxPlOb37Bh42ew09XEPJjQGE,11537 |
|||
pip/__main__.py,sha256=V6Kh-IEDEFpt1cahRE6MajUF_14qJR_Qsvn4MjWZXzE,584 |
|||
pip/basecommand.py,sha256=TTlmZesQ4Vuxcto2KqwZGmgmN5ioHEl_DeFev9ie_SA,11910 |
|||
pip/baseparser.py,sha256=AKMOeF3fTrRroiv0DmTQbdiLW0DQux2KqGC_dJJB9d0,10465 |
|||
pip/cmdoptions.py,sha256=pRptFz05iFEfSW4Flg3x1_P92sYlFvq7elhnwujikNY,16473 |
|||
pip/download.py,sha256=rA0wbmqC2n9ejX481YJSidmKgQqQDjdaxkHkHlAN68k,32171 |
|||
pip/exceptions.py,sha256=BvqH-Jw3tP2b-2IJ2kjrQemOAPMqKrQMLRIZHZQpJXk,8121 |
|||
pip/index.py,sha256=L6UhtAEZc2qw7BqfQrkPQcw2gCgEw3GukLRSA95BNyI,39950 |
|||
pip/locations.py,sha256=9rJRlgonC6QC2zGDIn_7mXaoZ9_tF_IHM2BQhWVRgbo,5626 |
|||
pip/pep425tags.py,sha256=q3kec4f6NHszuGYIhGIbVvs896D06uJAnKFgJ_wce44,10980 |
|||
pip/status_codes.py,sha256=F6uDG6Gj7RNKQJUDnd87QKqI16Us-t-B0wPF_4QMpWc,156 |
|||
pip/wheel.py,sha256=QSWmGs2ui-n4UMWm0JUY6aMCcwNKungVzbWsxI9KlJQ,32010 |
|||
pip/_vendor/__init__.py,sha256=L-0x9jj0HSZen1Fm2U0GUbxfjfwQPIXc4XJ4IAxy8D8,4804 |
|||
pip/commands/__init__.py,sha256=2Uq3HCdjchJD9FL1LB7rd5v6UySVAVizX0W3EX3hIoE,2244 |
|||
pip/commands/check.py,sha256=-A7GI1-WZBh9a4P6UoH_aR-J7I8Lz8ly7m3wnCjmevs,1382 |
|||
pip/commands/completion.py,sha256=kkPgVX7SUcJ_8Juw5GkgWaxHN9_45wmAr9mGs1zXEEs,2453 |
|||
pip/commands/download.py,sha256=8RuuPmSYgAq3iEDTqZY_1PDXRqREdUULHNjWJeAv7Mo,7810 |
|||
pip/commands/freeze.py,sha256=h6-yFMpjCjbNj8-gOm5UuoF6cg14N5rPV4TCi3_CeuI,2835 |
|||
pip/commands/hash.py,sha256=MCt4jEFyfoce0lVeNEz1x49uaTY-VDkKiBvvxrVcHkw,1597 |
|||
pip/commands/help.py,sha256=84HWkEdnGP_AEBHnn8gJP2Te0XTXRKFoXqXopbOZTNo,982 |
|||
pip/commands/install.py,sha256=o-CR1TKf-b1qaFv47nNlawqsIfDjXyIzv_iJUw1Trag,18069 |
|||
pip/commands/list.py,sha256=93bCiFyt2Qut_YHkYHJMZHpXladmxsjS-yOtZeb3uqI,11369 |
|||
pip/commands/search.py,sha256=oTs9QNdefnrmCV_JeftG0PGiMuYVmiEDF1OUaYsmDao,4502 |
|||
pip/commands/show.py,sha256=ZYM57_7U8KP9MQIIyHKQdZxmiEZByy-DRzB697VFoTY,5891 |
|||
pip/commands/uninstall.py,sha256=tz8cXz4WdpUdnt3RvpdQwH6_SNMB50egBIZWa1dwfcc,2884 |
|||
pip/commands/wheel.py,sha256=z5SEhws2YRMb0Ml1IEkg6jFZMLRpLl86bHCrQbYt5zo,7729 |
|||
pip/compat/__init__.py,sha256=2Xs_IpsmdRgHbQgQO0c8_lPvHJnQXHyGWxPbLbYJL4c,4672 |
|||
pip/compat/dictconfig.py,sha256=dRrelPDWrceDSzFT51RTEVY2GuM7UDyc5Igh_tn4Fvk,23096 |
|||
pip/models/__init__.py,sha256=0Rs7_RA4DxeOkWT5Cq4CQzDrSEhvYcN3TH2cazr72PE,71 |
|||
pip/models/index.py,sha256=pUfbO__v3mD9j-2n_ClwPS8pVyx4l2wIwyvWt8GMCRA,487 |
|||
pip/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 |
|||
pip/operations/check.py,sha256=uwUN9cs1sPo7c0Sj6pRrSv7b22Pk29SXUImTelVchMQ,1590 |
|||
pip/operations/freeze.py,sha256=k-7w7LsM-RpPv7ERBzHiPpYkH-GuYfHLyR-Cp_1VPL0,5194 |
|||
pip/req/__init__.py,sha256=vFwZY8_Vc1WU1zFAespg1My_r_AT3n7cN0W9eX0EFqk,276 |
|||
pip/req/req_file.py,sha256=fG9MDsXUNPhmGwxUiwrIXEynyD8Q7s3L47-hLZPDXq0,11926 |
|||
pip/req/req_install.py,sha256=gYrH-lwQMmt55VVbav_EtRIPu94cQbHFHm_Kq6AeHbg,46487 |
|||
pip/req/req_set.py,sha256=jHspXqcA2FxcF05dgUIAZ5huYPv6bn0wRUX0Z7PKmaA,34462 |
|||
pip/req/req_uninstall.py,sha256=fdH2VgCjEC8NRYDS7fRu3ZJaBBUEy-N5muwxDX5MBNM,6897 |
|||
pip/utils/__init__.py,sha256=zk1vF2EzHZX1ZKPwgeC9I6yKvs8IJ6NZEfXgp2IP8hI,27912 |
|||
pip/utils/appdirs.py,sha256=kj2LK-I2fC5QnEh_A_v-ev_IQMcXaWWF5DE39sNvCLQ,8811 |
|||
pip/utils/build.py,sha256=4smLRrfSCmXmjEnVnMFh2tBEpNcSLRe6J0ejZJ-wWJE,1312 |
|||
pip/utils/deprecation.py,sha256=X_FMjtDbMJqfqEkdRrki-mYyIdPB6I6DHUTCA_ChY6M,2232 |
|||
pip/utils/encoding.py,sha256=NQxGiFS5GbeAveLZTnx92t5r0PYqvt0iRnP2u9SGG1w,971 |
|||
pip/utils/filesystem.py,sha256=ZEVBuYM3fqr2_lgOESh4Y7fPFszGD474zVm_M3Mb5Tk,899 |
|||
pip/utils/glibc.py,sha256=jcQYjt_oJLPKVZB28Kauy4Sw70zS-wawxoU1HHX36_0,2939 |
|||
pip/utils/hashes.py,sha256=oMk7cd3PbJgzpSQyXq1MytMud5f6H5Oa2YY5hYuCq6I,2866 |
|||
pip/utils/logging.py,sha256=7yWu4gZw-Qclj7X80QVdpGWkdTWGKT4LiUVKcE04pro,3327 |
|||
pip/utils/outdated.py,sha256=fNwOCL5r2EftPGhgCYGMKu032HC8cV-JAr9lp0HmToM,5455 |
|||
pip/utils/packaging.py,sha256=qhmli14odw6DIhWJgQYS2Q0RrSbr8nXNcG48f5yTRms,2080 |
|||
pip/utils/setuptools_build.py,sha256=0blfscmNJW_iZ5DcswJeDB_PbtTEjfK9RL1R1WEDW2E,278 |
|||
pip/utils/ui.py,sha256=pbDkSAeumZ6jdZcOJ2yAbx8iBgeP2zfpqNnLJK1gskQ,11597 |
|||
pip/vcs/__init__.py,sha256=WafFliUTHMmsSISV8PHp1M5EXDNSWyJr78zKaQmPLdY,12374 |
|||
pip/vcs/bazaar.py,sha256=tYTwc4b4off8mr0O2o8SiGejqBDJxcbDBMSMd9-ISYc,3803 |
|||
pip/vcs/git.py,sha256=5LfWryi78A-2ULjEZJvCTarJ_3l8venwXASlwm8hiug,11197 |
|||
pip/vcs/mercurial.py,sha256=xG6rDiwHCRytJEs23SIHBXl_SwQo2jkkdD_6rVVP5h4,3472 |
|||
pip/vcs/subversion.py,sha256=GAuX2Sk7IZvJyEzENKcVld_wGBrQ3fpXDlXjapZEYdI,9350 |
|||
pip-9.0.1.dist-info/DESCRIPTION.rst,sha256=Va8Wj1XBpTbVQ2Z41mZRJdALEeziiS_ZewWn1H2ecY4,1287 |
|||
pip-9.0.1.dist-info/METADATA,sha256=LZLdUBpPmFB4Of_9wIHegCXhbmiByzOv6WCGs3rixt0,2553 |
|||
pip-9.0.1.dist-info/RECORD,, |
|||
pip-9.0.1.dist-info/WHEEL,sha256=kdsN-5OJAZIiHN-iO4Rhl82KyS0bDWf4uBwMbkNafr8,110 |
|||
pip-9.0.1.dist-info/entry_points.txt,sha256=Q-fR2tcp9DRdeXoGn1wR67Xecy32o5EyQEnzDghwqqk,68 |
|||
pip-9.0.1.dist-info/metadata.json,sha256=eAfMY0s5HjwtLLjIZ9LYDxWocl2her-knzH7qTJ38CU,1565 |
|||
pip-9.0.1.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 |
|||
../../../bin/pip,sha256=BZ5XUh9I1BRLnSOOS1QrrMyNrU3dEhtqh0JBYY6LCxs,281 |
|||
../../../bin/pip3,sha256=BZ5XUh9I1BRLnSOOS1QrrMyNrU3dEhtqh0JBYY6LCxs,281 |
|||
../../../bin/pip3.6,sha256=BZ5XUh9I1BRLnSOOS1QrrMyNrU3dEhtqh0JBYY6LCxs,281 |
|||
pip-9.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 |
|||
pip/vcs/__pycache__/git.cpython-36.pyc,, |
|||
pip/vcs/__pycache__/__init__.cpython-36.pyc,, |
|||
pip/vcs/__pycache__/mercurial.cpython-36.pyc,, |
|||
pip/vcs/__pycache__/subversion.cpython-36.pyc,, |
|||
pip/vcs/__pycache__/bazaar.cpython-36.pyc,, |
|||
pip/compat/__pycache__/__init__.cpython-36.pyc,, |
|||
pip/compat/__pycache__/dictconfig.cpython-36.pyc,, |
|||
pip/commands/__pycache__/wheel.cpython-36.pyc,, |
|||
pip/commands/__pycache__/freeze.cpython-36.pyc,, |
|||
pip/commands/__pycache__/__init__.cpython-36.pyc,, |
|||
pip/commands/__pycache__/search.cpython-36.pyc,, |
|||
pip/commands/__pycache__/completion.cpython-36.pyc,, |
|||
pip/commands/__pycache__/hash.cpython-36.pyc,, |
|||
pip/commands/__pycache__/download.cpython-36.pyc,, |
|||
pip/commands/__pycache__/help.cpython-36.pyc,, |
|||
pip/commands/__pycache__/uninstall.cpython-36.pyc,, |
|||
pip/commands/__pycache__/show.cpython-36.pyc,, |
|||
pip/commands/__pycache__/install.cpython-36.pyc,, |
|||
pip/commands/__pycache__/list.cpython-36.pyc,, |
|||
pip/commands/__pycache__/check.cpython-36.pyc,, |
|||
pip/_vendor/__pycache__/__init__.cpython-36.pyc,, |
|||
pip/utils/__pycache__/encoding.cpython-36.pyc,, |
|||
pip/utils/__pycache__/appdirs.cpython-36.pyc,, |
|||
pip/utils/__pycache__/__init__.cpython-36.pyc,, |
|||
pip/utils/__pycache__/hashes.cpython-36.pyc,, |
|||
pip/utils/__pycache__/filesystem.cpython-36.pyc,, |
|||
pip/utils/__pycache__/ui.cpython-36.pyc,, |
|||
pip/utils/__pycache__/setuptools_build.cpython-36.pyc,, |
|||
pip/utils/__pycache__/packaging.cpython-36.pyc,, |
|||
pip/utils/__pycache__/build.cpython-36.pyc,, |
|||
pip/utils/__pycache__/outdated.cpython-36.pyc,, |
|||
pip/utils/__pycache__/glibc.cpython-36.pyc,, |
|||
pip/utils/__pycache__/deprecation.cpython-36.pyc,, |
|||
pip/utils/__pycache__/logging.cpython-36.pyc,, |
|||
pip/models/__pycache__/__init__.cpython-36.pyc,, |
|||
pip/models/__pycache__/index.cpython-36.pyc,, |
|||
pip/operations/__pycache__/freeze.cpython-36.pyc,, |
|||
pip/operations/__pycache__/__init__.cpython-36.pyc,, |
|||
pip/operations/__pycache__/check.cpython-36.pyc,, |
|||
pip/__pycache__/wheel.cpython-36.pyc,, |
|||
pip/__pycache__/__init__.cpython-36.pyc,, |
|||
pip/__pycache__/index.cpython-36.pyc,, |
|||
pip/__pycache__/download.cpython-36.pyc,, |
|||
pip/__pycache__/status_codes.cpython-36.pyc,, |
|||
pip/__pycache__/basecommand.cpython-36.pyc,, |
|||
pip/__pycache__/cmdoptions.cpython-36.pyc,, |
|||
pip/__pycache__/locations.cpython-36.pyc,, |
|||
pip/__pycache__/baseparser.cpython-36.pyc,, |
|||
pip/__pycache__/exceptions.cpython-36.pyc,, |
|||
pip/__pycache__/pep425tags.cpython-36.pyc,, |
|||
pip/__pycache__/__main__.cpython-36.pyc,, |
|||
pip/req/__pycache__/__init__.cpython-36.pyc,, |
|||
pip/req/__pycache__/req_install.cpython-36.pyc,, |
|||
pip/req/__pycache__/req_uninstall.cpython-36.pyc,, |
|||
pip/req/__pycache__/req_set.cpython-36.pyc,, |
|||
pip/req/__pycache__/req_file.cpython-36.pyc,, |
@ -0,0 +1,6 @@ |
|||
Wheel-Version: 1.0 |
|||
Generator: bdist_wheel (0.30.0) |
|||
Root-Is-Purelib: true |
|||
Tag: py2-none-any |
|||
Tag: py3-none-any |
|||
|
@ -0,0 +1,5 @@ |
|||
[console_scripts] |
|||
pip = pip:main |
|||
pip3 = pip:main |
|||
pip3.6 = pip:main |
|||
|
@ -0,0 +1 @@ |
|||
{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Topic :: Software Development :: Build Tools", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: Implementation :: PyPy"], "extensions": {"python.commands": {"wrap_console": {"pip": "pip:main", "pip3": "pip:main", "pip3.6": "pip:main"}}, "python.details": {"contacts": [{"email": "python-virtualenv@groups.google.com", "name": "The pip developers", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://pip.pypa.io/"}}, "python.exports": {"console_scripts": {"pip": "pip:main", "pip3": "pip:main", "pip3.6": "pip:main"}}}, "extras": ["testing"], "generator": "bdist_wheel (0.30.0)", "keywords": ["easy_install", "distutils", "setuptools", "egg", "virtualenv"], "license": "MIT", "metadata_version": "2.0", "name": "pip", "requires_python": ">=2.6,!=3.0.*,!=3.1.*,!=3.2.*", "run_requires": [{"extra": "testing", "requires": ["mock", "pretend", "pytest", "scripttest (>=1.3)", "virtualenv (>=1.10)"]}], "summary": "The PyPA recommended tool for installing Python packages.", "test_requires": [{"requires": ["mock", "pretend", "pytest", "scripttest (>=1.3)", "virtualenv (>=1.10)"]}], "version": "9.0.1"} |
@ -0,0 +1 @@ |
|||
pip |
@ -0,0 +1,338 @@ |
|||
#!/usr/bin/env python |
|||
from __future__ import absolute_import |
|||
|
|||
import locale |
|||
import logging |
|||
import os |
|||
import optparse |
|||
import warnings |
|||
|
|||
import sys |
|||
import re |
|||
|
|||
# 2016-06-17 barry@debian.org: urllib3 1.14 added optional support for socks, |
|||
# but if invoked (i.e. imported), it will issue a warning to stderr if socks |
|||
# isn't available. requests unconditionally imports urllib3's socks contrib |
|||
# module, triggering this warning. The warning breaks DEP-8 tests (because of |
|||
# the stderr output) and is just plain annoying in normal usage. I don't want |
|||
# to add socks as yet another dependency for pip, nor do I want to allow-stder |
|||
# in the DEP-8 tests, so just suppress the warning. pdb tells me this has to |
|||
# be done before the import of pip.vcs. |
|||
try: |
|||
from pip._vendor.requests.packages.urllib3.exceptions import DependencyWarning |
|||
except ImportError: |
|||
from urllib3.exceptions import DependencyWarning |
|||
warnings.filterwarnings("ignore", category=DependencyWarning) # noqa |
|||
|
|||
|
|||
from pip.exceptions import InstallationError, CommandError, PipError |
|||
from pip.utils import get_installed_distributions, get_prog |
|||
from pip.utils import deprecation, dist_is_editable |
|||
from pip.vcs import git, mercurial, subversion, bazaar # noqa |
|||
from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter |
|||
from pip.commands import get_summaries, get_similar_commands |
|||
from pip.commands import commands_dict |
|||
try: |
|||
from pip._vendor.requests.packages.urllib3.exceptions import ( |
|||
InsecureRequestWarning, |
|||
) |
|||
except ImportError: |
|||
from urllib3.exceptions import ( |
|||
InsecureRequestWarning, |
|||
) |
|||
|
|||
# assignment for flake8 to be happy |
|||
|
|||
# This fixes a peculiarity when importing via __import__ - as we are |
|||
# initialising the pip module, "from pip import cmdoptions" is recursive |
|||
# and appears not to work properly in that situation. |
|||
import pip.cmdoptions |
|||
cmdoptions = pip.cmdoptions |
|||
|
|||
# The version as used in the setup.py and the docs conf.py |
|||
__version__ = "9.0.1" |
|||
|
|||
|
|||
logger = logging.getLogger(__name__) |
|||
|
|||
# Hide the InsecureRequestWarning from urllib3 |
|||
warnings.filterwarnings("ignore", category=InsecureRequestWarning) |
|||
|
|||
|
|||
def autocomplete(): |
|||
"""Command and option completion for the main option parser (and options) |
|||
and its subcommands (and options). |
|||
|
|||
Enable by sourcing one of the completion shell scripts (bash, zsh or fish). |
|||
""" |
|||
# Don't complete if user hasn't sourced bash_completion file. |
|||
if 'PIP_AUTO_COMPLETE' not in os.environ: |
|||
return |
|||
cwords = os.environ['COMP_WORDS'].split()[1:] |
|||
cword = int(os.environ['COMP_CWORD']) |
|||
try: |
|||
current = cwords[cword - 1] |
|||
except IndexError: |
|||
current = '' |
|||
|
|||
subcommands = [cmd for cmd, summary in get_summaries()] |
|||
options = [] |
|||
# subcommand |
|||
try: |
|||
subcommand_name = [w for w in cwords if w in subcommands][0] |
|||
except IndexError: |
|||
subcommand_name = None |
|||
|
|||
parser = create_main_parser() |
|||
# subcommand options |
|||
if subcommand_name: |
|||
# special case: 'help' subcommand has no options |
|||
if subcommand_name == 'help': |
|||
sys.exit(1) |
|||
# special case: list locally installed dists for uninstall command |
|||
if subcommand_name == 'uninstall' and not current.startswith('-'): |
|||
installed = [] |
|||
lc = current.lower() |
|||
for dist in get_installed_distributions(local_only=True): |
|||
if dist.key.startswith(lc) and dist.key not in cwords[1:]: |
|||
installed.append(dist.key) |
|||
# if there are no dists installed, fall back to option completion |
|||
if installed: |
|||
for dist in installed: |
|||
print(dist) |
|||
sys.exit(1) |
|||
|
|||
subcommand = commands_dict[subcommand_name]() |
|||
options += [(opt.get_opt_string(), opt.nargs) |
|||
for opt in subcommand.parser.option_list_all |
|||
if opt.help != optparse.SUPPRESS_HELP] |
|||
|
|||
# filter out previously specified options from available options |
|||
prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]] |
|||
options = [(x, v) for (x, v) in options if x not in prev_opts] |
|||
# filter options by current input |
|||
options = [(k, v) for k, v in options if k.startswith(current)] |
|||
for option in options: |
|||
opt_label = option[0] |
|||
# append '=' to options which require args |
|||
if option[1]: |
|||
opt_label += '=' |
|||
print(opt_label) |
|||
else: |
|||
# show main parser options only when necessary |
|||
if current.startswith('-') or current.startswith('--'): |
|||
opts = [i.option_list for i in parser.option_groups] |
|||
opts.append(parser.option_list) |
|||
opts = (o for it in opts for o in it) |
|||
|
|||
subcommands += [i.get_opt_string() for i in opts |
|||
if i.help != optparse.SUPPRESS_HELP] |
|||
|
|||
print(' '.join([x for x in subcommands if x.startswith(current)])) |
|||
sys.exit(1) |
|||
|
|||
|
|||
def create_main_parser(): |
|||
parser_kw = { |
|||
'usage': '\n%prog <command> [options]', |
|||
'add_help_option': False, |
|||
'formatter': UpdatingDefaultsHelpFormatter(), |
|||
'name': 'global', |
|||
'prog': get_prog(), |
|||
} |
|||
|
|||
parser = ConfigOptionParser(**parser_kw) |
|||
parser.disable_interspersed_args() |
|||
|
|||
pip_pkg_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) |
|||
parser.version = 'pip %s from %s (python %s)' % ( |
|||
__version__, pip_pkg_dir, sys.version[:3]) |
|||
|
|||
# add the general options |
|||
gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser) |
|||
parser.add_option_group(gen_opts) |
|||
|
|||
parser.main = True # so the help formatter knows |
|||
|
|||
# create command listing for description |
|||
command_summaries = get_summaries() |
|||
description = [''] + ['%-27s %s' % (i, j) for i, j in command_summaries] |
|||
parser.description = '\n'.join(description) |
|||
|
|||
return parser |
|||
|
|||
|
|||
def parseopts(args): |
|||
parser = create_main_parser() |
|||
|
|||
# Note: parser calls disable_interspersed_args(), so the result of this |
|||
# call is to split the initial args into the general options before the |
|||
# subcommand and everything else. |
|||
# For example: |
|||
# args: ['--timeout=5', 'install', '--user', 'INITools'] |
|||
# general_options: ['--timeout==5'] |
|||
# args_else: ['install', '--user', 'INITools'] |
|||
general_options, args_else = parser.parse_args(args) |
|||
|
|||
# --version |
|||
if general_options.version: |
|||
sys.stdout.write(parser.version) |
|||
sys.stdout.write(os.linesep) |
|||
sys.exit() |
|||
|
|||
# pip || pip help -> print_help() |
|||
if not args_else or (args_else[0] == 'help' and len(args_else) == 1): |
|||
parser.print_help() |
|||
sys.exit() |
|||
|
|||
# the subcommand name |
|||
cmd_name = args_else[0] |
|||
|
|||
if cmd_name not in commands_dict: |
|||
guess = get_similar_commands(cmd_name) |
|||
|
|||
msg = ['unknown command "%s"' % cmd_name] |
|||
if guess: |
|||
msg.append('maybe you meant "%s"' % guess) |
|||
|
|||
raise CommandError(' - '.join(msg)) |
|||
|
|||
# all the args without the subcommand |
|||
cmd_args = args[:] |
|||
cmd_args.remove(cmd_name) |
|||
|
|||
return cmd_name, cmd_args |
|||
|
|||
|
|||
def check_isolated(args): |
|||
isolated = False |
|||
|
|||
if "--isolated" in args: |
|||
isolated = True |
|||
|
|||
return isolated |
|||
|
|||
|
|||
def main(args=None): |
|||
if args is None: |
|||
args = sys.argv[1:] |
|||
|
|||
# Configure our deprecation warnings to be sent through loggers |
|||
deprecation.install_warning_logger() |
|||
|
|||
autocomplete() |
|||
|
|||
try: |
|||
cmd_name, cmd_args = parseopts(args) |
|||
except PipError as exc: |
|||
sys.stderr.write("ERROR: %s" % exc) |
|||
sys.stderr.write(os.linesep) |
|||
sys.exit(1) |
|||
|
|||
# Needed for locale.getpreferredencoding(False) to work |
|||
# in pip.utils.encoding.auto_decode |
|||
try: |
|||
locale.setlocale(locale.LC_ALL, '') |
|||
except locale.Error as e: |
|||
# setlocale can apparently crash if locale are uninitialized |
|||
logger.debug("Ignoring error %s when setting locale", e) |
|||
command = commands_dict[cmd_name](isolated=check_isolated(cmd_args)) |
|||
return command.main(cmd_args) |
|||
|
|||
|
|||
# ########################################################### |
|||
# # Writing freeze files |
|||
|
|||
class FrozenRequirement(object): |
|||
|
|||
def __init__(self, name, req, editable, comments=()): |
|||
self.name = name |
|||
self.req = req |
|||
self.editable = editable |
|||
self.comments = comments |
|||
|
|||
_rev_re = re.compile(r'-r(\d+)$') |
|||
_date_re = re.compile(r'-(20\d\d\d\d\d\d)$') |
|||
|
|||
@classmethod |
|||
def from_dist(cls, dist, dependency_links): |
|||
location = os.path.normcase(os.path.abspath(dist.location)) |
|||
comments = [] |
|||
from pip.vcs import vcs, get_src_requirement |
|||
if dist_is_editable(dist) and vcs.get_backend_name(location): |
|||
editable = True |
|||
try: |
|||
req = get_src_requirement(dist, location) |
|||
except InstallationError as exc: |
|||
logger.warning( |
|||
"Error when trying to get requirement for VCS system %s, " |
|||
"falling back to uneditable format", exc |
|||
) |
|||
req = None |
|||
if req is None: |
|||
logger.warning( |
|||
'Could not determine repository location of %s', location |
|||
) |
|||
comments.append( |
|||
'## !! Could not determine repository location' |
|||
) |
|||
req = dist.as_requirement() |
|||
editable = False |
|||
else: |
|||
editable = False |
|||
req = dist.as_requirement() |
|||
specs = req.specs |
|||
assert len(specs) == 1 and specs[0][0] in ["==", "==="], \ |
|||
'Expected 1 spec with == or ===; specs = %r; dist = %r' % \ |
|||
(specs, dist) |
|||
version = specs[0][1] |
|||
ver_match = cls._rev_re.search(version) |
|||
date_match = cls._date_re.search(version) |
|||
if ver_match or date_match: |
|||
svn_backend = vcs.get_backend('svn') |
|||
if svn_backend: |
|||
svn_location = svn_backend().get_location( |
|||
dist, |
|||
dependency_links, |
|||
) |
|||
if not svn_location: |
|||
logger.warning( |
|||
'Warning: cannot find svn location for %s', req) |
|||
comments.append( |
|||
'## FIXME: could not find svn URL in dependency_links ' |
|||
'for this package:' |
|||
) |
|||
else: |
|||
comments.append( |
|||
'# Installing as editable to satisfy requirement %s:' % |
|||
req |
|||
) |
|||
if ver_match: |
|||
rev = ver_match.group(1) |
|||
else: |
|||
rev = '{%s}' % date_match.group(1) |
|||
editable = True |
|||
req = '%s@%s#egg=%s' % ( |
|||
svn_location, |
|||
rev, |
|||
cls.egg_name(dist) |
|||
) |
|||
return cls(dist.project_name, req, editable, comments) |
|||
|
|||
@staticmethod |
|||
def egg_name(dist): |
|||
name = dist.egg_name() |
|||
match = re.search(r'-py\d\.\d$', name) |
|||
if match: |
|||
name = name[:match.start()] |
|||
return name |
|||
|
|||
def __str__(self): |
|||
req = self.req |
|||
if self.editable: |
|||
req = '-e %s' % req |
|||
return '\n'.join(list(self.comments) + [str(req)]) + '\n' |
|||
|
|||
|
|||
if __name__ == '__main__': |
|||
sys.exit(main()) |
@ -0,0 +1,19 @@ |
|||
from __future__ import absolute_import |
|||
|
|||
import os |
|||
import sys |
|||
|
|||
# If we are running from a wheel, add the wheel to sys.path |
|||
# This allows the usage python pip-*.whl/pip install pip-*.whl |
|||
if __package__ == '': |
|||
# __file__ is pip-*.whl/pip/__main__.py |
|||
# first dirname call strips of '/__main__.py', second strips off '/pip' |
|||
# Resulting path is the name of the wheel itself |
|||
# Add that to sys.path so we can import pip |
|||
path = os.path.dirname(os.path.dirname(__file__)) |
|||
sys.path.insert(0, path) |
|||
|
|||
import pip # noqa |
|||
|
|||
if __name__ == '__main__': |
|||
sys.exit(pip.main()) |
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,111 @@ |
|||
""" |
|||
pip._vendor is for vendoring dependencies of pip to prevent needing pip to |
|||
depend on something external. |
|||
|
|||
Files inside of pip._vendor should be considered immutable and should only be |
|||
updated to versions from upstream. |
|||
""" |
|||
from __future__ import absolute_import |
|||
|
|||
import glob |
|||
import os.path |
|||
import sys |
|||
|
|||
# Downstream redistributors which have debundled our dependencies should also |
|||
# patch this value to be true. This will trigger the additional patching |
|||
# to cause things like "six" to be available as pip. |
|||
DEBUNDLED = True |
|||
|
|||
# By default, look in this directory for a bunch of .whl files which we will |
|||
# add to the beginning of sys.path before attempting to import anything. This |
|||
# is done to support downstream re-distributors like Debian and Fedora who |
|||
# wish to create their own Wheels for our dependencies to aid in debundling. |
|||
WHEEL_DIR = os.path.abspath(os.path.join(sys.prefix, 'share', 'python-wheels')) |
|||
|
|||
|
|||
# Define a small helper function to alias our vendored modules to the real ones |
|||
# if the vendored ones do not exist. This idea of this was taken from |
|||
# https://github.com/kennethreitz/requests/pull/2567. |
|||
def vendored(modulename): |
|||
vendored_name = "{0}.{1}".format(__name__, modulename) |
|||
|
|||
try: |
|||
__import__(vendored_name, globals(), locals(), level=0) |
|||
except ImportError: |
|||
try: |
|||
__import__(modulename, globals(), locals(), level=0) |
|||
except ImportError: |
|||
# We can just silently allow import failures to pass here. If we |
|||
# got to this point it means that ``import pip._vendor.whatever`` |
|||
# failed and so did ``import whatever``. Since we're importing this |
|||
# upfront in an attempt to alias imports, not erroring here will |
|||
# just mean we get a regular import error whenever pip *actually* |
|||
# tries to import one of these modules to use it, which actually |
|||
# gives us a better error message than we would have otherwise |
|||
# gotten. |
|||
pass |
|||
else: |
|||
sys.modules[vendored_name] = sys.modules[modulename] |
|||
base, head = vendored_name.rsplit(".", 1) |
|||
setattr(sys.modules[base], head, sys.modules[modulename]) |
|||
|
|||
|
|||
# If we're operating in a debundled setup, then we want to go ahead and trigger |
|||
# the aliasing of our vendored libraries as well as looking for wheels to add |
|||
# to our sys.path. This will cause all of this code to be a no-op typically |
|||
# however downstream redistributors can enable it in a consistent way across |
|||
# all platforms. |
|||
if DEBUNDLED: |
|||
# Actually look inside of WHEEL_DIR to find .whl files and add them to the |
|||
# front of our sys.path. |
|||
sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path |
|||
|
|||
# Actually alias all of our vendored dependencies. |
|||
vendored("cachecontrol") |
|||
vendored("colorama") |
|||
vendored("distlib") |
|||
vendored("distro") |
|||
vendored("html5lib") |
|||
vendored("lockfile") |
|||
vendored("six") |
|||
vendored("six.moves") |
|||
vendored("six.moves.urllib") |
|||
vendored("packaging") |
|||
vendored("packaging.version") |
|||
vendored("packaging.specifiers") |
|||
vendored("pkg_resources") |
|||
vendored("progress") |
|||
vendored("retrying") |
|||
vendored("requests") |
|||
vendored("requests.packages") |
|||
vendored("requests.packages.urllib3") |
|||
vendored("requests.packages.urllib3._collections") |
|||
vendored("requests.packages.urllib3.connection") |
|||
vendored("requests.packages.urllib3.connectionpool") |
|||
vendored("requests.packages.urllib3.contrib") |
|||
vendored("requests.packages.urllib3.contrib.ntlmpool") |
|||
vendored("requests.packages.urllib3.contrib.pyopenssl") |
|||
vendored("requests.packages.urllib3.exceptions") |
|||
vendored("requests.packages.urllib3.fields") |
|||
vendored("requests.packages.urllib3.filepost") |
|||
vendored("requests.packages.urllib3.packages") |
|||
try: |
|||
vendored("requests.packages.urllib3.packages.ordered_dict") |
|||
vendored("requests.packages.urllib3.packages.six") |
|||
except ImportError: |
|||
# Debian already unbundles these from requests. |
|||
pass |
|||
vendored("requests.packages.urllib3.packages.ssl_match_hostname") |
|||
vendored("requests.packages.urllib3.packages.ssl_match_hostname." |
|||
"_implementation") |
|||
vendored("requests.packages.urllib3.poolmanager") |
|||
vendored("requests.packages.urllib3.request") |
|||
vendored("requests.packages.urllib3.response") |
|||
vendored("requests.packages.urllib3.util") |
|||
vendored("requests.packages.urllib3.util.connection") |
|||
vendored("requests.packages.urllib3.util.request") |
|||
vendored("requests.packages.urllib3.util.response") |
|||
vendored("requests.packages.urllib3.util.retry") |
|||
vendored("requests.packages.urllib3.util.ssl_") |
|||
vendored("requests.packages.urllib3.util.timeout") |
|||
vendored("requests.packages.urllib3.util.url") |
Binary file not shown.
@ -0,0 +1,337 @@ |
|||
"""Base Command class, and related routines""" |
|||
from __future__ import absolute_import |
|||
|
|||
import logging |
|||
import os |
|||
import sys |
|||
import optparse |
|||
import warnings |
|||
|
|||
from pip import cmdoptions |
|||
from pip.index import PackageFinder |
|||
from pip.locations import running_under_virtualenv |
|||
from pip.download import PipSession |
|||
from pip.exceptions import (BadCommand, InstallationError, UninstallationError, |
|||
CommandError, PreviousBuildDirError) |
|||
|
|||
from pip.compat import logging_dictConfig |
|||
from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter |
|||
from pip.req import InstallRequirement, parse_requirements |
|||
from pip.status_codes import ( |
|||
SUCCESS, ERROR, UNKNOWN_ERROR, VIRTUALENV_NOT_FOUND, |
|||
PREVIOUS_BUILD_DIR_ERROR, |
|||
) |
|||
from pip.utils import deprecation, get_prog, normalize_path |
|||
from pip.utils.logging import IndentingFormatter |
|||
from pip.utils.outdated import pip_version_check |
|||
|
|||
|
|||
__all__ = ['Command'] |
|||
|
|||
|
|||
logger = logging.getLogger(__name__) |
|||
|
|||
|
|||
class Command(object): |
|||
name = None |
|||
usage = None |
|||
hidden = False |
|||
log_streams = ("ext://sys.stdout", "ext://sys.stderr") |
|||
|
|||
def __init__(self, isolated=False): |
|||
parser_kw = { |
|||
'usage': self.usage, |
|||
'prog': '%s %s' % (get_prog(), self.name), |
|||
'formatter': UpdatingDefaultsHelpFormatter(), |
|||
'add_help_option': False, |
|||
'name': self.name, |
|||
'description': self.__doc__, |
|||
'isolated': isolated, |
|||
} |
|||
|
|||
self.parser = ConfigOptionParser(**parser_kw) |
|||
|
|||
# Commands should add options to this option group |
|||
optgroup_name = '%s Options' % self.name.capitalize() |
|||
self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name) |
|||
|
|||
# Add the general options |
|||
gen_opts = cmdoptions.make_option_group( |
|||
cmdoptions.general_group, |
|||
self.parser, |
|||
) |
|||
self.parser.add_option_group(gen_opts) |
|||
|
|||
def _build_session(self, options, retries=None, timeout=None): |
|||
session = PipSession( |
|||
cache=( |
|||
normalize_path(os.path.join(options.cache_dir, "http")) |
|||
if options.cache_dir else None |
|||
), |
|||
retries=retries if retries is not None else options.retries, |
|||
insecure_hosts=options.trusted_hosts, |
|||
) |
|||
|
|||
# Handle custom ca-bundles from the user |
|||
if options.cert: |
|||
session.verify = options.cert |
|||
|
|||
# Handle SSL client certificate |
|||
if options.client_cert: |
|||
session.cert = options.client_cert |
|||
|
|||
# Handle timeouts |
|||
if options.timeout or timeout: |
|||
session.timeout = ( |
|||
timeout if timeout is not None else options.timeout |
|||
) |
|||
|
|||
# Handle configured proxies |
|||
if options.proxy: |
|||
session.proxies = { |
|||
"http": options.proxy, |
|||
"https": options.proxy, |
|||
} |
|||
|
|||
# Determine if we can prompt the user for authentication or not |
|||
session.auth.prompting = not options.no_input |
|||
|
|||
return session |
|||
|
|||
def parse_args(self, args): |
|||
# factored out for testability |
|||
return self.parser.parse_args(args) |
|||
|
|||
def main(self, args): |
|||
options, args = self.parse_args(args) |
|||
|
|||
if options.quiet: |
|||
if options.quiet == 1: |
|||
level = "WARNING" |
|||
if options.quiet == 2: |
|||
level = "ERROR" |
|||
else: |
|||
level = "CRITICAL" |
|||
elif options.verbose: |
|||
level = "DEBUG" |
|||
else: |
|||
level = "INFO" |
|||
|
|||
# The root logger should match the "console" level *unless* we |
|||
# specified "--log" to send debug logs to a file. |
|||
root_level = level |
|||
if options.log: |
|||
root_level = "DEBUG" |
|||
|
|||
logging_dictConfig({ |
|||
"version": 1, |
|||
"disable_existing_loggers": False, |
|||
"filters": { |
|||
"exclude_warnings": { |
|||
"()": "pip.utils.logging.MaxLevelFilter", |
|||
"level": logging.WARNING, |
|||
}, |
|||
}, |
|||
"formatters": { |
|||
"indent": { |
|||
"()": IndentingFormatter, |
|||
"format": "%(message)s", |
|||
}, |
|||
}, |
|||
"handlers": { |
|||
"console": { |
|||
"level": level, |
|||
"class": "pip.utils.logging.ColorizedStreamHandler", |
|||
"stream": self.log_streams[0], |
|||
"filters": ["exclude_warnings"], |
|||
"formatter": "indent", |
|||
}, |
|||
"console_errors": { |
|||
"level": "WARNING", |
|||
"class": "pip.utils.logging.ColorizedStreamHandler", |
|||
"stream": self.log_streams[1], |
|||
"formatter": "indent", |
|||
}, |
|||
"user_log": { |
|||
"level": "DEBUG", |
|||
"class": "pip.utils.logging.BetterRotatingFileHandler", |
|||
"filename": options.log or "/dev/null", |
|||
"delay": True, |
|||
"formatter": "indent", |
|||
}, |
|||
}, |
|||
"root": { |
|||
"level": root_level, |
|||
"handlers": list(filter(None, [ |
|||
"console", |
|||
"console_errors", |
|||
"user_log" if options.log else None, |
|||
])), |
|||
}, |
|||
# Disable any logging besides WARNING unless we have DEBUG level |
|||
# logging enabled. These use both pip._vendor and the bare names |
|||
# for the case where someone unbundles our libraries. |
|||
"loggers": dict( |
|||
( |
|||
name, |
|||
{ |
|||
"level": ( |
|||
"WARNING" |
|||
if level in ["INFO", "ERROR"] |
|||
else "DEBUG" |
|||
), |
|||
}, |
|||
) |
|||
for name in ["pip._vendor", "distlib", "requests", "urllib3"] |
|||
), |
|||
}) |
|||
|
|||
if sys.version_info[:2] == (2, 6): |
|||
warnings.warn( |
|||
"Python 2.6 is no longer supported by the Python core team, " |
|||
"please upgrade your Python. A future version of pip will " |
|||
"drop support for Python 2.6", |
|||
deprecation.Python26DeprecationWarning |
|||
) |
|||
|
|||
# TODO: try to get these passing down from the command? |
|||
# without resorting to os.environ to hold these. |
|||
|
|||
if options.no_input: |
|||
os.environ['PIP_NO_INPUT'] = '1' |
|||
|
|||
if options.exists_action: |
|||
os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action) |
|||
|
|||
if options.require_venv: |
|||
# If a venv is required check if it can really be found |
|||
if not running_under_virtualenv(): |
|||
logger.critical( |
|||
'Could not find an activated virtualenv (required).' |
|||
) |
|||
sys.exit(VIRTUALENV_NOT_FOUND) |
|||
|
|||
try: |
|||
status = self.run(options, args) |
|||
# FIXME: all commands should return an exit status |
|||
# and when it is done, isinstance is not needed anymore |
|||
if isinstance(status, int): |
|||
return status |
|||
except PreviousBuildDirError as exc: |
|||
logger.critical(str(exc)) |
|||
logger.debug('Exception information:', exc_info=True) |
|||
|
|||
return PREVIOUS_BUILD_DIR_ERROR |
|||
except (InstallationError, UninstallationError, BadCommand) as exc: |
|||
logger.critical(str(exc)) |
|||
logger.debug('Exception information:', exc_info=True) |
|||
|
|||
return ERROR |
|||
except CommandError as exc: |
|||
logger.critical('ERROR: %s', exc) |
|||
logger.debug('Exception information:', exc_info=True) |
|||
|
|||
return ERROR |
|||
except KeyboardInterrupt: |
|||
logger.critical('Operation cancelled by user') |
|||
logger.debug('Exception information:', exc_info=True) |
|||
|
|||
return ERROR |
|||
except: |
|||
logger.critical('Exception:', exc_info=True) |
|||
|
|||
return UNKNOWN_ERROR |
|||
finally: |
|||
# Check if we're using the latest version of pip available |
|||
if (not options.disable_pip_version_check and not |
|||
getattr(options, "no_index", False)): |
|||
with self._build_session( |
|||
options, |
|||
retries=0, |
|||
timeout=min(5, options.timeout)) as session: |
|||
pip_version_check(session) |
|||
|
|||
return SUCCESS |
|||
|
|||
|
|||
class RequirementCommand(Command): |
|||
|
|||
@staticmethod |
|||
def populate_requirement_set(requirement_set, args, options, finder, |
|||
session, name, wheel_cache): |
|||
""" |
|||
Marshal cmd line args into a requirement set. |
|||
""" |
|||
for filename in options.constraints: |
|||
for req in parse_requirements( |
|||
filename, |
|||
constraint=True, finder=finder, options=options, |
|||
session=session, wheel_cache=wheel_cache): |
|||
requirement_set.add_requirement(req) |
|||
|
|||
for req in args: |
|||
requirement_set.add_requirement( |
|||
InstallRequirement.from_line( |
|||
req, None, isolated=options.isolated_mode, |
|||
wheel_cache=wheel_cache |
|||
) |
|||
) |
|||
|
|||
for req in options.editables: |
|||
requirement_set.add_requirement( |
|||
InstallRequirement.from_editable( |
|||
req, |
|||
default_vcs=options.default_vcs, |
|||
isolated=options.isolated_mode, |
|||
wheel_cache=wheel_cache |
|||
) |
|||
) |
|||
|
|||
found_req_in_file = False |
|||
for filename in options.requirements: |
|||
for req in parse_requirements( |
|||
filename, |
|||
finder=finder, options=options, session=session, |
|||
wheel_cache=wheel_cache): |
|||
found_req_in_file = True |
|||
requirement_set.add_requirement(req) |
|||
# If --require-hashes was a line in a requirements file, tell |
|||
# RequirementSet about it: |
|||
requirement_set.require_hashes = options.require_hashes |
|||
|
|||
if not (args or options.editables or found_req_in_file): |
|||
opts = {'name': name} |
|||
if options.find_links: |
|||
msg = ('You must give at least one requirement to ' |
|||
'%(name)s (maybe you meant "pip %(name)s ' |
|||
'%(links)s"?)' % |
|||
dict(opts, links=' '.join(options.find_links))) |
|||
else: |
|||
msg = ('You must give at least one requirement ' |
|||
'to %(name)s (see "pip help %(name)s")' % opts) |
|||
logger.warning(msg) |
|||
|
|||
def _build_package_finder(self, options, session, |
|||
platform=None, python_versions=None, |
|||
abi=None, implementation=None): |
|||
""" |
|||
Create a package finder appropriate to this requirement command. |
|||
""" |
|||
index_urls = [options.index_url] + options.extra_index_urls |
|||
if options.no_index: |
|||
logger.debug('Ignoring indexes: %s', ','.join(index_urls)) |
|||
index_urls = [] |
|||
|
|||
return PackageFinder( |
|||
find_links=options.find_links, |
|||
format_control=options.format_control, |
|||
index_urls=index_urls, |
|||
trusted_hosts=options.trusted_hosts, |
|||
allow_all_prereleases=options.pre, |
|||
process_dependency_links=options.process_dependency_links, |
|||
session=session, |
|||
platform=platform, |
|||
versions=python_versions, |
|||
abi=abi, |
|||
implementation=implementation, |
|||
) |
@ -0,0 +1,293 @@ |
|||
"""Base option parser setup""" |
|||
from __future__ import absolute_import |
|||
|
|||
import sys |
|||
import optparse |
|||
import os |
|||
import re |
|||
import textwrap |
|||
from distutils.util import strtobool |
|||
|
|||
from pip._vendor.six import string_types |
|||
from pip._vendor.six.moves import configparser |
|||
from pip.locations import ( |
|||
legacy_config_file, config_basename, running_under_virtualenv, |
|||
site_config_files |
|||
) |
|||
from pip.utils import appdirs, get_terminal_size |
|||
|
|||
|
|||
_environ_prefix_re = re.compile(r"^PIP_", re.I) |
|||
|
|||
|
|||
class PrettyHelpFormatter(optparse.IndentedHelpFormatter): |
|||
"""A prettier/less verbose help formatter for optparse.""" |
|||
|
|||
def __init__(self, *args, **kwargs): |
|||
# help position must be aligned with __init__.parseopts.description |
|||
kwargs['max_help_position'] = 30 |
|||
kwargs['indent_increment'] = 1 |
|||
kwargs['width'] = get_terminal_size()[0] - 2 |
|||
optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs) |
|||
|
|||
def format_option_strings(self, option): |
|||
return self._format_option_strings(option, ' <%s>', ', ') |
|||
|
|||
def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '): |
|||
""" |
|||
Return a comma-separated list of option strings and metavars. |
|||
|
|||
:param option: tuple of (short opt, long opt), e.g: ('-f', '--format') |
|||
:param mvarfmt: metavar format string - evaluated as mvarfmt % metavar |
|||
:param optsep: separator |
|||
""" |
|||
opts = [] |
|||
|
|||
if option._short_opts: |
|||
opts.append(option._short_opts[0]) |
|||
if option._long_opts: |
|||
opts.append(option._long_opts[0]) |
|||
if len(opts) > 1: |
|||
opts.insert(1, optsep) |
|||
|
|||
if option.takes_value(): |
|||
metavar = option.metavar or option.dest.lower() |
|||
opts.append(mvarfmt % metavar.lower()) |
|||
|
|||
return ''.join(opts) |
|||
|
|||
def format_heading(self, heading): |
|||
if heading == 'Options': |
|||
return '' |
|||
return heading + ':\n' |
|||
|
|||
def format_usage(self, usage): |
|||
""" |
|||
Ensure there is only one newline between usage and the first heading |
|||
if there is no description. |
|||
""" |
|||
msg = '\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), " ") |
|||
return msg |
|||
|
|||
def format_description(self, description): |
|||
# leave full control over description to us |
|||
if description: |
|||
if hasattr(self.parser, 'main'): |
|||
label = 'Commands' |
|||
else: |
|||
label = 'Description' |
|||
# some doc strings have initial newlines, some don't |
|||
description = description.lstrip('\n') |
|||
# some doc strings have final newlines and spaces, some don't |
|||
description = description.rstrip() |
|||
# dedent, then reindent |
|||
description = self.indent_lines(textwrap.dedent(description), " ") |
|||
description = '%s:\n%s\n' % (label, description) |
|||
return description |
|||
else: |
|||
return '' |
|||
|
|||
def format_epilog(self, epilog): |
|||
# leave full control over epilog to us |
|||
if epilog: |
|||
return epilog |
|||
else: |
|||
return '' |
|||
|
|||
def indent_lines(self, text, indent): |
|||
new_lines = [indent + line for line in text.split('\n')] |
|||
return "\n".join(new_lines) |
|||
|
|||
|
|||
class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter): |
|||
"""Custom help formatter for use in ConfigOptionParser. |
|||
|
|||
This is updates the defaults before expanding them, allowing |
|||
them to show up correctly in the help listing. |
|||
""" |
|||
|
|||
def expand_default(self, option): |
|||
if self.parser is not None: |
|||
self.parser._update_defaults(self.parser.defaults) |
|||
return optparse.IndentedHelpFormatter.expand_default(self, option) |
|||
|
|||
|
|||
class CustomOptionParser(optparse.OptionParser): |
|||
|
|||
def insert_option_group(self, idx, *args, **kwargs): |
|||
"""Insert an OptionGroup at a given position.""" |
|||
group = self.add_option_group(*args, **kwargs) |
|||
|
|||
self.option_groups.pop() |
|||
self.option_groups.insert(idx, group) |
|||
|
|||
return group |
|||
|
|||
@property |
|||
def option_list_all(self): |
|||
"""Get a list of all options, including those in option groups.""" |
|||
res = self.option_list[:] |
|||
for i in self.option_groups: |
|||
res.extend(i.option_list) |
|||
|
|||
return res |
|||
|
|||
|
|||
class ConfigOptionParser(CustomOptionParser): |
|||
"""Custom option parser which updates its defaults by checking the |
|||
configuration files and environmental variables""" |
|||
|
|||
isolated = False |
|||
|
|||
def __init__(self, *args, **kwargs): |
|||
self.config = configparser.RawConfigParser() |
|||
self.name = kwargs.pop('name') |
|||
self.isolated = kwargs.pop("isolated", False) |
|||
self.files = self.get_config_files() |
|||
if self.files: |
|||
self.config.read(self.files) |
|||
assert self.name |
|||
optparse.OptionParser.__init__(self, *args, **kwargs) |
|||
|
|||
def get_config_files(self): |
|||
# the files returned by this method will be parsed in order with the |
|||
# first files listed being overridden by later files in standard |
|||
# ConfigParser fashion |
|||
config_file = os.environ.get('PIP_CONFIG_FILE', False) |
|||
if config_file == os.devnull: |
|||
return [] |
|||
|
|||
# at the base we have any site-wide configuration |
|||
files = list(site_config_files) |
|||
|
|||
# per-user configuration next |
|||
if not self.isolated: |
|||
if config_file and os.path.exists(config_file): |
|||
files.append(config_file) |
|||
else: |
|||
# This is the legacy config file, we consider it to be a lower |
|||
# priority than the new file location. |
|||
files.append(legacy_config_file) |
|||
|
|||
# This is the new config file, we consider it to be a higher |
|||
# priority than the legacy file. |
|||
files.append( |
|||
os.path.join( |
|||
appdirs.user_config_dir("pip"), |
|||
config_basename, |
|||
) |
|||
) |
|||
|
|||
# finally virtualenv configuration first trumping others |
|||
if running_under_virtualenv(): |
|||
venv_config_file = os.path.join( |
|||
sys.prefix, |
|||
config_basename, |
|||
) |
|||
if os.path.exists(venv_config_file): |
|||
files.append(venv_config_file) |
|||
|
|||
return files |
|||
|
|||
def check_default(self, option, key, val): |
|||
try: |
|||
return option.check_value(key, val) |
|||
except optparse.OptionValueError as exc: |
|||
print("An error occurred during configuration: %s" % exc) |
|||
sys.exit(3) |
|||
|
|||
def _update_defaults(self, defaults): |
|||
"""Updates the given defaults with values from the config files and |
|||
the environ. Does a little special handling for certain types of |
|||
options (lists).""" |
|||
# Then go and look for the other sources of configuration: |
|||
config = {} |
|||
# 1. config files |
|||
for section in ('global', self.name): |
|||
config.update( |
|||
self.normalize_keys(self.get_config_section(section)) |
|||
) |
|||
# 2. environmental variables |
|||
if not self.isolated: |
|||
config.update(self.normalize_keys(self.get_environ_vars())) |
|||
# Accumulate complex default state. |
|||
self.values = optparse.Values(self.defaults) |
|||
late_eval = set() |
|||
# Then set the options with those values |
|||
for key, val in config.items(): |
|||
# ignore empty values |
|||
if not val: |
|||
continue |
|||
|
|||
option = self.get_option(key) |
|||
# Ignore options not present in this parser. E.g. non-globals put |
|||
# in [global] by users that want them to apply to all applicable |
|||
# commands. |
|||
if option is None: |
|||
continue |
|||
|
|||
if option.action in ('store_true', 'store_false', 'count'): |
|||
val = strtobool(val) |
|||
elif option.action == 'append': |
|||
val = val.split() |
|||
val = [self.check_default(option, key, v) for v in val] |
|||
elif option.action == 'callback': |
|||
late_eval.add(option.dest) |
|||
opt_str = option.get_opt_string() |
|||
val = option.convert_value(opt_str, val) |
|||
# From take_action |
|||
args = option.callback_args or () |
|||
kwargs = option.callback_kwargs or {} |
|||
option.callback(option, opt_str, val, self, *args, **kwargs) |
|||
else: |
|||
val = self.check_default(option, key, val) |
|||
|
|||
defaults[option.dest] = val |
|||
|
|||
for key in late_eval: |
|||
defaults[key] = getattr(self.values, key) |
|||
self.values = None |
|||
return defaults |
|||
|
|||
def normalize_keys(self, items): |
|||
"""Return a config dictionary with normalized keys regardless of |
|||
whether the keys were specified in environment variables or in config |
|||
files""" |
|||
normalized = {} |
|||
for key, val in items: |
|||
key = key.replace('_', '-') |
|||
if not key.startswith('--'): |
|||
key = '--%s' % key # only prefer long opts |
|||
normalized[key] = val |
|||
return normalized |
|||
|
|||
def get_config_section(self, name): |
|||
"""Get a section of a configuration""" |
|||
if self.config.has_section(name): |
|||
return self.config.items(name) |
|||
return [] |
|||
|
|||
def get_environ_vars(self): |
|||
"""Returns a generator with all environmental vars with prefix PIP_""" |
|||
for key, val in os.environ.items(): |
|||
if _environ_prefix_re.search(key): |
|||
yield (_environ_prefix_re.sub("", key).lower(), val) |
|||
|
|||
def get_default_values(self): |
|||
"""Overriding to make updating the defaults after instantiation of |
|||
the option parser possible, _update_defaults() does the dirty work.""" |
|||
if not self.process_default_values: |
|||
# Old, pre-Optik 1.5 behaviour. |
|||
return optparse.Values(self.defaults) |
|||
|
|||
defaults = self._update_defaults(self.defaults.copy()) # ours |
|||
for option in self._get_all_options(): |
|||
default = defaults.get(option.dest) |
|||
if isinstance(default, string_types): |
|||
opt_str = option.get_opt_string() |
|||
defaults[option.dest] = option.check_value(opt_str, default) |
|||
return optparse.Values(defaults) |
|||
|
|||
def error(self, msg): |
|||
self.print_usage(sys.stderr) |
|||
self.exit(2, "%s\n" % msg) |
@ -0,0 +1,633 @@ |
|||
""" |
|||
shared options and groups |
|||
|
|||
The principle here is to define options once, but *not* instantiate them |
|||
globally. One reason being that options with action='append' can carry state |
|||
between parses. pip parses general options twice internally, and shouldn't |
|||
pass on state. To be consistent, all options will follow this design. |
|||
|
|||
""" |
|||
from __future__ import absolute_import |
|||
|
|||
from functools import partial |
|||
from optparse import OptionGroup, SUPPRESS_HELP, Option |
|||
import warnings |
|||
|
|||
from pip.index import ( |
|||
FormatControl, fmt_ctl_handle_mutual_exclude, fmt_ctl_no_binary, |
|||
fmt_ctl_no_use_wheel) |
|||
from pip.models import PyPI |
|||
from pip.locations import USER_CACHE_DIR, src_prefix |
|||
from pip.utils.hashes import STRONG_HASHES |
|||
|
|||
|
|||
def make_option_group(group, parser): |
|||
""" |
|||
Return an OptionGroup object |
|||
group -- assumed to be dict with 'name' and 'options' keys |
|||
parser -- an optparse Parser |
|||
""" |
|||
option_group = OptionGroup(parser, group['name']) |
|||
for option in group['options']: |
|||
option_group.add_option(option()) |
|||
return option_group |
|||
|
|||
|
|||
def resolve_wheel_no_use_binary(options): |
|||
if not options.use_wheel: |
|||
control = options.format_control |
|||
fmt_ctl_no_use_wheel(control) |
|||
|
|||
|
|||
def check_install_build_global(options, check_options=None): |
|||
"""Disable wheels if per-setup.py call options are set. |
|||
|
|||
:param options: The OptionParser options to update. |
|||
:param check_options: The options to check, if not supplied defaults to |
|||
options. |
|||
""" |
|||
if check_options is None: |
|||
check_options = options |
|||
|
|||
def getname(n): |
|||
return getattr(check_options, n, None) |
|||
names = ["build_options", "global_options", "install_options"] |
|||
if any(map(getname, names)): |
|||
control = options.format_control |
|||
fmt_ctl_no_binary(control) |
|||
warnings.warn( |
|||
'Disabling all use of wheels due to the use of --build-options ' |
|||
'/ --global-options / --install-options.', stacklevel=2) |
|||
|
|||
|
|||
########### |
|||
# options # |
|||
########### |
|||
|
|||
help_ = partial( |
|||
Option, |
|||
'-h', '--help', |
|||
dest='help', |
|||
action='help', |
|||
help='Show help.') |
|||
|
|||
isolated_mode = partial( |
|||
Option, |
|||
"--isolated", |
|||
dest="isolated_mode", |
|||
action="store_true", |
|||
default=False, |
|||
help=( |
|||
"Run pip in an isolated mode, ignoring environment variables and user " |
|||
"configuration." |
|||
), |
|||
) |
|||
|
|||
require_virtualenv = partial( |
|||
Option, |
|||
# Run only if inside a virtualenv, bail if not. |
|||
'--require-virtualenv', '--require-venv', |
|||
dest='require_venv', |
|||
action='store_true', |
|||
default=False, |
|||
help=SUPPRESS_HELP) |
|||
|
|||
verbose = partial( |
|||
Option, |
|||
'-v', '--verbose', |
|||
dest='verbose', |
|||
action='count', |
|||
default=0, |
|||
help='Give more output. Option is additive, and can be used up to 3 times.' |
|||
) |
|||
|
|||
version = partial( |
|||
Option, |
|||
'-V', '--version', |
|||
dest='version', |
|||
action='store_true', |
|||
help='Show version and exit.') |
|||
|
|||
quiet = partial( |
|||
Option, |
|||
'-q', '--quiet', |
|||
dest='quiet', |
|||
action='count', |
|||
default=0, |
|||
help=('Give less output. Option is additive, and can be used up to 3' |
|||
' times (corresponding to WARNING, ERROR, and CRITICAL logging' |
|||
' levels).') |
|||
) |
|||
|
|||
log = partial( |
|||
Option, |
|||
"--log", "--log-file", "--local-log", |
|||
dest="log", |
|||
metavar="path", |
|||
help="Path to a verbose appending log." |
|||
) |
|||
|
|||
no_input = partial( |
|||
Option, |
|||
# Don't ask for input |
|||
'--no-input', |
|||
dest='no_input', |
|||
action='store_true', |
|||
default=False, |
|||
help=SUPPRESS_HELP) |
|||
|
|||
proxy = partial( |
|||
Option, |
|||
'--proxy', |
|||
dest='proxy', |
|||
type='str', |
|||
default='', |
|||
help="Specify a proxy in the form [user:passwd@]proxy.server:port.") |
|||
|
|||
retries = partial( |
|||
Option, |
|||
'--retries', |
|||
dest='retries', |
|||
type='int', |
|||
default=5, |
|||
help="Maximum number of retries each connection should attempt " |
|||
"(default %default times).") |
|||
|
|||
timeout = partial( |
|||
Option, |
|||
'--timeout', '--default-timeout', |
|||
metavar='sec', |
|||
dest='timeout', |
|||
type='float', |
|||
default=15, |
|||
help='Set the socket timeout (default %default seconds).') |
|||
|
|||
default_vcs = partial( |
|||
Option, |
|||
# The default version control system for editables, e.g. 'svn' |
|||
'--default-vcs', |
|||
dest='default_vcs', |
|||
type='str', |
|||
default='', |
|||
help=SUPPRESS_HELP) |
|||
|
|||
skip_requirements_regex = partial( |
|||
Option, |
|||
# A regex to be used to skip requirements |
|||
'--skip-requirements-regex', |
|||
dest='skip_requirements_regex', |
|||
type='str', |
|||
default='', |
|||
help=SUPPRESS_HELP) |
|||
|
|||
|
|||
def exists_action(): |
|||
return Option( |
|||
# Option when path already exist |
|||
'--exists-action', |
|||
dest='exists_action', |
|||
type='choice', |
|||
choices=['s', 'i', 'w', 'b', 'a'], |
|||
default=[], |
|||
action='append', |
|||
metavar='action', |
|||
help="Default action when a path already exists: " |
|||
"(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.") |
|||
|
|||
|
|||
cert = partial( |
|||
Option, |
|||
'--cert', |
|||
dest='cert', |
|||
type='str', |
|||
metavar='path', |
|||
help="Path to alternate CA bundle.") |
|||
|
|||
client_cert = partial( |
|||
Option, |
|||
'--client-cert', |
|||
dest='client_cert', |
|||
type='str', |
|||
default=None, |
|||
metavar='path', |
|||
help="Path to SSL client certificate, a single file containing the " |
|||
"private key and the certificate in PEM format.") |
|||
|
|||
index_url = partial( |
|||
Option, |
|||
'-i', '--index-url', '--pypi-url', |
|||
dest='index_url', |
|||
metavar='URL', |
|||
default=PyPI.simple_url, |
|||
help="Base URL of Python Package Index (default %default). " |
|||
"This should point to a repository compliant with PEP 503 " |
|||
"(the simple repository API) or a local directory laid out " |
|||
"in the same format.") |
|||
|
|||
|
|||
def extra_index_url(): |
|||
return Option( |
|||
'--extra-index-url', |
|||
dest='extra_index_urls', |
|||
metavar='URL', |
|||
action='append', |
|||
default=[], |
|||
help="Extra URLs of package indexes to use in addition to " |
|||
"--index-url. Should follow the same rules as " |
|||
"--index-url." |
|||
) |
|||
|
|||
|
|||
no_index = partial( |
|||
Option, |
|||
'--no-index', |
|||
dest='no_index', |
|||
action='store_true', |
|||
default=False, |
|||
help='Ignore package index (only looking at --find-links URLs instead).') |
|||
|
|||
|
|||
def find_links(): |
|||
return Option( |
|||
'-f', '--find-links', |
|||
dest='find_links', |
|||
action='append', |
|||
default=[], |
|||
metavar='url', |
|||
help="If a url or path to an html file, then parse for links to " |
|||
"archives. If a local path or file:// url that's a directory, " |
|||
"then look for archives in the directory listing.") |
|||
|
|||
|
|||
def allow_external(): |
|||
return Option( |
|||
"--allow-external", |
|||
dest="allow_external", |
|||
action="append", |
|||
default=[], |
|||
metavar="PACKAGE", |
|||
help=SUPPRESS_HELP, |
|||
) |
|||
|
|||
|
|||
allow_all_external = partial( |
|||
Option, |
|||
"--allow-all-external", |
|||
dest="allow_all_external", |
|||
action="store_true", |
|||
default=False, |
|||
help=SUPPRESS_HELP, |
|||
) |
|||
|
|||
|
|||
def trusted_host(): |
|||
return Option( |
|||
"--trusted-host", |
|||
dest="trusted_hosts", |
|||
action="append", |
|||
metavar="HOSTNAME", |
|||
default=[], |
|||
help="Mark this host as trusted, even though it does not have valid " |
|||
"or any HTTPS.", |
|||
) |
|||
|
|||
|
|||
# Remove after 7.0 |
|||
no_allow_external = partial( |
|||
Option, |
|||
"--no-allow-external", |
|||
dest="allow_all_external", |
|||
action="store_false", |
|||
default=False, |
|||
help=SUPPRESS_HELP, |
|||
) |
|||
|
|||
|
|||
# Remove --allow-insecure after 7.0 |
|||
def allow_unsafe(): |
|||
return Option( |
|||
"--allow-unverified", "--allow-insecure", |
|||
dest="allow_unverified", |
|||
action="append", |
|||
default=[], |
|||
metavar="PACKAGE", |
|||
help=SUPPRESS_HELP, |
|||
) |
|||
|
|||
# Remove after 7.0 |
|||
no_allow_unsafe = partial( |
|||
Option, |
|||
"--no-allow-insecure", |
|||
dest="allow_all_insecure", |
|||
action="store_false", |
|||
default=False, |
|||
help=SUPPRESS_HELP |
|||
) |
|||
|
|||
# Remove after 1.5 |
|||
process_dependency_links = partial( |
|||
Option, |
|||
"--process-dependency-links", |
|||
dest="process_dependency_links", |
|||
action="store_true", |
|||
default=False, |
|||
help="Enable the processing of dependency links.", |
|||
) |
|||
|
|||
|
|||
def constraints(): |
|||
return Option( |
|||
'-c', '--constraint', |
|||
dest='constraints', |
|||
action='append', |
|||
default=[], |
|||
metavar='file', |
|||
help='Constrain versions using the given constraints file. ' |
|||
'This option can be used multiple times.') |
|||
|
|||
|
|||
def requirements(): |
|||
return Option( |
|||
'-r', '--requirement', |
|||
dest='requirements', |
|||
action='append', |
|||
default=[], |
|||
metavar='file', |
|||
help='Install from the given requirements file. ' |
|||
'This option can be used multiple times.') |
|||
|
|||
|
|||
def editable(): |
|||
return Option( |
|||
'-e', '--editable', |
|||
dest='editables', |
|||
action='append', |
|||
default=[], |
|||
metavar='path/url', |
|||
help=('Install a project in editable mode (i.e. setuptools ' |
|||
'"develop mode") from a local project path or a VCS url.'), |
|||
) |
|||
|
|||
src = partial( |
|||
Option, |
|||
'--src', '--source', '--source-dir', '--source-directory', |
|||
dest='src_dir', |
|||
metavar='dir', |
|||
default=src_prefix, |
|||
help='Directory to check out editable projects into. ' |
|||
'The default in a virtualenv is "<venv path>/src". ' |
|||
'The default for global installs is "<current dir>/src".' |
|||
) |
|||
|
|||
# XXX: deprecated, remove in 9.0 |
|||
use_wheel = partial( |
|||
Option, |
|||
'--use-wheel', |
|||
dest='use_wheel', |
|||
action='store_true', |
|||
default=True, |
|||
help=SUPPRESS_HELP, |
|||
) |
|||
|
|||
# XXX: deprecated, remove in 9.0 |
|||
no_use_wheel = partial( |
|||
Option, |
|||
'--no-use-wheel', |
|||
dest='use_wheel', |
|||
action='store_false', |
|||
default=True, |
|||
help=('Do not Find and prefer wheel archives when searching indexes and ' |
|||
'find-links locations. DEPRECATED in favour of --no-binary.'), |
|||
) |
|||
|
|||
|
|||
def _get_format_control(values, option): |
|||
"""Get a format_control object.""" |
|||
return getattr(values, option.dest) |
|||
|
|||
|
|||
def _handle_no_binary(option, opt_str, value, parser): |
|||
existing = getattr(parser.values, option.dest) |
|||
fmt_ctl_handle_mutual_exclude( |
|||
value, existing.no_binary, existing.only_binary) |
|||
|
|||
|
|||
def _handle_only_binary(option, opt_str, value, parser): |
|||
existing = getattr(parser.values, option.dest) |
|||
fmt_ctl_handle_mutual_exclude( |
|||
value, existing.only_binary, existing.no_binary) |
|||
|
|||
|
|||
def no_binary(): |
|||
return Option( |
|||
"--no-binary", dest="format_control", action="callback", |
|||
callback=_handle_no_binary, type="str", |
|||
default=FormatControl(set(), set()), |
|||
help="Do not use binary packages. Can be supplied multiple times, and " |
|||
"each time adds to the existing value. Accepts either :all: to " |
|||
"disable all binary packages, :none: to empty the set, or one or " |
|||
"more package names with commas between them. Note that some " |
|||
"packages are tricky to compile and may fail to install when " |
|||
"this option is used on them.") |
|||
|
|||
|
|||
def only_binary(): |
|||
return Option( |
|||
"--only-binary", dest="format_control", action="callback", |
|||
callback=_handle_only_binary, type="str", |
|||
default=FormatControl(set(), set()), |
|||
help="Do not use source packages. Can be supplied multiple times, and " |
|||
"each time adds to the existing value. Accepts either :all: to " |
|||
"disable all source packages, :none: to empty the set, or one or " |
|||
"more package names with commas between them. Packages without " |
|||
"binary distributions will fail to install when this option is " |
|||
"used on them.") |
|||
|
|||
|
|||
cache_dir = partial( |
|||
Option, |
|||
"--cache-dir", |
|||
dest="cache_dir", |
|||
default=USER_CACHE_DIR, |
|||
metavar="dir", |
|||
help="Store the cache data in <dir>." |
|||
) |
|||
|
|||
no_cache = partial( |
|||
Option, |
|||
"--no-cache-dir", |
|||
dest="cache_dir", |
|||
action="store_false", |
|||
help="Disable the cache.", |
|||
) |
|||
|
|||
no_deps = partial( |
|||
Option, |
|||
'--no-deps', '--no-dependencies', |
|||
dest='ignore_dependencies', |
|||
action='store_true', |
|||
default=False, |
|||
help="Don't install package dependencies.") |
|||
|
|||
build_dir = partial( |
|||
Option, |
|||
'-b', '--build', '--build-dir', '--build-directory', |
|||
dest='build_dir', |
|||
metavar='dir', |
|||
help='Directory to unpack packages into and build in.' |
|||
) |
|||
|
|||
ignore_requires_python = partial( |
|||
Option, |
|||
'--ignore-requires-python', |
|||
dest='ignore_requires_python', |
|||
action='store_true', |
|||
help='Ignore the Requires-Python information.') |
|||
|
|||
install_options = partial( |
|||
Option, |
|||
'--install-option', |
|||
dest='install_options', |
|||
action='append', |
|||
metavar='options', |
|||
help="Extra arguments to be supplied to the setup.py install " |
|||
"command (use like --install-option=\"--install-scripts=/usr/local/" |
|||
"bin\"). Use multiple --install-option options to pass multiple " |
|||
"options to setup.py install. If you are using an option with a " |
|||
"directory path, be sure to use absolute path.") |
|||
|
|||
global_options = partial( |
|||
Option, |
|||
'--global-option', |
|||
dest='global_options', |
|||
action='append', |
|||
metavar='options', |
|||
help="Extra global options to be supplied to the setup.py " |
|||
"call before the install command.") |
|||
|
|||
no_clean = partial( |
|||
Option, |
|||
'--no-clean', |
|||
action='store_true', |
|||
default=False, |
|||
help="Don't clean up build directories.") |
|||
|
|||
pre = partial( |
|||
Option, |
|||
'--pre', |
|||
action='store_true', |
|||
default=False, |
|||
help="Include pre-release and development versions. By default, " |
|||
"pip only finds stable versions.") |
|||
|
|||
disable_pip_version_check = partial( |
|||
Option, |
|||
"--disable-pip-version-check", |
|||
dest="disable_pip_version_check", |
|||
action="store_true", |
|||
default=True, |
|||
help="Don't periodically check PyPI to determine whether a new version " |
|||
"of pip is available for download. Implied with --no-index.") |
|||
|
|||
# Deprecated, Remove later |
|||
always_unzip = partial( |
|||
Option, |
|||
'-Z', '--always-unzip', |
|||
dest='always_unzip', |
|||
action='store_true', |
|||
help=SUPPRESS_HELP, |
|||
) |
|||
|
|||
|
|||
def _merge_hash(option, opt_str, value, parser): |
|||
"""Given a value spelled "algo:digest", append the digest to a list |
|||
pointed to in a dict by the algo name.""" |
|||
if not parser.values.hashes: |
|||
parser.values.hashes = {} |
|||
try: |
|||
algo, digest = value.split(':', 1) |
|||
except ValueError: |
|||
parser.error('Arguments to %s must be a hash name ' |
|||
'followed by a value, like --hash=sha256:abcde...' % |
|||
opt_str) |
|||
if algo not in STRONG_HASHES: |
|||
parser.error('Allowed hash algorithms for %s are %s.' % |
|||
(opt_str, ', '.join(STRONG_HASHES))) |
|||
parser.values.hashes.setdefault(algo, []).append(digest) |
|||
|
|||
|
|||
hash = partial( |
|||
Option, |
|||
'--hash', |
|||
# Hash values eventually end up in InstallRequirement.hashes due to |
|||
# __dict__ copying in process_line(). |
|||
dest='hashes', |
|||
action='callback', |
|||
callback=_merge_hash, |
|||
type='string', |
|||
help="Verify that the package's archive matches this " |
|||
'hash before installing. Example: --hash=sha256:abcdef...') |
|||
|
|||
|
|||
require_hashes = partial( |
|||
Option, |
|||
'--require-hashes', |
|||
dest='require_hashes', |
|||
action='store_true', |
|||
default=False, |
|||
help='Require a hash to check each requirement against, for ' |
|||
'repeatable installs. This option is implied when any package in a ' |
|||
'requirements file has a --hash option.') |
|||
|
|||
|
|||
########## |
|||
# groups # |
|||
########## |
|||
|
|||
general_group = { |
|||
'name': 'General Options', |
|||
'options': [ |
|||
help_, |
|||
isolated_mode, |
|||
require_virtualenv, |
|||
verbose, |
|||
version, |
|||
quiet, |
|||
log, |
|||
no_input, |
|||
proxy, |
|||
retries, |
|||
timeout, |
|||
default_vcs, |
|||
skip_requirements_regex, |
|||
exists_action, |
|||
trusted_host, |
|||
cert, |
|||
client_cert, |
|||
cache_dir, |
|||
no_cache, |
|||
disable_pip_version_check, |
|||
] |
|||
} |
|||
|
|||
non_deprecated_index_group = { |
|||
'name': 'Package Index Options', |
|||
'options': [ |
|||
index_url, |
|||
extra_index_url, |
|||
no_index, |
|||
find_links, |
|||
process_dependency_links, |
|||
] |
|||
} |
|||
|
|||
index_group = { |
|||
'name': 'Package Index Options (including deprecated options)', |
|||
'options': non_deprecated_index_group['options'] + [ |
|||
allow_external, |
|||
allow_all_external, |
|||
no_allow_external, |
|||
allow_unsafe, |
|||
no_allow_unsafe, |
|||
] |
|||
} |
@ -0,0 +1,86 @@ |
|||
""" |
|||
Package containing all pip commands |
|||
""" |
|||
from __future__ import absolute_import |
|||
|
|||
from pip.commands.completion import CompletionCommand |
|||
from pip.commands.download import DownloadCommand |
|||
from pip.commands.freeze import FreezeCommand |
|||
from pip.commands.hash import HashCommand |
|||
from pip.commands.help import HelpCommand |
|||
from pip.commands.list import ListCommand |
|||
from pip.commands.check import CheckCommand |
|||
from pip.commands.search import SearchCommand |
|||
from pip.commands.show import ShowCommand |
|||
from pip.commands.install import InstallCommand |
|||
from pip.commands.uninstall import UninstallCommand |
|||
from pip.commands.wheel import WheelCommand |
|||
|
|||
|
|||
commands_dict = { |
|||
CompletionCommand.name: CompletionCommand, |
|||
FreezeCommand.name: FreezeCommand, |
|||
HashCommand.name: HashCommand, |
|||
HelpCommand.name: HelpCommand, |
|||
SearchCommand.name: SearchCommand, |
|||
ShowCommand.name: ShowCommand, |
|||
InstallCommand.name: InstallCommand, |
|||
UninstallCommand.name: UninstallCommand, |
|||
DownloadCommand.name: DownloadCommand, |
|||
ListCommand.name: ListCommand, |
|||
CheckCommand.name: CheckCommand, |
|||
WheelCommand.name: WheelCommand, |
|||
} |
|||
|
|||
|
|||
commands_order = [ |
|||
InstallCommand, |
|||
DownloadCommand, |
|||
UninstallCommand, |
|||
FreezeCommand, |
|||
ListCommand, |
|||
ShowCommand, |
|||
CheckCommand, |
|||
SearchCommand, |
|||
WheelCommand, |
|||
HashCommand, |
|||
CompletionCommand, |
|||
HelpCommand, |
|||
] |
|||
|
|||
|
|||
def get_summaries(ordered=True): |
|||
"""Yields sorted (command name, command summary) tuples.""" |
|||
|
|||
if ordered: |
|||
cmditems = _sort_commands(commands_dict, commands_order) |
|||
else: |
|||
cmditems = commands_dict.items() |
|||
|
|||
for name, command_class in cmditems: |
|||
yield (name, command_class.summary) |
|||
|
|||
|
|||
def get_similar_commands(name): |
|||
"""Command name auto-correct.""" |
|||
from difflib import get_close_matches |
|||
|
|||
name = name.lower() |
|||
|
|||
close_commands = get_close_matches(name, commands_dict.keys()) |
|||
|
|||
if close_commands: |
|||
return close_commands[0] |
|||
else: |
|||
return False |
|||
|
|||
|
|||
def _sort_commands(cmddict, order): |
|||
def keyfn(key): |
|||
try: |
|||
return order.index(key[1]) |
|||
except ValueError: |
|||
# unordered items should come last |
|||
return 0xff |
|||
|
|||
return sorted(cmddict.items(), key=keyfn) |
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,39 @@ |
|||
import logging |
|||
|
|||
from pip.basecommand import Command |
|||
from pip.operations.check import check_requirements |
|||
from pip.utils import get_installed_distributions |
|||
|
|||
|
|||
logger = logging.getLogger(__name__) |
|||
|
|||
|
|||
class CheckCommand(Command): |
|||
"""Verify installed packages have compatible dependencies.""" |
|||
name = 'check' |
|||
usage = """ |
|||
%prog [options]""" |
|||
summary = 'Verify installed packages have compatible dependencies.' |
|||
|
|||
def run(self, options, args): |
|||
dists = get_installed_distributions(local_only=False, skip=()) |
|||
missing_reqs_dict, incompatible_reqs_dict = check_requirements(dists) |
|||
|
|||
for dist in dists: |
|||
key = '%s==%s' % (dist.project_name, dist.version) |
|||
|
|||
for requirement in missing_reqs_dict.get(key, []): |
|||
logger.info( |
|||
"%s %s requires %s, which is not installed.", |
|||
dist.project_name, dist.version, requirement.project_name) |
|||
|
|||
for requirement, actual in incompatible_reqs_dict.get(key, []): |
|||
logger.info( |
|||
"%s %s has requirement %s, but you have %s %s.", |
|||
dist.project_name, dist.version, requirement, |
|||
actual.project_name, actual.version) |
|||
|
|||
if missing_reqs_dict or incompatible_reqs_dict: |
|||
return 1 |
|||
else: |
|||
logger.info("No broken requirements found.") |
@ -0,0 +1,81 @@ |
|||
from __future__ import absolute_import |
|||
|
|||
import sys |
|||
from pip.basecommand import Command |
|||
|
|||
BASE_COMPLETION = """ |
|||
# pip %(shell)s completion start%(script)s# pip %(shell)s completion end |
|||
""" |
|||
|
|||
COMPLETION_SCRIPTS = { |
|||
'bash': """ |
|||
_pip_completion() |
|||
{ |
|||
COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\ |
|||
COMP_CWORD=$COMP_CWORD \\ |
|||
PIP_AUTO_COMPLETE=1 $1 ) ) |
|||
} |
|||
complete -o default -F _pip_completion pip |
|||
""", 'zsh': """ |
|||
function _pip_completion { |
|||
local words cword |
|||
read -Ac words |
|||
read -cn cword |
|||
reply=( $( COMP_WORDS="$words[*]" \\ |
|||
COMP_CWORD=$(( cword-1 )) \\ |
|||
PIP_AUTO_COMPLETE=1 $words[1] ) ) |
|||
} |
|||
compctl -K _pip_completion pip |
|||
""", 'fish': """ |
|||
function __fish_complete_pip |
|||
set -lx COMP_WORDS (commandline -o) "" |
|||
set -lx COMP_CWORD (math (contains -i -- (commandline -t) $COMP_WORDS)-1) |
|||
set -lx PIP_AUTO_COMPLETE 1 |
|||
string split \ -- (eval $COMP_WORDS[1]) |
|||
end |
|||
complete -fa "(__fish_complete_pip)" -c pip |
|||
"""} |
|||
|
|||
|
|||
class CompletionCommand(Command): |
|||
"""A helper command to be used for command completion.""" |
|||
name = 'completion' |
|||
summary = 'A helper command used for command completion.' |
|||
|
|||
def __init__(self, *args, **kw): |
|||
super(CompletionCommand, self).__init__(*args, **kw) |
|||
|
|||
cmd_opts = self.cmd_opts |
|||
|
|||
cmd_opts.add_option( |
|||
'--bash', '-b', |
|||
action='store_const', |
|||
const='bash', |
|||
dest='shell', |
|||
help='Emit completion code for bash') |
|||
cmd_opts.add_option( |
|||
'--zsh', '-z', |
|||
action='store_const', |
|||
const='zsh', |
|||
dest='shell', |
|||
help='Emit completion code for zsh') |
|||
cmd_opts.add_option( |
|||
'--fish', '-f', |
|||
action='store_const', |
|||
const='fish', |
|||
dest='shell', |
|||
help='Emit completion code for fish') |
|||
|
|||
self.parser.insert_option_group(0, cmd_opts) |
|||
|
|||
def run(self, options, args): |
|||
"""Prints the completion code of the given shell""" |
|||
shells = COMPLETION_SCRIPTS.keys() |
|||
shell_options = ['--' + shell for shell in sorted(shells)] |
|||
if options.shell in shells: |
|||
script = COMPLETION_SCRIPTS.get(options.shell, '') |
|||
print(BASE_COMPLETION % {'script': script, 'shell': options.shell}) |
|||
else: |
|||
sys.stderr.write( |
|||
'ERROR: You must pass %s\n' % ' or '.join(shell_options) |
|||
) |
@ -0,0 +1,212 @@ |
|||
from __future__ import absolute_import |
|||
|
|||
import logging |
|||
import os |
|||
|
|||
from pip.exceptions import CommandError |
|||
from pip.index import FormatControl |
|||
from pip.req import RequirementSet |
|||
from pip.basecommand import RequirementCommand |
|||
from pip import cmdoptions |
|||
from pip.utils import ensure_dir, normalize_path |
|||
from pip.utils.build import BuildDirectory |
|||
from pip.utils.filesystem import check_path_owner |
|||
|
|||
|
|||
logger = logging.getLogger(__name__) |
|||
|
|||
|
|||
class DownloadCommand(RequirementCommand): |
|||
""" |
|||
Download packages from: |
|||
|
|||
- PyPI (and other indexes) using requirement specifiers. |
|||
- VCS project urls. |
|||
- Local project directories. |
|||
- Local or remote source archives. |
|||
|
|||
pip also supports downloading from "requirements files", which provide |
|||
an easy way to specify a whole environment to be downloaded. |
|||
""" |
|||
name = 'download' |
|||
|
|||
usage = """ |
|||
%prog [options] <requirement specifier> [package-index-options] ... |
|||
%prog [options] -r <requirements file> [package-index-options] ... |
|||
%prog [options] [-e] <vcs project url> ... |
|||
%prog [options] [-e] <local project path> ... |
|||
%prog [options] <archive url/path> ...""" |
|||
|
|||
summary = 'Download packages.' |
|||
|
|||
def __init__(self, *args, **kw): |
|||
super(DownloadCommand, self).__init__(*args, **kw) |
|||
|
|||
cmd_opts = self.cmd_opts |
|||
|
|||
cmd_opts.add_option(cmdoptions.constraints()) |
|||
cmd_opts.add_option(cmdoptions.editable()) |
|||
cmd_opts.add_option(cmdoptions.requirements()) |
|||
cmd_opts.add_option(cmdoptions.build_dir()) |
|||
cmd_opts.add_option(cmdoptions.no_deps()) |
|||
cmd_opts.add_option(cmdoptions.global_options()) |
|||
cmd_opts.add_option(cmdoptions.no_binary()) |
|||
cmd_opts.add_option(cmdoptions.only_binary()) |
|||
cmd_opts.add_option(cmdoptions.src()) |
|||
cmd_opts.add_option(cmdoptions.pre()) |
|||
cmd_opts.add_option(cmdoptions.no_clean()) |
|||
cmd_opts.add_option(cmdoptions.require_hashes()) |
|||
|
|||
cmd_opts.add_option( |
|||
'-d', '--dest', '--destination-dir', '--destination-directory', |
|||
dest='download_dir', |
|||
metavar='dir', |
|||
default=os.curdir, |
|||
help=("Download packages into <dir>."), |
|||
) |
|||
|
|||
cmd_opts.add_option( |
|||
'--platform', |
|||
dest='platform', |
|||
metavar='platform', |
|||
default=None, |
|||
help=("Only download wheels compatible with <platform>. " |
|||
"Defaults to the platform of the running system."), |
|||
) |
|||
|
|||
cmd_opts.add_option( |
|||
'--python-version', |
|||
dest='python_version', |
|||
metavar='python_version', |
|||
default=None, |
|||
help=("Only download wheels compatible with Python " |
|||
"interpreter version <version>. If not specified, then the " |
|||
"current system interpreter minor version is used. A major " |
|||
"version (e.g. '2') can be specified to match all " |
|||
"minor revs of that major version. A minor version " |
|||
"(e.g. '34') can also be specified."), |
|||
) |
|||
|
|||
cmd_opts.add_option( |
|||
'--implementation', |
|||
dest='implementation', |
|||
metavar='implementation', |
|||
default=None, |
|||
help=("Only download wheels compatible with Python " |
|||
"implementation <implementation>, e.g. 'pp', 'jy', 'cp', " |
|||
" or 'ip'. If not specified, then the current " |
|||
"interpreter implementation is used. Use 'py' to force " |
|||
"implementation-agnostic wheels."), |
|||
) |
|||
|
|||
cmd_opts.add_option( |
|||
'--abi', |
|||
dest='abi', |
|||
metavar='abi', |
|||
default=None, |
|||
help=("Only download wheels compatible with Python " |
|||
"abi <abi>, e.g. 'pypy_41'. If not specified, then the " |
|||
"current interpreter abi tag is used. Generally " |
|||
"you will need to specify --implementation, " |
|||
"--platform, and --python-version when using " |
|||
"this option."), |
|||
) |
|||
|
|||
index_opts = cmdoptions.make_option_group( |
|||
cmdoptions.non_deprecated_index_group, |
|||
self.parser, |
|||
) |
|||
|
|||
self.parser.insert_option_group(0, index_opts) |
|||
self.parser.insert_option_group(0, cmd_opts) |
|||
|
|||
def run(self, options, args): |
|||
options.ignore_installed = True |
|||
|
|||
if options.python_version: |
|||
python_versions = [options.python_version] |
|||
else: |
|||
python_versions = None |
|||
|
|||
dist_restriction_set = any([ |
|||
options.python_version, |
|||
options.platform, |
|||
options.abi, |
|||
options.implementation, |
|||
]) |
|||
binary_only = FormatControl(set(), set([':all:'])) |
|||
if dist_restriction_set and options.format_control != binary_only: |
|||
raise CommandError( |
|||
"--only-binary=:all: must be set and --no-binary must not " |
|||
"be set (or must be set to :none:) when restricting platform " |
|||
"and interpreter constraints using --python-version, " |
|||
"--platform, --abi, or --implementation." |
|||
) |
|||
|
|||
options.src_dir = os.path.abspath(options.src_dir) |
|||
options.download_dir = normalize_path(options.download_dir) |
|||
|
|||
ensure_dir(options.download_dir) |
|||
|
|||
with self._build_session(options) as session: |
|||
finder = self._build_package_finder( |
|||
options=options, |
|||
session=session, |
|||
platform=options.platform, |
|||
python_versions=python_versions, |
|||
abi=options.abi, |
|||
implementation=options.implementation, |
|||
) |
|||
build_delete = (not (options.no_clean or options.build_dir)) |
|||
if options.cache_dir and not check_path_owner(options.cache_dir): |
|||
logger.warning( |
|||
"The directory '%s' or its parent directory is not owned " |
|||
"by the current user and caching wheels has been " |
|||
"disabled. check the permissions and owner of that " |
|||
"directory. If executing pip with sudo, you may want " |
|||
"sudo's -H flag.", |
|||
options.cache_dir, |
|||
) |
|||
options.cache_dir = None |
|||
|
|||
with BuildDirectory(options.build_dir, |
|||
delete=build_delete) as build_dir: |
|||
|
|||
requirement_set = RequirementSet( |
|||
build_dir=build_dir, |
|||
src_dir=options.src_dir, |
|||
download_dir=options.download_dir, |
|||
ignore_installed=True, |
|||
ignore_dependencies=options.ignore_dependencies, |
|||
session=session, |
|||
isolated=options.isolated_mode, |
|||
require_hashes=options.require_hashes |
|||
) |
|||
self.populate_requirement_set( |
|||
requirement_set, |
|||
args, |
|||
options, |
|||
finder, |
|||
session, |
|||
self.name, |
|||
None |
|||
) |
|||
|
|||
if not requirement_set.has_requirements: |
|||
return |
|||
|
|||
requirement_set.prepare_files(finder) |
|||
|
|||
downloaded = ' '.join([ |
|||
req.name for req in requirement_set.successfully_downloaded |
|||
]) |
|||
if downloaded: |
|||
logger.info( |
|||
'Successfully downloaded %s', downloaded |
|||
) |
|||
|
|||
# Clean up |
|||
if not options.no_clean: |
|||
requirement_set.cleanup_files() |
|||
|
|||
return requirement_set |
@ -0,0 +1,87 @@ |
|||
from __future__ import absolute_import |
|||
|
|||
import sys |
|||
|
|||
import pip |
|||
from pip.compat import stdlib_pkgs |
|||
from pip.basecommand import Command |
|||
from pip.operations.freeze import freeze |
|||
from pip.wheel import WheelCache |
|||
|
|||
|
|||
DEV_PKGS = ('pip', 'setuptools', 'distribute', 'wheel') |
|||
|
|||
|
|||
class FreezeCommand(Command): |
|||
""" |
|||
Output installed packages in requirements format. |
|||
|
|||
packages are listed in a case-insensitive sorted order. |
|||
""" |
|||
name = 'freeze' |
|||
usage = """ |
|||
%prog [options]""" |
|||
summary = 'Output installed packages in requirements format.' |
|||
log_streams = ("ext://sys.stderr", "ext://sys.stderr") |
|||
|
|||
def __init__(self, *args, **kw): |
|||
super(FreezeCommand, self).__init__(*args, **kw) |
|||
|
|||
self.cmd_opts.add_option( |
|||
'-r', '--requirement', |
|||
dest='requirements', |
|||
action='append', |
|||
default=[], |
|||
metavar='file', |
|||
help="Use the order in the given requirements file and its " |
|||
"comments when generating output. This option can be " |
|||
"used multiple times.") |
|||
self.cmd_opts.add_option( |
|||
'-f', '--find-links', |
|||
dest='find_links', |
|||
action='append', |
|||
default=[], |
|||
metavar='URL', |
|||
help='URL for finding packages, which will be added to the ' |
|||
'output.') |
|||
self.cmd_opts.add_option( |
|||
'-l', '--local', |
|||
dest='local', |
|||
action='store_true', |
|||
default=False, |
|||
help='If in a virtualenv that has global access, do not output ' |
|||
'globally-installed packages.') |
|||
self.cmd_opts.add_option( |
|||
'--user', |
|||
dest='user', |
|||
action='store_true', |
|||
default=False, |
|||
help='Only output packages installed in user-site.') |
|||
self.cmd_opts.add_option( |
|||
'--all', |
|||
dest='freeze_all', |
|||
action='store_true', |
|||
help='Do not skip these packages in the output:' |
|||
' %s' % ', '.join(DEV_PKGS)) |
|||
|
|||
self.parser.insert_option_group(0, self.cmd_opts) |
|||
|
|||
def run(self, options, args): |
|||
format_control = pip.index.FormatControl(set(), set()) |
|||
wheel_cache = WheelCache(options.cache_dir, format_control) |
|||
skip = set(stdlib_pkgs) |
|||
if not options.freeze_all: |
|||
skip.update(DEV_PKGS) |
|||
|
|||
freeze_kwargs = dict( |
|||
requirement=options.requirements, |
|||
find_links=options.find_links, |
|||
local_only=options.local, |
|||
user_only=options.user, |
|||
skip_regex=options.skip_requirements_regex, |
|||
isolated=options.isolated_mode, |
|||
wheel_cache=wheel_cache, |
|||
skip=skip) |
|||
|
|||
for line in freeze(**freeze_kwargs): |
|||
sys.stdout.write(line + '\n') |
@ -0,0 +1,57 @@ |
|||
from __future__ import absolute_import |
|||
|
|||
import hashlib |
|||
import logging |
|||
import sys |
|||
|
|||
from pip.basecommand import Command |
|||
from pip.status_codes import ERROR |
|||
from pip.utils import read_chunks |
|||
from pip.utils.hashes import FAVORITE_HASH, STRONG_HASHES |
|||
|
|||
|
|||
logger = logging.getLogger(__name__) |
|||
|
|||
|
|||
class HashCommand(Command): |
|||
""" |
|||
Compute a hash of a local package archive. |
|||
|
|||
These can be used with --hash in a requirements file to do repeatable |
|||
installs. |
|||
|
|||
""" |
|||
name = 'hash' |
|||
usage = '%prog [options] <file> ...' |
|||
summary = 'Compute hashes of package archives.' |
|||
|
|||
def __init__(self, *args, **kw): |
|||
super(HashCommand, self).__init__(*args, **kw) |
|||
self.cmd_opts.add_option( |
|||
'-a', '--algorithm', |
|||
dest='algorithm', |
|||
choices=STRONG_HASHES, |
|||
action='store', |
|||
default=FAVORITE_HASH, |
|||
help='The hash algorithm to use: one of %s' % |
|||
', '.join(STRONG_HASHES)) |
|||
self.parser.insert_option_group(0, self.cmd_opts) |
|||
|
|||
def run(self, options, args): |
|||
if not args: |
|||
self.parser.print_usage(sys.stderr) |
|||
return ERROR |
|||
|
|||
algorithm = options.algorithm |
|||
for path in args: |
|||
logger.info('%s:\n--hash=%s:%s', |
|||
path, algorithm, _hash_of_file(path, algorithm)) |
|||
|
|||
|
|||
def _hash_of_file(path, algorithm): |
|||
"""Return the hash digest of a file.""" |
|||
with open(path, 'rb') as archive: |
|||
hash = hashlib.new(algorithm) |
|||
for chunk in read_chunks(archive): |
|||
hash.update(chunk) |
|||
return hash.hexdigest() |
@ -0,0 +1,35 @@ |
|||
from __future__ import absolute_import |
|||
|
|||
from pip.basecommand import Command, SUCCESS |
|||
from pip.exceptions import CommandError |
|||
|
|||
|
|||
class HelpCommand(Command): |
|||
"""Show help for commands""" |
|||
name = 'help' |
|||
usage = """ |
|||
%prog <command>""" |
|||
summary = 'Show help for commands.' |
|||
|
|||
def run(self, options, args): |
|||
from pip.commands import commands_dict, get_similar_commands |
|||
|
|||
try: |
|||
# 'pip help' with no args is handled by pip.__init__.parseopt() |
|||
cmd_name = args[0] # the command we need help for |
|||
except IndexError: |
|||
return SUCCESS |
|||
|
|||
if cmd_name not in commands_dict: |
|||
guess = get_similar_commands(cmd_name) |
|||
|
|||
msg = ['unknown command "%s"' % cmd_name] |
|||
if guess: |
|||
msg.append('maybe you meant "%s"' % guess) |
|||
|
|||
raise CommandError(' - '.join(msg)) |
|||
|
|||
command = commands_dict[cmd_name]() |
|||
command.parser.print_help() |
|||
|
|||
return SUCCESS |
@ -0,0 +1,455 @@ |
|||
from __future__ import absolute_import |
|||
|
|||
import logging |
|||
import operator |
|||
import os |
|||
import tempfile |
|||
import shutil |
|||
import warnings |
|||
try: |
|||
import wheel |
|||
except ImportError: |
|||
wheel = None |
|||
|
|||
from pip.req import RequirementSet |
|||
from pip.basecommand import RequirementCommand |
|||
from pip.locations import virtualenv_no_global, distutils_scheme |
|||
from pip.exceptions import ( |
|||
InstallationError, CommandError, PreviousBuildDirError, |
|||
) |
|||
from pip import cmdoptions |
|||
from pip.utils import ensure_dir, get_installed_version |
|||
from pip.utils.build import BuildDirectory |
|||
from pip.utils.deprecation import RemovedInPip10Warning |
|||
from pip.utils.filesystem import check_path_owner |
|||
from pip.wheel import WheelCache, WheelBuilder |
|||
|
|||
from pip.locations import running_under_virtualenv |
|||
|
|||
logger = logging.getLogger(__name__) |
|||
|
|||
|
|||
class InstallCommand(RequirementCommand): |
|||
""" |
|||
Install packages from: |
|||
|
|||
- PyPI (and other indexes) using requirement specifiers. |
|||
- VCS project urls. |
|||
- Local project directories. |
|||
- Local or remote source archives. |
|||
|
|||
pip also supports installing from "requirements files", which provide |
|||
an easy way to specify a whole environment to be installed. |
|||
""" |
|||
name = 'install' |
|||
|
|||
usage = """ |
|||
%prog [options] <requirement specifier> [package-index-options] ... |
|||
%prog [options] -r <requirements file> [package-index-options] ... |
|||
%prog [options] [-e] <vcs project url> ... |
|||
%prog [options] [-e] <local project path> ... |
|||
%prog [options] <archive url/path> ...""" |
|||
|
|||
summary = 'Install packages.' |
|||
|
|||
def __init__(self, *args, **kw): |
|||
super(InstallCommand, self).__init__(*args, **kw) |
|||
|
|||
default_user = True |
|||
if running_under_virtualenv(): |
|||
default_user = False |
|||
if os.geteuid() == 0: |
|||
default_user = False |
|||
|
|||
cmd_opts = self.cmd_opts |
|||
|
|||
cmd_opts.add_option(cmdoptions.constraints()) |
|||
cmd_opts.add_option(cmdoptions.editable()) |
|||
cmd_opts.add_option(cmdoptions.requirements()) |
|||
cmd_opts.add_option(cmdoptions.build_dir()) |
|||
|
|||
cmd_opts.add_option( |
|||
'-t', '--target', |
|||
dest='target_dir', |
|||
metavar='dir', |
|||
default=None, |
|||
help='Install packages into <dir>. ' |
|||
'By default this will not replace existing files/folders in ' |
|||
'<dir>. Use --upgrade to replace existing packages in <dir> ' |
|||
'with new versions.' |
|||
) |
|||
|
|||
cmd_opts.add_option( |
|||
'-d', '--download', '--download-dir', '--download-directory', |
|||
dest='download_dir', |
|||
metavar='dir', |
|||
default=None, |
|||
help=("Download packages into <dir> instead of installing them, " |
|||
"regardless of what's already installed."), |
|||
) |
|||
|
|||
cmd_opts.add_option(cmdoptions.src()) |
|||
|
|||
cmd_opts.add_option( |
|||
'-U', '--upgrade', |
|||
dest='upgrade', |
|||
action='store_true', |
|||
help='Upgrade all specified packages to the newest available ' |
|||
'version. The handling of dependencies depends on the ' |
|||
'upgrade-strategy used.' |
|||
) |
|||
|
|||
cmd_opts.add_option( |
|||
'--upgrade-strategy', |
|||
dest='upgrade_strategy', |
|||
default='eager', |
|||
choices=['only-if-needed', 'eager'], |
|||
help='Determines how dependency upgrading should be handled. ' |
|||
'"eager" - dependencies are upgraded regardless of ' |
|||
'whether the currently installed version satisfies the ' |
|||
'requirements of the upgraded package(s). ' |
|||
'"only-if-needed" - are upgraded only when they do not ' |
|||
'satisfy the requirements of the upgraded package(s).' |
|||
) |
|||
|
|||
cmd_opts.add_option( |
|||
'--force-reinstall', |
|||
dest='force_reinstall', |
|||
action='store_true', |
|||
help='When upgrading, reinstall all packages even if they are ' |
|||
'already up-to-date.') |
|||
|
|||
cmd_opts.add_option( |
|||
'-I', '--ignore-installed', |
|||
dest='ignore_installed', |
|||
action='store_true', |
|||
default=default_user, |
|||
help='Ignore the installed packages (reinstalling instead).') |
|||
|
|||
cmd_opts.add_option(cmdoptions.ignore_requires_python()) |
|||
cmd_opts.add_option(cmdoptions.no_deps()) |
|||
|
|||
cmd_opts.add_option(cmdoptions.install_options()) |
|||
cmd_opts.add_option(cmdoptions.global_options()) |
|||
|
|||
cmd_opts.add_option( |
|||
'--user', |
|||
dest='use_user_site', |
|||
action='store_true', |
|||
default=default_user, |
|||
help="Install to the Python user install directory for your " |
|||
"platform. Typically ~/.local/, or %APPDATA%\Python on " |
|||
"Windows. (See the Python documentation for site.USER_BASE " |
|||
"for full details.) On Debian systems, this is the " |
|||
"default when running outside of a virtual environment " |
|||
"and not as root.") |
|||
|
|||
cmd_opts.add_option( |
|||
'--system', |
|||
dest='use_user_site', |
|||
action='store_false', |
|||
help="Install using the system scheme (overrides --user on " |
|||
"Debian systems)") |
|||
|
|||
cmd_opts.add_option( |
|||
'--egg', |
|||
dest='as_egg', |
|||
action='store_true', |
|||
help="Install packages as eggs, not 'flat', like pip normally " |
|||
"does. This option is not about installing *from* eggs. " |
|||
"(WARNING: Because this option overrides pip's normal install" |
|||
" logic, requirements files may not behave as expected.)") |
|||
|
|||
cmd_opts.add_option( |
|||
'--root', |
|||
dest='root_path', |
|||
metavar='dir', |
|||
default=None, |
|||
help="Install everything relative to this alternate root " |
|||
"directory.") |
|||
|
|||
cmd_opts.add_option( |
|||
'--prefix', |
|||
dest='prefix_path', |
|||
metavar='dir', |
|||
default=None, |
|||
help="Installation prefix where lib, bin and other top-level " |
|||
"folders are placed") |
|||
|
|||
cmd_opts.add_option( |
|||
"--compile", |
|||
action="store_true", |
|||
dest="compile", |
|||
default=True, |
|||
help="Compile py files to pyc", |
|||
) |
|||
|
|||
cmd_opts.add_option( |
|||
"--no-compile", |
|||
action="store_false", |
|||
dest="compile", |
|||
help="Do not compile py files to pyc", |
|||
) |
|||
|
|||
cmd_opts.add_option(cmdoptions.use_wheel()) |
|||
cmd_opts.add_option(cmdoptions.no_use_wheel()) |
|||
cmd_opts.add_option(cmdoptions.no_binary()) |
|||
cmd_opts.add_option(cmdoptions.only_binary()) |
|||
cmd_opts.add_option(cmdoptions.pre()) |
|||
cmd_opts.add_option(cmdoptions.no_clean()) |
|||
cmd_opts.add_option(cmdoptions.require_hashes()) |
|||
|
|||
index_opts = cmdoptions.make_option_group( |
|||
cmdoptions.index_group, |
|||
self.parser, |
|||
) |
|||
|
|||
self.parser.insert_option_group(0, index_opts) |
|||
self.parser.insert_option_group(0, cmd_opts) |
|||
|
|||
def run(self, options, args): |
|||
cmdoptions.resolve_wheel_no_use_binary(options) |
|||
cmdoptions.check_install_build_global(options) |
|||
|
|||
if options.as_egg: |
|||
warnings.warn( |
|||
"--egg has been deprecated and will be removed in the future. " |
|||
"This flag is mutually exclusive with large parts of pip, and " |
|||
"actually using it invalidates pip's ability to manage the " |
|||
"installation process.", |
|||
RemovedInPip10Warning, |
|||
) |
|||
|
|||
if options.allow_external: |
|||
warnings.warn( |
|||
"--allow-external has been deprecated and will be removed in " |
|||
"the future. Due to changes in the repository protocol, it no " |
|||
"longer has any effect.", |
|||
RemovedInPip10Warning, |
|||
) |
|||
|
|||
if options.allow_all_external: |
|||
warnings.warn( |
|||
"--allow-all-external has been deprecated and will be removed " |
|||
"in the future. Due to changes in the repository protocol, it " |
|||
"no longer has any effect.", |
|||
RemovedInPip10Warning, |
|||
) |
|||
|
|||
if options.allow_unverified: |
|||
warnings.warn( |
|||
"--allow-unverified has been deprecated and will be removed " |
|||
"in the future. Due to changes in the repository protocol, it " |
|||
"no longer has any effect.", |
|||
RemovedInPip10Warning, |
|||
) |
|||
|
|||
if options.download_dir: |
|||
warnings.warn( |
|||
"pip install --download has been deprecated and will be " |
|||
"removed in the future. Pip now has a download command that " |
|||
"should be used instead.", |
|||
RemovedInPip10Warning, |
|||
) |
|||
options.ignore_installed = True |
|||
|
|||
if options.build_dir: |
|||
options.build_dir = os.path.abspath(options.build_dir) |
|||
|
|||
options.src_dir = os.path.abspath(options.src_dir) |
|||
install_options = options.install_options or [] |
|||
if options.use_user_site: |
|||
if options.prefix_path: |
|||
raise CommandError( |
|||
"Can not combine '--user' and '--prefix' as they imply " |
|||
"different installation locations" |
|||
) |
|||
if virtualenv_no_global(): |
|||
raise InstallationError( |
|||
"Can not perform a '--user' install. User site-packages " |
|||
"are not visible in this virtualenv." |
|||
) |
|||
install_options.append('--user') |
|||
install_options.append('--prefix=') |
|||
|
|||
temp_target_dir = None |
|||
if options.target_dir: |
|||
options.ignore_installed = True |
|||
temp_target_dir = tempfile.mkdtemp() |
|||
options.target_dir = os.path.abspath(options.target_dir) |
|||
if (os.path.exists(options.target_dir) and not |
|||
os.path.isdir(options.target_dir)): |
|||
raise CommandError( |
|||
"Target path exists but is not a directory, will not " |
|||
"continue." |
|||
) |
|||
install_options.append('--home=' + temp_target_dir) |
|||
|
|||
global_options = options.global_options or [] |
|||
|
|||
with self._build_session(options) as session: |
|||
|
|||
finder = self._build_package_finder(options, session) |
|||
build_delete = (not (options.no_clean or options.build_dir)) |
|||
wheel_cache = WheelCache(options.cache_dir, options.format_control) |
|||
if options.cache_dir and not check_path_owner(options.cache_dir): |
|||
logger.warning( |
|||
"The directory '%s' or its parent directory is not owned " |
|||
"by the current user and caching wheels has been " |
|||
"disabled. check the permissions and owner of that " |
|||
"directory. If executing pip with sudo, you may want " |
|||
"sudo's -H flag.", |
|||
options.cache_dir, |
|||
) |
|||
options.cache_dir = None |
|||
|
|||
with BuildDirectory(options.build_dir, |
|||
delete=build_delete) as build_dir: |
|||
requirement_set = RequirementSet( |
|||
build_dir=build_dir, |
|||
src_dir=options.src_dir, |
|||
download_dir=options.download_dir, |
|||
upgrade=options.upgrade, |
|||
upgrade_strategy=options.upgrade_strategy, |
|||
as_egg=options.as_egg, |
|||
ignore_installed=options.ignore_installed, |
|||
ignore_dependencies=options.ignore_dependencies, |
|||
ignore_requires_python=options.ignore_requires_python, |
|||
force_reinstall=options.force_reinstall, |
|||
use_user_site=options.use_user_site, |
|||
target_dir=temp_target_dir, |
|||
session=session, |
|||
pycompile=options.compile, |
|||
isolated=options.isolated_mode, |
|||
wheel_cache=wheel_cache, |
|||
require_hashes=options.require_hashes, |
|||
) |
|||
|
|||
self.populate_requirement_set( |
|||
requirement_set, args, options, finder, session, self.name, |
|||
wheel_cache |
|||
) |
|||
|
|||
if not requirement_set.has_requirements: |
|||
return |
|||
|
|||
try: |
|||
if (options.download_dir or not wheel or not |
|||
options.cache_dir): |
|||
# on -d don't do complex things like building |
|||
# wheels, and don't try to build wheels when wheel is |
|||
# not installed. |
|||
requirement_set.prepare_files(finder) |
|||
else: |
|||
# build wheels before install. |
|||
wb = WheelBuilder( |
|||
requirement_set, |
|||
finder, |
|||
build_options=[], |
|||
global_options=[], |
|||
) |
|||
# Ignore the result: a failed wheel will be |
|||
# installed from the sdist/vcs whatever. |
|||
wb.build(autobuilding=True) |
|||
|
|||
if not options.download_dir: |
|||
requirement_set.install( |
|||
install_options, |
|||
global_options, |
|||
root=options.root_path, |
|||
prefix=options.prefix_path, |
|||
) |
|||
|
|||
possible_lib_locations = get_lib_location_guesses( |
|||
user=options.use_user_site, |
|||
home=temp_target_dir, |
|||
root=options.root_path, |
|||
prefix=options.prefix_path, |
|||
isolated=options.isolated_mode, |
|||
) |
|||
reqs = sorted( |
|||
requirement_set.successfully_installed, |
|||
key=operator.attrgetter('name')) |
|||
items = [] |
|||
for req in reqs: |
|||
item = req.name |
|||
try: |
|||
installed_version = get_installed_version( |
|||
req.name, possible_lib_locations |
|||
) |
|||
if installed_version: |
|||
item += '-' + installed_version |
|||
except Exception: |
|||
pass |
|||
items.append(item) |
|||
installed = ' '.join(items) |
|||
if installed: |
|||
logger.info('Successfully installed %s', installed) |
|||
else: |
|||
downloaded = ' '.join([ |
|||
req.name |
|||
for req in requirement_set.successfully_downloaded |
|||
]) |
|||
if downloaded: |
|||
logger.info( |
|||
'Successfully downloaded %s', downloaded |
|||
) |
|||
except PreviousBuildDirError: |
|||
options.no_clean = True |
|||
raise |
|||
finally: |
|||
# Clean up |
|||
if not options.no_clean: |
|||
requirement_set.cleanup_files() |
|||
|
|||
if options.target_dir: |
|||
ensure_dir(options.target_dir) |
|||
|
|||
# Checking both purelib and platlib directories for installed |
|||
# packages to be moved to target directory |
|||
lib_dir_list = [] |
|||
|
|||
purelib_dir = distutils_scheme('', home=temp_target_dir)['purelib'] |
|||
platlib_dir = distutils_scheme('', home=temp_target_dir)['platlib'] |
|||
|
|||
if os.path.exists(purelib_dir): |
|||
lib_dir_list.append(purelib_dir) |
|||
if os.path.exists(platlib_dir) and platlib_dir != purelib_dir: |
|||
lib_dir_list.append(platlib_dir) |
|||
|
|||
for lib_dir in lib_dir_list: |
|||
for item in os.listdir(lib_dir): |
|||
target_item_dir = os.path.join(options.target_dir, item) |
|||
if os.path.exists(target_item_dir): |
|||
if not options.upgrade: |
|||
logger.warning( |
|||
'Target directory %s already exists. Specify ' |
|||
'--upgrade to force replacement.', |
|||
target_item_dir |
|||
) |
|||
continue |
|||
if os.path.islink(target_item_dir): |
|||
logger.warning( |
|||
'Target directory %s already exists and is ' |
|||
'a link. Pip will not automatically replace ' |
|||
'links, please remove if replacement is ' |
|||
'desired.', |
|||
target_item_dir |
|||
) |
|||
continue |
|||
if os.path.isdir(target_item_dir): |
|||
shutil.rmtree(target_item_dir) |
|||
else: |
|||
os.remove(target_item_dir) |
|||
|
|||
shutil.move( |
|||
os.path.join(lib_dir, item), |
|||
target_item_dir |
|||
) |
|||
shutil.rmtree(temp_target_dir) |
|||
return requirement_set |
|||
|
|||
|
|||
def get_lib_location_guesses(*args, **kwargs): |
|||
scheme = distutils_scheme('', *args, **kwargs) |
|||
return [scheme['purelib'], scheme['platlib']] |
@ -0,0 +1,337 @@ |
|||
from __future__ import absolute_import |
|||
|
|||
import json |
|||
import logging |
|||
import warnings |
|||
try: |
|||
from itertools import zip_longest |
|||
except ImportError: |
|||
from itertools import izip_longest as zip_longest |
|||
|
|||
from pip._vendor import six |
|||
|
|||
from pip.basecommand import Command |
|||
from pip.exceptions import CommandError |
|||
from pip.index import PackageFinder |
|||
from pip.utils import ( |
|||
get_installed_distributions, dist_is_editable) |
|||
from pip.utils.deprecation import RemovedInPip10Warning |
|||
from pip.cmdoptions import make_option_group, index_group |
|||
|
|||
logger = logging.getLogger(__name__) |
|||
|
|||
|
|||
class ListCommand(Command): |
|||
""" |
|||
List installed packages, including editables. |
|||
|
|||
Packages are listed in a case-insensitive sorted order. |
|||
""" |
|||
name = 'list' |
|||
usage = """ |
|||
%prog [options]""" |
|||
summary = 'List installed packages.' |
|||
|
|||
def __init__(self, *args, **kw): |
|||
super(ListCommand, self).__init__(*args, **kw) |
|||
|
|||
cmd_opts = self.cmd_opts |
|||
|
|||
cmd_opts.add_option( |
|||
'-o', '--outdated', |
|||
action='store_true', |
|||
default=False, |
|||
help='List outdated packages') |
|||
cmd_opts.add_option( |
|||
'-u', '--uptodate', |
|||
action='store_true', |
|||
default=False, |
|||
help='List uptodate packages') |
|||
cmd_opts.add_option( |
|||
'-e', '--editable', |
|||
action='store_true', |
|||
default=False, |
|||
help='List editable projects.') |
|||
cmd_opts.add_option( |
|||
'-l', '--local', |
|||
action='store_true', |
|||
default=False, |
|||
help=('If in a virtualenv that has global access, do not list ' |
|||
'globally-installed packages.'), |
|||
) |
|||
self.cmd_opts.add_option( |
|||
'--user', |
|||
dest='user', |
|||
action='store_true', |
|||
default=False, |
|||
help='Only output packages installed in user-site.') |
|||
|
|||
cmd_opts.add_option( |
|||
'--pre', |
|||
action='store_true', |
|||
default=False, |
|||
help=("Include pre-release and development versions. By default, " |
|||
"pip only finds stable versions."), |
|||
) |
|||
|
|||
cmd_opts.add_option( |
|||
'--format', |
|||
action='store', |
|||
dest='list_format', |
|||
choices=('legacy', 'columns', 'freeze', 'json'), |
|||
help="Select the output format among: legacy (default), columns, " |
|||
"freeze or json.", |
|||
) |
|||
|
|||
cmd_opts.add_option( |
|||
'--not-required', |
|||
action='store_true', |
|||
dest='not_required', |
|||
help="List packages that are not dependencies of " |
|||
"installed packages.", |
|||
) |
|||
|
|||
index_opts = make_option_group(index_group, self.parser) |
|||
|
|||
self.parser.insert_option_group(0, index_opts) |
|||
self.parser.insert_option_group(0, cmd_opts) |
|||
|
|||
def _build_package_finder(self, options, index_urls, session): |
|||
""" |
|||
Create a package finder appropriate to this list command. |
|||
""" |
|||
return PackageFinder( |
|||
find_links=options.find_links, |
|||
index_urls=index_urls, |
|||
allow_all_prereleases=options.pre, |
|||
trusted_hosts=options.trusted_hosts, |
|||
process_dependency_links=options.process_dependency_links, |
|||
session=session, |
|||
) |
|||
|
|||
def run(self, options, args): |
|||
if options.allow_external: |
|||
warnings.warn( |
|||
"--allow-external has been deprecated and will be removed in " |
|||
"the future. Due to changes in the repository protocol, it no " |
|||
"longer has any effect.", |
|||
RemovedInPip10Warning, |
|||
) |
|||
|
|||
if options.allow_all_external: |
|||
warnings.warn( |
|||
"--allow-all-external has been deprecated and will be removed " |
|||
"in the future. Due to changes in the repository protocol, it " |
|||
"no longer has any effect.", |
|||
RemovedInPip10Warning, |
|||
) |
|||
|
|||
if options.allow_unverified: |
|||
warnings.warn( |
|||
"--allow-unverified has been deprecated and will be removed " |
|||
"in the future. Due to changes in the repository protocol, it " |
|||
"no longer has any effect.", |
|||
RemovedInPip10Warning, |
|||
) |
|||
|
|||
if options.list_format is None: |
|||
warnings.warn( |
|||
"The default format will switch to columns in the future. " |
|||
"You can use --format=(legacy|columns) (or define a " |
|||
"format=(legacy|columns) in your pip.conf under the [list] " |
|||
"section) to disable this warning.", |
|||
RemovedInPip10Warning, |
|||
) |
|||
|
|||
if options.outdated and options.uptodate: |
|||
raise CommandError( |
|||
"Options --outdated and --uptodate cannot be combined.") |
|||
|
|||
packages = get_installed_distributions( |
|||
local_only=options.local, |
|||
user_only=options.user, |
|||
editables_only=options.editable, |
|||
) |
|||
|
|||
if options.outdated: |
|||
packages = self.get_outdated(packages, options) |
|||
elif options.uptodate: |
|||
packages = self.get_uptodate(packages, options) |
|||
|
|||
if options.not_required: |
|||
packages = self.get_not_required(packages, options) |
|||
|
|||
self.output_package_listing(packages, options) |
|||
|
|||
def get_outdated(self, packages, options): |
|||
return [ |
|||
dist for dist in self.iter_packages_latest_infos(packages, options) |
|||
if dist.latest_version > dist.parsed_version |
|||
] |
|||
|
|||
def get_uptodate(self, packages, options): |
|||
return [ |
|||
dist for dist in self.iter_packages_latest_infos(packages, options) |
|||
if dist.latest_version == dist.parsed_version |
|||
] |
|||
|
|||
def get_not_required(self, packages, options): |
|||
dep_keys = set() |
|||
for dist in packages: |
|||
dep_keys.update(requirement.key for requirement in dist.requires()) |
|||
return set(pkg for pkg in packages if pkg.key not in dep_keys) |
|||
|
|||
def iter_packages_latest_infos(self, packages, options): |
|||
index_urls = [options.index_url] + options.extra_index_urls |
|||
if options.no_index: |
|||
logger.debug('Ignoring indexes: %s', ','.join(index_urls)) |
|||
index_urls = [] |
|||
|
|||
dependency_links = [] |
|||
for dist in packages: |
|||
if dist.has_metadata('dependency_links.txt'): |
|||
dependency_links.extend( |
|||
dist.get_metadata_lines('dependency_links.txt'), |
|||
) |
|||
|
|||
with self._build_session(options) as session: |
|||
finder = self._build_package_finder(options, index_urls, session) |
|||
finder.add_dependency_links(dependency_links) |
|||
|
|||
for dist in packages: |
|||
typ = 'unknown' |
|||
all_candidates = finder.find_all_candidates(dist.key) |
|||
if not options.pre: |
|||
# Remove prereleases |
|||
all_candidates = [candidate for candidate in all_candidates |
|||
if not candidate.version.is_prerelease] |
|||
|
|||
if not all_candidates: |
|||
continue |
|||
best_candidate = max(all_candidates, |
|||
key=finder._candidate_sort_key) |
|||
remote_version = best_candidate.version |
|||
if best_candidate.location.is_wheel: |
|||
typ = 'wheel' |
|||
else: |
|||
typ = 'sdist' |
|||
# This is dirty but makes the rest of the code much cleaner |
|||
dist.latest_version = remote_version |
|||
dist.latest_filetype = typ |
|||
yield dist |
|||
|
|||
def output_legacy(self, dist): |
|||
if dist_is_editable(dist): |
|||
return '%s (%s, %s)' % ( |
|||
dist.project_name, |
|||
dist.version, |
|||
dist.location, |
|||
) |
|||
else: |
|||
return '%s (%s)' % (dist.project_name, dist.version) |
|||
|
|||
def output_legacy_latest(self, dist): |
|||
return '%s - Latest: %s [%s]' % ( |
|||
self.output_legacy(dist), |
|||
dist.latest_version, |
|||
dist.latest_filetype, |
|||
) |
|||
|
|||
def output_package_listing(self, packages, options): |
|||
packages = sorted( |
|||
packages, |
|||
key=lambda dist: dist.project_name.lower(), |
|||
) |
|||
if options.list_format == 'columns' and packages: |
|||
data, header = format_for_columns(packages, options) |
|||
self.output_package_listing_columns(data, header) |
|||
elif options.list_format == 'freeze': |
|||
for dist in packages: |
|||
logger.info("%s==%s", dist.project_name, dist.version) |
|||
elif options.list_format == 'json': |
|||
logger.info(format_for_json(packages, options)) |
|||
else: # legacy |
|||
for dist in packages: |
|||
if options.outdated: |
|||
logger.info(self.output_legacy_latest(dist)) |
|||
else: |
|||
logger.info(self.output_legacy(dist)) |
|||
|
|||
def output_package_listing_columns(self, data, header): |
|||
# insert the header first: we need to know the size of column names |
|||
if len(data) > 0: |
|||
data.insert(0, header) |
|||
|
|||
pkg_strings, sizes = tabulate(data) |
|||
|
|||
# Create and add a separator. |
|||
if len(data) > 0: |
|||
pkg_strings.insert(1, " ".join(map(lambda x: '-' * x, sizes))) |
|||
|
|||
for val in pkg_strings: |
|||
logger.info(val) |
|||
|
|||
|
|||
def tabulate(vals): |
|||
# From pfmoore on GitHub: |
|||
# https://github.com/pypa/pip/issues/3651#issuecomment-216932564 |
|||
assert len(vals) > 0 |
|||
|
|||
sizes = [0] * max(len(x) for x in vals) |
|||
for row in vals: |
|||
sizes = [max(s, len(str(c))) for s, c in zip_longest(sizes, row)] |
|||
|
|||
result = [] |
|||
for row in vals: |
|||
display = " ".join([str(c).ljust(s) if c is not None else '' |
|||
for s, c in zip_longest(sizes, row)]) |
|||
result.append(display) |
|||
|
|||
return result, sizes |
|||
|
|||
|
|||
def format_for_columns(pkgs, options): |
|||
""" |
|||
Convert the package data into something usable |
|||
by output_package_listing_columns. |
|||
""" |
|||
running_outdated = options.outdated |
|||
# Adjust the header for the `pip list --outdated` case. |
|||
if running_outdated: |
|||
header = ["Package", "Version", "Latest", "Type"] |
|||
else: |
|||
header = ["Package", "Version"] |
|||
|
|||
data = [] |
|||
if any(dist_is_editable(x) for x in pkgs): |
|||
header.append("Location") |
|||
|
|||
for proj in pkgs: |
|||
# if we're working on the 'outdated' list, separate out the |
|||
# latest_version and type |
|||
row = [proj.project_name, proj.version] |
|||
|
|||
if running_outdated: |
|||
row.append(proj.latest_version) |
|||
row.append(proj.latest_filetype) |
|||
|
|||
if dist_is_editable(proj): |
|||
row.append(proj.location) |
|||
|
|||
data.append(row) |
|||
|
|||
return data, header |
|||
|
|||
|
|||
def format_for_json(packages, options): |
|||
data = [] |
|||
for dist in packages: |
|||
info = { |
|||
'name': dist.project_name, |
|||
'version': six.text_type(dist.version), |
|||
} |
|||
if options.outdated: |
|||
info['latest_version'] = six.text_type(dist.latest_version) |
|||
info['latest_filetype'] = dist.latest_filetype |
|||
data.append(info) |
|||
return json.dumps(data) |
@ -0,0 +1,133 @@ |
|||
from __future__ import absolute_import |
|||
|
|||
import logging |
|||
import sys |
|||
import textwrap |
|||
|
|||
from pip.basecommand import Command, SUCCESS |
|||
from pip.compat import OrderedDict |
|||
from pip.download import PipXmlrpcTransport |
|||
from pip.models import PyPI |
|||
from pip.utils import get_terminal_size |
|||
from pip.utils.logging import indent_log |
|||
from pip.exceptions import CommandError |
|||
from pip.status_codes import NO_MATCHES_FOUND |
|||
from pip._vendor.packaging.version import parse as parse_version |
|||
from pip._vendor import pkg_resources |
|||
from pip._vendor.six.moves import xmlrpc_client |
|||
|
|||
|
|||
logger = logging.getLogger(__name__) |
|||
|
|||
|
|||
class SearchCommand(Command): |
|||
"""Search for PyPI packages whose name or summary contains <query>.""" |
|||
name = 'search' |
|||
usage = """ |
|||
%prog [options] <query>""" |
|||
summary = 'Search PyPI for packages.' |
|||
|
|||
def __init__(self, *args, **kw): |
|||
super(SearchCommand, self).__init__(*args, **kw) |
|||
self.cmd_opts.add_option( |
|||
'-i', '--index', |
|||
dest='index', |
|||
metavar='URL', |
|||
default=PyPI.pypi_url, |
|||
help='Base URL of Python Package Index (default %default)') |
|||
|
|||
self.parser.insert_option_group(0, self.cmd_opts) |
|||
|
|||
def run(self, options, args): |
|||
if not args: |
|||
raise CommandError('Missing required argument (search query).') |
|||
query = args |
|||
pypi_hits = self.search(query, options) |
|||
hits = transform_hits(pypi_hits) |
|||
|
|||
terminal_width = None |
|||
if sys.stdout.isatty(): |
|||
terminal_width = get_terminal_size()[0] |
|||
|
|||
print_results(hits, terminal_width=terminal_width) |
|||
if pypi_hits: |
|||
return SUCCESS |
|||
return NO_MATCHES_FOUND |
|||
|
|||
def search(self, query, options): |
|||
index_url = options.index |
|||
with self._build_session(options) as session: |
|||
transport = PipXmlrpcTransport(index_url, session) |
|||
pypi = xmlrpc_client.ServerProxy(index_url, transport) |
|||
hits = pypi.search({'name': query, 'summary': query}, 'or') |
|||
return hits |
|||
|
|||
|
|||
def transform_hits(hits): |
|||
""" |
|||
The list from pypi is really a list of versions. We want a list of |
|||
packages with the list of versions stored inline. This converts the |
|||
list from pypi into one we can use. |
|||
""" |
|||
packages = OrderedDict() |
|||
for hit in hits: |
|||
name = hit['name'] |
|||
summary = hit['summary'] |
|||
version = hit['version'] |
|||
|
|||
if name not in packages.keys(): |
|||
packages[name] = { |
|||
'name': name, |
|||
'summary': summary, |
|||
'versions': [version], |
|||
} |
|||
else: |
|||
packages[name]['versions'].append(version) |
|||
|
|||
# if this is the highest version, replace summary and score |
|||
if version == highest_version(packages[name]['versions']): |
|||
packages[name]['summary'] = summary |
|||
|
|||
return list(packages.values()) |
|||
|
|||
|
|||
def print_results(hits, name_column_width=None, terminal_width=None): |
|||
if not hits: |
|||
return |
|||
if name_column_width is None: |
|||
name_column_width = max([ |
|||
len(hit['name']) + len(hit.get('versions', ['-'])[-1]) |
|||
for hit in hits |
|||
]) + 4 |
|||
|
|||
installed_packages = [p.project_name for p in pkg_resources.working_set] |
|||
for hit in hits: |
|||
name = hit['name'] |
|||
summary = hit['summary'] or '' |
|||
version = hit.get('versions', ['-'])[-1] |
|||
if terminal_width is not None: |
|||
target_width = terminal_width - name_column_width - 5 |
|||
if target_width > 10: |
|||
# wrap and indent summary to fit terminal |
|||
summary = textwrap.wrap(summary, target_width) |
|||
summary = ('\n' + ' ' * (name_column_width + 3)).join(summary) |
|||
|
|||
line = '%-*s - %s' % (name_column_width, |
|||
'%s (%s)' % (name, version), summary) |
|||
try: |
|||
logger.info(line) |
|||
if name in installed_packages: |
|||
dist = pkg_resources.get_distribution(name) |
|||
with indent_log(): |
|||
latest = highest_version(hit['versions']) |
|||
if dist.version == latest: |
|||
logger.info('INSTALLED: %s (latest)', dist.version) |
|||
else: |
|||
logger.info('INSTALLED: %s', dist.version) |
|||
logger.info('LATEST: %s', latest) |
|||
except UnicodeEncodeError: |
|||
pass |
|||
|
|||
|
|||
def highest_version(versions): |
|||
return max(versions, key=parse_version) |
@ -0,0 +1,154 @@ |
|||
from __future__ import absolute_import |
|||
|
|||
from email.parser import FeedParser |
|||
import logging |
|||
import os |
|||
|
|||
from pip.basecommand import Command |
|||
from pip.status_codes import SUCCESS, ERROR |
|||
from pip._vendor import pkg_resources |
|||
from pip._vendor.packaging.utils import canonicalize_name |
|||
|
|||
|
|||
logger = logging.getLogger(__name__) |
|||
|
|||
|
|||
class ShowCommand(Command): |
|||
"""Show information about one or more installed packages.""" |
|||
name = 'show' |
|||
usage = """ |
|||
%prog [options] <package> ...""" |
|||
summary = 'Show information about installed packages.' |
|||
|
|||
def __init__(self, *args, **kw): |
|||
super(ShowCommand, self).__init__(*args, **kw) |
|||
self.cmd_opts.add_option( |
|||
'-f', '--files', |
|||
dest='files', |
|||
action='store_true', |
|||
default=False, |
|||
help='Show the full list of installed files for each package.') |
|||
|
|||
self.parser.insert_option_group(0, self.cmd_opts) |
|||
|
|||
def run(self, options, args): |
|||
if not args: |
|||
logger.warning('ERROR: Please provide a package name or names.') |
|||
return ERROR |
|||
query = args |
|||
|
|||
results = search_packages_info(query) |
|||
if not print_results( |
|||
results, list_files=options.files, verbose=options.verbose): |
|||
return ERROR |
|||
return SUCCESS |
|||
|
|||
|
|||
def search_packages_info(query): |
|||
""" |
|||
Gather details from installed distributions. Print distribution name, |
|||
version, location, and installed files. Installed files requires a |
|||
pip generated 'installed-files.txt' in the distributions '.egg-info' |
|||
directory. |
|||
""" |
|||
installed = {} |
|||
for p in pkg_resources.working_set: |
|||
installed[canonicalize_name(p.project_name)] = p |
|||
|
|||
query_names = [canonicalize_name(name) for name in query] |
|||
|
|||
for dist in [installed[pkg] for pkg in query_names if pkg in installed]: |
|||
package = { |
|||
'name': dist.project_name, |
|||
'version': dist.version, |
|||
'location': dist.location, |
|||
'requires': [dep.project_name for dep in dist.requires()], |
|||
} |
|||
file_list = None |
|||
metadata = None |
|||
if isinstance(dist, pkg_resources.DistInfoDistribution): |
|||
# RECORDs should be part of .dist-info metadatas |
|||
if dist.has_metadata('RECORD'): |
|||
lines = dist.get_metadata_lines('RECORD') |
|||
paths = [l.split(',')[0] for l in lines] |
|||
paths = [os.path.join(dist.location, p) for p in paths] |
|||
file_list = [os.path.relpath(p, dist.location) for p in paths] |
|||
|
|||
if dist.has_metadata('METADATA'): |
|||
metadata = dist.get_metadata('METADATA') |
|||
else: |
|||
# Otherwise use pip's log for .egg-info's |
|||
if dist.has_metadata('installed-files.txt'): |
|||
paths = dist.get_metadata_lines('installed-files.txt') |
|||
paths = [os.path.join(dist.egg_info, p) for p in paths] |
|||
file_list = [os.path.relpath(p, dist.location) for p in paths] |
|||
|
|||
if dist.has_metadata('PKG-INFO'): |
|||
metadata = dist.get_metadata('PKG-INFO') |
|||
|
|||
if dist.has_metadata('entry_points.txt'): |
|||
entry_points = dist.get_metadata_lines('entry_points.txt') |
|||
package['entry_points'] = entry_points |
|||
|
|||
if dist.has_metadata('INSTALLER'): |
|||
for line in dist.get_metadata_lines('INSTALLER'): |
|||
if line.strip(): |
|||
package['installer'] = line.strip() |
|||
break |
|||
|
|||
# @todo: Should pkg_resources.Distribution have a |
|||
# `get_pkg_info` method? |
|||
feed_parser = FeedParser() |
|||
feed_parser.feed(metadata) |
|||
pkg_info_dict = feed_parser.close() |
|||
for key in ('metadata-version', 'summary', |
|||
'home-page', 'author', 'author-email', 'license'): |
|||
package[key] = pkg_info_dict.get(key) |
|||
|
|||
# It looks like FeedParser cannot deal with repeated headers |
|||
classifiers = [] |
|||
for line in metadata.splitlines(): |
|||
if line.startswith('Classifier: '): |
|||
classifiers.append(line[len('Classifier: '):]) |
|||
package['classifiers'] = classifiers |
|||
|
|||
if file_list: |
|||
package['files'] = sorted(file_list) |
|||
yield package |
|||
|
|||
|
|||
def print_results(distributions, list_files=False, verbose=False): |
|||
""" |
|||
Print the informations from installed distributions found. |
|||
""" |
|||
results_printed = False |
|||
for i, dist in enumerate(distributions): |
|||
results_printed = True |
|||
if i > 0: |
|||
logger.info("---") |
|||
logger.info("Name: %s", dist.get('name', '')) |
|||
logger.info("Version: %s", dist.get('version', '')) |
|||
logger.info("Summary: %s", dist.get('summary', '')) |
|||
logger.info("Home-page: %s", dist.get('home-page', '')) |
|||
logger.info("Author: %s", dist.get('author', '')) |
|||
logger.info("Author-email: %s", dist.get('author-email', '')) |
|||
logger.info("License: %s", dist.get('license', '')) |
|||
logger.info("Location: %s", dist.get('location', '')) |
|||
logger.info("Requires: %s", ', '.join(dist.get('requires', []))) |
|||
if verbose: |
|||
logger.info("Metadata-Version: %s", |
|||
dist.get('metadata-version', '')) |
|||
logger.info("Installer: %s", dist.get('installer', '')) |
|||
logger.info("Classifiers:") |
|||
for classifier in dist.get('classifiers', []): |
|||
logger.info(" %s", classifier) |
|||
logger.info("Entry-points:") |
|||
for entry in dist.get('entry_points', []): |
|||
logger.info(" %s", entry.strip()) |
|||
if list_files: |
|||
logger.info("Files:") |
|||
for line in dist.get('files', []): |
|||
logger.info(" %s", line.strip()) |
|||
if "files" not in dist: |
|||
logger.info("Cannot locate installed-files.txt") |
|||
return results_printed |
@ -0,0 +1,76 @@ |
|||
from __future__ import absolute_import |
|||
|
|||
import pip |
|||
from pip.wheel import WheelCache |
|||
from pip.req import InstallRequirement, RequirementSet, parse_requirements |
|||
from pip.basecommand import Command |
|||
from pip.exceptions import InstallationError |
|||
|
|||
|
|||
class UninstallCommand(Command): |
|||
""" |
|||
Uninstall packages. |
|||
|
|||
pip is able to uninstall most installed packages. Known exceptions are: |
|||
|
|||
- Pure distutils packages installed with ``python setup.py install``, which |
|||
leave behind no metadata to determine what files were installed. |
|||
- Script wrappers installed by ``python setup.py develop``. |
|||
""" |
|||
name = 'uninstall' |
|||
usage = """ |
|||
%prog [options] <package> ... |
|||
%prog [options] -r <requirements file> ...""" |
|||
summary = 'Uninstall packages.' |
|||
|
|||
def __init__(self, *args, **kw): |
|||
super(UninstallCommand, self).__init__(*args, **kw) |
|||
self.cmd_opts.add_option( |
|||
'-r', '--requirement', |
|||
dest='requirements', |
|||
action='append', |
|||
default=[], |
|||
metavar='file', |
|||
help='Uninstall all the packages listed in the given requirements ' |
|||
'file. This option can be used multiple times.', |
|||
) |
|||
self.cmd_opts.add_option( |
|||
'-y', '--yes', |
|||
dest='yes', |
|||
action='store_true', |
|||
help="Don't ask for confirmation of uninstall deletions.") |
|||
|
|||
self.parser.insert_option_group(0, self.cmd_opts) |
|||
|
|||
def run(self, options, args): |
|||
with self._build_session(options) as session: |
|||
format_control = pip.index.FormatControl(set(), set()) |
|||
wheel_cache = WheelCache(options.cache_dir, format_control) |
|||
requirement_set = RequirementSet( |
|||
build_dir=None, |
|||
src_dir=None, |
|||
download_dir=None, |
|||
isolated=options.isolated_mode, |
|||
session=session, |
|||
wheel_cache=wheel_cache, |
|||
) |
|||
for name in args: |
|||
requirement_set.add_requirement( |
|||
InstallRequirement.from_line( |
|||
name, isolated=options.isolated_mode, |
|||
wheel_cache=wheel_cache |
|||
) |
|||
) |
|||
for filename in options.requirements: |
|||
for req in parse_requirements( |
|||
filename, |
|||
options=options, |
|||
session=session, |
|||
wheel_cache=wheel_cache): |
|||
requirement_set.add_requirement(req) |
|||
if not requirement_set.has_requirements: |
|||
raise InstallationError( |
|||
'You must give at least one requirement to %(name)s (see ' |
|||
'"pip help %(name)s")' % dict(name=self.name) |
|||
) |
|||
requirement_set.uninstall(auto_confirm=options.yes) |
@ -0,0 +1,208 @@ |
|||
# -*- coding: utf-8 -*- |
|||
from __future__ import absolute_import |
|||
|
|||
import logging |
|||
import os |
|||
import warnings |
|||
|
|||
from pip.basecommand import RequirementCommand |
|||
from pip.exceptions import CommandError, PreviousBuildDirError |
|||
from pip.req import RequirementSet |
|||
from pip.utils import import_or_raise |
|||
from pip.utils.build import BuildDirectory |
|||
from pip.utils.deprecation import RemovedInPip10Warning |
|||
from pip.wheel import WheelCache, WheelBuilder |
|||
from pip import cmdoptions |
|||
|
|||
|
|||
logger = logging.getLogger(__name__) |
|||
|
|||
|
|||
class WheelCommand(RequirementCommand): |
|||
""" |
|||
Build Wheel archives for your requirements and dependencies. |
|||
|
|||
Wheel is a built-package format, and offers the advantage of not |
|||
recompiling your software during every install. For more details, see the |
|||
wheel docs: https://wheel.readthedocs.io/en/latest/ |
|||
|
|||
Requirements: setuptools>=0.8, and wheel. |
|||
|
|||
'pip wheel' uses the bdist_wheel setuptools extension from the wheel |
|||
package to build individual wheels. |
|||
|
|||
""" |
|||
|
|||
name = 'wheel' |
|||
usage = """ |
|||
%prog [options] <requirement specifier> ... |
|||
%prog [options] -r <requirements file> ... |
|||
%prog [options] [-e] <vcs project url> ... |
|||
%prog [options] [-e] <local project path> ... |
|||
%prog [options] <archive url/path> ...""" |
|||
|
|||
summary = 'Build wheels from your requirements.' |
|||
|
|||
def __init__(self, *args, **kw): |
|||
super(WheelCommand, self).__init__(*args, **kw) |
|||
|
|||
cmd_opts = self.cmd_opts |
|||
|
|||
cmd_opts.add_option( |
|||
'-w', '--wheel-dir', |
|||
dest='wheel_dir', |
|||
metavar='dir', |
|||
default=os.curdir, |
|||
help=("Build wheels into <dir>, where the default is the " |
|||
"current working directory."), |
|||
) |
|||
cmd_opts.add_option(cmdoptions.use_wheel()) |
|||
cmd_opts.add_option(cmdoptions.no_use_wheel()) |
|||
cmd_opts.add_option(cmdoptions.no_binary()) |
|||
cmd_opts.add_option(cmdoptions.only_binary()) |
|||
cmd_opts.add_option( |
|||
'--build-option', |
|||
dest='build_options', |
|||
metavar='options', |
|||
action='append', |
|||
help="Extra arguments to be supplied to 'setup.py bdist_wheel'.") |
|||
cmd_opts.add_option(cmdoptions.constraints()) |
|||
cmd_opts.add_option(cmdoptions.editable()) |
|||
cmd_opts.add_option(cmdoptions.requirements()) |
|||
cmd_opts.add_option(cmdoptions.src()) |
|||
cmd_opts.add_option(cmdoptions.ignore_requires_python()) |
|||
cmd_opts.add_option(cmdoptions.no_deps()) |
|||
cmd_opts.add_option(cmdoptions.build_dir()) |
|||
|
|||
cmd_opts.add_option( |
|||
'--global-option', |
|||
dest='global_options', |
|||
action='append', |
|||
metavar='options', |
|||
help="Extra global options to be supplied to the setup.py " |
|||
"call before the 'bdist_wheel' command.") |
|||
|
|||
cmd_opts.add_option( |
|||
'--pre', |
|||
action='store_true', |
|||
default=False, |
|||
help=("Include pre-release and development versions. By default, " |
|||
"pip only finds stable versions."), |
|||
) |
|||
|
|||
cmd_opts.add_option(cmdoptions.no_clean()) |
|||
cmd_opts.add_option(cmdoptions.require_hashes()) |
|||
|
|||
index_opts = cmdoptions.make_option_group( |
|||
cmdoptions.index_group, |
|||
self.parser, |
|||
) |
|||
|
|||
self.parser.insert_option_group(0, index_opts) |
|||
self.parser.insert_option_group(0, cmd_opts) |
|||
|
|||
def check_required_packages(self): |
|||
import_or_raise( |
|||
'wheel.bdist_wheel', |
|||
CommandError, |
|||
"'pip wheel' requires the 'wheel' package. To fix this, run: " |
|||
"pip install wheel" |
|||
) |
|||
pkg_resources = import_or_raise( |
|||
'pkg_resources', |
|||
CommandError, |
|||
"'pip wheel' requires setuptools >= 0.8 for dist-info support." |
|||
" To fix this, run: pip install --upgrade setuptools" |
|||
) |
|||
if not hasattr(pkg_resources, 'DistInfoDistribution'): |
|||
raise CommandError( |
|||
"'pip wheel' requires setuptools >= 0.8 for dist-info " |
|||
"support. To fix this, run: pip install --upgrade " |
|||
"setuptools" |
|||
) |
|||
|
|||
def run(self, options, args): |
|||
self.check_required_packages() |
|||
cmdoptions.resolve_wheel_no_use_binary(options) |
|||
cmdoptions.check_install_build_global(options) |
|||
|
|||
if options.allow_external: |
|||
warnings.warn( |
|||
"--allow-external has been deprecated and will be removed in " |
|||
"the future. Due to changes in the repository protocol, it no " |
|||
"longer has any effect.", |
|||
RemovedInPip10Warning, |
|||
) |
|||
|
|||
if options.allow_all_external: |
|||
warnings.warn( |
|||
"--allow-all-external has been deprecated and will be removed " |
|||
"in the future. Due to changes in the repository protocol, it " |
|||
"no longer has any effect.", |
|||
RemovedInPip10Warning, |
|||
) |
|||
|
|||
if options.allow_unverified: |
|||
warnings.warn( |
|||
"--allow-unverified has been deprecated and will be removed " |
|||
"in the future. Due to changes in the repository protocol, it " |
|||
"no longer has any effect.", |
|||
RemovedInPip10Warning, |
|||
) |
|||
|
|||
index_urls = [options.index_url] + options.extra_index_urls |
|||
if options.no_index: |
|||
logger.debug('Ignoring indexes: %s', ','.join(index_urls)) |
|||
index_urls = [] |
|||
|
|||
if options.build_dir: |
|||
options.build_dir = os.path.abspath(options.build_dir) |
|||
|
|||
options.src_dir = os.path.abspath(options.src_dir) |
|||
|
|||
with self._build_session(options) as session: |
|||
finder = self._build_package_finder(options, session) |
|||
build_delete = (not (options.no_clean or options.build_dir)) |
|||
wheel_cache = WheelCache(options.cache_dir, options.format_control) |
|||
with BuildDirectory(options.build_dir, |
|||
delete=build_delete) as build_dir: |
|||
requirement_set = RequirementSet( |
|||
build_dir=build_dir, |
|||
src_dir=options.src_dir, |
|||
download_dir=None, |
|||
ignore_dependencies=options.ignore_dependencies, |
|||
ignore_installed=True, |
|||
ignore_requires_python=options.ignore_requires_python, |
|||
isolated=options.isolated_mode, |
|||
session=session, |
|||
wheel_cache=wheel_cache, |
|||
wheel_download_dir=options.wheel_dir, |
|||
require_hashes=options.require_hashes |
|||
) |
|||
|
|||
self.populate_requirement_set( |
|||
requirement_set, args, options, finder, session, self.name, |
|||
wheel_cache |
|||
) |
|||
|
|||
if not requirement_set.has_requirements: |
|||
return |
|||
|
|||
try: |
|||
# build wheels |
|||
wb = WheelBuilder( |
|||
requirement_set, |
|||
finder, |
|||
build_options=options.build_options or [], |
|||
global_options=options.global_options or [], |
|||
) |
|||
if not wb.build(): |
|||
raise CommandError( |
|||
"Failed to build one or more wheels" |
|||
) |
|||
except PreviousBuildDirError: |
|||
options.no_clean = True |
|||
raise |
|||
finally: |
|||
if not options.no_clean: |
|||
requirement_set.cleanup_files() |
@ -0,0 +1,164 @@ |
|||
"""Stuff that differs in different Python versions and platform |
|||
distributions.""" |
|||
from __future__ import absolute_import, division |
|||
|
|||
import os |
|||
import sys |
|||
|
|||
from pip._vendor.six import text_type |
|||
|
|||
try: |
|||
from logging.config import dictConfig as logging_dictConfig |
|||
except ImportError: |
|||
from pip.compat.dictconfig import dictConfig as logging_dictConfig |
|||
|
|||
try: |
|||
from collections import OrderedDict |
|||
except ImportError: |
|||
from pip._vendor.ordereddict import OrderedDict |
|||
|
|||
try: |
|||
import ipaddress |
|||
except ImportError: |
|||
try: |
|||
from pip._vendor import ipaddress |
|||
except ImportError: |
|||
import ipaddr as ipaddress |
|||
ipaddress.ip_address = ipaddress.IPAddress |
|||
ipaddress.ip_network = ipaddress.IPNetwork |
|||
|
|||
|
|||
try: |
|||
import sysconfig |
|||
|
|||
def get_stdlib(): |
|||
paths = [ |
|||
sysconfig.get_path("stdlib"), |
|||
sysconfig.get_path("platstdlib"), |
|||
] |
|||
return set(filter(bool, paths)) |
|||
except ImportError: |
|||
from distutils import sysconfig |
|||
|
|||
def get_stdlib(): |
|||
paths = [ |
|||
sysconfig.get_python_lib(standard_lib=True), |
|||
sysconfig.get_python_lib(standard_lib=True, plat_specific=True), |
|||
] |
|||
return set(filter(bool, paths)) |
|||
|
|||
|
|||
__all__ = [ |
|||
"logging_dictConfig", "ipaddress", "uses_pycache", "console_to_str", |
|||
"native_str", "get_path_uid", "stdlib_pkgs", "WINDOWS", "samefile", |
|||
"OrderedDict", |
|||
] |
|||
|
|||
|
|||
if sys.version_info >= (3, 4): |
|||
uses_pycache = True |
|||
from importlib.util import cache_from_source |
|||
else: |
|||
import imp |
|||
uses_pycache = hasattr(imp, 'cache_from_source') |
|||
if uses_pycache: |
|||
cache_from_source = imp.cache_from_source |
|||
else: |
|||
cache_from_source = None |
|||
|
|||
|
|||
if sys.version_info >= (3,): |
|||
def console_to_str(s): |
|||
try: |
|||
return s.decode(sys.__stdout__.encoding) |
|||
except UnicodeDecodeError: |
|||
return s.decode('utf_8') |
|||
|
|||
def native_str(s, replace=False): |
|||
if isinstance(s, bytes): |
|||
return s.decode('utf-8', 'replace' if replace else 'strict') |
|||
return s |
|||
|
|||
else: |
|||
def console_to_str(s): |
|||
return s |
|||
|
|||
def native_str(s, replace=False): |
|||
# Replace is ignored -- unicode to UTF-8 can't fail |
|||
if isinstance(s, text_type): |
|||
return s.encode('utf-8') |
|||
return s |
|||
|
|||
|
|||
def total_seconds(td): |
|||
if hasattr(td, "total_seconds"): |
|||
return td.total_seconds() |
|||
else: |
|||
val = td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6 |
|||
return val / 10 ** 6 |
|||
|
|||
|
|||
def get_path_uid(path): |
|||
""" |
|||
Return path's uid. |
|||
|
|||
Does not follow symlinks: |
|||
https://github.com/pypa/pip/pull/935#discussion_r5307003 |
|||
|
|||
Placed this function in compat due to differences on AIX and |
|||
Jython, that should eventually go away. |
|||
|
|||
:raises OSError: When path is a symlink or can't be read. |
|||
""" |
|||
if hasattr(os, 'O_NOFOLLOW'): |
|||
fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW) |
|||
file_uid = os.fstat(fd).st_uid |
|||
os.close(fd) |
|||
else: # AIX and Jython |
|||
# WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW |
|||
if not os.path.islink(path): |
|||
# older versions of Jython don't have `os.fstat` |
|||
file_uid = os.stat(path).st_uid |
|||
else: |
|||
# raise OSError for parity with os.O_NOFOLLOW above |
|||
raise OSError( |
|||
"%s is a symlink; Will not return uid for symlinks" % path |
|||
) |
|||
return file_uid |
|||
|
|||
|
|||
def expanduser(path): |
|||
""" |
|||
Expand ~ and ~user constructions. |
|||
|
|||
Includes a workaround for http://bugs.python.org/issue14768 |
|||
""" |
|||
expanded = os.path.expanduser(path) |
|||
if path.startswith('~/') and expanded.startswith('//'): |
|||
expanded = expanded[1:] |
|||
return expanded |
|||
|
|||
|
|||
# packages in the stdlib that may have installation metadata, but should not be |
|||
# considered 'installed'. this theoretically could be determined based on |
|||
# dist.location (py27:`sysconfig.get_paths()['stdlib']`, |
|||
# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may |
|||
# make this ineffective, so hard-coding |
|||
stdlib_pkgs = ('python', 'wsgiref') |
|||
if sys.version_info >= (2, 7): |
|||
stdlib_pkgs += ('argparse',) |
|||
|
|||
|
|||
# windows detection, covers cpython and ironpython |
|||
WINDOWS = (sys.platform.startswith("win") or |
|||
(sys.platform == 'cli' and os.name == 'nt')) |
|||
|
|||
|
|||
def samefile(file1, file2): |
|||
"""Provide an alternative for os.path.samefile on Windows/Python2""" |
|||
if hasattr(os.path, 'samefile'): |
|||
return os.path.samefile(file1, file2) |
|||
else: |
|||
path1 = os.path.normcase(os.path.abspath(file1)) |
|||
path2 = os.path.normcase(os.path.abspath(file2)) |
|||
return path1 == path2 |
Binary file not shown.
Binary file not shown.
@ -0,0 +1,565 @@ |
|||
# This is a copy of the Python logging.config.dictconfig module, |
|||
# reproduced with permission. It is provided here for backwards |
|||
# compatibility for Python versions prior to 2.7. |
|||
# |
|||
# Copyright 2009-2010 by Vinay Sajip. All Rights Reserved. |
|||
# |
|||
# Permission to use, copy, modify, and distribute this software and its |
|||
# documentation for any purpose and without fee is hereby granted, |
|||
# provided that the above copyright notice appear in all copies and that |
|||
# both that copyright notice and this permission notice appear in |
|||
# supporting documentation, and that the name of Vinay Sajip |
|||
# not be used in advertising or publicity pertaining to distribution |
|||
# of the software without specific, written prior permission. |
|||
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING |
|||
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL |
|||
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR |
|||
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER |
|||
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT |
|||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. |
|||
from __future__ import absolute_import |
|||
|
|||
import logging.handlers |
|||
import re |
|||
import sys |
|||
import types |
|||
|
|||
from pip._vendor import six |
|||
|
|||
# flake8: noqa |
|||
|
|||
IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I) |
|||
|
|||
|
|||
def valid_ident(s): |
|||
m = IDENTIFIER.match(s) |
|||
if not m: |
|||
raise ValueError('Not a valid Python identifier: %r' % s) |
|||
return True |
|||
|
|||
# |
|||
# This function is defined in logging only in recent versions of Python |
|||
# |
|||
try: |
|||
from logging import _checkLevel |
|||
except ImportError: |
|||
def _checkLevel(level): |
|||
if isinstance(level, int): |
|||
rv = level |
|||
elif str(level) == level: |
|||
if level not in logging._levelNames: |
|||
raise ValueError('Unknown level: %r' % level) |
|||
rv = logging._levelNames[level] |
|||
else: |
|||
raise TypeError('Level not an integer or a ' |
|||
'valid string: %r' % level) |
|||
return rv |
|||
|
|||
# The ConvertingXXX classes are wrappers around standard Python containers, |
|||
# and they serve to convert any suitable values in the container. The |
|||
# conversion converts base dicts, lists and tuples to their wrapped |
|||
# equivalents, whereas strings which match a conversion format are converted |
|||
# appropriately. |
|||
# |
|||
# Each wrapper should have a configurator attribute holding the actual |
|||
# configurator to use for conversion. |
|||
|
|||
|
|||
class ConvertingDict(dict): |
|||
"""A converting dictionary wrapper.""" |
|||
|
|||
def __getitem__(self, key): |
|||
value = dict.__getitem__(self, key) |
|||
result = self.configurator.convert(value) |
|||
# If the converted value is different, save for next time |
|||
if value is not result: |
|||
self[key] = result |
|||
if type(result) in (ConvertingDict, ConvertingList, |
|||
ConvertingTuple): |
|||
result.parent = self |
|||
result.key = key |
|||
return result |
|||
|
|||
def get(self, key, default=None): |
|||
value = dict.get(self, key, default) |
|||
result = self.configurator.convert(value) |
|||
# If the converted value is different, save for next time |
|||
if value is not result: |
|||
self[key] = result |
|||
if type(result) in (ConvertingDict, ConvertingList, |
|||
ConvertingTuple): |
|||
result.parent = self |
|||
result.key = key |
|||
return result |
|||
|
|||
def pop(self, key, default=None): |
|||
value = dict.pop(self, key, default) |
|||
result = self.configurator.convert(value) |
|||
if value is not result: |
|||
if type(result) in (ConvertingDict, ConvertingList, |
|||
ConvertingTuple): |
|||
result.parent = self |
|||
result.key = key |
|||
return result |
|||
|
|||
|
|||
class ConvertingList(list): |
|||
"""A converting list wrapper.""" |
|||
def __getitem__(self, key): |
|||
value = list.__getitem__(self, key) |
|||
result = self.configurator.convert(value) |
|||
# If the converted value is different, save for next time |
|||
if value is not result: |
|||
self[key] = result |
|||
if type(result) in (ConvertingDict, ConvertingList, |
|||
ConvertingTuple): |
|||
result.parent = self |
|||
result.key = key |
|||
return result |
|||
|
|||
def pop(self, idx=-1): |
|||
value = list.pop(self, idx) |
|||
result = self.configurator.convert(value) |
|||
if value is not result: |
|||
if type(result) in (ConvertingDict, ConvertingList, |
|||
ConvertingTuple): |
|||
result.parent = self |
|||
return result |
|||
|
|||
|
|||
class ConvertingTuple(tuple): |
|||
"""A converting tuple wrapper.""" |
|||
def __getitem__(self, key): |
|||
value = tuple.__getitem__(self, key) |
|||
result = self.configurator.convert(value) |
|||
if value is not result: |
|||
if type(result) in (ConvertingDict, ConvertingList, |
|||
ConvertingTuple): |
|||
result.parent = self |
|||
result.key = key |
|||
return result |
|||
|
|||
|
|||
class BaseConfigurator(object): |
|||
""" |
|||
The configurator base class which defines some useful defaults. |
|||
""" |
|||
|
|||
CONVERT_PATTERN = re.compile(r'^(?P<prefix>[a-z]+)://(?P<suffix>.*)$') |
|||
|
|||
WORD_PATTERN = re.compile(r'^\s*(\w+)\s*') |
|||
DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*') |
|||
INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*') |
|||
DIGIT_PATTERN = re.compile(r'^\d+$') |
|||
|
|||
value_converters = { |
|||
'ext' : 'ext_convert', |
|||
'cfg' : 'cfg_convert', |
|||
} |
|||
|
|||
# We might want to use a different one, e.g. importlib |
|||
importer = __import__ |
|||
|
|||
def __init__(self, config): |
|||
self.config = ConvertingDict(config) |
|||
self.config.configurator = self |
|||
|
|||
def resolve(self, s): |
|||
""" |
|||
Resolve strings to objects using standard import and attribute |
|||
syntax. |
|||
""" |
|||
name = s.split('.') |
|||
used = name.pop(0) |
|||
try: |
|||
found = self.importer(used) |
|||
for frag in name: |
|||
used += '.' + frag |
|||
try: |
|||
found = getattr(found, frag) |
|||
except AttributeError: |
|||
self.importer(used) |
|||
found = getattr(found, frag) |
|||
return found |
|||
except ImportError: |
|||
e, tb = sys.exc_info()[1:] |
|||
v = ValueError('Cannot resolve %r: %s' % (s, e)) |
|||
v.__cause__, v.__traceback__ = e, tb |
|||
raise v |
|||
|
|||
def ext_convert(self, value): |
|||
"""Default converter for the ext:// protocol.""" |
|||
return self.resolve(value) |
|||
|
|||
def cfg_convert(self, value): |
|||
"""Default converter for the cfg:// protocol.""" |
|||
rest = value |
|||
m = self.WORD_PATTERN.match(rest) |
|||
if m is None: |
|||
raise ValueError("Unable to convert %r" % value) |
|||
else: |
|||
rest = rest[m.end():] |
|||
d = self.config[m.groups()[0]] |
|||
# print d, rest |
|||
while rest: |
|||
m = self.DOT_PATTERN.match(rest) |
|||
if m: |
|||
d = d[m.groups()[0]] |
|||
else: |
|||
m = self.INDEX_PATTERN.match(rest) |
|||
if m: |
|||
idx = m.groups()[0] |
|||
if not self.DIGIT_PATTERN.match(idx): |
|||
d = d[idx] |
|||
else: |
|||
try: |
|||
n = int(idx) # try as number first (most likely) |
|||
d = d[n] |
|||
except TypeError: |
|||
d = d[idx] |
|||
if m: |
|||
rest = rest[m.end():] |
|||
else: |
|||
raise ValueError('Unable to convert ' |
|||
'%r at %r' % (value, rest)) |
|||
# rest should be empty |
|||
return d |
|||
|
|||
def convert(self, value): |
|||
""" |
|||
Convert values to an appropriate type. dicts, lists and tuples are |
|||
replaced by their converting alternatives. Strings are checked to |
|||
see if they have a conversion format and are converted if they do. |
|||
""" |
|||
if not isinstance(value, ConvertingDict) and isinstance(value, dict): |
|||
value = ConvertingDict(value) |
|||
value.configurator = self |
|||
elif not isinstance(value, ConvertingList) and isinstance(value, list): |
|||
value = ConvertingList(value) |
|||
value.configurator = self |
|||
elif not isinstance(value, ConvertingTuple) and\ |
|||
isinstance(value, tuple): |
|||
value = ConvertingTuple(value) |
|||
value.configurator = self |
|||
elif isinstance(value, six.string_types): # str for py3k |
|||
m = self.CONVERT_PATTERN.match(value) |
|||
if m: |
|||
d = m.groupdict() |
|||
prefix = d['prefix'] |
|||
converter = self.value_converters.get(prefix, None) |
|||
if converter: |
|||
suffix = d['suffix'] |
|||
converter = getattr(self, converter) |
|||
value = converter(suffix) |
|||
return value |
|||
|
|||
def configure_custom(self, config): |
|||
"""Configure an object with a user-supplied factory.""" |
|||
c = config.pop('()') |
|||
if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and type(c) != types.ClassType: |
|||
c = self.resolve(c) |
|||
props = config.pop('.', None) |
|||
# Check for valid identifiers |
|||
kwargs = dict((k, config[k]) for k in config if valid_ident(k)) |
|||
result = c(**kwargs) |
|||
if props: |
|||
for name, value in props.items(): |
|||
setattr(result, name, value) |
|||
return result |
|||
|
|||
def as_tuple(self, value): |
|||
"""Utility function which converts lists to tuples.""" |
|||
if isinstance(value, list): |
|||
value = tuple(value) |
|||
return value |
|||
|
|||
|
|||
class DictConfigurator(BaseConfigurator): |
|||
""" |
|||
Configure logging using a dictionary-like object to describe the |
|||
configuration. |
|||
""" |
|||
|
|||
def configure(self): |
|||
"""Do the configuration.""" |
|||
|
|||
config = self.config |
|||
if 'version' not in config: |
|||
raise ValueError("dictionary doesn't specify a version") |
|||
if config['version'] != 1: |
|||
raise ValueError("Unsupported version: %s" % config['version']) |
|||
incremental = config.pop('incremental', False) |
|||
EMPTY_DICT = {} |
|||
logging._acquireLock() |
|||
try: |
|||
if incremental: |
|||
handlers = config.get('handlers', EMPTY_DICT) |
|||
# incremental handler config only if handler name |
|||
# ties in to logging._handlers (Python 2.7) |
|||
if sys.version_info[:2] == (2, 7): |
|||
for name in handlers: |
|||
if name not in logging._handlers: |
|||
raise ValueError('No handler found with ' |
|||
'name %r' % name) |
|||
else: |
|||
try: |
|||
handler = logging._handlers[name] |
|||
handler_config = handlers[name] |
|||
level = handler_config.get('level', None) |
|||
if level: |
|||
handler.setLevel(_checkLevel(level)) |
|||
except StandardError as e: |
|||
raise ValueError('Unable to configure handler ' |
|||
'%r: %s' % (name, e)) |
|||
loggers = config.get('loggers', EMPTY_DICT) |
|||
for name in loggers: |
|||
try: |
|||
self.configure_logger(name, loggers[name], True) |
|||
except StandardError as e: |
|||
raise ValueError('Unable to configure logger ' |
|||
'%r: %s' % (name, e)) |
|||
root = config.get('root', None) |
|||
if root: |
|||
try: |
|||
self.configure_root(root, True) |
|||
except StandardError as e: |
|||
raise ValueError('Unable to configure root ' |
|||
'logger: %s' % e) |
|||
else: |
|||
disable_existing = config.pop('disable_existing_loggers', True) |
|||
|
|||
logging._handlers.clear() |
|||
del logging._handlerList[:] |
|||
|
|||
# Do formatters first - they don't refer to anything else |
|||
formatters = config.get('formatters', EMPTY_DICT) |
|||
for name in formatters: |
|||
try: |
|||
formatters[name] = self.configure_formatter( |
|||
formatters[name]) |
|||
except StandardError as e: |
|||
raise ValueError('Unable to configure ' |
|||
'formatter %r: %s' % (name, e)) |
|||
# Next, do filters - they don't refer to anything else, either |
|||
filters = config.get('filters', EMPTY_DICT) |
|||
for name in filters: |
|||
try: |
|||
filters[name] = self.configure_filter(filters[name]) |
|||
except StandardError as e: |
|||
raise ValueError('Unable to configure ' |
|||
'filter %r: %s' % (name, e)) |
|||
|
|||
# Next, do handlers - they refer to formatters and filters |
|||
# As handlers can refer to other handlers, sort the keys |
|||
# to allow a deterministic order of configuration |
|||
handlers = config.get('handlers', EMPTY_DICT) |
|||
for name in sorted(handlers): |
|||
try: |
|||
handler = self.configure_handler(handlers[name]) |
|||
handler.name = name |
|||
handlers[name] = handler |
|||
except StandardError as e: |
|||
raise ValueError('Unable to configure handler ' |
|||
'%r: %s' % (name, e)) |
|||
# Next, do loggers - they refer to handlers and filters |
|||
|
|||
# we don't want to lose the existing loggers, |
|||
# since other threads may have pointers to them. |
|||
# existing is set to contain all existing loggers, |
|||
# and as we go through the new configuration we |
|||
# remove any which are configured. At the end, |
|||
# what's left in existing is the set of loggers |
|||
# which were in the previous configuration but |
|||
# which are not in the new configuration. |
|||
root = logging.root |
|||
existing = list(root.manager.loggerDict) |
|||
# The list needs to be sorted so that we can |
|||
# avoid disabling child loggers of explicitly |
|||
# named loggers. With a sorted list it is easier |
|||
# to find the child loggers. |
|||
existing.sort() |
|||
# We'll keep the list of existing loggers |
|||
# which are children of named loggers here... |
|||
child_loggers = [] |
|||
# now set up the new ones... |
|||
loggers = config.get('loggers', EMPTY_DICT) |
|||
for name in loggers: |
|||
if name in existing: |
|||
i = existing.index(name) |
|||
prefixed = name + "." |
|||
pflen = len(prefixed) |
|||
num_existing = len(existing) |
|||
i = i + 1 # look at the entry after name |
|||
while (i < num_existing) and\ |
|||
(existing[i][:pflen] == prefixed): |
|||
child_loggers.append(existing[i]) |
|||
i = i + 1 |
|||
existing.remove(name) |
|||
try: |
|||
self.configure_logger(name, loggers[name]) |
|||
except StandardError as e: |
|||
raise ValueError('Unable to configure logger ' |
|||
'%r: %s' % (name, e)) |
|||
|
|||
# Disable any old loggers. There's no point deleting |
|||
# them as other threads may continue to hold references |
|||
# and by disabling them, you stop them doing any logging. |
|||
# However, don't disable children of named loggers, as that's |
|||
# probably not what was intended by the user. |
|||
for log in existing: |
|||
logger = root.manager.loggerDict[log] |
|||
if log in child_loggers: |
|||
logger.level = logging.NOTSET |
|||
logger.handlers = [] |
|||
logger.propagate = True |
|||
elif disable_existing: |
|||
logger.disabled = True |
|||
|
|||
# And finally, do the root logger |
|||
root = config.get('root', None) |
|||
if root: |
|||
try: |
|||
self.configure_root(root) |
|||
except StandardError as e: |
|||
raise ValueError('Unable to configure root ' |
|||
'logger: %s' % e) |
|||
finally: |
|||
logging._releaseLock() |
|||
|
|||
def configure_formatter(self, config): |
|||
"""Configure a formatter from a dictionary.""" |
|||
if '()' in config: |
|||
factory = config['()'] # for use in exception handler |
|||
try: |
|||
result = self.configure_custom(config) |
|||
except TypeError as te: |
|||
if "'format'" not in str(te): |
|||
raise |
|||
# Name of parameter changed from fmt to format. |
|||
# Retry with old name. |
|||
# This is so that code can be used with older Python versions |
|||
#(e.g. by Django) |
|||
config['fmt'] = config.pop('format') |
|||
config['()'] = factory |
|||
result = self.configure_custom(config) |
|||
else: |
|||
fmt = config.get('format', None) |
|||
dfmt = config.get('datefmt', None) |
|||
result = logging.Formatter(fmt, dfmt) |
|||
return result |
|||
|
|||
def configure_filter(self, config): |
|||
"""Configure a filter from a dictionary.""" |
|||
if '()' in config: |
|||
result = self.configure_custom(config) |
|||
else: |
|||
name = config.get('name', '') |
|||
result = logging.Filter(name) |
|||
return result |
|||
|
|||
def add_filters(self, filterer, filters): |
|||
"""Add filters to a filterer from a list of names.""" |
|||
for f in filters: |
|||
try: |
|||
filterer.addFilter(self.config['filters'][f]) |
|||
except StandardError as e: |
|||
raise ValueError('Unable to add filter %r: %s' % (f, e)) |
|||
|
|||
def configure_handler(self, config): |
|||
"""Configure a handler from a dictionary.""" |
|||
formatter = config.pop('formatter', None) |
|||
if formatter: |
|||
try: |
|||
formatter = self.config['formatters'][formatter] |
|||
except StandardError as e: |
|||
raise ValueError('Unable to set formatter ' |
|||
'%r: %s' % (formatter, e)) |
|||
level = config.pop('level', None) |
|||
filters = config.pop('filters', None) |
|||
if '()' in config: |
|||
c = config.pop('()') |
|||
if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and type(c) != types.ClassType: |
|||
c = self.resolve(c) |
|||
factory = c |
|||
else: |
|||
klass = self.resolve(config.pop('class')) |
|||
# Special case for handler which refers to another handler |
|||
if issubclass(klass, logging.handlers.MemoryHandler) and\ |
|||
'target' in config: |
|||
try: |
|||
config['target'] = self.config['handlers'][config['target']] |
|||
except StandardError as e: |
|||
raise ValueError('Unable to set target handler ' |
|||
'%r: %s' % (config['target'], e)) |
|||
elif issubclass(klass, logging.handlers.SMTPHandler) and\ |
|||
'mailhost' in config: |
|||
config['mailhost'] = self.as_tuple(config['mailhost']) |
|||
elif issubclass(klass, logging.handlers.SysLogHandler) and\ |
|||
'address' in config: |
|||
config['address'] = self.as_tuple(config['address']) |
|||
factory = klass |
|||
kwargs = dict((k, config[k]) for k in config if valid_ident(k)) |
|||
try: |
|||
result = factory(**kwargs) |
|||
except TypeError as te: |
|||
if "'stream'" not in str(te): |
|||
raise |
|||
# The argument name changed from strm to stream |
|||
# Retry with old name. |
|||
# This is so that code can be used with older Python versions |
|||
#(e.g. by Django) |
|||
kwargs['strm'] = kwargs.pop('stream') |
|||
result = factory(**kwargs) |
|||
if formatter: |
|||
result.setFormatter(formatter) |
|||
if level is not None: |
|||
result.setLevel(_checkLevel(level)) |
|||
if filters: |
|||
self.add_filters(result, filters) |
|||
return result |
|||
|
|||
def add_handlers(self, logger, handlers): |
|||
"""Add handlers to a logger from a list of names.""" |
|||
for h in handlers: |
|||
try: |
|||
logger.addHandler(self.config['handlers'][h]) |
|||
except StandardError as e: |
|||
raise ValueError('Unable to add handler %r: %s' % (h, e)) |
|||
|
|||
def common_logger_config(self, logger, config, incremental=False): |
|||
""" |
|||
Perform configuration which is common to root and non-root loggers. |
|||
""" |
|||
level = config.get('level', None) |
|||
if level is not None: |
|||
logger.setLevel(_checkLevel(level)) |
|||
if not incremental: |
|||
# Remove any existing handlers |
|||
for h in logger.handlers[:]: |
|||
logger.removeHandler(h) |
|||
handlers = config.get('handlers', None) |
|||
if handlers: |
|||
self.add_handlers(logger, handlers) |
|||
filters = config.get('filters', None) |
|||
if filters: |
|||
self.add_filters(logger, filters) |
|||
|
|||
def configure_logger(self, name, config, incremental=False): |
|||
"""Configure a non-root logger from a dictionary.""" |
|||
logger = logging.getLogger(name) |
|||
self.common_logger_config(logger, config, incremental) |
|||
propagate = config.get('propagate', None) |
|||
if propagate is not None: |
|||
logger.propagate = propagate |
|||
|
|||
def configure_root(self, config, incremental=False): |
|||
"""Configure a root logger from a dictionary.""" |
|||
root = logging.getLogger() |
|||
self.common_logger_config(root, config, incremental) |
|||
|
|||
dictConfigClass = DictConfigurator |
|||
|
|||
|
|||
def dictConfig(config): |
|||
"""Configure logging using a dictionary.""" |
|||
dictConfigClass(config).configure() |
@ -0,0 +1,906 @@ |
|||
from __future__ import absolute_import |
|||
|
|||
import cgi |
|||
import email.utils |
|||
import getpass |
|||
import json |
|||
import logging |
|||
import mimetypes |
|||
import os |
|||
import platform |
|||
import re |
|||
import shutil |
|||
import sys |
|||
import tempfile |
|||
|
|||
try: |
|||
import ssl # noqa |
|||
HAS_TLS = True |
|||
except ImportError: |
|||
HAS_TLS = False |
|||
|
|||
from pip._vendor.six.moves.urllib import parse as urllib_parse |
|||
from pip._vendor.six.moves.urllib import request as urllib_request |
|||
|
|||
import pip |
|||
|
|||
from pip.exceptions import InstallationError, HashMismatch |
|||
from pip.models import PyPI |
|||
from pip.utils import (splitext, rmtree, format_size, display_path, |
|||
backup_dir, ask_path_exists, unpack_file, |
|||
ARCHIVE_EXTENSIONS, consume, call_subprocess) |
|||
from pip.utils.encoding import auto_decode |
|||
from pip.utils.filesystem import check_path_owner |
|||
from pip.utils.logging import indent_log |
|||
from pip.utils.setuptools_build import SETUPTOOLS_SHIM |
|||
from pip.utils.glibc import libc_ver |
|||
from pip.utils.ui import DownloadProgressBar, DownloadProgressSpinner |
|||
from pip.locations import write_delete_marker_file |
|||
from pip.vcs import vcs |
|||
from pip._vendor import requests, six |
|||
from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter |
|||
from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth |
|||
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response |
|||
from pip._vendor.requests.utils import get_netrc_auth |
|||
from pip._vendor.requests.structures import CaseInsensitiveDict |
|||
from pip._vendor.requests.packages import urllib3 |
|||
from pip._vendor.cachecontrol import CacheControlAdapter |
|||
from pip._vendor.cachecontrol.caches import FileCache |
|||
from pip._vendor.lockfile import LockError |
|||
from pip._vendor.six.moves import xmlrpc_client |
|||
|
|||
|
|||
__all__ = ['get_file_content', |
|||
'is_url', 'url_to_path', 'path_to_url', |
|||
'is_archive_file', 'unpack_vcs_link', |
|||
'unpack_file_url', 'is_vcs_url', 'is_file_url', |
|||
'unpack_http_url', 'unpack_url'] |
|||
|
|||
|
|||
logger = logging.getLogger(__name__) |
|||
|
|||
|
|||
def user_agent(): |
|||
""" |
|||
Return a string representing the user agent. |
|||
""" |
|||
data = { |
|||
"installer": {"name": "pip", "version": pip.__version__}, |
|||
"python": platform.python_version(), |
|||
"implementation": { |
|||
"name": platform.python_implementation(), |
|||
}, |
|||
} |
|||
|
|||
if data["implementation"]["name"] == 'CPython': |
|||
data["implementation"]["version"] = platform.python_version() |
|||
elif data["implementation"]["name"] == 'PyPy': |
|||
if sys.pypy_version_info.releaselevel == 'final': |
|||
pypy_version_info = sys.pypy_version_info[:3] |
|||
else: |
|||
pypy_version_info = sys.pypy_version_info |
|||
data["implementation"]["version"] = ".".join( |
|||
[str(x) for x in pypy_version_info] |
|||
) |
|||
elif data["implementation"]["name"] == 'Jython': |
|||
# Complete Guess |
|||
data["implementation"]["version"] = platform.python_version() |
|||
elif data["implementation"]["name"] == 'IronPython': |
|||
# Complete Guess |
|||
data["implementation"]["version"] = platform.python_version() |
|||
|
|||
if sys.platform.startswith("linux"): |
|||
from pip._vendor import distro |
|||
distro_infos = dict(filter( |
|||
lambda x: x[1], |
|||
zip(["name", "version", "id"], distro.linux_distribution()), |
|||
)) |
|||
libc = dict(filter( |
|||
lambda x: x[1], |
|||
zip(["lib", "version"], libc_ver()), |
|||
)) |
|||
if libc: |
|||
distro_infos["libc"] = libc |
|||
if distro_infos: |
|||
data["distro"] = distro_infos |
|||
|
|||
if sys.platform.startswith("darwin") and platform.mac_ver()[0]: |
|||
data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]} |
|||
|
|||
if platform.system(): |
|||
data.setdefault("system", {})["name"] = platform.system() |
|||
|
|||
if platform.release(): |
|||
data.setdefault("system", {})["release"] = platform.release() |
|||
|
|||
if platform.machine(): |
|||
data["cpu"] = platform.machine() |
|||
|
|||
# Python 2.6 doesn't have ssl.OPENSSL_VERSION. |
|||
if HAS_TLS and sys.version_info[:2] > (2, 6): |
|||
data["openssl_version"] = ssl.OPENSSL_VERSION |
|||
|
|||
return "{data[installer][name]}/{data[installer][version]} {json}".format( |
|||
data=data, |
|||
json=json.dumps(data, separators=(",", ":"), sort_keys=True), |
|||
) |
|||
|
|||
|
|||
class MultiDomainBasicAuth(AuthBase): |
|||
|
|||
def __init__(self, prompting=True): |
|||
self.prompting = prompting |
|||
self.passwords = {} |
|||
|
|||
def __call__(self, req): |
|||
parsed = urllib_parse.urlparse(req.url) |
|||
|
|||
# Get the netloc without any embedded credentials |
|||
netloc = parsed.netloc.rsplit("@", 1)[-1] |
|||
|
|||
# Set the url of the request to the url without any credentials |
|||
req.url = urllib_parse.urlunparse(parsed[:1] + (netloc,) + parsed[2:]) |
|||
|
|||
# Use any stored credentials that we have for this netloc |
|||
username, password = self.passwords.get(netloc, (None, None)) |
|||
|
|||
# Extract credentials embedded in the url if we have none stored |
|||
if username is None: |
|||
username, password = self.parse_credentials(parsed.netloc) |
|||
|
|||
# Get creds from netrc if we still don't have them |
|||
if username is None and password is None: |
|||
netrc_auth = get_netrc_auth(req.url) |
|||
username, password = netrc_auth if netrc_auth else (None, None) |
|||
|
|||
if username or password: |
|||
# Store the username and password |
|||
self.passwords[netloc] = (username, password) |
|||
|
|||
# Send the basic auth with this request |
|||
req = HTTPBasicAuth(username or "", password or "")(req) |
|||
|
|||
# Attach a hook to handle 401 responses |
|||
req.register_hook("response", self.handle_401) |
|||
|
|||
return req |
|||
|
|||
def handle_401(self, resp, **kwargs): |
|||
# We only care about 401 responses, anything else we want to just |
|||
# pass through the actual response |
|||
if resp.status_code != 401: |
|||
return resp |
|||
|
|||
# We are not able to prompt the user so simply return the response |
|||
if not self.prompting: |
|||
return resp |
|||
|
|||
parsed = urllib_parse.urlparse(resp.url) |
|||
|
|||
# Prompt the user for a new username and password |
|||
username = six.moves.input("User for %s: " % parsed.netloc) |
|||
password = getpass.getpass("Password: ") |
|||
|
|||
# Store the new username and password to use for future requests |
|||
if username or password: |
|||
self.passwords[parsed.netloc] = (username, password) |
|||
|
|||
# Consume content and release the original connection to allow our new |
|||
# request to reuse the same one. |
|||
resp.content |
|||
resp.raw.release_conn() |
|||
|
|||
# Add our new username and password to the request |
|||
req = HTTPBasicAuth(username or "", password or "")(resp.request) |
|||
|
|||
# Send our new request |
|||
new_resp = resp.connection.send(req, **kwargs) |
|||
new_resp.history.append(resp) |
|||
|
|||
return new_resp |
|||
|
|||
def parse_credentials(self, netloc): |
|||
if "@" in netloc: |
|||
userinfo = netloc.rsplit("@", 1)[0] |
|||
if ":" in userinfo: |
|||
return userinfo.split(":", 1) |
|||
return userinfo, None |
|||
return None, None |
|||
|
|||
|
|||
class LocalFSAdapter(BaseAdapter): |
|||
|
|||
def send(self, request, stream=None, timeout=None, verify=None, cert=None, |
|||
proxies=None): |
|||
pathname = url_to_path(request.url) |
|||
|
|||
resp = Response() |
|||
resp.status_code = 200 |
|||
resp.url = request.url |
|||
|
|||
try: |
|||
stats = os.stat(pathname) |
|||
except OSError as exc: |
|||
resp.status_code = 404 |
|||
resp.raw = exc |
|||
else: |
|||
modified = email.utils.formatdate(stats.st_mtime, usegmt=True) |
|||
content_type = mimetypes.guess_type(pathname)[0] or "text/plain" |
|||
resp.headers = CaseInsensitiveDict({ |
|||
"Content-Type": content_type, |
|||
"Content-Length": stats.st_size, |
|||
"Last-Modified": modified, |
|||
}) |
|||
|
|||
resp.raw = open(pathname, "rb") |
|||
resp.close = resp.raw.close |
|||
|
|||
return resp |
|||
|
|||
def close(self): |
|||
pass |
|||
|
|||
|
|||
class SafeFileCache(FileCache): |
|||
""" |
|||
A file based cache which is safe to use even when the target directory may |
|||
not be accessible or writable. |
|||
""" |
|||
|
|||
def __init__(self, *args, **kwargs): |
|||
super(SafeFileCache, self).__init__(*args, **kwargs) |
|||
|
|||
# Check to ensure that the directory containing our cache directory |
|||
# is owned by the user current executing pip. If it does not exist |
|||
# we will check the parent directory until we find one that does exist. |
|||
# If it is not owned by the user executing pip then we will disable |
|||
# the cache and log a warning. |
|||
if not check_path_owner(self.directory): |
|||
logger.warning( |
|||
"The directory '%s' or its parent directory is not owned by " |
|||
"the current user and the cache has been disabled. Please " |
|||
"check the permissions and owner of that directory. If " |
|||
"executing pip with sudo, you may want sudo's -H flag.", |
|||
self.directory, |
|||
) |
|||
|
|||
# Set our directory to None to disable the Cache |
|||
self.directory = None |
|||
|
|||
def get(self, *args, **kwargs): |
|||
# If we don't have a directory, then the cache should be a no-op. |
|||
if self.directory is None: |
|||
return |
|||
|
|||
try: |
|||
return super(SafeFileCache, self).get(*args, **kwargs) |
|||
except (LockError, OSError, IOError): |
|||
# We intentionally silence this error, if we can't access the cache |
|||
# then we can just skip caching and process the request as if |
|||
# caching wasn't enabled. |
|||
pass |
|||
|
|||
def set(self, *args, **kwargs): |
|||
# If we don't have a directory, then the cache should be a no-op. |
|||
if self.directory is None: |
|||
return |
|||
|
|||
try: |
|||
return super(SafeFileCache, self).set(*args, **kwargs) |
|||
except (LockError, OSError, IOError): |
|||
# We intentionally silence this error, if we can't access the cache |
|||
# then we can just skip caching and process the request as if |
|||
# caching wasn't enabled. |
|||
pass |
|||
|
|||
def delete(self, *args, **kwargs): |
|||
# If we don't have a directory, then the cache should be a no-op. |
|||
if self.directory is None: |
|||
return |
|||
|
|||
try: |
|||
return super(SafeFileCache, self).delete(*args, **kwargs) |
|||
except (LockError, OSError, IOError): |
|||
# We intentionally silence this error, if we can't access the cache |
|||
# then we can just skip caching and process the request as if |
|||
# caching wasn't enabled. |
|||
pass |
|||
|
|||
|
|||
class InsecureHTTPAdapter(HTTPAdapter): |
|||
|
|||
def cert_verify(self, conn, url, verify, cert): |
|||
conn.cert_reqs = 'CERT_NONE' |
|||
conn.ca_certs = None |
|||
|
|||
|
|||
class PipSession(requests.Session): |
|||
|
|||
timeout = None |
|||
|
|||
def __init__(self, *args, **kwargs): |
|||
retries = kwargs.pop("retries", 0) |
|||
cache = kwargs.pop("cache", None) |
|||
insecure_hosts = kwargs.pop("insecure_hosts", []) |
|||
|
|||
super(PipSession, self).__init__(*args, **kwargs) |
|||
|
|||
# Attach our User Agent to the request |
|||
self.headers["User-Agent"] = user_agent() |
|||
|
|||
# Attach our Authentication handler to the session |
|||
self.auth = MultiDomainBasicAuth() |
|||
|
|||
# Create our urllib3.Retry instance which will allow us to customize |
|||
# how we handle retries. |
|||
retries = urllib3.Retry( |
|||
# Set the total number of retries that a particular request can |
|||
# have. |
|||
total=retries, |
|||
|
|||
# A 503 error from PyPI typically means that the Fastly -> Origin |
|||
# connection got interrupted in some way. A 503 error in general |
|||
# is typically considered a transient error so we'll go ahead and |
|||
# retry it. |
|||
status_forcelist=[503], |
|||
|
|||
# Add a small amount of back off between failed requests in |
|||
# order to prevent hammering the service. |
|||
backoff_factor=0.25, |
|||
) |
|||
|
|||
# We want to _only_ cache responses on securely fetched origins. We do |
|||
# this because we can't validate the response of an insecurely fetched |
|||
# origin, and we don't want someone to be able to poison the cache and |
|||
# require manual eviction from the cache to fix it. |
|||
if cache: |
|||
secure_adapter = CacheControlAdapter( |
|||
cache=SafeFileCache(cache, use_dir_lock=True), |
|||
max_retries=retries, |
|||
) |
|||
else: |
|||
secure_adapter = HTTPAdapter(max_retries=retries) |
|||
|
|||
# Our Insecure HTTPAdapter disables HTTPS validation. It does not |
|||
# support caching (see above) so we'll use it for all http:// URLs as |
|||
# well as any https:// host that we've marked as ignoring TLS errors |
|||
# for. |
|||
insecure_adapter = InsecureHTTPAdapter(max_retries=retries) |
|||
|
|||
self.mount("https://", secure_adapter) |
|||
self.mount("http://", insecure_adapter) |
|||
|
|||
# Enable file:// urls |
|||
self.mount("file://", LocalFSAdapter()) |
|||
|
|||
# We want to use a non-validating adapter for any requests which are |
|||
# deemed insecure. |
|||
for host in insecure_hosts: |
|||
self.mount("https://{0}/".format(host), insecure_adapter) |
|||
|
|||
def request(self, method, url, *args, **kwargs): |
|||
# Allow setting a default timeout on a session |
|||
kwargs.setdefault("timeout", self.timeout) |
|||
|
|||
# Dispatch the actual request |
|||
return super(PipSession, self).request(method, url, *args, **kwargs) |
|||
|
|||
|
|||
def get_file_content(url, comes_from=None, session=None): |
|||
"""Gets the content of a file; it may be a filename, file: URL, or |
|||
http: URL. Returns (location, content). Content is unicode.""" |
|||
if session is None: |
|||
raise TypeError( |
|||
"get_file_content() missing 1 required keyword argument: 'session'" |
|||
) |
|||
|
|||
match = _scheme_re.search(url) |
|||
if match: |
|||
scheme = match.group(1).lower() |
|||
if (scheme == 'file' and comes_from and |
|||
comes_from.startswith('http')): |
|||
raise InstallationError( |
|||
'Requirements file %s references URL %s, which is local' |
|||
% (comes_from, url)) |
|||
if scheme == 'file': |
|||
path = url.split(':', 1)[1] |
|||
path = path.replace('\\', '/') |
|||
match = _url_slash_drive_re.match(path) |
|||
if match: |
|||
path = match.group(1) + ':' + path.split('|', 1)[1] |
|||
path = urllib_parse.unquote(path) |
|||
if path.startswith('/'): |
|||
path = '/' + path.lstrip('/') |
|||
url = path |
|||
else: |
|||
# FIXME: catch some errors |
|||
resp = session.get(url) |
|||
resp.raise_for_status() |
|||
return resp.url, resp.text |
|||
try: |
|||
with open(url, 'rb') as f: |
|||
content = auto_decode(f.read()) |
|||
except IOError as exc: |
|||
raise InstallationError( |
|||
'Could not open requirements file: %s' % str(exc) |
|||
) |
|||
return url, content |
|||
|
|||
|
|||
_scheme_re = re.compile(r'^(http|https|file):', re.I) |
|||
_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I) |
|||
|
|||
|
|||
def is_url(name): |
|||
"""Returns true if the name looks like a URL""" |
|||
if ':' not in name: |
|||
return False |
|||
scheme = name.split(':', 1)[0].lower() |
|||
return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes |
|||
|
|||
|
|||
def url_to_path(url): |
|||
""" |
|||
Convert a file: URL to a path. |
|||
""" |
|||
assert url.startswith('file:'), ( |
|||
"You can only turn file: urls into filenames (not %r)" % url) |
|||
|
|||
_, netloc, path, _, _ = urllib_parse.urlsplit(url) |
|||
|
|||
# if we have a UNC path, prepend UNC share notation |
|||
if netloc: |
|||
netloc = '\\\\' + netloc |
|||
|
|||
path = urllib_request.url2pathname(netloc + path) |
|||
return path |
|||
|
|||
|
|||
def path_to_url(path): |
|||
""" |
|||
Convert a path to a file: URL. The path will be made absolute and have |
|||
quoted path parts. |
|||
""" |
|||
path = os.path.normpath(os.path.abspath(path)) |
|||
url = urllib_parse.urljoin('file:', urllib_request.pathname2url(path)) |
|||
return url |
|||
|
|||
|
|||
def is_archive_file(name): |
|||
"""Return True if `name` is a considered as an archive file.""" |
|||
ext = splitext(name)[1].lower() |
|||
if ext in ARCHIVE_EXTENSIONS: |
|||
return True |
|||
return False |
|||
|
|||
|
|||
def unpack_vcs_link(link, location): |
|||
vcs_backend = _get_used_vcs_backend(link) |
|||
vcs_backend.unpack(location) |
|||
|
|||
|
|||
def _get_used_vcs_backend(link): |
|||
for backend in vcs.backends: |
|||
if link.scheme in backend.schemes: |
|||
vcs_backend = backend(link.url) |
|||
return vcs_backend |
|||
|
|||
|
|||
def is_vcs_url(link): |
|||
return bool(_get_used_vcs_backend(link)) |
|||
|
|||
|
|||
def is_file_url(link): |
|||
return link.url.lower().startswith('file:') |
|||
|
|||
|
|||
def is_dir_url(link): |
|||
"""Return whether a file:// Link points to a directory. |
|||
|
|||
``link`` must not have any other scheme but file://. Call is_file_url() |
|||
first. |
|||
|
|||
""" |
|||
link_path = url_to_path(link.url_without_fragment) |
|||
return os.path.isdir(link_path) |
|||
|
|||
|
|||
def _progress_indicator(iterable, *args, **kwargs): |
|||
return iterable |
|||
|
|||
|
|||
def _download_url(resp, link, content_file, hashes): |
|||
try: |
|||
total_length = int(resp.headers['content-length']) |
|||
except (ValueError, KeyError, TypeError): |
|||
total_length = 0 |
|||
|
|||
cached_resp = getattr(resp, "from_cache", False) |
|||
|
|||
if logger.getEffectiveLevel() > logging.INFO: |
|||
show_progress = False |
|||
elif cached_resp: |
|||
show_progress = False |
|||
elif total_length > (40 * 1000): |
|||
show_progress = True |
|||
elif not total_length: |
|||
show_progress = True |
|||
else: |
|||
show_progress = False |
|||
|
|||
show_url = link.show_url |
|||
|
|||
def resp_read(chunk_size): |
|||
try: |
|||
# Special case for urllib3. |
|||
for chunk in resp.raw.stream( |
|||
chunk_size, |
|||
# We use decode_content=False here because we don't |
|||
# want urllib3 to mess with the raw bytes we get |
|||
# from the server. If we decompress inside of |
|||
# urllib3 then we cannot verify the checksum |
|||
# because the checksum will be of the compressed |
|||
# file. This breakage will only occur if the |
|||
# server adds a Content-Encoding header, which |
|||
# depends on how the server was configured: |
|||
# - Some servers will notice that the file isn't a |
|||
# compressible file and will leave the file alone |
|||
# and with an empty Content-Encoding |
|||
# - Some servers will notice that the file is |
|||
# already compressed and will leave the file |
|||
# alone and will add a Content-Encoding: gzip |
|||
# header |
|||
# - Some servers won't notice anything at all and |
|||
# will take a file that's already been compressed |
|||
# and compress it again and set the |
|||
# Content-Encoding: gzip header |
|||
# |
|||
# By setting this not to decode automatically we |
|||
# hope to eliminate problems with the second case. |
|||
decode_content=False): |
|||
yield chunk |
|||
except AttributeError: |
|||
# Standard file-like object. |
|||
while True: |
|||
chunk = resp.raw.read(chunk_size) |
|||
if not chunk: |
|||
break |
|||
yield chunk |
|||
|
|||
def written_chunks(chunks): |
|||
for chunk in chunks: |
|||
content_file.write(chunk) |
|||
yield chunk |
|||
|
|||
progress_indicator = _progress_indicator |
|||
|
|||
if link.netloc == PyPI.netloc: |
|||
url = show_url |
|||
else: |
|||
url = link.url_without_fragment |
|||
|
|||
if show_progress: # We don't show progress on cached responses |
|||
if total_length: |
|||
logger.info("Downloading %s (%s)", url, format_size(total_length)) |
|||
progress_indicator = DownloadProgressBar(max=total_length).iter |
|||
else: |
|||
logger.info("Downloading %s", url) |
|||
progress_indicator = DownloadProgressSpinner().iter |
|||
elif cached_resp: |
|||
logger.info("Using cached %s", url) |
|||
else: |
|||
logger.info("Downloading %s", url) |
|||
|
|||
logger.debug('Downloading from URL %s', link) |
|||
|
|||
downloaded_chunks = written_chunks( |
|||
progress_indicator( |
|||
resp_read(CONTENT_CHUNK_SIZE), |
|||
CONTENT_CHUNK_SIZE |
|||
) |
|||
) |
|||
if hashes: |
|||
hashes.check_against_chunks(downloaded_chunks) |
|||
else: |
|||
consume(downloaded_chunks) |
|||
|
|||
|
|||
def _copy_file(filename, location, link): |
|||
copy = True |
|||
download_location = os.path.join(location, link.filename) |
|||
if os.path.exists(download_location): |
|||
response = ask_path_exists( |
|||
'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' % |
|||
display_path(download_location), ('i', 'w', 'b', 'a')) |
|||
if response == 'i': |
|||
copy = False |
|||
elif response == 'w': |
|||
logger.warning('Deleting %s', display_path(download_location)) |
|||
os.remove(download_location) |
|||
elif response == 'b': |
|||
dest_file = backup_dir(download_location) |
|||
logger.warning( |
|||
'Backing up %s to %s', |
|||
display_path(download_location), |
|||
display_path(dest_file), |
|||
) |
|||
shutil.move(download_location, dest_file) |
|||
elif response == 'a': |
|||
sys.exit(-1) |
|||
if copy: |
|||
shutil.copy(filename, download_location) |
|||
logger.info('Saved %s', display_path(download_location)) |
|||
|
|||
|
|||
def unpack_http_url(link, location, download_dir=None, |
|||
session=None, hashes=None): |
|||
if session is None: |
|||
raise TypeError( |
|||
"unpack_http_url() missing 1 required keyword argument: 'session'" |
|||
) |
|||
|
|||
temp_dir = tempfile.mkdtemp('-unpack', 'pip-') |
|||
|
|||
# If a download dir is specified, is the file already downloaded there? |
|||
already_downloaded_path = None |
|||
if download_dir: |
|||
already_downloaded_path = _check_download_dir(link, |
|||
download_dir, |
|||
hashes) |
|||
|
|||
if already_downloaded_path: |
|||
from_path = already_downloaded_path |
|||
content_type = mimetypes.guess_type(from_path)[0] |
|||
else: |
|||
# let's download to a tmp dir |
|||
from_path, content_type = _download_http_url(link, |
|||
session, |
|||
temp_dir, |
|||
hashes) |
|||
|
|||
# unpack the archive to the build dir location. even when only downloading |
|||
# archives, they have to be unpacked to parse dependencies |
|||
unpack_file(from_path, location, content_type, link) |
|||
|
|||
# a download dir is specified; let's copy the archive there |
|||
if download_dir and not already_downloaded_path: |
|||
_copy_file(from_path, download_dir, link) |
|||
|
|||
if not already_downloaded_path: |
|||
os.unlink(from_path) |
|||
rmtree(temp_dir) |
|||
|
|||
|
|||
def unpack_file_url(link, location, download_dir=None, hashes=None): |
|||
"""Unpack link into location. |
|||
|
|||
If download_dir is provided and link points to a file, make a copy |
|||
of the link file inside download_dir. |
|||
""" |
|||
link_path = url_to_path(link.url_without_fragment) |
|||
|
|||
# If it's a url to a local directory |
|||
if is_dir_url(link): |
|||
if os.path.isdir(location): |
|||
rmtree(location) |
|||
shutil.copytree(link_path, location, symlinks=True) |
|||
if download_dir: |
|||
logger.info('Link is a directory, ignoring download_dir') |
|||
return |
|||
|
|||
# If --require-hashes is off, `hashes` is either empty, the |
|||
# link's embedded hash, or MissingHashes; it is required to |
|||
# match. If --require-hashes is on, we are satisfied by any |
|||
# hash in `hashes` matching: a URL-based or an option-based |
|||
# one; no internet-sourced hash will be in `hashes`. |
|||
if hashes: |
|||
hashes.check_against_path(link_path) |
|||
|
|||
# If a download dir is specified, is the file already there and valid? |
|||
already_downloaded_path = None |
|||
if download_dir: |
|||
already_downloaded_path = _check_download_dir(link, |
|||
download_dir, |
|||
hashes) |
|||
|
|||
if already_downloaded_path: |
|||
from_path = already_downloaded_path |
|||
else: |
|||
from_path = link_path |
|||
|
|||
content_type = mimetypes.guess_type(from_path)[0] |
|||
|
|||
# unpack the archive to the build dir location. even when only downloading |
|||
# archives, they have to be unpacked to parse dependencies |
|||
unpack_file(from_path, location, content_type, link) |
|||
|
|||
# a download dir is specified and not already downloaded |
|||
if download_dir and not already_downloaded_path: |
|||
_copy_file(from_path, download_dir, link) |
|||
|
|||
|
|||
def _copy_dist_from_dir(link_path, location): |
|||
"""Copy distribution files in `link_path` to `location`. |
|||
|
|||
Invoked when user requests to install a local directory. E.g.: |
|||
|
|||
pip install . |
|||
pip install ~/dev/git-repos/python-prompt-toolkit |
|||
|
|||
""" |
|||
|
|||
# Note: This is currently VERY SLOW if you have a lot of data in the |
|||
# directory, because it copies everything with `shutil.copytree`. |
|||
# What it should really do is build an sdist and install that. |
|||
# See https://github.com/pypa/pip/issues/2195 |
|||
|
|||
if os.path.isdir(location): |
|||
rmtree(location) |
|||
|
|||
# build an sdist |
|||
setup_py = 'setup.py' |
|||
sdist_args = [sys.executable] |
|||
sdist_args.append('-c') |
|||
sdist_args.append(SETUPTOOLS_SHIM % setup_py) |
|||
sdist_args.append('sdist') |
|||
sdist_args += ['--dist-dir', location] |
|||
logger.info('Running setup.py sdist for %s', link_path) |
|||
|
|||
with indent_log(): |
|||
call_subprocess(sdist_args, cwd=link_path, show_stdout=False) |
|||
|
|||
# unpack sdist into `location` |
|||
sdist = os.path.join(location, os.listdir(location)[0]) |
|||
logger.info('Unpacking sdist %s into %s', sdist, location) |
|||
unpack_file(sdist, location, content_type=None, link=None) |
|||
|
|||
|
|||
class PipXmlrpcTransport(xmlrpc_client.Transport): |
|||
"""Provide a `xmlrpclib.Transport` implementation via a `PipSession` |
|||
object. |
|||
""" |
|||
|
|||
def __init__(self, index_url, session, use_datetime=False): |
|||
xmlrpc_client.Transport.__init__(self, use_datetime) |
|||
index_parts = urllib_parse.urlparse(index_url) |
|||
self._scheme = index_parts.scheme |
|||
self._session = session |
|||
|
|||
def request(self, host, handler, request_body, verbose=False): |
|||
parts = (self._scheme, host, handler, None, None, None) |
|||
url = urllib_parse.urlunparse(parts) |
|||
try: |
|||
headers = {'Content-Type': 'text/xml'} |
|||
response = self._session.post(url, data=request_body, |
|||
headers=headers, stream=True) |
|||
response.raise_for_status() |
|||
self.verbose = verbose |
|||
return self.parse_response(response.raw) |
|||
except requests.HTTPError as exc: |
|||
logger.critical( |
|||
"HTTP error %s while getting %s", |
|||
exc.response.status_code, url, |
|||
) |
|||
raise |
|||
|
|||
|
|||
def unpack_url(link, location, download_dir=None, |
|||
only_download=False, session=None, hashes=None): |
|||
"""Unpack link. |
|||
If link is a VCS link: |
|||
if only_download, export into download_dir and ignore location |
|||
else unpack into location |
|||
for other types of link: |
|||
- unpack into location |
|||
- if download_dir, copy the file into download_dir |
|||
- if only_download, mark location for deletion |
|||
|
|||
:param hashes: A Hashes object, one of whose embedded hashes must match, |
|||
or HashMismatch will be raised. If the Hashes is empty, no matches are |
|||
required, and unhashable types of requirements (like VCS ones, which |
|||
would ordinarily raise HashUnsupported) are allowed. |
|||
""" |
|||
# non-editable vcs urls |
|||
if is_vcs_url(link): |
|||
unpack_vcs_link(link, location) |
|||
|
|||
# file urls |
|||
elif is_file_url(link): |
|||
unpack_file_url(link, location, download_dir, hashes=hashes) |
|||
|
|||
# http urls |
|||
else: |
|||
if session is None: |
|||
session = PipSession() |
|||
|
|||
unpack_http_url( |
|||
link, |
|||
location, |
|||
download_dir, |
|||
session, |
|||
hashes=hashes |
|||
) |
|||
if only_download: |
|||
write_delete_marker_file(location) |
|||
|
|||
|
|||
def _download_http_url(link, session, temp_dir, hashes): |
|||
"""Download link url into temp_dir using provided session""" |
|||
target_url = link.url.split('#', 1)[0] |
|||
try: |
|||
resp = session.get( |
|||
target_url, |
|||
# We use Accept-Encoding: identity here because requests |
|||
# defaults to accepting compressed responses. This breaks in |
|||
# a variety of ways depending on how the server is configured. |
|||
# - Some servers will notice that the file isn't a compressible |
|||
# file and will leave the file alone and with an empty |
|||
# Content-Encoding |
|||
# - Some servers will notice that the file is already |
|||
# compressed and will leave the file alone and will add a |
|||
# Content-Encoding: gzip header |
|||
# - Some servers won't notice anything at all and will take |
|||
# a file that's already been compressed and compress it again |
|||
# and set the Content-Encoding: gzip header |
|||
# By setting this to request only the identity encoding We're |
|||
# hoping to eliminate the third case. Hopefully there does not |
|||
# exist a server which when given a file will notice it is |
|||
# already compressed and that you're not asking for a |
|||
# compressed file and will then decompress it before sending |
|||
# because if that's the case I don't think it'll ever be |
|||
# possible to make this work. |
|||
headers={"Accept-Encoding": "identity"}, |
|||
stream=True, |
|||
) |
|||
resp.raise_for_status() |
|||
except requests.HTTPError as exc: |
|||
logger.critical( |
|||
"HTTP error %s while getting %s", exc.response.status_code, link, |
|||
) |
|||
raise |
|||
|
|||
content_type = resp.headers.get('content-type', '') |
|||
filename = link.filename # fallback |
|||
# Have a look at the Content-Disposition header for a better guess |
|||
content_disposition = resp.headers.get('content-disposition') |
|||
if content_disposition: |
|||
type, params = cgi.parse_header(content_disposition) |
|||
# We use ``or`` here because we don't want to use an "empty" value |
|||
# from the filename param. |
|||
filename = params.get('filename') or filename |
|||
ext = splitext(filename)[1] |
|||
if not ext: |
|||
ext = mimetypes.guess_extension(content_type) |
|||
if ext: |
|||
filename += ext |
|||
if not ext and link.url != resp.url: |
|||
ext = os.path.splitext(resp.url)[1] |
|||
if ext: |
|||
filename += ext |
|||
file_path = os.path.join(temp_dir, filename) |
|||
with open(file_path, 'wb') as content_file: |
|||
_download_url(resp, link, content_file, hashes) |
|||
return file_path, content_type |
|||
|
|||
|
|||
def _check_download_dir(link, download_dir, hashes): |
|||
""" Check download_dir for previously downloaded file with correct hash |
|||
If a correct file is found return its path else None |
|||
""" |
|||
download_path = os.path.join(download_dir, link.filename) |
|||
if os.path.exists(download_path): |
|||
# If already downloaded, does its hash match? |
|||
logger.info('File was already downloaded %s', download_path) |
|||
if hashes: |
|||
try: |
|||
hashes.check_against_path(download_path) |
|||
except HashMismatch: |
|||
logger.warning( |
|||
'Previously-downloaded file %s has bad hash. ' |
|||
'Re-downloading.', |
|||
download_path |
|||
) |
|||
os.unlink(download_path) |
|||
return None |
|||
return download_path |
|||
return None |
@ -0,0 +1,244 @@ |
|||
"""Exceptions used throughout package""" |
|||
from __future__ import absolute_import |
|||
|
|||
from itertools import chain, groupby, repeat |
|||
|
|||
from pip._vendor.six import iteritems |
|||
|
|||
|
|||
class PipError(Exception): |
|||
"""Base pip exception""" |
|||
|
|||
|
|||
class InstallationError(PipError): |
|||
"""General exception during installation""" |
|||
|
|||
|
|||
class UninstallationError(PipError): |
|||
"""General exception during uninstallation""" |
|||
|
|||
|
|||
class DistributionNotFound(InstallationError): |
|||
"""Raised when a distribution cannot be found to satisfy a requirement""" |
|||
|
|||
|
|||
class RequirementsFileParseError(InstallationError): |
|||
"""Raised when a general error occurs parsing a requirements file line.""" |
|||
|
|||
|
|||
class BestVersionAlreadyInstalled(PipError): |
|||
"""Raised when the most up-to-date version of a package is already |
|||
installed.""" |
|||
|
|||
|
|||
class BadCommand(PipError): |
|||
"""Raised when virtualenv or a command is not found""" |
|||
|
|||
|
|||
class CommandError(PipError): |
|||
"""Raised when there is an error in command-line arguments""" |
|||
|
|||
|
|||
class PreviousBuildDirError(PipError): |
|||
"""Raised when there's a previous conflicting build directory""" |
|||
|
|||
|
|||
class InvalidWheelFilename(InstallationError): |
|||
"""Invalid wheel filename.""" |
|||
|
|||
|
|||
class UnsupportedWheel(InstallationError): |
|||
"""Unsupported wheel.""" |
|||
|
|||
|
|||
class HashErrors(InstallationError): |
|||
"""Multiple HashError instances rolled into one for reporting""" |
|||
|
|||
def __init__(self): |
|||
self.errors = [] |
|||
|
|||
def append(self, error): |
|||
self.errors.append(error) |
|||
|
|||
def __str__(self): |
|||
lines = [] |
|||
self.errors.sort(key=lambda e: e.order) |
|||
for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__): |
|||
lines.append(cls.head) |
|||
lines.extend(e.body() for e in errors_of_cls) |
|||
if lines: |
|||
return '\n'.join(lines) |
|||
|
|||
def __nonzero__(self): |
|||
return bool(self.errors) |
|||
|
|||
def __bool__(self): |
|||
return self.__nonzero__() |
|||
|
|||
|
|||
class HashError(InstallationError): |
|||
""" |
|||
A failure to verify a package against known-good hashes |
|||
|
|||
:cvar order: An int sorting hash exception classes by difficulty of |
|||
recovery (lower being harder), so the user doesn't bother fretting |
|||
about unpinned packages when he has deeper issues, like VCS |
|||
dependencies, to deal with. Also keeps error reports in a |
|||
deterministic order. |
|||
:cvar head: A section heading for display above potentially many |
|||
exceptions of this kind |
|||
:ivar req: The InstallRequirement that triggered this error. This is |
|||
pasted on after the exception is instantiated, because it's not |
|||
typically available earlier. |
|||
|
|||
""" |
|||
req = None |
|||
head = '' |
|||
|
|||
def body(self): |
|||
"""Return a summary of me for display under the heading. |
|||
|
|||
This default implementation simply prints a description of the |
|||
triggering requirement. |
|||
|
|||
:param req: The InstallRequirement that provoked this error, with |
|||
populate_link() having already been called |
|||
|
|||
""" |
|||
return ' %s' % self._requirement_name() |
|||
|
|||
def __str__(self): |
|||
return '%s\n%s' % (self.head, self.body()) |
|||
|
|||
def _requirement_name(self): |
|||
"""Return a description of the requirement that triggered me. |
|||
|
|||
This default implementation returns long description of the req, with |
|||
line numbers |
|||
|
|||
""" |
|||
return str(self.req) if self.req else 'unknown package' |
|||
|
|||
|
|||
class VcsHashUnsupported(HashError): |
|||
"""A hash was provided for a version-control-system-based requirement, but |
|||
we don't have a method for hashing those.""" |
|||
|
|||
order = 0 |
|||
head = ("Can't verify hashes for these requirements because we don't " |
|||
"have a way to hash version control repositories:") |
|||
|
|||
|
|||
class DirectoryUrlHashUnsupported(HashError): |
|||
"""A hash was provided for a version-control-system-based requirement, but |
|||
we don't have a method for hashing those.""" |
|||
|
|||
order = 1 |
|||
head = ("Can't verify hashes for these file:// requirements because they " |
|||
"point to directories:") |
|||
|
|||
|
|||
class HashMissing(HashError): |
|||
"""A hash was needed for a requirement but is absent.""" |
|||
|
|||
order = 2 |
|||
head = ('Hashes are required in --require-hashes mode, but they are ' |
|||
'missing from some requirements. Here is a list of those ' |
|||
'requirements along with the hashes their downloaded archives ' |
|||
'actually had. Add lines like these to your requirements files to ' |
|||
'prevent tampering. (If you did not enable --require-hashes ' |
|||
'manually, note that it turns on automatically when any package ' |
|||
'has a hash.)') |
|||
|
|||
def __init__(self, gotten_hash): |
|||
""" |
|||
:param gotten_hash: The hash of the (possibly malicious) archive we |
|||
just downloaded |
|||
""" |
|||
self.gotten_hash = gotten_hash |
|||
|
|||
def body(self): |
|||
from pip.utils.hashes import FAVORITE_HASH # Dodge circular import. |
|||
|
|||
package = None |
|||
if self.req: |
|||
# In the case of URL-based requirements, display the original URL |
|||
# seen in the requirements file rather than the package name, |
|||
# so the output can be directly copied into the requirements file. |
|||
package = (self.req.original_link if self.req.original_link |
|||
# In case someone feeds something downright stupid |
|||
# to InstallRequirement's constructor. |
|||
else getattr(self.req, 'req', None)) |
|||
return ' %s --hash=%s:%s' % (package or 'unknown package', |
|||
FAVORITE_HASH, |
|||
self.gotten_hash) |
|||
|
|||
|
|||
class HashUnpinned(HashError): |
|||
"""A requirement had a hash specified but was not pinned to a specific |
|||
version.""" |
|||
|
|||
order = 3 |
|||
head = ('In --require-hashes mode, all requirements must have their ' |
|||
'versions pinned with ==. These do not:') |
|||
|
|||
|
|||
class HashMismatch(HashError): |
|||
""" |
|||
Distribution file hash values don't match. |
|||
|
|||
:ivar package_name: The name of the package that triggered the hash |
|||
mismatch. Feel free to write to this after the exception is raise to |
|||
improve its error message. |
|||
|
|||
""" |
|||
order = 4 |
|||
head = ('THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS ' |
|||
'FILE. If you have updated the package versions, please update ' |
|||
'the hashes. Otherwise, examine the package contents carefully; ' |
|||
'someone may have tampered with them.') |
|||
|
|||
def __init__(self, allowed, gots): |
|||
""" |
|||
:param allowed: A dict of algorithm names pointing to lists of allowed |
|||
hex digests |
|||
:param gots: A dict of algorithm names pointing to hashes we |
|||
actually got from the files under suspicion |
|||
""" |
|||
self.allowed = allowed |
|||
self.gots = gots |
|||
|
|||
def body(self): |
|||
return ' %s:\n%s' % (self._requirement_name(), |
|||
self._hash_comparison()) |
|||
|
|||
def _hash_comparison(self): |
|||
""" |
|||
Return a comparison of actual and expected hash values. |
|||
|
|||
Example:: |
|||
|
|||
Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde |
|||
or 123451234512345123451234512345123451234512345 |
|||
Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef |
|||
|
|||
""" |
|||
def hash_then_or(hash_name): |
|||
# For now, all the decent hashes have 6-char names, so we can get |
|||
# away with hard-coding space literals. |
|||
return chain([hash_name], repeat(' or')) |
|||
|
|||
lines = [] |
|||
for hash_name, expecteds in iteritems(self.allowed): |
|||
prefix = hash_then_or(hash_name) |
|||
lines.extend((' Expected %s %s' % (next(prefix), e)) |
|||
for e in expecteds) |
|||
lines.append(' Got %s\n' % |
|||
self.gots[hash_name].hexdigest()) |
|||
prefix = ' or' |
|||
return '\n'.join(lines) |
|||
|
|||
|
|||
class UnsupportedPythonVersion(InstallationError): |
|||
"""Unsupported python version according to Requires-Python package |
|||
metadata.""" |
File diff suppressed because it is too large
@ -0,0 +1,182 @@ |
|||
"""Locations where we look for configs, install stuff, etc""" |
|||
from __future__ import absolute_import |
|||
|
|||
import os |
|||
import os.path |
|||
import site |
|||
import sys |
|||
|
|||
from distutils import sysconfig |
|||
from distutils.command.install import install, SCHEME_KEYS # noqa |
|||
|
|||
from pip.compat import WINDOWS, expanduser |
|||
from pip.utils import appdirs |
|||
|
|||
|
|||
# Application Directories |
|||
USER_CACHE_DIR = appdirs.user_cache_dir("pip") |
|||
|
|||
|
|||
DELETE_MARKER_MESSAGE = '''\ |
|||
This file is placed here by pip to indicate the source was put |
|||
here by pip. |
|||
|
|||
Once this package is successfully installed this source code will be |
|||
deleted (unless you remove this file). |
|||
''' |
|||
PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt' |
|||
|
|||
|
|||
def write_delete_marker_file(directory): |
|||
""" |
|||
Write the pip delete marker file into this directory. |
|||
""" |
|||
filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME) |
|||
with open(filepath, 'w') as marker_fp: |
|||
marker_fp.write(DELETE_MARKER_MESSAGE) |
|||
|
|||
|
|||
def running_under_virtualenv(): |
|||
""" |
|||
Return True if we're running inside a virtualenv, False otherwise. |
|||
|
|||
""" |
|||
if hasattr(sys, 'real_prefix'): |
|||
return True |
|||
elif sys.prefix != getattr(sys, "base_prefix", sys.prefix): |
|||
return True |
|||
|
|||
return False |
|||
|
|||
|
|||
def virtualenv_no_global(): |
|||
""" |
|||
Return True if in a venv and no system site packages. |
|||
""" |
|||
# this mirrors the logic in virtualenv.py for locating the |
|||
# no-global-site-packages.txt file |
|||
site_mod_dir = os.path.dirname(os.path.abspath(site.__file__)) |
|||
no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt') |
|||
if running_under_virtualenv() and os.path.isfile(no_global_file): |
|||
return True |
|||
|
|||
|
|||
if running_under_virtualenv(): |
|||
src_prefix = os.path.join(sys.prefix, 'src') |
|||
else: |
|||
# FIXME: keep src in cwd for now (it is not a temporary folder) |
|||
try: |
|||
src_prefix = os.path.join(os.getcwd(), 'src') |
|||
except OSError: |
|||
# In case the current working directory has been renamed or deleted |
|||
sys.exit( |
|||
"The folder you are executing pip from can no longer be found." |
|||
) |
|||
|
|||
# under macOS + virtualenv sys.prefix is not properly resolved |
|||
# it is something like /path/to/python/bin/.. |
|||
# Note: using realpath due to tmp dirs on OSX being symlinks |
|||
src_prefix = os.path.abspath(src_prefix) |
|||
|
|||
# FIXME doesn't account for venv linked to global site-packages |
|||
|
|||
site_packages = sysconfig.get_python_lib() |
|||
user_site = site.USER_SITE |
|||
user_dir = expanduser('~') |
|||
if WINDOWS: |
|||
bin_py = os.path.join(sys.prefix, 'Scripts') |
|||
bin_user = os.path.join(user_site, 'Scripts') |
|||
# buildout uses 'bin' on Windows too? |
|||
if not os.path.exists(bin_py): |
|||
bin_py = os.path.join(sys.prefix, 'bin') |
|||
bin_user = os.path.join(user_site, 'bin') |
|||
|
|||
config_basename = 'pip.ini' |
|||
|
|||
legacy_storage_dir = os.path.join(user_dir, 'pip') |
|||
legacy_config_file = os.path.join( |
|||
legacy_storage_dir, |
|||
config_basename, |
|||
) |
|||
else: |
|||
bin_py = os.path.join(sys.prefix, 'bin') |
|||
bin_user = os.path.join(user_site, 'bin') |
|||
|
|||
config_basename = 'pip.conf' |
|||
|
|||
legacy_storage_dir = os.path.join(user_dir, '.pip') |
|||
legacy_config_file = os.path.join( |
|||
legacy_storage_dir, |
|||
config_basename, |
|||
) |
|||
|
|||
# Forcing to use /usr/local/bin for standard macOS framework installs |
|||
# Also log to ~/Library/Logs/ for use with the Console.app log viewer |
|||
if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/': |
|||
bin_py = '/usr/local/bin' |
|||
|
|||
site_config_files = [ |
|||
os.path.join(path, config_basename) |
|||
for path in appdirs.site_config_dirs('pip') |
|||
] |
|||
|
|||
|
|||
def distutils_scheme(dist_name, user=False, home=None, root=None, |
|||
isolated=False, prefix=None): |
|||
""" |
|||
Return a distutils install scheme |
|||
""" |
|||
from distutils.dist import Distribution |
|||
|
|||
scheme = {} |
|||
|
|||
if isolated: |
|||
extra_dist_args = {"script_args": ["--no-user-cfg"]} |
|||
else: |
|||
extra_dist_args = {} |
|||
dist_args = {'name': dist_name} |
|||
dist_args.update(extra_dist_args) |
|||
|
|||
d = Distribution(dist_args) |
|||
d.parse_config_files() |
|||
i = d.get_command_obj('install', create=True) |
|||
# NOTE: setting user or home has the side-effect of creating the home dir |
|||
# or user base for installations during finalize_options() |
|||
# ideally, we'd prefer a scheme class that has no side-effects. |
|||
assert not (user and prefix), "user={0} prefix={1}".format(user, prefix) |
|||
i.user = user or i.user |
|||
if user: |
|||
i.prefix = "" |
|||
i.prefix = prefix or i.prefix |
|||
i.home = home or i.home |
|||
i.root = root or i.root |
|||
i.finalize_options() |
|||
for key in SCHEME_KEYS: |
|||
scheme[key] = getattr(i, 'install_' + key) |
|||
|
|||
# install_lib specified in setup.cfg should install *everything* |
|||
# into there (i.e. it takes precedence over both purelib and |
|||
# platlib). Note, i.install_lib is *always* set after |
|||
# finalize_options(); we only want to override here if the user |
|||
# has explicitly requested it hence going back to the config |
|||
if 'install_lib' in d.get_option_dict('install'): |
|||
scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib)) |
|||
|
|||
if running_under_virtualenv(): |
|||
scheme['headers'] = os.path.join( |
|||
sys.prefix, |
|||
'include', |
|||
'site', |
|||
'python' + sys.version[:3], |
|||
dist_name, |
|||
) |
|||
|
|||
if root is not None: |
|||
path_no_drive = os.path.splitdrive( |
|||
os.path.abspath(scheme["headers"]))[1] |
|||
scheme["headers"] = os.path.join( |
|||
root, |
|||
path_no_drive[1:], |
|||
) |
|||
|
|||
return scheme |
@ -0,0 +1,4 @@ |
|||
from pip.models.index import Index, PyPI |
|||
|
|||
|
|||
__all__ = ["Index", "PyPI"] |
Binary file not shown.
Binary file not shown.
@ -0,0 +1,16 @@ |
|||
from pip._vendor.six.moves.urllib import parse as urllib_parse |
|||
|
|||
|
|||
class Index(object): |
|||
def __init__(self, url): |
|||
self.url = url |
|||
self.netloc = urllib_parse.urlsplit(url).netloc |
|||
self.simple_url = self.url_to_path('simple') |
|||
self.pypi_url = self.url_to_path('pypi') |
|||
self.pip_json_url = self.url_to_path('pypi/pip/json') |
|||
|
|||
def url_to_path(self, path): |
|||
return urllib_parse.urljoin(self.url, path) |
|||
|
|||
|
|||
PyPI = Index('https://pypi.python.org/') |
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,49 @@ |
|||
|
|||
|
|||
def check_requirements(installed_dists): |
|||
missing_reqs_dict = {} |
|||
incompatible_reqs_dict = {} |
|||
|
|||
for dist in installed_dists: |
|||
key = '%s==%s' % (dist.project_name, dist.version) |
|||
|
|||
missing_reqs = list(get_missing_reqs(dist, installed_dists)) |
|||
if missing_reqs: |
|||
missing_reqs_dict[key] = missing_reqs |
|||
|
|||
incompatible_reqs = list(get_incompatible_reqs( |
|||
dist, installed_dists)) |
|||
if incompatible_reqs: |
|||
incompatible_reqs_dict[key] = incompatible_reqs |
|||
|
|||
return (missing_reqs_dict, incompatible_reqs_dict) |
|||
|
|||
|
|||
def get_missing_reqs(dist, installed_dists): |
|||
"""Return all of the requirements of `dist` that aren't present in |
|||
`installed_dists`. |
|||
|
|||
""" |
|||
installed_names = set(d.project_name.lower() for d in installed_dists) |
|||
missing_requirements = set() |
|||
|
|||
for requirement in dist.requires(): |
|||
if requirement.project_name.lower() not in installed_names: |
|||
missing_requirements.add(requirement) |
|||
yield requirement |
|||
|
|||
|
|||
def get_incompatible_reqs(dist, installed_dists): |
|||
"""Return all of the requirements of `dist` that are present in |
|||
`installed_dists`, but have incompatible versions. |
|||
|
|||
""" |
|||
installed_dists_by_name = {} |
|||
for installed_dist in installed_dists: |
|||
installed_dists_by_name[installed_dist.project_name] = installed_dist |
|||
|
|||
for requirement in dist.requires(): |
|||
present_dist = installed_dists_by_name.get(requirement.project_name) |
|||
|
|||
if present_dist and present_dist not in requirement: |
|||
yield (requirement, present_dist) |
@ -0,0 +1,132 @@ |
|||
from __future__ import absolute_import |
|||
|
|||
import logging |
|||
import re |
|||
|
|||
import pip |
|||
from pip.req import InstallRequirement |
|||
from pip.req.req_file import COMMENT_RE |
|||
from pip.utils import get_installed_distributions |
|||
from pip._vendor import pkg_resources |
|||
from pip._vendor.packaging.utils import canonicalize_name |
|||
from pip._vendor.pkg_resources import RequirementParseError |
|||
|
|||
|
|||
logger = logging.getLogger(__name__) |
|||
|
|||
|
|||
def freeze( |
|||
requirement=None, |
|||
find_links=None, local_only=None, user_only=None, skip_regex=None, |
|||
default_vcs=None, |
|||
isolated=False, |
|||
wheel_cache=None, |
|||
skip=()): |
|||
find_links = find_links or [] |
|||
skip_match = None |
|||
|
|||
if skip_regex: |
|||
skip_match = re.compile(skip_regex).search |
|||
|
|||
dependency_links = [] |
|||
|
|||
for dist in pkg_resources.working_set: |
|||
if dist.has_metadata('dependency_links.txt'): |
|||
dependency_links.extend( |
|||
dist.get_metadata_lines('dependency_links.txt') |
|||
) |
|||
for link in find_links: |
|||
if '#egg=' in link: |
|||
dependency_links.append(link) |
|||
for link in find_links: |
|||
yield '-f %s' % link |
|||
installations = {} |
|||
for dist in get_installed_distributions(local_only=local_only, |
|||
skip=(), |
|||
user_only=user_only): |
|||
try: |
|||
req = pip.FrozenRequirement.from_dist( |
|||
dist, |
|||
dependency_links |
|||
) |
|||
except RequirementParseError: |
|||
logger.warning( |
|||
"Could not parse requirement: %s", |
|||
dist.project_name |
|||
) |
|||
continue |
|||
installations[req.name] = req |
|||
|
|||
if requirement: |
|||
# the options that don't get turned into an InstallRequirement |
|||
# should only be emitted once, even if the same option is in multiple |
|||
# requirements files, so we need to keep track of what has been emitted |
|||
# so that we don't emit it again if it's seen again |
|||
emitted_options = set() |
|||
for req_file_path in requirement: |
|||
with open(req_file_path) as req_file: |
|||
for line in req_file: |
|||
if (not line.strip() or |
|||
line.strip().startswith('#') or |
|||
(skip_match and skip_match(line)) or |
|||
line.startswith(( |
|||
'-r', '--requirement', |
|||
'-Z', '--always-unzip', |
|||
'-f', '--find-links', |
|||
'-i', '--index-url', |
|||
'--pre', |
|||
'--trusted-host', |
|||
'--process-dependency-links', |
|||
'--extra-index-url'))): |
|||
line = line.rstrip() |
|||
if line not in emitted_options: |
|||
emitted_options.add(line) |
|||
yield line |
|||
continue |
|||
|
|||
if line.startswith('-e') or line.startswith('--editable'): |
|||
if line.startswith('-e'): |
|||
line = line[2:].strip() |
|||
else: |
|||
line = line[len('--editable'):].strip().lstrip('=') |
|||
line_req = InstallRequirement.from_editable( |
|||
line, |
|||
default_vcs=default_vcs, |
|||
isolated=isolated, |
|||
wheel_cache=wheel_cache, |
|||
) |
|||
else: |
|||
line_req = InstallRequirement.from_line( |
|||
COMMENT_RE.sub('', line).strip(), |
|||
isolated=isolated, |
|||
wheel_cache=wheel_cache, |
|||
) |
|||
|
|||
if not line_req.name: |
|||
logger.info( |
|||
"Skipping line in requirement file [%s] because " |
|||
"it's not clear what it would install: %s", |
|||
req_file_path, line.strip(), |
|||
) |
|||
logger.info( |
|||
" (add #egg=PackageName to the URL to avoid" |
|||
" this warning)" |
|||
) |
|||
elif line_req.name not in installations: |
|||
logger.warning( |
|||
"Requirement file [%s] contains %s, but that " |
|||
"package is not installed", |
|||
req_file_path, COMMENT_RE.sub('', line).strip(), |
|||
) |
|||
else: |
|||
yield str(installations[line_req.name]).rstrip() |
|||
del installations[line_req.name] |
|||
|
|||
yield( |
|||
'## The following requirements were added by ' |
|||
'pip freeze:' |
|||
) |
|||
for installation in sorted( |
|||
installations.values(), key=lambda x: x.name.lower()): |
|||
if canonicalize_name(installation.name) not in skip: |
|||
yield str(installation).rstrip() |
@ -0,0 +1,324 @@ |
|||
"""Generate and work with PEP 425 Compatibility Tags.""" |
|||
from __future__ import absolute_import |
|||
|
|||
import re |
|||
import sys |
|||
import warnings |
|||
import platform |
|||
import logging |
|||
|
|||
try: |
|||
import sysconfig |
|||
except ImportError: # pragma nocover |
|||
# Python < 2.7 |
|||
import distutils.sysconfig as sysconfig |
|||
import distutils.util |
|||
|
|||
from pip.compat import OrderedDict |
|||
import pip.utils.glibc |
|||
|
|||
logger = logging.getLogger(__name__) |
|||
|
|||
_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)') |
|||
|
|||
|
|||
def get_config_var(var): |
|||
try: |
|||
return sysconfig.get_config_var(var) |
|||
except IOError as e: # Issue #1074 |
|||
warnings.warn("{0}".format(e), RuntimeWarning) |
|||
return None |
|||
|
|||
|
|||
def get_abbr_impl(): |
|||
"""Return abbreviated implementation name.""" |
|||
if hasattr(sys, 'pypy_version_info'): |
|||
pyimpl = 'pp' |
|||
elif sys.platform.startswith('java'): |
|||
pyimpl = 'jy' |
|||
elif sys.platform == 'cli': |
|||
pyimpl = 'ip' |
|||
else: |
|||
pyimpl = 'cp' |
|||
return pyimpl |
|||
|
|||
|
|||
def get_impl_ver(): |
|||
"""Return implementation version.""" |
|||
impl_ver = get_config_var("py_version_nodot") |
|||
if not impl_ver or get_abbr_impl() == 'pp': |
|||
impl_ver = ''.join(map(str, get_impl_version_info())) |
|||
return impl_ver |
|||
|
|||
|
|||
def get_impl_version_info(): |
|||
"""Return sys.version_info-like tuple for use in decrementing the minor |
|||
version.""" |
|||
if get_abbr_impl() == 'pp': |
|||
# as per https://github.com/pypa/pip/issues/2882 |
|||
return (sys.version_info[0], sys.pypy_version_info.major, |
|||
sys.pypy_version_info.minor) |
|||
else: |
|||
return sys.version_info[0], sys.version_info[1] |
|||
|
|||
|
|||
def get_impl_tag(): |
|||
""" |
|||
Returns the Tag for this specific implementation. |
|||
""" |
|||
return "{0}{1}".format(get_abbr_impl(), get_impl_ver()) |
|||
|
|||
|
|||
def get_flag(var, fallback, expected=True, warn=True): |
|||
"""Use a fallback method for determining SOABI flags if the needed config |
|||
var is unset or unavailable.""" |
|||
val = get_config_var(var) |
|||
if val is None: |
|||
if warn: |
|||
logger.debug("Config variable '%s' is unset, Python ABI tag may " |
|||
"be incorrect", var) |
|||
return fallback() |
|||
return val == expected |
|||
|
|||
|
|||
def get_abi_tag(): |
|||
"""Return the ABI tag based on SOABI (if available) or emulate SOABI |
|||
(CPython 2, PyPy).""" |
|||
soabi = get_config_var('SOABI') |
|||
impl = get_abbr_impl() |
|||
if not soabi and impl in ('cp', 'pp') and hasattr(sys, 'maxunicode'): |
|||
d = '' |
|||
m = '' |
|||
u = '' |
|||
if get_flag('Py_DEBUG', |
|||
lambda: hasattr(sys, 'gettotalrefcount'), |
|||
warn=(impl == 'cp')): |
|||
d = 'd' |
|||
if get_flag('WITH_PYMALLOC', |
|||
lambda: impl == 'cp', |
|||
warn=(impl == 'cp')): |
|||
m = 'm' |
|||
if get_flag('Py_UNICODE_SIZE', |
|||
lambda: sys.maxunicode == 0x10ffff, |
|||
expected=4, |
|||
warn=(impl == 'cp' and |
|||
sys.version_info < (3, 3))) \ |
|||
and sys.version_info < (3, 3): |
|||
u = 'u' |
|||
abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u) |
|||
elif soabi and soabi.startswith('cpython-'): |
|||
abi = 'cp' + soabi.split('-')[1] |
|||
elif soabi: |
|||
abi = soabi.replace('.', '_').replace('-', '_') |
|||
else: |
|||
abi = None |
|||
return abi |
|||
|
|||
|
|||
def _is_running_32bit(): |
|||
return sys.maxsize == 2147483647 |
|||
|
|||
|
|||
def get_platform(): |
|||
"""Return our platform name 'win32', 'linux_x86_64'""" |
|||
if sys.platform == 'darwin': |
|||
# distutils.util.get_platform() returns the release based on the value |
|||
# of MACOSX_DEPLOYMENT_TARGET on which Python was built, which may |
|||
# be significantly older than the user's current machine. |
|||
release, _, machine = platform.mac_ver() |
|||
split_ver = release.split('.') |
|||
|
|||
if machine == "x86_64" and _is_running_32bit(): |
|||
machine = "i386" |
|||
elif machine == "ppc64" and _is_running_32bit(): |
|||
machine = "ppc" |
|||
|
|||
return 'macosx_{0}_{1}_{2}'.format(split_ver[0], split_ver[1], machine) |
|||
|
|||
# XXX remove distutils dependency |
|||
result = distutils.util.get_platform().replace('.', '_').replace('-', '_') |
|||
if result == "linux_x86_64" and _is_running_32bit(): |
|||
# 32 bit Python program (running on a 64 bit Linux): pip should only |
|||
# install and run 32 bit compiled extensions in that case. |
|||
result = "linux_i686" |
|||
|
|||
return result |
|||
|
|||
|
|||
def is_manylinux1_compatible(): |
|||
# Only Linux, and only x86-64 / i686 |
|||
if get_platform() not in ("linux_x86_64", "linux_i686"): |
|||
return False |
|||
|
|||
# Check for presence of _manylinux module |
|||
try: |
|||
import _manylinux |
|||
return bool(_manylinux.manylinux1_compatible) |
|||
except (ImportError, AttributeError): |
|||
# Fall through to heuristic check below |
|||
pass |
|||
|
|||
# Check glibc version. CentOS 5 uses glibc 2.5. |
|||
return pip.utils.glibc.have_compatible_glibc(2, 5) |
|||
|
|||
|
|||
def get_darwin_arches(major, minor, machine): |
|||
"""Return a list of supported arches (including group arches) for |
|||
the given major, minor and machine architecture of an macOS machine. |
|||
""" |
|||
arches = [] |
|||
|
|||
def _supports_arch(major, minor, arch): |
|||
# Looking at the application support for macOS versions in the chart |
|||
# provided by https://en.wikipedia.org/wiki/OS_X#Versions it appears |
|||
# our timeline looks roughly like: |
|||
# |
|||
# 10.0 - Introduces ppc support. |
|||
# 10.4 - Introduces ppc64, i386, and x86_64 support, however the ppc64 |
|||
# and x86_64 support is CLI only, and cannot be used for GUI |
|||
# applications. |
|||
# 10.5 - Extends ppc64 and x86_64 support to cover GUI applications. |
|||
# 10.6 - Drops support for ppc64 |
|||
# 10.7 - Drops support for ppc |
|||
# |
|||
# Given that we do not know if we're installing a CLI or a GUI |
|||
# application, we must be conservative and assume it might be a GUI |
|||
# application and behave as if ppc64 and x86_64 support did not occur |
|||
# until 10.5. |
|||
# |
|||
# Note: The above information is taken from the "Application support" |
|||
# column in the chart not the "Processor support" since I believe |
|||
# that we care about what instruction sets an application can use |
|||
# not which processors the OS supports. |
|||
if arch == 'ppc': |
|||
return (major, minor) <= (10, 5) |
|||
if arch == 'ppc64': |
|||
return (major, minor) == (10, 5) |
|||
if arch == 'i386': |
|||
return (major, minor) >= (10, 4) |
|||
if arch == 'x86_64': |
|||
return (major, minor) >= (10, 5) |
|||
if arch in groups: |
|||
for garch in groups[arch]: |
|||
if _supports_arch(major, minor, garch): |
|||
return True |
|||
return False |
|||
|
|||
groups = OrderedDict([ |
|||
("fat", ("i386", "ppc")), |
|||
("intel", ("x86_64", "i386")), |
|||
("fat64", ("x86_64", "ppc64")), |
|||
("fat32", ("x86_64", "i386", "ppc")), |
|||
]) |
|||
|
|||
if _supports_arch(major, minor, machine): |
|||
arches.append(machine) |
|||
|
|||
for garch in groups: |
|||
if machine in groups[garch] and _supports_arch(major, minor, garch): |
|||
arches.append(garch) |
|||
|
|||
arches.append('universal') |
|||
|
|||
return arches |
|||
|
|||
|
|||
def get_supported(versions=None, noarch=False, platform=None, |
|||
impl=None, abi=None): |
|||
"""Return a list of supported tags for each version specified in |
|||
`versions`. |
|||
|
|||
:param versions: a list of string versions, of the form ["33", "32"], |
|||
or None. The first version will be assumed to support our ABI. |
|||
:param platform: specify the exact platform you want valid |
|||
tags for, or None. If None, use the local system platform. |
|||
:param impl: specify the exact implementation you want valid |
|||
tags for, or None. If None, use the local interpreter impl. |
|||
:param abi: specify the exact abi you want valid |
|||
tags for, or None. If None, use the local interpreter abi. |
|||
""" |
|||
supported = [] |
|||
|
|||
# Versions must be given with respect to the preference |
|||
if versions is None: |
|||
versions = [] |
|||
version_info = get_impl_version_info() |
|||
major = version_info[:-1] |
|||
# Support all previous minor Python versions. |
|||
for minor in range(version_info[-1], -1, -1): |
|||
versions.append(''.join(map(str, major + (minor,)))) |
|||
|
|||
impl = impl or get_abbr_impl() |
|||
|
|||
abis = [] |
|||
|
|||
abi = abi or get_abi_tag() |
|||
if abi: |
|||
abis[0:0] = [abi] |
|||
|
|||
abi3s = set() |
|||
import imp |
|||
for suffix in imp.get_suffixes(): |
|||
if suffix[0].startswith('.abi'): |
|||
abi3s.add(suffix[0].split('.', 2)[1]) |
|||
|
|||
abis.extend(sorted(list(abi3s))) |
|||
|
|||
abis.append('none') |
|||
|
|||
if not noarch: |
|||
arch = platform or get_platform() |
|||
if arch.startswith('macosx'): |
|||
# support macosx-10.6-intel on macosx-10.9-x86_64 |
|||
match = _osx_arch_pat.match(arch) |
|||
if match: |
|||
name, major, minor, actual_arch = match.groups() |
|||
tpl = '{0}_{1}_%i_%s'.format(name, major) |
|||
arches = [] |
|||
for m in reversed(range(int(minor) + 1)): |
|||
for a in get_darwin_arches(int(major), m, actual_arch): |
|||
arches.append(tpl % (m, a)) |
|||
else: |
|||
# arch pattern didn't match (?!) |
|||
arches = [arch] |
|||
elif platform is None and is_manylinux1_compatible(): |
|||
arches = [arch.replace('linux', 'manylinux1'), arch] |
|||
else: |
|||
arches = [arch] |
|||
|
|||
# Current version, current API (built specifically for our Python): |
|||
for abi in abis: |
|||
for arch in arches: |
|||
supported.append(('%s%s' % (impl, versions[0]), abi, arch)) |
|||
|
|||
# abi3 modules compatible with older version of Python |
|||
for version in versions[1:]: |
|||
# abi3 was introduced in Python 3.2 |
|||
if version in ('31', '30'): |
|||
break |
|||
for abi in abi3s: # empty set if not Python 3 |
|||
for arch in arches: |
|||
supported.append(("%s%s" % (impl, version), abi, arch)) |
|||
|
|||
# Has binaries, does not use the Python API: |
|||
for arch in arches: |
|||
supported.append(('py%s' % (versions[0][0]), 'none', arch)) |
|||
|
|||
# No abi / arch, but requires our implementation: |
|||
supported.append(('%s%s' % (impl, versions[0]), 'none', 'any')) |
|||
# Tagged specifically as being cross-version compatible |
|||
# (with just the major version specified) |
|||
supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any')) |
|||
|
|||
# No abi / arch, generic Python |
|||
for i, version in enumerate(versions): |
|||
supported.append(('py%s' % (version,), 'none', 'any')) |
|||
if i == 0: |
|||
supported.append(('py%s' % (version[0]), 'none', 'any')) |
|||
|
|||
return supported |
|||
|
|||
supported_tags = get_supported() |
|||
supported_tags_noarch = get_supported(noarch=True) |
|||
|
|||
implementation_tag = get_impl_tag() |
@ -0,0 +1,10 @@ |
|||
from __future__ import absolute_import |
|||
|
|||
from .req_install import InstallRequirement |
|||
from .req_set import RequirementSet, Requirements |
|||
from .req_file import parse_requirements |
|||
|
|||
__all__ = [ |
|||
"RequirementSet", "Requirements", "InstallRequirement", |
|||
"parse_requirements", |
|||
] |
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,342 @@ |
|||
""" |
|||
Requirements file parsing |
|||
""" |
|||
|
|||
from __future__ import absolute_import |
|||
|
|||
import os |
|||
import re |
|||
import shlex |
|||
import sys |
|||
import optparse |
|||
import warnings |
|||
|
|||
from pip._vendor.six.moves.urllib import parse as urllib_parse |
|||
from pip._vendor.six.moves import filterfalse |
|||
|
|||
import pip |
|||
from pip.download import get_file_content |
|||
from pip.req.req_install import InstallRequirement |
|||
from pip.exceptions import (RequirementsFileParseError) |
|||
from pip.utils.deprecation import RemovedInPip10Warning |
|||
from pip import cmdoptions |
|||
|
|||
__all__ = ['parse_requirements'] |
|||
|
|||
SCHEME_RE = re.compile(r'^(http|https|file):', re.I) |
|||
COMMENT_RE = re.compile(r'(^|\s)+#.*$') |
|||
|
|||
SUPPORTED_OPTIONS = [ |
|||
cmdoptions.constraints, |
|||
cmdoptions.editable, |
|||
cmdoptions.requirements, |
|||
cmdoptions.no_index, |
|||
cmdoptions.index_url, |
|||
cmdoptions.find_links, |
|||
cmdoptions.extra_index_url, |
|||
cmdoptions.allow_external, |
|||
cmdoptions.allow_all_external, |
|||
cmdoptions.no_allow_external, |
|||
cmdoptions.allow_unsafe, |
|||
cmdoptions.no_allow_unsafe, |
|||
cmdoptions.use_wheel, |
|||
cmdoptions.no_use_wheel, |
|||
cmdoptions.always_unzip, |
|||
cmdoptions.no_binary, |
|||
cmdoptions.only_binary, |
|||
cmdoptions.pre, |
|||
cmdoptions.process_dependency_links, |
|||
cmdoptions.trusted_host, |
|||
cmdoptions.require_hashes, |
|||
] |
|||
|
|||
# options to be passed to requirements |
|||
SUPPORTED_OPTIONS_REQ = [ |
|||
cmdoptions.install_options, |
|||
cmdoptions.global_options, |
|||
cmdoptions.hash, |
|||
] |
|||
|
|||
# the 'dest' string values |
|||
SUPPORTED_OPTIONS_REQ_DEST = [o().dest for o in SUPPORTED_OPTIONS_REQ] |
|||
|
|||
|
|||
def parse_requirements(filename, finder=None, comes_from=None, options=None, |
|||
session=None, constraint=False, wheel_cache=None): |
|||
"""Parse a requirements file and yield InstallRequirement instances. |
|||
|
|||
:param filename: Path or url of requirements file. |
|||
:param finder: Instance of pip.index.PackageFinder. |
|||
:param comes_from: Origin description of requirements. |
|||
:param options: cli options. |
|||
:param session: Instance of pip.download.PipSession. |
|||
:param constraint: If true, parsing a constraint file rather than |
|||
requirements file. |
|||
:param wheel_cache: Instance of pip.wheel.WheelCache |
|||
""" |
|||
if session is None: |
|||
raise TypeError( |
|||
"parse_requirements() missing 1 required keyword argument: " |
|||
"'session'" |
|||
) |
|||
|
|||
_, content = get_file_content( |
|||
filename, comes_from=comes_from, session=session |
|||
) |
|||
|
|||
lines_enum = preprocess(content, options) |
|||
|
|||
for line_number, line in lines_enum: |
|||
req_iter = process_line(line, filename, line_number, finder, |
|||
comes_from, options, session, wheel_cache, |
|||
constraint=constraint) |
|||
for req in req_iter: |
|||
yield req |
|||
|
|||
|
|||
def preprocess(content, options): |
|||
"""Split, filter, and join lines, and return a line iterator |
|||
|
|||
:param content: the content of the requirements file |
|||
:param options: cli options |
|||
""" |
|||
lines_enum = enumerate(content.splitlines(), start=1) |
|||
lines_enum = join_lines(lines_enum) |
|||
lines_enum = ignore_comments(lines_enum) |
|||
lines_enum = skip_regex(lines_enum, options) |
|||
return lines_enum |
|||
|
|||
|
|||
def process_line(line, filename, line_number, finder=None, comes_from=None, |
|||
options=None, session=None, wheel_cache=None, |
|||
constraint=False): |
|||
"""Process a single requirements line; This can result in creating/yielding |
|||
requirements, or updating the finder. |
|||
|
|||
For lines that contain requirements, the only options that have an effect |
|||
are from SUPPORTED_OPTIONS_REQ, and they are scoped to the |
|||
requirement. Other options from SUPPORTED_OPTIONS may be present, but are |
|||
ignored. |
|||
|
|||
For lines that do not contain requirements, the only options that have an |
|||
effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may |
|||
be present, but are ignored. These lines may contain multiple options |
|||
(although our docs imply only one is supported), and all our parsed and |
|||
affect the finder. |
|||
|
|||
:param constraint: If True, parsing a constraints file. |
|||
:param options: OptionParser options that we may update |
|||
""" |
|||
parser = build_parser() |
|||
defaults = parser.get_default_values() |
|||
defaults.index_url = None |
|||
if finder: |
|||
# `finder.format_control` will be updated during parsing |
|||
defaults.format_control = finder.format_control |
|||
args_str, options_str = break_args_options(line) |
|||
if sys.version_info < (2, 7, 3): |
|||
# Prior to 2.7.3, shlex cannot deal with unicode entries |
|||
options_str = options_str.encode('utf8') |
|||
opts, _ = parser.parse_args(shlex.split(options_str), defaults) |
|||
|
|||
# preserve for the nested code path |
|||
line_comes_from = '%s %s (line %s)' % ( |
|||
'-c' if constraint else '-r', filename, line_number) |
|||
|
|||
# yield a line requirement |
|||
if args_str: |
|||
isolated = options.isolated_mode if options else False |
|||
if options: |
|||
cmdoptions.check_install_build_global(options, opts) |
|||
# get the options that apply to requirements |
|||
req_options = {} |
|||
for dest in SUPPORTED_OPTIONS_REQ_DEST: |
|||
if dest in opts.__dict__ and opts.__dict__[dest]: |
|||
req_options[dest] = opts.__dict__[dest] |
|||
yield InstallRequirement.from_line( |
|||
args_str, line_comes_from, constraint=constraint, |
|||
isolated=isolated, options=req_options, wheel_cache=wheel_cache |
|||
) |
|||
|
|||
# yield an editable requirement |
|||
elif opts.editables: |
|||
isolated = options.isolated_mode if options else False |
|||
default_vcs = options.default_vcs if options else None |
|||
yield InstallRequirement.from_editable( |
|||
opts.editables[0], comes_from=line_comes_from, |
|||
constraint=constraint, default_vcs=default_vcs, isolated=isolated, |
|||
wheel_cache=wheel_cache |
|||
) |
|||
|
|||
# parse a nested requirements file |
|||
elif opts.requirements or opts.constraints: |
|||
if opts.requirements: |
|||
req_path = opts.requirements[0] |
|||
nested_constraint = False |
|||
else: |
|||
req_path = opts.constraints[0] |
|||
nested_constraint = True |
|||
# original file is over http |
|||
if SCHEME_RE.search(filename): |
|||
# do a url join so relative paths work |
|||
req_path = urllib_parse.urljoin(filename, req_path) |
|||
# original file and nested file are paths |
|||
elif not SCHEME_RE.search(req_path): |
|||
# do a join so relative paths work |
|||
req_path = os.path.join(os.path.dirname(filename), req_path) |
|||
# TODO: Why not use `comes_from='-r {} (line {})'` here as well? |
|||
parser = parse_requirements( |
|||
req_path, finder, comes_from, options, session, |
|||
constraint=nested_constraint, wheel_cache=wheel_cache |
|||
) |
|||
for req in parser: |
|||
yield req |
|||
|
|||
# percolate hash-checking option upward |
|||
elif opts.require_hashes: |
|||
options.require_hashes = opts.require_hashes |
|||
|
|||
# set finder options |
|||
elif finder: |
|||
if opts.allow_external: |
|||
warnings.warn( |
|||
"--allow-external has been deprecated and will be removed in " |
|||
"the future. Due to changes in the repository protocol, it no " |
|||
"longer has any effect.", |
|||
RemovedInPip10Warning, |
|||
) |
|||
|
|||
if opts.allow_all_external: |
|||
warnings.warn( |
|||
"--allow-all-external has been deprecated and will be removed " |
|||
"in the future. Due to changes in the repository protocol, it " |
|||
"no longer has any effect.", |
|||
RemovedInPip10Warning, |
|||
) |
|||
|
|||
if opts.allow_unverified: |
|||
warnings.warn( |
|||
"--allow-unverified has been deprecated and will be removed " |
|||
"in the future. Due to changes in the repository protocol, it " |
|||
"no longer has any effect.", |
|||
RemovedInPip10Warning, |
|||
) |
|||
|
|||
if opts.index_url: |
|||
finder.index_urls = [opts.index_url] |
|||
if opts.use_wheel is False: |
|||
finder.use_wheel = False |
|||
pip.index.fmt_ctl_no_use_wheel(finder.format_control) |
|||
if opts.no_index is True: |
|||
finder.index_urls = [] |
|||
if opts.extra_index_urls: |
|||
finder.index_urls.extend(opts.extra_index_urls) |
|||
if opts.find_links: |
|||
# FIXME: it would be nice to keep track of the source |
|||
# of the find_links: support a find-links local path |
|||
# relative to a requirements file. |
|||
value = opts.find_links[0] |
|||
req_dir = os.path.dirname(os.path.abspath(filename)) |
|||
relative_to_reqs_file = os.path.join(req_dir, value) |
|||
if os.path.exists(relative_to_reqs_file): |
|||
value = relative_to_reqs_file |
|||
finder.find_links.append(value) |
|||
if opts.pre: |
|||
finder.allow_all_prereleases = True |
|||
if opts.process_dependency_links: |
|||
finder.process_dependency_links = True |
|||
if opts.trusted_hosts: |
|||
finder.secure_origins.extend( |
|||
("*", host, "*") for host in opts.trusted_hosts) |
|||
|
|||
|
|||
def break_args_options(line): |
|||
"""Break up the line into an args and options string. We only want to shlex |
|||
(and then optparse) the options, not the args. args can contain markers |
|||
which are corrupted by shlex. |
|||
""" |
|||
tokens = line.split(' ') |
|||
args = [] |
|||
options = tokens[:] |
|||
for token in tokens: |
|||
if token.startswith('-') or token.startswith('--'): |
|||
break |
|||
else: |
|||
args.append(token) |
|||
options.pop(0) |
|||
return ' '.join(args), ' '.join(options) |
|||
|
|||
|
|||
def build_parser(): |
|||
""" |
|||
Return a parser for parsing requirement lines |
|||
""" |
|||
parser = optparse.OptionParser(add_help_option=False) |
|||
|
|||
option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ |
|||
for option_factory in option_factories: |
|||
option = option_factory() |
|||
parser.add_option(option) |
|||
|
|||
# By default optparse sys.exits on parsing errors. We want to wrap |
|||
# that in our own exception. |
|||
def parser_exit(self, msg): |
|||
raise RequirementsFileParseError(msg) |
|||
parser.exit = parser_exit |
|||
|
|||
return parser |
|||
|
|||
|
|||
def join_lines(lines_enum): |
|||
"""Joins a line ending in '\' with the previous line (except when following |
|||
comments). The joined line takes on the index of the first line. |
|||
""" |
|||
primary_line_number = None |
|||
new_line = [] |
|||
for line_number, line in lines_enum: |
|||
if not line.endswith('\\') or COMMENT_RE.match(line): |
|||
if COMMENT_RE.match(line): |
|||
# this ensures comments are always matched later |
|||
line = ' ' + line |
|||
if new_line: |
|||
new_line.append(line) |
|||
yield primary_line_number, ''.join(new_line) |
|||
new_line = [] |
|||
else: |
|||
yield line_number, line |
|||
else: |
|||
if not new_line: |
|||
primary_line_number = line_number |
|||
new_line.append(line.strip('\\')) |
|||
|
|||
# last line contains \ |
|||
if new_line: |
|||
yield primary_line_number, ''.join(new_line) |
|||
|
|||
# TODO: handle space after '\'. |
|||
|
|||
|
|||
def ignore_comments(lines_enum): |
|||
""" |
|||
Strips comments and filter empty lines. |
|||
""" |
|||
for line_number, line in lines_enum: |
|||
line = COMMENT_RE.sub('', line) |
|||
line = line.strip() |
|||
if line: |
|||
yield line_number, line |
|||
|
|||
|
|||
def skip_regex(lines_enum, options): |
|||
""" |
|||
Skip lines that match '--skip-requirements-regex' pattern |
|||
|
|||
Note: the regex pattern is only built once |
|||
""" |
|||
skip_regex = options.skip_requirements_regex if options else None |
|||
if skip_regex: |
|||
pattern = re.compile(skip_regex) |
|||
lines_enum = filterfalse( |
|||
lambda e: pattern.search(e[1]), |
|||
lines_enum) |
|||
return lines_enum |
File diff suppressed because it is too large
@ -0,0 +1,798 @@ |
|||
from __future__ import absolute_import |
|||
|
|||
from collections import defaultdict |
|||
from itertools import chain |
|||
import logging |
|||
import os |
|||
|
|||
from pip._vendor import pkg_resources |
|||
from pip._vendor import requests |
|||
|
|||
from pip.compat import expanduser |
|||
from pip.download import (is_file_url, is_dir_url, is_vcs_url, url_to_path, |
|||
unpack_url) |
|||
from pip.exceptions import (InstallationError, BestVersionAlreadyInstalled, |
|||
DistributionNotFound, PreviousBuildDirError, |
|||
HashError, HashErrors, HashUnpinned, |
|||
DirectoryUrlHashUnsupported, VcsHashUnsupported, |
|||
UnsupportedPythonVersion) |
|||
from pip.req.req_install import InstallRequirement |
|||
from pip.utils import ( |
|||
display_path, dist_in_usersite, ensure_dir, normalize_path) |
|||
from pip.utils.hashes import MissingHashes |
|||
from pip.utils.logging import indent_log |
|||
from pip.utils.packaging import check_dist_requires_python |
|||
from pip.vcs import vcs |
|||
from pip.wheel import Wheel |
|||
|
|||
logger = logging.getLogger(__name__) |
|||
|
|||
|
|||
class Requirements(object): |
|||
|
|||
def __init__(self): |
|||
self._keys = [] |
|||
self._dict = {} |
|||
|
|||
def keys(self): |
|||
return self._keys |
|||
|
|||
def values(self): |
|||
return [self._dict[key] for key in self._keys] |
|||
|
|||
def __contains__(self, item): |
|||
return item in self._keys |
|||
|
|||
def __setitem__(self, key, value): |
|||
if key not in self._keys: |
|||
self._keys.append(key) |
|||
self._dict[key] = value |
|||
|
|||
def __getitem__(self, key): |
|||
return self._dict[key] |
|||
|
|||
def __repr__(self): |
|||
values = ['%s: %s' % (repr(k), repr(self[k])) for k in self.keys()] |
|||
return 'Requirements({%s})' % ', '.join(values) |
|||
|
|||
|
|||
class DistAbstraction(object): |
|||
"""Abstracts out the wheel vs non-wheel prepare_files logic. |
|||
|
|||
The requirements for anything installable are as follows: |
|||
- we must be able to determine the requirement name |
|||
(or we can't correctly handle the non-upgrade case). |
|||
- we must be able to generate a list of run-time dependencies |
|||
without installing any additional packages (or we would |
|||
have to either burn time by doing temporary isolated installs |
|||
or alternatively violate pips 'don't start installing unless |
|||
all requirements are available' rule - neither of which are |
|||
desirable). |
|||
- for packages with setup requirements, we must also be able |
|||
to determine their requirements without installing additional |
|||
packages (for the same reason as run-time dependencies) |
|||
- we must be able to create a Distribution object exposing the |
|||
above metadata. |
|||
""" |
|||
|
|||
def __init__(self, req_to_install): |
|||
self.req_to_install = req_to_install |
|||
|
|||
def dist(self, finder): |
|||
"""Return a setuptools Dist object.""" |
|||
raise NotImplementedError(self.dist) |
|||
|
|||
def prep_for_dist(self): |
|||
"""Ensure that we can get a Dist for this requirement.""" |
|||
raise NotImplementedError(self.dist) |
|||
|
|||
|
|||
def make_abstract_dist(req_to_install): |
|||
"""Factory to make an abstract dist object. |
|||
|
|||
Preconditions: Either an editable req with a source_dir, or satisfied_by or |
|||
a wheel link, or a non-editable req with a source_dir. |
|||
|
|||
:return: A concrete DistAbstraction. |
|||
""" |
|||
if req_to_install.editable: |
|||
return IsSDist(req_to_install) |
|||
elif req_to_install.link and req_to_install.link.is_wheel: |
|||
return IsWheel(req_to_install) |
|||
else: |
|||
return IsSDist(req_to_install) |
|||
|
|||
|
|||
class IsWheel(DistAbstraction): |
|||
|
|||
def dist(self, finder): |
|||
return list(pkg_resources.find_distributions( |
|||
self.req_to_install.source_dir))[0] |
|||
|
|||
def prep_for_dist(self): |
|||
# FIXME:https://github.com/pypa/pip/issues/1112 |
|||
pass |
|||
|
|||
|
|||
class IsSDist(DistAbstraction): |
|||
|
|||
def dist(self, finder): |
|||
dist = self.req_to_install.get_dist() |
|||
# FIXME: shouldn't be globally added: |
|||
if dist.has_metadata('dependency_links.txt'): |
|||
finder.add_dependency_links( |
|||
dist.get_metadata_lines('dependency_links.txt') |
|||
) |
|||
return dist |
|||
|
|||
def prep_for_dist(self): |
|||
self.req_to_install.run_egg_info() |
|||
self.req_to_install.assert_source_matches_version() |
|||
|
|||
|
|||
class Installed(DistAbstraction): |
|||
|
|||
def dist(self, finder): |
|||
return self.req_to_install.satisfied_by |
|||
|
|||
def prep_for_dist(self): |
|||
pass |
|||
|
|||
|
|||
class RequirementSet(object): |
|||
|
|||
def __init__(self, build_dir, src_dir, download_dir, upgrade=False, |
|||
upgrade_strategy=None, ignore_installed=False, as_egg=False, |
|||
target_dir=None, ignore_dependencies=False, |
|||
force_reinstall=False, use_user_site=False, session=None, |
|||
pycompile=True, isolated=False, wheel_download_dir=None, |
|||
wheel_cache=None, require_hashes=False, |
|||
ignore_requires_python=False): |
|||
"""Create a RequirementSet. |
|||
|
|||
:param wheel_download_dir: Where still-packed .whl files should be |
|||
written to. If None they are written to the download_dir parameter. |
|||
Separate to download_dir to permit only keeping wheel archives for |
|||
pip wheel. |
|||
:param download_dir: Where still packed archives should be written to. |
|||
If None they are not saved, and are deleted immediately after |
|||
unpacking. |
|||
:param wheel_cache: The pip wheel cache, for passing to |
|||
InstallRequirement. |
|||
""" |
|||
if session is None: |
|||
raise TypeError( |
|||
"RequirementSet() missing 1 required keyword argument: " |
|||
"'session'" |
|||
) |
|||
|
|||
self.build_dir = build_dir |
|||
self.src_dir = src_dir |
|||
# XXX: download_dir and wheel_download_dir overlap semantically and may |
|||
# be combined if we're willing to have non-wheel archives present in |
|||
# the wheelhouse output by 'pip wheel'. |
|||
self.download_dir = download_dir |
|||
self.upgrade = upgrade |
|||
self.upgrade_strategy = upgrade_strategy |
|||
self.ignore_installed = ignore_installed |
|||
self.force_reinstall = force_reinstall |
|||
self.requirements = Requirements() |
|||
# Mapping of alias: real_name |
|||
self.requirement_aliases = {} |
|||
self.unnamed_requirements = [] |
|||
self.ignore_dependencies = ignore_dependencies |
|||
self.ignore_requires_python = ignore_requires_python |
|||
self.successfully_downloaded = [] |
|||
self.successfully_installed = [] |
|||
self.reqs_to_cleanup = [] |
|||
self.as_egg = as_egg |
|||
self.use_user_site = use_user_site |
|||
self.target_dir = target_dir # set from --target option |
|||
self.session = session |
|||
self.pycompile = pycompile |
|||
self.isolated = isolated |
|||
if wheel_download_dir: |
|||
wheel_download_dir = normalize_path(wheel_download_dir) |
|||
self.wheel_download_dir = wheel_download_dir |
|||
self._wheel_cache = wheel_cache |
|||
self.require_hashes = require_hashes |
|||
# Maps from install_req -> dependencies_of_install_req |
|||
self._dependencies = defaultdict(list) |
|||
|
|||
def __str__(self): |
|||
reqs = [req for req in self.requirements.values() |
|||
if not req.comes_from] |
|||
reqs.sort(key=lambda req: req.name.lower()) |
|||
return ' '.join([str(req.req) for req in reqs]) |
|||
|
|||
def __repr__(self): |
|||
reqs = [req for req in self.requirements.values()] |
|||
reqs.sort(key=lambda req: req.name.lower()) |
|||
reqs_str = ', '.join([str(req.req) for req in reqs]) |
|||
return ('<%s object; %d requirement(s): %s>' |
|||
% (self.__class__.__name__, len(reqs), reqs_str)) |
|||
|
|||
def add_requirement(self, install_req, parent_req_name=None, |
|||
extras_requested=None): |
|||
"""Add install_req as a requirement to install. |
|||
|
|||
:param parent_req_name: The name of the requirement that needed this |
|||
added. The name is used because when multiple unnamed requirements |
|||
resolve to the same name, we could otherwise end up with dependency |
|||
links that point outside the Requirements set. parent_req must |
|||
already be added. Note that None implies that this is a user |
|||
supplied requirement, vs an inferred one. |
|||
:param extras_requested: an iterable of extras used to evaluate the |
|||
environement markers. |
|||
:return: Additional requirements to scan. That is either [] if |
|||
the requirement is not applicable, or [install_req] if the |
|||
requirement is applicable and has just been added. |
|||
""" |
|||
name = install_req.name |
|||
if not install_req.match_markers(extras_requested): |
|||
logger.warning("Ignoring %s: markers '%s' don't match your " |
|||
"environment", install_req.name, |
|||
install_req.markers) |
|||
return [] |
|||
|
|||
# This check has to come after we filter requirements with the |
|||
# environment markers. |
|||
if install_req.link and install_req.link.is_wheel: |
|||
wheel = Wheel(install_req.link.filename) |
|||
if not wheel.supported(): |
|||
raise InstallationError( |
|||
"%s is not a supported wheel on this platform." % |
|||
wheel.filename |
|||
) |
|||
|
|||
install_req.as_egg = self.as_egg |
|||
install_req.use_user_site = self.use_user_site |
|||
install_req.target_dir = self.target_dir |
|||
install_req.pycompile = self.pycompile |
|||
install_req.is_direct = (parent_req_name is None) |
|||
|
|||
if not name: |
|||
# url or path requirement w/o an egg fragment |
|||
self.unnamed_requirements.append(install_req) |
|||
return [install_req] |
|||
else: |
|||
try: |
|||
existing_req = self.get_requirement(name) |
|||
except KeyError: |
|||
existing_req = None |
|||
if (parent_req_name is None and existing_req and not |
|||
existing_req.constraint and |
|||
existing_req.extras == install_req.extras and not |
|||
existing_req.req.specifier == install_req.req.specifier): |
|||
raise InstallationError( |
|||
'Double requirement given: %s (already in %s, name=%r)' |
|||
% (install_req, existing_req, name)) |
|||
if not existing_req: |
|||
# Add requirement |
|||
self.requirements[name] = install_req |
|||
# FIXME: what about other normalizations? E.g., _ vs. -? |
|||
if name.lower() != name: |
|||
self.requirement_aliases[name.lower()] = name |
|||
result = [install_req] |
|||
else: |
|||
# Assume there's no need to scan, and that we've already |
|||
# encountered this for scanning. |
|||
result = [] |
|||
if not install_req.constraint and existing_req.constraint: |
|||
if (install_req.link and not (existing_req.link and |
|||
install_req.link.path == existing_req.link.path)): |
|||
self.reqs_to_cleanup.append(install_req) |
|||
raise InstallationError( |
|||
"Could not satisfy constraints for '%s': " |
|||
"installation from path or url cannot be " |
|||
"constrained to a version" % name) |
|||
# If we're now installing a constraint, mark the existing |
|||
# object for real installation. |
|||
existing_req.constraint = False |
|||
existing_req.extras = tuple( |
|||
sorted(set(existing_req.extras).union( |
|||
set(install_req.extras)))) |
|||
logger.debug("Setting %s extras to: %s", |
|||
existing_req, existing_req.extras) |
|||
# And now we need to scan this. |
|||
result = [existing_req] |
|||
# Canonicalise to the already-added object for the backref |
|||
# check below. |
|||
install_req = existing_req |
|||
if parent_req_name: |
|||
parent_req = self.get_requirement(parent_req_name) |
|||
self._dependencies[parent_req].append(install_req) |
|||
return result |
|||
|
|||
def has_requirement(self, project_name): |
|||
name = project_name.lower() |
|||
if (name in self.requirements and |
|||
not self.requirements[name].constraint or |
|||
name in self.requirement_aliases and |
|||
not self.requirements[self.requirement_aliases[name]].constraint): |
|||
return True |
|||
return False |
|||
|
|||
@property |
|||
def has_requirements(self): |
|||
return list(req for req in self.requirements.values() if not |
|||
req.constraint) or self.unnamed_requirements |
|||
|
|||
@property |
|||
def is_download(self): |
|||
if self.download_dir: |
|||
self.download_dir = expanduser(self.download_dir) |
|||
if os.path.exists(self.download_dir): |
|||
return True |
|||
else: |
|||
logger.critical('Could not find download directory') |
|||
raise InstallationError( |
|||
"Could not find or access download directory '%s'" |
|||
% display_path(self.download_dir)) |
|||
return False |
|||
|
|||
def get_requirement(self, project_name): |
|||
for name in project_name, project_name.lower(): |
|||
if name in self.requirements: |
|||
return self.requirements[name] |
|||
if name in self.requirement_aliases: |
|||
return self.requirements[self.requirement_aliases[name]] |
|||
raise KeyError("No project with the name %r" % project_name) |
|||
|
|||
def uninstall(self, auto_confirm=False): |
|||
for req in self.requirements.values(): |
|||
if req.constraint: |
|||
continue |
|||
req.uninstall(auto_confirm=auto_confirm) |
|||
req.commit_uninstall() |
|||
|
|||
def prepare_files(self, finder): |
|||
""" |
|||
Prepare process. Create temp directories, download and/or unpack files. |
|||
""" |
|||
# make the wheelhouse |
|||
if self.wheel_download_dir: |
|||
ensure_dir(self.wheel_download_dir) |
|||
|
|||
# If any top-level requirement has a hash specified, enter |
|||
# hash-checking mode, which requires hashes from all. |
|||
root_reqs = self.unnamed_requirements + self.requirements.values() |
|||
require_hashes = (self.require_hashes or |
|||
any(req.has_hash_options for req in root_reqs)) |
|||
if require_hashes and self.as_egg: |
|||
raise InstallationError( |
|||
'--egg is not allowed with --require-hashes mode, since it ' |
|||
'delegates dependency resolution to setuptools and could thus ' |
|||
'result in installation of unhashed packages.') |
|||
|
|||
# Actually prepare the files, and collect any exceptions. Most hash |
|||
# exceptions cannot be checked ahead of time, because |
|||
# req.populate_link() needs to be called before we can make decisions |
|||
# based on link type. |
|||
discovered_reqs = [] |
|||
hash_errors = HashErrors() |
|||
for req in chain(root_reqs, discovered_reqs): |
|||
try: |
|||
discovered_reqs.extend(self._prepare_file( |
|||
finder, |
|||
req, |
|||
require_hashes=require_hashes, |
|||
ignore_dependencies=self.ignore_dependencies)) |
|||
except HashError as exc: |
|||
exc.req = req |
|||
hash_errors.append(exc) |
|||
|
|||
if hash_errors: |
|||
raise hash_errors |
|||
|
|||
def _is_upgrade_allowed(self, req): |
|||
return self.upgrade and ( |
|||
self.upgrade_strategy == "eager" or ( |
|||
self.upgrade_strategy == "only-if-needed" and req.is_direct |
|||
) |
|||
) |
|||
|
|||
def _check_skip_installed(self, req_to_install, finder): |
|||
"""Check if req_to_install should be skipped. |
|||
|
|||
This will check if the req is installed, and whether we should upgrade |
|||
or reinstall it, taking into account all the relevant user options. |
|||
|
|||
After calling this req_to_install will only have satisfied_by set to |
|||
None if the req_to_install is to be upgraded/reinstalled etc. Any |
|||
other value will be a dist recording the current thing installed that |
|||
satisfies the requirement. |
|||
|
|||
Note that for vcs urls and the like we can't assess skipping in this |
|||
routine - we simply identify that we need to pull the thing down, |
|||
then later on it is pulled down and introspected to assess upgrade/ |
|||
reinstalls etc. |
|||
|
|||
:return: A text reason for why it was skipped, or None. |
|||
""" |
|||
# Check whether to upgrade/reinstall this req or not. |
|||
req_to_install.check_if_exists() |
|||
if req_to_install.satisfied_by: |
|||
upgrade_allowed = self._is_upgrade_allowed(req_to_install) |
|||
|
|||
# Is the best version is installed. |
|||
best_installed = False |
|||
|
|||
if upgrade_allowed: |
|||
# For link based requirements we have to pull the |
|||
# tree down and inspect to assess the version #, so |
|||
# its handled way down. |
|||
if not (self.force_reinstall or req_to_install.link): |
|||
try: |
|||
finder.find_requirement( |
|||
req_to_install, upgrade_allowed) |
|||
except BestVersionAlreadyInstalled: |
|||
best_installed = True |
|||
except DistributionNotFound: |
|||
# No distribution found, so we squash the |
|||
# error - it will be raised later when we |
|||
# re-try later to do the install. |
|||
# Why don't we just raise here? |
|||
pass |
|||
|
|||
if not best_installed: |
|||
# don't uninstall conflict if user install and |
|||
# conflict is not user install |
|||
if not (self.use_user_site and not |
|||
dist_in_usersite(req_to_install.satisfied_by)): |
|||
req_to_install.conflicts_with = \ |
|||
req_to_install.satisfied_by |
|||
req_to_install.satisfied_by = None |
|||
|
|||
# Figure out a nice message to say why we're skipping this. |
|||
if best_installed: |
|||
skip_reason = 'already up-to-date' |
|||
elif self.upgrade_strategy == "only-if-needed": |
|||
skip_reason = 'not upgraded as not directly required' |
|||
else: |
|||
skip_reason = 'already satisfied' |
|||
|
|||
return skip_reason |
|||
else: |
|||
return None |
|||
|
|||
def _prepare_file(self, |
|||
finder, |
|||
req_to_install, |
|||
require_hashes=False, |
|||
ignore_dependencies=False): |
|||
"""Prepare a single requirements file. |
|||
|
|||
:return: A list of additional InstallRequirements to also install. |
|||
""" |
|||
# Tell user what we are doing for this requirement: |
|||
# obtain (editable), skipping, processing (local url), collecting |
|||
# (remote url or package name) |
|||
if req_to_install.constraint or req_to_install.prepared: |
|||
return [] |
|||
|
|||
req_to_install.prepared = True |
|||
|
|||
# ###################### # |
|||
# # print log messages # # |
|||
# ###################### # |
|||
if req_to_install.editable: |
|||
logger.info('Obtaining %s', req_to_install) |
|||
else: |
|||
# satisfied_by is only evaluated by calling _check_skip_installed, |
|||
# so it must be None here. |
|||
assert req_to_install.satisfied_by is None |
|||
if not self.ignore_installed: |
|||
skip_reason = self._check_skip_installed( |
|||
req_to_install, finder) |
|||
|
|||
if req_to_install.satisfied_by: |
|||
assert skip_reason is not None, ( |
|||
'_check_skip_installed returned None but ' |
|||
'req_to_install.satisfied_by is set to %r' |
|||
% (req_to_install.satisfied_by,)) |
|||
logger.info( |
|||
'Requirement %s: %s', skip_reason, |
|||
req_to_install) |
|||
else: |
|||
if (req_to_install.link and |
|||
req_to_install.link.scheme == 'file'): |
|||
path = url_to_path(req_to_install.link.url) |
|||
logger.info('Processing %s', display_path(path)) |
|||
else: |
|||
logger.info('Collecting %s', req_to_install) |
|||
|
|||
with indent_log(): |
|||
# ################################ # |
|||
# # vcs update or unpack archive # # |
|||
# ################################ # |
|||
if req_to_install.editable: |
|||
if require_hashes: |
|||
raise InstallationError( |
|||
'The editable requirement %s cannot be installed when ' |
|||
'requiring hashes, because there is no single file to ' |
|||
'hash.' % req_to_install) |
|||
req_to_install.ensure_has_source_dir(self.src_dir) |
|||
req_to_install.update_editable(not self.is_download) |
|||
abstract_dist = make_abstract_dist(req_to_install) |
|||
abstract_dist.prep_for_dist() |
|||
if self.is_download: |
|||
req_to_install.archive(self.download_dir) |
|||
req_to_install.check_if_exists() |
|||
elif req_to_install.satisfied_by: |
|||
if require_hashes: |
|||
logger.debug( |
|||
'Since it is already installed, we are trusting this ' |
|||
'package without checking its hash. To ensure a ' |
|||
'completely repeatable environment, install into an ' |
|||
'empty virtualenv.') |
|||
abstract_dist = Installed(req_to_install) |
|||
else: |
|||
# @@ if filesystem packages are not marked |
|||
# editable in a req, a non deterministic error |
|||
# occurs when the script attempts to unpack the |
|||
# build directory |
|||
req_to_install.ensure_has_source_dir(self.build_dir) |
|||
# If a checkout exists, it's unwise to keep going. version |
|||
# inconsistencies are logged later, but do not fail the |
|||
# installation. |
|||
# FIXME: this won't upgrade when there's an existing |
|||
# package unpacked in `req_to_install.source_dir` |
|||
if os.path.exists( |
|||
os.path.join(req_to_install.source_dir, 'setup.py')): |
|||
raise PreviousBuildDirError( |
|||
"pip can't proceed with requirements '%s' due to a" |
|||
" pre-existing build directory (%s). This is " |
|||
"likely due to a previous installation that failed" |
|||
". pip is being responsible and not assuming it " |
|||
"can delete this. Please delete it and try again." |
|||
% (req_to_install, req_to_install.source_dir) |
|||
) |
|||
req_to_install.populate_link( |
|||
finder, |
|||
self._is_upgrade_allowed(req_to_install), |
|||
require_hashes |
|||
) |
|||
# We can't hit this spot and have populate_link return None. |
|||
# req_to_install.satisfied_by is None here (because we're |
|||
# guarded) and upgrade has no impact except when satisfied_by |
|||
# is not None. |
|||
# Then inside find_requirement existing_applicable -> False |
|||
# If no new versions are found, DistributionNotFound is raised, |
|||
# otherwise a result is guaranteed. |
|||
assert req_to_install.link |
|||
link = req_to_install.link |
|||
|
|||
# Now that we have the real link, we can tell what kind of |
|||
# requirements we have and raise some more informative errors |
|||
# than otherwise. (For example, we can raise VcsHashUnsupported |
|||
# for a VCS URL rather than HashMissing.) |
|||
if require_hashes: |
|||
# We could check these first 2 conditions inside |
|||
# unpack_url and save repetition of conditions, but then |
|||
# we would report less-useful error messages for |
|||
# unhashable requirements, complaining that there's no |
|||
# hash provided. |
|||
if is_vcs_url(link): |
|||
raise VcsHashUnsupported() |
|||
elif is_file_url(link) and is_dir_url(link): |
|||
raise DirectoryUrlHashUnsupported() |
|||
if (not req_to_install.original_link and |
|||
not req_to_install.is_pinned): |
|||
# Unpinned packages are asking for trouble when a new |
|||
# version is uploaded. This isn't a security check, but |
|||
# it saves users a surprising hash mismatch in the |
|||
# future. |
|||
# |
|||
# file:/// URLs aren't pinnable, so don't complain |
|||
# about them not being pinned. |
|||
raise HashUnpinned() |
|||
hashes = req_to_install.hashes( |
|||
trust_internet=not require_hashes) |
|||
if require_hashes and not hashes: |
|||
# Known-good hashes are missing for this requirement, so |
|||
# shim it with a facade object that will provoke hash |
|||
# computation and then raise a HashMissing exception |
|||
# showing the user what the hash should be. |
|||
hashes = MissingHashes() |
|||
|
|||
try: |
|||
download_dir = self.download_dir |
|||
# We always delete unpacked sdists after pip ran. |
|||
autodelete_unpacked = True |
|||
if req_to_install.link.is_wheel \ |
|||
and self.wheel_download_dir: |
|||
# when doing 'pip wheel` we download wheels to a |
|||
# dedicated dir. |
|||
download_dir = self.wheel_download_dir |
|||
if req_to_install.link.is_wheel: |
|||
if download_dir: |
|||
# When downloading, we only unpack wheels to get |
|||
# metadata. |
|||
autodelete_unpacked = True |
|||
else: |
|||
# When installing a wheel, we use the unpacked |
|||
# wheel. |
|||
autodelete_unpacked = False |
|||
unpack_url( |
|||
req_to_install.link, req_to_install.source_dir, |
|||
download_dir, autodelete_unpacked, |
|||
session=self.session, hashes=hashes) |
|||
except requests.HTTPError as exc: |
|||
logger.critical( |
|||
'Could not install requirement %s because ' |
|||
'of error %s', |
|||
req_to_install, |
|||
exc, |
|||
) |
|||
raise InstallationError( |
|||
'Could not install requirement %s because ' |
|||
'of HTTP error %s for URL %s' % |
|||
(req_to_install, exc, req_to_install.link) |
|||
) |
|||
abstract_dist = make_abstract_dist(req_to_install) |
|||
abstract_dist.prep_for_dist() |
|||
if self.is_download: |
|||
# Make a .zip of the source_dir we already created. |
|||
if req_to_install.link.scheme in vcs.all_schemes: |
|||
req_to_install.archive(self.download_dir) |
|||
# req_to_install.req is only avail after unpack for URL |
|||
# pkgs repeat check_if_exists to uninstall-on-upgrade |
|||
# (#14) |
|||
if not self.ignore_installed: |
|||
req_to_install.check_if_exists() |
|||
if req_to_install.satisfied_by: |
|||
if self.upgrade or self.ignore_installed: |
|||
# don't uninstall conflict if user install and |
|||
# conflict is not user install |
|||
if not (self.use_user_site and not |
|||
dist_in_usersite( |
|||
req_to_install.satisfied_by)): |
|||
req_to_install.conflicts_with = \ |
|||
req_to_install.satisfied_by |
|||
req_to_install.satisfied_by = None |
|||
else: |
|||
logger.info( |
|||
'Requirement already satisfied (use ' |
|||
'--upgrade to upgrade): %s', |
|||
req_to_install, |
|||
) |
|||
|
|||
# ###################### # |
|||
# # parse dependencies # # |
|||
# ###################### # |
|||
dist = abstract_dist.dist(finder) |
|||
try: |
|||
check_dist_requires_python(dist) |
|||
except UnsupportedPythonVersion as e: |
|||
if self.ignore_requires_python: |
|||
logger.warning(e.args[0]) |
|||
else: |
|||
req_to_install.remove_temporary_source() |
|||
raise |
|||
more_reqs = [] |
|||
|
|||
def add_req(subreq, extras_requested): |
|||
sub_install_req = InstallRequirement( |
|||
str(subreq), |
|||
req_to_install, |
|||
isolated=self.isolated, |
|||
wheel_cache=self._wheel_cache, |
|||
) |
|||
more_reqs.extend(self.add_requirement( |
|||
sub_install_req, req_to_install.name, |
|||
extras_requested=extras_requested)) |
|||
|
|||
# We add req_to_install before its dependencies, so that we |
|||
# can refer to it when adding dependencies. |
|||
if not self.has_requirement(req_to_install.name): |
|||
# 'unnamed' requirements will get added here |
|||
self.add_requirement(req_to_install, None) |
|||
|
|||
if not ignore_dependencies: |
|||
if (req_to_install.extras): |
|||
logger.debug( |
|||
"Installing extra requirements: %r", |
|||
','.join(req_to_install.extras), |
|||
) |
|||
missing_requested = sorted( |
|||
set(req_to_install.extras) - set(dist.extras) |
|||
) |
|||
for missing in missing_requested: |
|||
logger.warning( |
|||
'%s does not provide the extra \'%s\'', |
|||
dist, missing |
|||
) |
|||
|
|||
available_requested = sorted( |
|||
set(dist.extras) & set(req_to_install.extras) |
|||
) |
|||
for subreq in dist.requires(available_requested): |
|||
add_req(subreq, extras_requested=available_requested) |
|||
|
|||
# cleanup tmp src |
|||
self.reqs_to_cleanup.append(req_to_install) |
|||
|
|||
if not req_to_install.editable and not req_to_install.satisfied_by: |
|||
# XXX: --no-install leads this to report 'Successfully |
|||
# downloaded' for only non-editable reqs, even though we took |
|||
# action on them. |
|||
self.successfully_downloaded.append(req_to_install) |
|||
|
|||
return more_reqs |
|||
|
|||
def cleanup_files(self): |
|||
"""Clean up files, remove builds.""" |
|||
logger.debug('Cleaning up...') |
|||
with indent_log(): |
|||
for req in self.reqs_to_cleanup: |
|||
req.remove_temporary_source() |
|||
|
|||
def _to_install(self): |
|||
"""Create the installation order. |
|||
|
|||
The installation order is topological - requirements are installed |
|||
before the requiring thing. We break cycles at an arbitrary point, |
|||
and make no other guarantees. |
|||
""" |
|||
# The current implementation, which we may change at any point |
|||
# installs the user specified things in the order given, except when |
|||
# dependencies must come earlier to achieve topological order. |
|||
order = [] |
|||
ordered_reqs = set() |
|||
|
|||
def schedule(req): |
|||
if req.satisfied_by or req in ordered_reqs: |
|||
return |
|||
if req.constraint: |
|||
return |
|||
ordered_reqs.add(req) |
|||
for dep in self._dependencies[req]: |
|||
schedule(dep) |
|||
order.append(req) |
|||
for install_req in self.requirements.values(): |
|||
schedule(install_req) |
|||
return order |
|||
|
|||
def install(self, install_options, global_options=(), *args, **kwargs): |
|||
""" |
|||
Install everything in this set (after having downloaded and unpacked |
|||
the packages) |
|||
""" |
|||
to_install = self._to_install() |
|||
|
|||
if to_install: |
|||
logger.info( |
|||
'Installing collected packages: %s', |
|||
', '.join([req.name for req in to_install]), |
|||
) |
|||
|
|||
with indent_log(): |
|||
for requirement in to_install: |
|||
if requirement.conflicts_with: |
|||
logger.info( |
|||
'Found existing installation: %s', |
|||
requirement.conflicts_with, |
|||
) |
|||
with indent_log(): |
|||
requirement.uninstall(auto_confirm=True) |
|||
try: |
|||
requirement.install( |
|||
install_options, |
|||
global_options, |
|||
*args, |
|||
**kwargs |
|||
) |
|||
except: |
|||
# if install did not succeed, rollback previous uninstall |
|||
if (requirement.conflicts_with and not |
|||
requirement.install_succeeded): |
|||
requirement.rollback_uninstall() |
|||
raise |
|||
else: |
|||
if (requirement.conflicts_with and |
|||
requirement.install_succeeded): |
|||
requirement.commit_uninstall() |
|||
requirement.remove_temporary_source() |
|||
|
|||
self.successfully_installed = to_install |
@ -0,0 +1,195 @@ |
|||
from __future__ import absolute_import |
|||
|
|||
import logging |
|||
import os |
|||
import tempfile |
|||
|
|||
from pip.compat import uses_pycache, WINDOWS, cache_from_source |
|||
from pip.exceptions import UninstallationError |
|||
from pip.utils import rmtree, ask, is_local, renames, normalize_path |
|||
from pip.utils.logging import indent_log |
|||
|
|||
|
|||
logger = logging.getLogger(__name__) |
|||
|
|||
|
|||
class UninstallPathSet(object): |
|||
"""A set of file paths to be removed in the uninstallation of a |
|||
requirement.""" |
|||
def __init__(self, dist): |
|||
self.paths = set() |
|||
self._refuse = set() |
|||
self.pth = {} |
|||
self.dist = dist |
|||
self.save_dir = None |
|||
self._moved_paths = [] |
|||
|
|||
def _permitted(self, path): |
|||
""" |
|||
Return True if the given path is one we are permitted to |
|||
remove/modify, False otherwise. |
|||
|
|||
""" |
|||
return is_local(path) |
|||
|
|||
def add(self, path): |
|||
head, tail = os.path.split(path) |
|||
|
|||
# we normalize the head to resolve parent directory symlinks, but not |
|||
# the tail, since we only want to uninstall symlinks, not their targets |
|||
path = os.path.join(normalize_path(head), os.path.normcase(tail)) |
|||
|
|||
if not os.path.exists(path): |
|||
return |
|||
if self._permitted(path): |
|||
self.paths.add(path) |
|||
else: |
|||
self._refuse.add(path) |
|||
|
|||
# __pycache__ files can show up after 'installed-files.txt' is created, |
|||
# due to imports |
|||
if os.path.splitext(path)[1] == '.py' and uses_pycache: |
|||
self.add(cache_from_source(path)) |
|||
|
|||
def add_pth(self, pth_file, entry): |
|||
pth_file = normalize_path(pth_file) |
|||
if self._permitted(pth_file): |
|||
if pth_file not in self.pth: |
|||
self.pth[pth_file] = UninstallPthEntries(pth_file) |
|||
self.pth[pth_file].add(entry) |
|||
else: |
|||
self._refuse.add(pth_file) |
|||
|
|||
def compact(self, paths): |
|||
"""Compact a path set to contain the minimal number of paths |
|||
necessary to contain all paths in the set. If /a/path/ and |
|||
/a/path/to/a/file.txt are both in the set, leave only the |
|||
shorter path.""" |
|||
short_paths = set() |
|||
for path in sorted(paths, key=len): |
|||
if not any([ |
|||
(path.startswith(shortpath) and |
|||
path[len(shortpath.rstrip(os.path.sep))] == os.path.sep) |
|||
for shortpath in short_paths]): |
|||
short_paths.add(path) |
|||
return short_paths |
|||
|
|||
def _stash(self, path): |
|||
return os.path.join( |
|||
self.save_dir, os.path.splitdrive(path)[1].lstrip(os.path.sep)) |
|||
|
|||
def remove(self, auto_confirm=False): |
|||
"""Remove paths in ``self.paths`` with confirmation (unless |
|||
``auto_confirm`` is True).""" |
|||
if not self.paths: |
|||
logger.info( |
|||
"Can't uninstall '%s'. No files were found to uninstall.", |
|||
self.dist.project_name, |
|||
) |
|||
return |
|||
logger.info( |
|||
'Uninstalling %s-%s:', |
|||
self.dist.project_name, self.dist.version |
|||
) |
|||
|
|||
with indent_log(): |
|||
paths = sorted(self.compact(self.paths)) |
|||
|
|||
if auto_confirm: |
|||
response = 'y' |
|||
else: |
|||
for path in paths: |
|||
logger.info(path) |
|||
response = ask('Proceed (y/n)? ', ('y', 'n')) |
|||
if self._refuse: |
|||
logger.info('Not removing or modifying (outside of prefix):') |
|||
for path in self.compact(self._refuse): |
|||
logger.info(path) |
|||
if response == 'y': |
|||
self.save_dir = tempfile.mkdtemp(suffix='-uninstall', |
|||
prefix='pip-') |
|||
for path in paths: |
|||
new_path = self._stash(path) |
|||
logger.debug('Removing file or directory %s', path) |
|||
self._moved_paths.append(path) |
|||
renames(path, new_path) |
|||
for pth in self.pth.values(): |
|||
pth.remove() |
|||
logger.info( |
|||
'Successfully uninstalled %s-%s', |
|||
self.dist.project_name, self.dist.version |
|||
) |
|||
|
|||
def rollback(self): |
|||
"""Rollback the changes previously made by remove().""" |
|||
if self.save_dir is None: |
|||
logger.error( |
|||
"Can't roll back %s; was not uninstalled", |
|||
self.dist.project_name, |
|||
) |
|||
return False |
|||
logger.info('Rolling back uninstall of %s', self.dist.project_name) |
|||
for path in self._moved_paths: |
|||
tmp_path = self._stash(path) |
|||
logger.debug('Replacing %s', path) |
|||
renames(tmp_path, path) |
|||
for pth in self.pth.values(): |
|||
pth.rollback() |
|||
|
|||
def commit(self): |
|||
"""Remove temporary save dir: rollback will no longer be possible.""" |
|||
if self.save_dir is not None: |
|||
rmtree(self.save_dir) |
|||
self.save_dir = None |
|||
self._moved_paths = [] |
|||
|
|||
|
|||
class UninstallPthEntries(object): |
|||
def __init__(self, pth_file): |
|||
if not os.path.isfile(pth_file): |
|||
raise UninstallationError( |
|||
"Cannot remove entries from nonexistent file %s" % pth_file |
|||
) |
|||
self.file = pth_file |
|||
self.entries = set() |
|||
self._saved_lines = None |
|||
|
|||
def add(self, entry): |
|||
entry = os.path.normcase(entry) |
|||
# On Windows, os.path.normcase converts the entry to use |
|||
# backslashes. This is correct for entries that describe absolute |
|||
# paths outside of site-packages, but all the others use forward |
|||
# slashes. |
|||
if WINDOWS and not os.path.splitdrive(entry)[0]: |
|||
entry = entry.replace('\\', '/') |
|||
self.entries.add(entry) |
|||
|
|||
def remove(self): |
|||
logger.debug('Removing pth entries from %s:', self.file) |
|||
with open(self.file, 'rb') as fh: |
|||
# windows uses '\r\n' with py3k, but uses '\n' with py2.x |
|||
lines = fh.readlines() |
|||
self._saved_lines = lines |
|||
if any(b'\r\n' in line for line in lines): |
|||
endline = '\r\n' |
|||
else: |
|||
endline = '\n' |
|||
for entry in self.entries: |
|||
try: |
|||
logger.debug('Removing entry: %s', entry) |
|||
lines.remove((entry + endline).encode("utf-8")) |
|||
except ValueError: |
|||
pass |
|||
with open(self.file, 'wb') as fh: |
|||
fh.writelines(lines) |
|||
|
|||
def rollback(self): |
|||
if self._saved_lines is None: |
|||
logger.error( |
|||
'Cannot roll back changes to %s, none were made', self.file |
|||
) |
|||
return False |
|||
logger.debug('Rolling %s back to previous state', self.file) |
|||
with open(self.file, 'wb') as fh: |
|||
fh.writelines(self._saved_lines) |
|||
return True |
@ -0,0 +1,8 @@ |
|||
from __future__ import absolute_import |
|||
|
|||
SUCCESS = 0 |
|||
ERROR = 1 |
|||
UNKNOWN_ERROR = 2 |
|||
VIRTUALENV_NOT_FOUND = 3 |
|||
PREVIOUS_BUILD_DIR_ERROR = 4 |
|||
NO_MATCHES_FOUND = 23 |
@ -0,0 +1,870 @@ |
|||
from __future__ import absolute_import |
|||
|
|||
from collections import deque |
|||
import contextlib |
|||
import errno |
|||
import io |
|||
import locale |
|||
# we have a submodule named 'logging' which would shadow this if we used the |
|||
# regular name: |
|||
import logging as std_logging |
|||
import re |
|||
import os |
|||
import posixpath |
|||
import shutil |
|||
import stat |
|||
import subprocess |
|||
import sys |
|||
import tarfile |
|||
import zipfile |
|||
|
|||
from pip.exceptions import InstallationError |
|||
from pip.compat import console_to_str, expanduser, stdlib_pkgs |
|||
from pip.locations import ( |
|||
site_packages, user_site, running_under_virtualenv, virtualenv_no_global, |
|||
write_delete_marker_file, |
|||
) |
|||
from pip._vendor import pkg_resources |
|||
from pip._vendor.six.moves import input |
|||
from pip._vendor.six import PY2 |
|||
from pip._vendor.retrying import retry |
|||
|
|||
if PY2: |
|||
from io import BytesIO as StringIO |
|||
else: |
|||
from io import StringIO |
|||
|
|||
__all__ = ['rmtree', 'display_path', 'backup_dir', |
|||
'ask', 'splitext', |
|||
'format_size', 'is_installable_dir', |
|||
'is_svn_page', 'file_contents', |
|||
'split_leading_dir', 'has_leading_dir', |
|||
'normalize_path', |
|||
'renames', 'get_terminal_size', 'get_prog', |
|||
'unzip_file', 'untar_file', 'unpack_file', 'call_subprocess', |
|||
'captured_stdout', 'ensure_dir', |
|||
'ARCHIVE_EXTENSIONS', 'SUPPORTED_EXTENSIONS', |
|||
'get_installed_version'] |
|||
|
|||
|
|||
logger = std_logging.getLogger(__name__) |
|||
|
|||
BZ2_EXTENSIONS = ('.tar.bz2', '.tbz') |
|||
XZ_EXTENSIONS = ('.tar.xz', '.txz', '.tlz', '.tar.lz', '.tar.lzma') |
|||
ZIP_EXTENSIONS = ('.zip', '.whl') |
|||
TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar') |
|||
ARCHIVE_EXTENSIONS = ( |
|||
ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS) |
|||
SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS |
|||
try: |
|||
import bz2 # noqa |
|||
SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS |
|||
except ImportError: |
|||
logger.debug('bz2 module is not available') |
|||
|
|||
try: |
|||
# Only for Python 3.3+ |
|||
import lzma # noqa |
|||
SUPPORTED_EXTENSIONS += XZ_EXTENSIONS |
|||
except ImportError: |
|||
logger.debug('lzma module is not available') |
|||
|
|||
|
|||
def import_or_raise(pkg_or_module_string, ExceptionType, *args, **kwargs): |
|||
try: |
|||
return __import__(pkg_or_module_string) |
|||
except ImportError: |
|||
raise ExceptionType(*args, **kwargs) |
|||
|
|||
|
|||
def ensure_dir(path): |
|||
"""os.path.makedirs without EEXIST.""" |
|||
try: |
|||
os.makedirs(path) |
|||
except OSError as e: |
|||
if e.errno != errno.EEXIST: |
|||
raise |
|||
|
|||
|
|||
def get_prog(): |
|||
try: |
|||
if os.path.basename(sys.argv[0]) in ('__main__.py', '-c'): |
|||
return "%s -m pip" % sys.executable |
|||
except (AttributeError, TypeError, IndexError): |
|||
pass |
|||
return 'pip' |
|||
|
|||
|
|||
# Retry every half second for up to 3 seconds |
|||
@retry(stop_max_delay=3000, wait_fixed=500) |
|||
def rmtree(dir, ignore_errors=False): |
|||
shutil.rmtree(dir, ignore_errors=ignore_errors, |
|||
onerror=rmtree_errorhandler) |
|||
|
|||
|
|||
def rmtree_errorhandler(func, path, exc_info): |
|||
"""On Windows, the files in .svn are read-only, so when rmtree() tries to |
|||
remove them, an exception is thrown. We catch that here, remove the |
|||
read-only attribute, and hopefully continue without problems.""" |
|||
# if file type currently read only |
|||
if os.stat(path).st_mode & stat.S_IREAD: |
|||
# convert to read/write |
|||
os.chmod(path, stat.S_IWRITE) |
|||
# use the original function to repeat the operation |
|||
func(path) |
|||
return |
|||
else: |
|||
raise |
|||
|
|||
|
|||
def display_path(path): |
|||
"""Gives the display value for a given path, making it relative to cwd |
|||
if possible.""" |
|||
path = os.path.normcase(os.path.abspath(path)) |
|||
if sys.version_info[0] == 2: |
|||
path = path.decode(sys.getfilesystemencoding(), 'replace') |
|||
path = path.encode(sys.getdefaultencoding(), 'replace') |
|||
if path.startswith(os.getcwd() + os.path.sep): |
|||
path = '.' + path[len(os.getcwd()):] |
|||
return path |
|||
|
|||
|
|||
def backup_dir(dir, ext='.bak'): |
|||
"""Figure out the name of a directory to back up the given dir to |
|||
(adding .bak, .bak2, etc)""" |
|||
n = 1 |
|||
extension = ext |
|||
while os.path.exists(dir + extension): |
|||
n += 1 |
|||
extension = ext + str(n) |
|||
return dir + extension |
|||
|
|||
|
|||
def ask_path_exists(message, options): |
|||
for action in os.environ.get('PIP_EXISTS_ACTION', '').split(): |
|||
if action in options: |
|||
return action |
|||
return ask(message, options) |
|||
|
|||
|
|||
def ask(message, options): |
|||
"""Ask the message interactively, with the given possible responses""" |
|||
while 1: |
|||
if os.environ.get('PIP_NO_INPUT'): |
|||
raise Exception( |
|||
'No input was expected ($PIP_NO_INPUT set); question: %s' % |
|||
message |
|||
) |
|||
response = input(message) |
|||
response = response.strip().lower() |
|||
if response not in options: |
|||
print( |
|||
'Your response (%r) was not one of the expected responses: ' |
|||
'%s' % (response, ', '.join(options)) |
|||
) |
|||
else: |
|||
return response |
|||
|
|||
|
|||
def format_size(bytes): |
|||
if bytes > 1000 * 1000: |
|||
return '%.1fMB' % (bytes / 1000.0 / 1000) |
|||
elif bytes > 10 * 1000: |
|||
return '%ikB' % (bytes / 1000) |
|||
elif bytes > 1000: |
|||
return '%.1fkB' % (bytes / 1000.0) |
|||
else: |
|||
return '%ibytes' % bytes |
|||
|
|||
|
|||
def is_installable_dir(path): |
|||
"""Return True if `path` is a directory containing a setup.py file.""" |
|||
if not os.path.isdir(path): |
|||
return False |
|||
setup_py = os.path.join(path, 'setup.py') |
|||
if os.path.isfile(setup_py): |
|||
return True |
|||
return False |
|||
|
|||
|
|||
def is_svn_page(html): |
|||
""" |
|||
Returns true if the page appears to be the index page of an svn repository |
|||
""" |
|||
return (re.search(r'<title>[^<]*Revision \d+:', html) and |
|||
re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I)) |
|||
|
|||
|
|||
def file_contents(filename): |
|||
with open(filename, 'rb') as fp: |
|||
return fp.read().decode('utf-8') |
|||
|
|||
|
|||
def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE): |
|||
"""Yield pieces of data from a file-like object until EOF.""" |
|||
while True: |
|||
chunk = file.read(size) |
|||
if not chunk: |
|||
break |
|||
yield chunk |
|||
|
|||
|
|||
def split_leading_dir(path): |
|||
path = path.lstrip('/').lstrip('\\') |
|||
if '/' in path and (('\\' in path and path.find('/') < path.find('\\')) or |
|||
'\\' not in path): |
|||
return path.split('/', 1) |
|||
elif '\\' in path: |
|||
return path.split('\\', 1) |
|||
else: |
|||
return path, '' |
|||
|
|||
|
|||
def has_leading_dir(paths): |
|||
"""Returns true if all the paths have the same leading path name |
|||
(i.e., everything is in one subdirectory in an archive)""" |
|||
common_prefix = None |
|||
for path in paths: |
|||
prefix, rest = split_leading_dir(path) |
|||
if not prefix: |
|||
return False |
|||
elif common_prefix is None: |
|||
common_prefix = prefix |
|||
elif prefix != common_prefix: |
|||
return False |
|||
return True |
|||
|
|||
|
|||
def normalize_path(path, resolve_symlinks=True): |
|||
""" |
|||
Convert a path to its canonical, case-normalized, absolute version. |
|||
|
|||
""" |
|||
path = expanduser(path) |
|||
if resolve_symlinks: |
|||
path = os.path.realpath(path) |
|||
else: |
|||
path = os.path.abspath(path) |
|||
return os.path.normcase(path) |
|||
|
|||
|
|||
def splitext(path): |
|||
"""Like os.path.splitext, but take off .tar too""" |
|||
base, ext = posixpath.splitext(path) |
|||
if base.lower().endswith('.tar'): |
|||
ext = base[-4:] + ext |
|||
base = base[:-4] |
|||
return base, ext |
|||
|
|||
|
|||
def renames(old, new): |
|||
"""Like os.renames(), but handles renaming across devices.""" |
|||
# Implementation borrowed from os.renames(). |
|||
head, tail = os.path.split(new) |
|||
if head and tail and not os.path.exists(head): |
|||
os.makedirs(head) |
|||
|
|||
shutil.move(old, new) |
|||
|
|||
head, tail = os.path.split(old) |
|||
if head and tail: |
|||
try: |
|||
os.removedirs(head) |
|||
except OSError: |
|||
pass |
|||
|
|||
|
|||
def is_local(path): |
|||
""" |
|||
Return True if this is a path pip is allowed to modify. |
|||
|
|||
If we're in a virtualenv, sys.prefix points to the virtualenv's |
|||
prefix; only sys.prefix is considered local. |
|||
|
|||
If we're not in a virtualenv, in general we can modify anything. |
|||
However, if the OS vendor has configured distutils to install |
|||
somewhere other than sys.prefix (which could be a subdirectory of |
|||
sys.prefix, e.g. /usr/local), we consider sys.prefix itself nonlocal |
|||
and the domain of the OS vendor. (In other words, everything _other |
|||
than_ sys.prefix is considered local.) |
|||
|
|||
""" |
|||
|
|||
path = normalize_path(path) |
|||
prefix = normalize_path(sys.prefix) |
|||
|
|||
if running_under_virtualenv(): |
|||
return path.startswith(normalize_path(sys.prefix)) |
|||
else: |
|||
from pip.locations import distutils_scheme |
|||
if path.startswith(prefix): |
|||
for local_path in distutils_scheme("").values(): |
|||
if path.startswith(normalize_path(local_path)): |
|||
return True |
|||
return False |
|||
else: |
|||
return True |
|||
|
|||
|
|||
def dist_is_local(dist): |
|||
""" |
|||
Return True if given Distribution object is installed somewhere pip |
|||
is allowed to modify. |
|||
|
|||
""" |
|||
return is_local(dist_location(dist)) |
|||
|
|||
|
|||
def dist_in_usersite(dist): |
|||
""" |
|||
Return True if given Distribution is installed in user site. |
|||
""" |
|||
norm_path = normalize_path(dist_location(dist)) |
|||
return norm_path.startswith(normalize_path(user_site)) |
|||
|
|||
|
|||
def dist_in_site_packages(dist): |
|||
""" |
|||
Return True if given Distribution is installed in |
|||
distutils.sysconfig.get_python_lib(). |
|||
""" |
|||
return normalize_path( |
|||
dist_location(dist) |
|||
).startswith(normalize_path(site_packages)) |
|||
|
|||
|
|||
def dist_is_editable(dist): |
|||
"""Is distribution an editable install?""" |
|||
for path_item in sys.path: |
|||
egg_link = os.path.join(path_item, dist.project_name + '.egg-link') |
|||
if os.path.isfile(egg_link): |
|||
return True |
|||
return False |
|||
|
|||
|
|||
def get_installed_distributions(local_only=True, |
|||
skip=stdlib_pkgs, |
|||
include_editables=True, |
|||
editables_only=False, |
|||
user_only=False): |
|||
""" |
|||
Return a list of installed Distribution objects. |
|||
|
|||
If ``local_only`` is True (default), only return installations |
|||
local to the current virtualenv, if in a virtualenv. |
|||
|
|||
``skip`` argument is an iterable of lower-case project names to |
|||
ignore; defaults to stdlib_pkgs |
|||
|
|||
If ``editables`` is False, don't report editables. |
|||
|
|||
If ``editables_only`` is True , only report editables. |
|||
|
|||
If ``user_only`` is True , only report installations in the user |
|||
site directory. |
|||
|
|||
""" |
|||
if local_only: |
|||
local_test = dist_is_local |
|||
else: |
|||
def local_test(d): |
|||
return True |
|||
|
|||
if include_editables: |
|||
def editable_test(d): |
|||
return True |
|||
else: |
|||
def editable_test(d): |
|||
return not dist_is_editable(d) |
|||
|
|||
if editables_only: |
|||
def editables_only_test(d): |
|||
return dist_is_editable(d) |
|||
else: |
|||
def editables_only_test(d): |
|||
return True |
|||
|
|||
if user_only: |
|||
user_test = dist_in_usersite |
|||
else: |
|||
def user_test(d): |
|||
return True |
|||
|
|||
return [d for d in pkg_resources.working_set |
|||
if local_test(d) and |
|||
d.key not in skip and |
|||
editable_test(d) and |
|||
editables_only_test(d) and |
|||
user_test(d) |
|||
] |
|||
|
|||
|
|||
def egg_link_path(dist): |
|||
""" |
|||
Return the path for the .egg-link file if it exists, otherwise, None. |
|||
|
|||
There's 3 scenarios: |
|||
1) not in a virtualenv |
|||
try to find in site.USER_SITE, then site_packages |
|||
2) in a no-global virtualenv |
|||
try to find in site_packages |
|||
3) in a yes-global virtualenv |
|||
try to find in site_packages, then site.USER_SITE |
|||
(don't look in global location) |
|||
|
|||
For #1 and #3, there could be odd cases, where there's an egg-link in 2 |
|||
locations. |
|||
|
|||
This method will just return the first one found. |
|||
""" |
|||
sites = [] |
|||
if running_under_virtualenv(): |
|||
if virtualenv_no_global(): |
|||
sites.append(site_packages) |
|||
else: |
|||
sites.append(site_packages) |
|||
if user_site: |
|||
sites.append(user_site) |
|||
else: |
|||
if user_site: |
|||
sites.append(user_site) |
|||
sites.append(site_packages) |
|||
|
|||
for site in sites: |
|||
egglink = os.path.join(site, dist.project_name) + '.egg-link' |
|||
if os.path.isfile(egglink): |
|||
return egglink |
|||
|
|||
|
|||
def dist_location(dist): |
|||
""" |
|||
Get the site-packages location of this distribution. Generally |
|||
this is dist.location, except in the case of develop-installed |
|||
packages, where dist.location is the source code location, and we |
|||
want to know where the egg-link file is. |
|||
|
|||
""" |
|||
egg_link = egg_link_path(dist) |
|||
if egg_link: |
|||
return egg_link |
|||
return dist.location |
|||
|
|||
|
|||
def get_terminal_size(): |
|||
"""Returns a tuple (x, y) representing the width(x) and the height(x) |
|||
in characters of the terminal window.""" |
|||
def ioctl_GWINSZ(fd): |
|||
try: |
|||
import fcntl |
|||
import termios |
|||
import struct |
|||
cr = struct.unpack( |
|||
'hh', |
|||
fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234') |
|||
) |
|||
except: |
|||
return None |
|||
if cr == (0, 0): |
|||
return None |
|||
return cr |
|||
cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2) |
|||
if not cr: |
|||
try: |
|||
fd = os.open(os.ctermid(), os.O_RDONLY) |
|||
cr = ioctl_GWINSZ(fd) |
|||
os.close(fd) |
|||
except: |
|||
pass |
|||
if not cr: |
|||
cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80)) |
|||
return int(cr[1]), int(cr[0]) |
|||
|
|||
|
|||
def current_umask(): |
|||
"""Get the current umask which involves having to set it temporarily.""" |
|||
mask = os.umask(0) |
|||
os.umask(mask) |
|||
return mask |
|||
|
|||
|
|||
def unzip_file(filename, location, flatten=True): |
|||
""" |
|||
Unzip the file (with path `filename`) to the destination `location`. All |
|||
files are written based on system defaults and umask (i.e. permissions are |
|||
not preserved), except that regular file members with any execute |
|||
permissions (user, group, or world) have "chmod +x" applied after being |
|||
written. Note that for windows, any execute changes using os.chmod are |
|||
no-ops per the python docs. |
|||
""" |
|||
ensure_dir(location) |
|||
zipfp = open(filename, 'rb') |
|||
try: |
|||
zip = zipfile.ZipFile(zipfp, allowZip64=True) |
|||
leading = has_leading_dir(zip.namelist()) and flatten |
|||
for info in zip.infolist(): |
|||
name = info.filename |
|||
data = zip.read(name) |
|||
fn = name |
|||
if leading: |
|||
fn = split_leading_dir(name)[1] |
|||
fn = os.path.join(location, fn) |
|||
dir = os.path.dirname(fn) |
|||
if fn.endswith('/') or fn.endswith('\\'): |
|||
# A directory |
|||
ensure_dir(fn) |
|||
else: |
|||
ensure_dir(dir) |
|||
fp = open(fn, 'wb') |
|||
try: |
|||
fp.write(data) |
|||
finally: |
|||
fp.close() |
|||
mode = info.external_attr >> 16 |
|||
# if mode and regular file and any execute permissions for |
|||
# user/group/world? |
|||
if mode and stat.S_ISREG(mode) and mode & 0o111: |
|||
# make dest file have execute for user/group/world |
|||
# (chmod +x) no-op on windows per python docs |
|||
os.chmod(fn, (0o777 - current_umask() | 0o111)) |
|||
finally: |
|||
zipfp.close() |
|||
|
|||
|
|||
def untar_file(filename, location): |
|||
""" |
|||
Untar the file (with path `filename`) to the destination `location`. |
|||
All files are written based on system defaults and umask (i.e. permissions |
|||
are not preserved), except that regular file members with any execute |
|||
permissions (user, group, or world) have "chmod +x" applied after being |
|||
written. Note that for windows, any execute changes using os.chmod are |
|||
no-ops per the python docs. |
|||
""" |
|||
ensure_dir(location) |
|||
if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'): |
|||
mode = 'r:gz' |
|||
elif filename.lower().endswith(BZ2_EXTENSIONS): |
|||
mode = 'r:bz2' |
|||
elif filename.lower().endswith(XZ_EXTENSIONS): |
|||
mode = 'r:xz' |
|||
elif filename.lower().endswith('.tar'): |
|||
mode = 'r' |
|||
else: |
|||
logger.warning( |
|||
'Cannot determine compression type for file %s', filename, |
|||
) |
|||
mode = 'r:*' |
|||
tar = tarfile.open(filename, mode) |
|||
try: |
|||
# note: python<=2.5 doesn't seem to know about pax headers, filter them |
|||
leading = has_leading_dir([ |
|||
member.name for member in tar.getmembers() |
|||
if member.name != 'pax_global_header' |
|||
]) |
|||
for member in tar.getmembers(): |
|||
fn = member.name |
|||
if fn == 'pax_global_header': |
|||
continue |
|||
if leading: |
|||
fn = split_leading_dir(fn)[1] |
|||
path = os.path.join(location, fn) |
|||
if member.isdir(): |
|||
ensure_dir(path) |
|||
elif member.issym(): |
|||
try: |
|||
tar._extract_member(member, path) |
|||
except Exception as exc: |
|||
# Some corrupt tar files seem to produce this |
|||
# (specifically bad symlinks) |
|||
logger.warning( |
|||
'In the tar file %s the member %s is invalid: %s', |
|||
filename, member.name, exc, |
|||
) |
|||
continue |
|||
else: |
|||
try: |
|||
fp = tar.extractfile(member) |
|||
except (KeyError, AttributeError) as exc: |
|||
# Some corrupt tar files seem to produce this |
|||
# (specifically bad symlinks) |
|||
logger.warning( |
|||
'In the tar file %s the member %s is invalid: %s', |
|||
filename, member.name, exc, |
|||
) |
|||
continue |
|||
ensure_dir(os.path.dirname(path)) |
|||
with open(path, 'wb') as destfp: |
|||
shutil.copyfileobj(fp, destfp) |
|||
fp.close() |
|||
# Update the timestamp (useful for cython compiled files) |
|||
tar.utime(member, path) |
|||
# member have any execute permissions for user/group/world? |
|||
if member.mode & 0o111: |
|||
# make dest file have execute for user/group/world |
|||
# no-op on windows per python docs |
|||
os.chmod(path, (0o777 - current_umask() | 0o111)) |
|||
finally: |
|||
tar.close() |
|||
|
|||
|
|||
def unpack_file(filename, location, content_type, link): |
|||
filename = os.path.realpath(filename) |
|||
if (content_type == 'application/zip' or |
|||
filename.lower().endswith(ZIP_EXTENSIONS) or |
|||
zipfile.is_zipfile(filename)): |
|||
unzip_file( |
|||
filename, |
|||
location, |
|||
flatten=not filename.endswith('.whl') |
|||
) |
|||
elif (content_type == 'application/x-gzip' or |
|||
tarfile.is_tarfile(filename) or |
|||
filename.lower().endswith( |
|||
TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS)): |
|||
untar_file(filename, location) |
|||
elif (content_type and content_type.startswith('text/html') and |
|||
is_svn_page(file_contents(filename))): |
|||
# We don't really care about this |
|||
from pip.vcs.subversion import Subversion |
|||
Subversion('svn+' + link.url).unpack(location) |
|||
else: |
|||
# FIXME: handle? |
|||
# FIXME: magic signatures? |
|||
logger.critical( |
|||
'Cannot unpack file %s (downloaded from %s, content-type: %s); ' |
|||
'cannot detect archive format', |
|||
filename, location, content_type, |
|||
) |
|||
raise InstallationError( |
|||
'Cannot determine archive format of %s' % location |
|||
) |
|||
|
|||
|
|||
def call_subprocess(cmd, show_stdout=True, cwd=None, |
|||
on_returncode='raise', |
|||
command_desc=None, |
|||
extra_environ=None, spinner=None): |
|||
# This function's handling of subprocess output is confusing and I |
|||
# previously broke it terribly, so as penance I will write a long comment |
|||
# explaining things. |
|||
# |
|||
# The obvious thing that affects output is the show_stdout= |
|||
# kwarg. show_stdout=True means, let the subprocess write directly to our |
|||
# stdout. Even though it is nominally the default, it is almost never used |
|||
# inside pip (and should not be used in new code without a very good |
|||
# reason); as of 2016-02-22 it is only used in a few places inside the VCS |
|||
# wrapper code. Ideally we should get rid of it entirely, because it |
|||
# creates a lot of complexity here for a rarely used feature. |
|||
# |
|||
# Most places in pip set show_stdout=False. What this means is: |
|||
# - We connect the child stdout to a pipe, which we read. |
|||
# - By default, we hide the output but show a spinner -- unless the |
|||
# subprocess exits with an error, in which case we show the output. |
|||
# - If the --verbose option was passed (= loglevel is DEBUG), then we show |
|||
# the output unconditionally. (But in this case we don't want to show |
|||
# the output a second time if it turns out that there was an error.) |
|||
# |
|||
# stderr is always merged with stdout (even if show_stdout=True). |
|||
if show_stdout: |
|||
stdout = None |
|||
else: |
|||
stdout = subprocess.PIPE |
|||
if command_desc is None: |
|||
cmd_parts = [] |
|||
for part in cmd: |
|||
if ' ' in part or '\n' in part or '"' in part or "'" in part: |
|||
part = '"%s"' % part.replace('"', '\\"') |
|||
cmd_parts.append(part) |
|||
command_desc = ' '.join(cmd_parts) |
|||
logger.debug("Running command %s", command_desc) |
|||
env = os.environ.copy() |
|||
if extra_environ: |
|||
env.update(extra_environ) |
|||
try: |
|||
proc = subprocess.Popen( |
|||
cmd, stderr=subprocess.STDOUT, stdin=None, stdout=stdout, |
|||
cwd=cwd, env=env) |
|||
except Exception as exc: |
|||
logger.critical( |
|||
"Error %s while executing command %s", exc, command_desc, |
|||
) |
|||
raise |
|||
if stdout is not None: |
|||
all_output = [] |
|||
while True: |
|||
line = console_to_str(proc.stdout.readline()) |
|||
if not line: |
|||
break |
|||
line = line.rstrip() |
|||
all_output.append(line + '\n') |
|||
if logger.getEffectiveLevel() <= std_logging.DEBUG: |
|||
# Show the line immediately |
|||
logger.debug(line) |
|||
else: |
|||
# Update the spinner |
|||
if spinner is not None: |
|||
spinner.spin() |
|||
proc.wait() |
|||
if spinner is not None: |
|||
if proc.returncode: |
|||
spinner.finish("error") |
|||
else: |
|||
spinner.finish("done") |
|||
if proc.returncode: |
|||
if on_returncode == 'raise': |
|||
if (logger.getEffectiveLevel() > std_logging.DEBUG and |
|||
not show_stdout): |
|||
logger.info( |
|||
'Complete output from command %s:', command_desc, |
|||
) |
|||
logger.info( |
|||
''.join(all_output) + |
|||
'\n----------------------------------------' |
|||
) |
|||
raise InstallationError( |
|||
'Command "%s" failed with error code %s in %s' |
|||
% (command_desc, proc.returncode, cwd)) |
|||
elif on_returncode == 'warn': |
|||
logger.warning( |
|||
'Command "%s" had error code %s in %s', |
|||
command_desc, proc.returncode, cwd, |
|||
) |
|||
elif on_returncode == 'ignore': |
|||
pass |
|||
else: |
|||
raise ValueError('Invalid value: on_returncode=%s' % |
|||
repr(on_returncode)) |
|||
if not show_stdout: |
|||
return ''.join(all_output) |
|||
|
|||
|
|||
def read_text_file(filename): |
|||
"""Return the contents of *filename*. |
|||
|
|||
Try to decode the file contents with utf-8, the preferred system encoding |
|||
(e.g., cp1252 on some Windows machines), and latin1, in that order. |
|||
Decoding a byte string with latin1 will never raise an error. In the worst |
|||
case, the returned string will contain some garbage characters. |
|||
|
|||
""" |
|||
with open(filename, 'rb') as fp: |
|||
data = fp.read() |
|||
|
|||
encodings = ['utf-8', locale.getpreferredencoding(False), 'latin1'] |
|||
for enc in encodings: |
|||
try: |
|||
data = data.decode(enc) |
|||
except UnicodeDecodeError: |
|||
continue |
|||
break |
|||
|
|||
assert type(data) != bytes # Latin1 should have worked. |
|||
return data |
|||
|
|||
|
|||
def _make_build_dir(build_dir): |
|||
os.makedirs(build_dir) |
|||
write_delete_marker_file(build_dir) |
|||
|
|||
|
|||
class FakeFile(object): |
|||
"""Wrap a list of lines in an object with readline() to make |
|||
ConfigParser happy.""" |
|||
def __init__(self, lines): |
|||
self._gen = (l for l in lines) |
|||
|
|||
def readline(self): |
|||
try: |
|||
try: |
|||
return next(self._gen) |
|||
except NameError: |
|||
return self._gen.next() |
|||
except StopIteration: |
|||
return '' |
|||
|
|||
def __iter__(self): |
|||
return self._gen |
|||
|
|||
|
|||
class StreamWrapper(StringIO): |
|||
|
|||
@classmethod |
|||
def from_stream(cls, orig_stream): |
|||
cls.orig_stream = orig_stream |
|||
return cls() |
|||
|
|||
# compileall.compile_dir() needs stdout.encoding to print to stdout |
|||
@property |
|||
def encoding(self): |
|||
return self.orig_stream.encoding |
|||
|
|||
|
|||
@contextlib.contextmanager |
|||
def captured_output(stream_name): |
|||
"""Return a context manager used by captured_stdout/stdin/stderr |
|||
that temporarily replaces the sys stream *stream_name* with a StringIO. |
|||
|
|||
Taken from Lib/support/__init__.py in the CPython repo. |
|||
""" |
|||
orig_stdout = getattr(sys, stream_name) |
|||
setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout)) |
|||
try: |
|||
yield getattr(sys, stream_name) |
|||
finally: |
|||
setattr(sys, stream_name, orig_stdout) |
|||
|
|||
|
|||
def captured_stdout(): |
|||
"""Capture the output of sys.stdout: |
|||
|
|||
with captured_stdout() as stdout: |
|||
print('hello') |
|||
self.assertEqual(stdout.getvalue(), 'hello\n') |
|||
|
|||
Taken from Lib/support/__init__.py in the CPython repo. |
|||
""" |
|||
return captured_output('stdout') |
|||
|
|||
|
|||
class cached_property(object): |
|||
"""A property that is only computed once per instance and then replaces |
|||
itself with an ordinary attribute. Deleting the attribute resets the |
|||
property. |
|||
|
|||
Source: https://github.com/bottlepy/bottle/blob/0.11.5/bottle.py#L175 |
|||
""" |
|||
|
|||
def __init__(self, func): |
|||
self.__doc__ = getattr(func, '__doc__') |
|||
self.func = func |
|||
|
|||
def __get__(self, obj, cls): |
|||
if obj is None: |
|||
# We're being accessed from the class itself, not from an object |
|||
return self |
|||
value = obj.__dict__[self.func.__name__] = self.func(obj) |
|||
return value |
|||
|
|||
|
|||
def get_installed_version(dist_name, lookup_dirs=None): |
|||
"""Get the installed version of dist_name avoiding pkg_resources cache""" |
|||
# Create a requirement that we'll look for inside of setuptools. |
|||
req = pkg_resources.Requirement.parse(dist_name) |
|||
|
|||
# We want to avoid having this cached, so we need to construct a new |
|||
# working set each time. |
|||
if lookup_dirs is None: |
|||
working_set = pkg_resources.WorkingSet() |
|||
else: |
|||
working_set = pkg_resources.WorkingSet(lookup_dirs) |
|||
|
|||
# Get the installed distribution from our working set |
|||
dist = working_set.find(req) |
|||
|
|||
# Check to see if we got an installed distribution or not, if we did |
|||
# we want to return it's version. |
|||
return dist.version if dist else None |
|||
|
|||
|
|||
def consume(iterator): |
|||
"""Consume an iterable at C speed.""" |
|||
deque(iterator, maxlen=0) |
Binary file not shown.
Binary file not shown.
Binary file not shown.
Some files were not shown because too many files changed in this diff
Loading…
Reference in new issue