mirror of
https://github.com/YunoHost-Apps/ffsync_ynh.git
synced 2024-09-03 18:26:38 +02:00
commit
da757c51ba
385 changed files with 118678 additions and 217 deletions
|
@ -1,10 +1,10 @@
|
||||||
# Mozilla’s Sync Server for YunoHost
|
# Mozilla’s Sync Server for YunoHost
|
||||||
|
|
||||||
[![Integration level](https://dash.yunohost.org/integration/ffsync.svg)](https://dash.yunohost.org/appci/app/ffsync) ![](https://ci-apps.yunohost.org/ci/badges/ffsync.status.svg) ![](https://ci-apps.yunohost.org/ci/badges/ffsync.maintain.svg)
|
[![Integration level](https://dash.yunohost.org/integration/ffsync.svg)](https://dash.yunohost.org/appci/app/ffsync) ![](https://ci-apps.yunohost.org/ci/badges/ffsync.status.svg) ![](https://ci-apps.yunohost.org/ci/badges/ffsync.maintain.svg)
|
||||||
[![Install ffsync with YunoHost](https://install-app.yunohost.org/install-with-yunohost.svg)](https://install-app.yunohost.org/?app=ffsync)
|
[![Install ffsync with YunoHost](https://install-app.yunohost.org/install-with-yunohost.svg)](https://install-app.yunohost.org/?app=ffsync)
|
||||||
|
|
||||||
*[Lire ce readme en français.](./README_fr.md)*
|
*[Lire ce readme en français.](./README_fr.md)*
|
||||||
> *This package allow you to install ffsync quickly and simply on a YunoHost server.
|
> *This package allow you to install ffsync quickly and simply on a YunoHost server.
|
||||||
If you don't have YunoHost, please see [here](https://yunohost.org/#/install) to know how to install and enjoy it.*
|
If you don't have YunoHost, please see [here](https://yunohost.org/#/install) to know how to install and enjoy it.*
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
@ -24,11 +24,6 @@ By default, a server set up will defer authentication to the Mozilla-hosted acco
|
||||||
|
|
||||||
Once installed, reaching `http://domain.tld/path` should show a page explaining how to configure it.
|
Once installed, reaching `http://domain.tld/path` should show a page explaining how to configure it.
|
||||||
|
|
||||||
#### Supported architectures
|
|
||||||
|
|
||||||
* x86-64 - [![Build Status](https://ci-apps.yunohost.org/ci/logs/ffsync%20%28Apps%29.svg)](https://ci-apps.yunohost.org/ci/apps/ffsync/)
|
|
||||||
* ARMv8-A - [![Build Status](https://ci-apps-arm.yunohost.org/ci/logs/ffsync%20%28Apps%29.svg)](https://ci-apps-arm.yunohost.org/ci/apps/ffsync/)
|
|
||||||
|
|
||||||
## Links
|
## Links
|
||||||
|
|
||||||
* Report a bug about this package: https://github.com/YunoHost-Apps/ffsync_ynh/issues
|
* Report a bug about this package: https://github.com/YunoHost-Apps/ffsync_ynh/issues
|
||||||
|
|
|
@ -14,6 +14,5 @@
|
||||||
multi_instance=1
|
multi_instance=1
|
||||||
port_already_use=0
|
port_already_use=0
|
||||||
change_url=0
|
change_url=0
|
||||||
;;; Options
|
;;; Upgrade options
|
||||||
Email=jean-baptiste@holcroft.fr
|
name=Before migration to pypy
|
||||||
Notification=fail
|
|
||||||
|
|
|
@ -4,15 +4,12 @@ location __PATH__/ {
|
||||||
# Path to source
|
# Path to source
|
||||||
alias __FINALPATH__/ ;
|
alias __FINALPATH__/ ;
|
||||||
|
|
||||||
# Force usage of https
|
|
||||||
if ($scheme = http) {
|
|
||||||
rewrite ^ https://$server_name$request_uri? permanent;
|
|
||||||
}
|
|
||||||
|
|
||||||
include uwsgi_params;
|
include uwsgi_params;
|
||||||
# Needed for long running operations in admin interface
|
# Needed for long running operations in admin interface
|
||||||
uwsgi_read_timeout 3600;
|
proxy_pass http://localhost:__PORT__/;
|
||||||
__IS_SUBPATH__uwsgi_param SCRIPT_NAME __PATH__;
|
proxy_set_header Host $host;
|
||||||
__IS_SUBPATH__uwsgi_modifier1 30;
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
uwsgi_pass unix:///run/__NAME__/app.socket;
|
proxy_buffering off;
|
||||||
|
client_max_body_size 200M;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
}
|
}
|
||||||
|
|
84
conf/requirement.txt
Normal file
84
conf/requirement.txt
Normal file
|
@ -0,0 +1,84 @@
|
||||||
|
alembic==1.0.9
|
||||||
|
asn1crypto==0.24.0
|
||||||
|
backports.functools-lru-cache==1.6.1
|
||||||
|
beautifulsoup4==4.6.3
|
||||||
|
boto==2.49.0
|
||||||
|
cachetools==3.1.1
|
||||||
|
certifi==2019.3.9
|
||||||
|
cffi==1.14.0
|
||||||
|
Chameleon==3.4
|
||||||
|
chardet==3.0.4
|
||||||
|
configparser==3.5.0
|
||||||
|
cornice==0.16.2
|
||||||
|
cryptography==2.6.1
|
||||||
|
enum34==1.1.6
|
||||||
|
futures==3.0.0
|
||||||
|
gevent==1.4.0
|
||||||
|
google-api-core==1.22.4
|
||||||
|
google-auth==1.22.1
|
||||||
|
google-cloud-core==1.4.3
|
||||||
|
google-cloud-spanner==1.18.0
|
||||||
|
googleapis-common-protos==1.52.0
|
||||||
|
greenlet==0.4.13
|
||||||
|
grpc-google-iam-v1==0.12.3
|
||||||
|
grpcio==1.32.0
|
||||||
|
grpcio-gcp==0.2.2
|
||||||
|
gunicorn==19.6.0
|
||||||
|
hawkauthlib==2.0.0
|
||||||
|
hupper==1.6.1
|
||||||
|
idna==2.8
|
||||||
|
ipaddress==1.0.22
|
||||||
|
konfig==1.1
|
||||||
|
linecache2==1.0.0
|
||||||
|
Mako==1.0.9
|
||||||
|
MarkupSafe==1.1.1
|
||||||
|
mozsvc==0.9
|
||||||
|
mysqlclient==1.4.6
|
||||||
|
Paste==3.0.8
|
||||||
|
PasteDeploy==2.0.1
|
||||||
|
pip==20.2.3
|
||||||
|
pkg-resources==0.0.0
|
||||||
|
plaster==1.0
|
||||||
|
plaster-pastedeploy==0.7
|
||||||
|
protobuf==3.13.0
|
||||||
|
pyasn1==0.4.8
|
||||||
|
pyasn1-modules==0.2.8
|
||||||
|
PyBrowserID==0.14.0
|
||||||
|
pycparser==2.19
|
||||||
|
PyFxA==0.7.7
|
||||||
|
PyJWT==1.7.1
|
||||||
|
PyMySQL==0.9.3
|
||||||
|
pymysql-sa==1.0
|
||||||
|
pyramid==1.5.3
|
||||||
|
pyramid-chameleon==0.3
|
||||||
|
pyramid-hawkauth==2.0.0
|
||||||
|
python-dateutil==2.8.0
|
||||||
|
python-editor==1.0.4
|
||||||
|
pytz==2020.1
|
||||||
|
repoze.lru==0.7
|
||||||
|
requests==2.20.0
|
||||||
|
rsa==4.5
|
||||||
|
setuptools==44.1.1
|
||||||
|
simplejson==3.16.0
|
||||||
|
six==1.14.0
|
||||||
|
soupsieve==1.9.5
|
||||||
|
SQLAlchemy==1.3.3
|
||||||
|
SyncStorage==1.8.0
|
||||||
|
testfixtures==6.7.0
|
||||||
|
tokenlib==2.0.0
|
||||||
|
tokenserver==1.5.11
|
||||||
|
traceback2==1.4.0
|
||||||
|
translationstring==1.3
|
||||||
|
umemcache==1.6.3
|
||||||
|
unittest2==1.1.0
|
||||||
|
urllib3==1.24
|
||||||
|
venusian==1.2.0
|
||||||
|
waitress==1.1.0
|
||||||
|
WebOb==1.4.1
|
||||||
|
WebTest==2.0.30
|
||||||
|
wheel==0.35.1
|
||||||
|
WSGIProxy==0.2.2
|
||||||
|
zope.component==4.2.1
|
||||||
|
zope.deprecation==4.4.0
|
||||||
|
zope.event==4.3.0
|
||||||
|
zope.interface==4.6.0
|
|
@ -1,14 +1,9 @@
|
||||||
[uwsgi]
|
[server:main]
|
||||||
plugins = python
|
use = egg:gunicorn
|
||||||
master = true
|
host = 127.0.0.1
|
||||||
protocol = uwsgi
|
port = __PORT__
|
||||||
socket = /run/__APP__/app.socket
|
workers = 1
|
||||||
chmod-socket = 660
|
timeout = 30
|
||||||
virtualenv = __FINALPATH__/local
|
|
||||||
wsgi-file = __FINALPATH__/syncserver.wsgi
|
|
||||||
python-path = __FINALPATH__/local
|
|
||||||
enable-threads = true
|
|
||||||
close-on-exec = true
|
|
||||||
|
|
||||||
[app:main]
|
[app:main]
|
||||||
use = egg:syncserver
|
use = egg:syncserver
|
51
conf/systemd.service
Normal file
51
conf/systemd.service
Normal file
|
@ -0,0 +1,51 @@
|
||||||
|
[Unit]
|
||||||
|
Description=Firefox sync server
|
||||||
|
After=network.target
|
||||||
|
After=mysql.service
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
# Modify these two values and uncomment them if you have
|
||||||
|
# repos with lots of files and get an HTTP error 500 because
|
||||||
|
# of that
|
||||||
|
###
|
||||||
|
#LimitMEMLOCK=infinity
|
||||||
|
#LimitNOFILE=65535
|
||||||
|
Type=simple
|
||||||
|
User=__APP__
|
||||||
|
Group=__APP__
|
||||||
|
WorkingDirectory=/opt/yunohost/__APP__
|
||||||
|
ExecStart=/opt/yunohost/__APP__/local/bin/gunicorn --paste /opt/yunohost/__APP__/syncserver.ini
|
||||||
|
Restart=always
|
||||||
|
|
||||||
|
# Sandboxing options to harden security
|
||||||
|
# Depending on specificities of your service/app, you may need to tweak these
|
||||||
|
# .. but this should be a good baseline
|
||||||
|
# Details for these options: https://www.freedesktop.org/software/systemd/man/systemd.exec.html
|
||||||
|
NoNewPrivileges=yes
|
||||||
|
PrivateTmp=yes
|
||||||
|
PrivateDevices=yes
|
||||||
|
RestrictAddressFamilies=AF_UNIX AF_INET AF_INET6
|
||||||
|
RestrictNamespaces=yes
|
||||||
|
RestrictRealtime=yes
|
||||||
|
DevicePolicy=closed
|
||||||
|
ProtectSystem=full
|
||||||
|
ProtectControlGroups=yes
|
||||||
|
ProtectKernelModules=yes
|
||||||
|
ProtectKernelTunables=yes
|
||||||
|
LockPersonality=yes
|
||||||
|
SystemCallFilter=~@clock @debug @module @mount @obsolete @reboot @swap
|
||||||
|
|
||||||
|
# Denying access to capabilities that should not be relevant for webapps
|
||||||
|
# Doc: https://man7.org/linux/man-pages/man7/capabilities.7.html
|
||||||
|
CapabilityBoundingSet=~CAP_RAWIO CAP_MKNOD
|
||||||
|
CapabilityBoundingSet=~CAP_AUDIT_CONTROL CAP_AUDIT_READ CAP_AUDIT_WRITE
|
||||||
|
CapabilityBoundingSet=~CAP_SYS_BOOT CAP_SYS_TIME CAP_SYS_MODULE CAP_SYS_PACCT
|
||||||
|
CapabilityBoundingSet=~CAP_LEASE CAP_LINUX_IMMUTABLE CAP_IPC_LOCK
|
||||||
|
CapabilityBoundingSet=~CAP_BLOCK_SUSPEND CAP_WAKE_ALARM
|
||||||
|
CapabilityBoundingSet=~CAP_SYS_TTY_CONFIG
|
||||||
|
CapabilityBoundingSet=~CAP_MAC_ADMIN CAP_MAC_OVERRIDE
|
||||||
|
CapabilityBoundingSet=~CAP_NET_ADMIN CAP_NET_BROADCAST CAP_NET_RAW
|
||||||
|
CapabilityBoundingSet=~CAP_SYS_ADMIN CAP_SYS_PTRACE CAP_SYSLOG
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
|
@ -1,2 +0,0 @@
|
||||||
[Unit]
|
|
||||||
After=mysql.service
|
|
|
@ -6,7 +6,7 @@
|
||||||
"en": "Mozilla’s Sync-Server to host your Firefox account data",
|
"en": "Mozilla’s Sync-Server to host your Firefox account data",
|
||||||
"fr": "Le serveur de synchronisation de Mozilla, pour héberger vos données Firefox"
|
"fr": "Le serveur de synchronisation de Mozilla, pour héberger vos données Firefox"
|
||||||
},
|
},
|
||||||
"version": "1.9.1~ynh2",
|
"version": "1.9.1~ynh3",
|
||||||
"url": "https://github.com/mozilla-services/syncserver",
|
"url": "https://github.com/mozilla-services/syncserver",
|
||||||
"license": "MPL-2.0",
|
"license": "MPL-2.0",
|
||||||
"maintainer": {
|
"maintainer": {
|
||||||
|
|
|
@ -7,33 +7,40 @@
|
||||||
|
|
||||||
# Note that we also need some specific pkg_dependencies for build with arm architectures
|
# Note that we also need some specific pkg_dependencies for build with arm architectures
|
||||||
# dependencies used by the app
|
# dependencies used by the app
|
||||||
pkg_dependencies="python2.7 python2.7-dev python-virtualenv virtualenv uwsgi uwsgi-plugin-python build-essential libssl-dev libffi-dev libmariadbclient-dev-compat"
|
pkg_dependencies="pypy pypy-dev python3-virtualenv build-essential libssl-dev libffi-dev libmariadb-dev-compat"
|
||||||
|
|
||||||
#=================================================
|
#=================================================
|
||||||
# PERSONAL HELPERS
|
# PERSONAL HELPERS
|
||||||
#=================================================
|
#=================================================
|
||||||
|
|
||||||
|
call_pip() {
|
||||||
|
ynh_exec_warn_less pip $@
|
||||||
|
}
|
||||||
|
|
||||||
install_sources() {
|
install_sources() {
|
||||||
ynh_setup_source --dest_dir "$final_path"
|
ynh_setup_source --dest_dir "$final_path"
|
||||||
|
|
||||||
# pip installation
|
# pip installation
|
||||||
virtualenv --python=$(which python2 python | head -n 1) "$final_path/local"
|
python3 -m virtualenv --python=$(which pypy | head -n 1) "$final_path/local"
|
||||||
|
# Install manually pip v20.1 because the installed version from the system don't work any more with pyp v2.7
|
||||||
|
cp -r ../sources/pip_20.1/. $final_path/local/site-packages/pip
|
||||||
|
|
||||||
# Init virtualenv
|
# Init virtualenv
|
||||||
(
|
set +o nounset
|
||||||
set +o nounset
|
source "$final_path/local/bin/activate"
|
||||||
source "$final_path/local/bin/activate"
|
set -o nounset
|
||||||
set -o nounset
|
pushd "$final_path"
|
||||||
cd "$final_path"
|
call_pip install --upgrade 'pip<20.2'
|
||||||
pip install --upgrade pip
|
call_pip install setuptools==44.1.1
|
||||||
pip install --upgrade pyramid_chameleon 'soupsieve<2.0'
|
call_pip install --upgrade pyramid_chameleon 'soupsieve<2.0'
|
||||||
CFLAGS="-Wno-error -Wno-error=format-security" \
|
CFLAGS="-Wno-error -Wno-error=format-security" \
|
||||||
ARCHFLAGS="-Wno-error=unused-command-line-argument-hard-error-in-future" \
|
ARCHFLAGS="-Wno-error=unused-command-line-argument-hard-error-in-future" \
|
||||||
pip install --upgrade --requirement "$final_path/requirements.txt"
|
call_pip install --upgrade --requirement "$final_path/requirements.txt"
|
||||||
|
pypy "$final_path/setup.py" develop
|
||||||
|
test -e $final_path/local/lib_pypy/_sysconfigdata.py || ln -s /usr/lib/pypy/lib_pypy/_sysconfigdata.py $final_path/local/lib_pypy/_sysconfigdata.py
|
||||||
|
test -e $final_path/local/lib_pypy/cffi || ln -s /usr/lib/pypy/lib_pypy/cffi $final_path/local/lib_pypy/cffi
|
||||||
|
popd
|
||||||
|
|
||||||
python "$final_path/setup.py" develop
|
|
||||||
|
|
||||||
touch "$final_path/local/COMPLETE"
|
|
||||||
)
|
|
||||||
# Add nice homepage
|
# Add nice homepage
|
||||||
cp -r ../sources/page $final_path/syncserver/
|
cp -r ../sources/page $final_path/syncserver/
|
||||||
(cd "$final_path/syncserver" && patch -p1 < $YNH_CWD/../sources/homepage.patch) || echo "Unable to apply patches"
|
(cd "$final_path/syncserver" && patch -p1 < $YNH_CWD/../sources/homepage.patch) || echo "Unable to apply patches"
|
||||||
|
@ -42,8 +49,6 @@ install_sources() {
|
||||||
set_permissions() {
|
set_permissions() {
|
||||||
chown $app -R $final_path
|
chown $app -R $final_path
|
||||||
chmod u=rwX,g=rX,o= -R $final_path
|
chmod u=rwX,g=rX,o= -R $final_path
|
||||||
chown $app:root /var/log/uwsgi/$app
|
|
||||||
chmod -R u=rwX,g=rX,o= /var/log/uwsgi/$app
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#=================================================
|
#=================================================
|
||||||
|
|
|
@ -7,7 +7,6 @@
|
||||||
#=================================================
|
#=================================================
|
||||||
|
|
||||||
#Keep this path for calling _common.sh inside the execution's context of backup and restore scripts
|
#Keep this path for calling _common.sh inside the execution's context of backup and restore scripts
|
||||||
source ../settings/scripts/experimental_helper.sh
|
|
||||||
source ../settings/scripts/_common.sh
|
source ../settings/scripts/_common.sh
|
||||||
source /usr/share/yunohost/helpers
|
source /usr/share/yunohost/helpers
|
||||||
|
|
||||||
|
@ -51,14 +50,10 @@ ynh_mysql_dump_db --database="$db_name" > db.sql
|
||||||
#=================================================
|
#=================================================
|
||||||
# SPECIFIC BACKUP
|
# SPECIFIC BACKUP
|
||||||
#=================================================
|
#=================================================
|
||||||
# Backup Log
|
|
||||||
ynh_print_info --message="Backing up logs"
|
|
||||||
ynh_backup --src_path="/var/log/uwsgi/$app"
|
|
||||||
|
|
||||||
# BACKUP THE UWSGI FILES
|
# BACKUP THE systemd FILES
|
||||||
ynh_print_info --message="Backing up UWSGI..."
|
ynh_print_info --message="Backing up systemd..."
|
||||||
ynh_backup --src_path="/etc/uwsgi/apps-available/$app.ini"
|
ynh_backup --src_path="/etc/systemd/system/$app.service"
|
||||||
ynh_backup --src_path="/etc/systemd/system/uwsgi-app@.service"
|
|
||||||
|
|
||||||
#=================================================
|
#=================================================
|
||||||
# END OF SCRIPT
|
# END OF SCRIPT
|
||||||
|
|
|
@ -1,132 +0,0 @@
|
||||||
# Check if system wide templates are available and correcly configured
|
|
||||||
#
|
|
||||||
# usage: ynh_check_global_uwsgi_config
|
|
||||||
ynh_check_global_uwsgi_config () {
|
|
||||||
uwsgi --version || ynh_die --message="You need to add uwsgi (and appropriate plugin) as a dependency"
|
|
||||||
|
|
||||||
cat > /etc/systemd/system/uwsgi-app@.service <<EOF
|
|
||||||
[Unit]
|
|
||||||
Description=%i uWSGI app
|
|
||||||
After=syslog.target
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
RuntimeDirectory=%i
|
|
||||||
ExecStart=/usr/bin/uwsgi \
|
|
||||||
--ini /etc/uwsgi/apps-available/%i.ini \
|
|
||||||
--socket /run/%i/app.socket \
|
|
||||||
--logto /var/log/uwsgi/%i/%i.log
|
|
||||||
User=%i
|
|
||||||
Group=www-data
|
|
||||||
Restart=always
|
|
||||||
RestartSec=10
|
|
||||||
KillSignal=SIGQUIT
|
|
||||||
Type=notify
|
|
||||||
StandardError=syslog
|
|
||||||
NotifyAccess=all
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=multi-user.target
|
|
||||||
EOF
|
|
||||||
|
|
||||||
systemctl daemon-reload
|
|
||||||
}
|
|
||||||
|
|
||||||
# Create a dedicated uwsgi ini file to use with generic uwsgi service
|
|
||||||
#
|
|
||||||
# This will use a template in ../conf/uwsgi.ini
|
|
||||||
# and will replace the following keywords with
|
|
||||||
# global variables that should be defined before calling
|
|
||||||
# this helper :
|
|
||||||
#
|
|
||||||
# __APP__ by $app
|
|
||||||
# __PATH__ by $path_url
|
|
||||||
# __FINALPATH__ by $final_path
|
|
||||||
#
|
|
||||||
# And dynamic variables (from the last example) :
|
|
||||||
# __PATH_2__ by $path_2
|
|
||||||
# __PORT_2__ by $port_2
|
|
||||||
#
|
|
||||||
# To be able to customise the settings of the systemd unit you can override the rules with the file "conf/uwsgi-app@override.service".
|
|
||||||
# This file will be automatically placed on the good place
|
|
||||||
#
|
|
||||||
# Note that the service need to be started manually at the end of the installation.
|
|
||||||
# Generally you can start the service with this command:
|
|
||||||
# # ynh_systemd_action --service_name "uwsgi-app@$app.service" --line_match "WSGI app 0 \(mountpoint='[/[:alnum:]_-]*'\) ready in [[:digit:]]* seconds on interpreter" --log_path "/var/log/uwsgi/$app/$app.log"
|
|
||||||
#
|
|
||||||
# usage: ynh_add_uwsgi_service
|
|
||||||
#
|
|
||||||
# to interact with your service: `systemctl <action> uwsgi-app@$app`
|
|
||||||
ynh_add_uwsgi_service () {
|
|
||||||
ynh_check_global_uwsgi_config
|
|
||||||
|
|
||||||
local others_var=${1:-}
|
|
||||||
local finaluwsgiini="/etc/uwsgi/apps-available/$app.ini"
|
|
||||||
|
|
||||||
# www-data group is needed since it is this nginx who will start the service
|
|
||||||
usermod --append --groups www-data "$app" || ynh_die --message="It wasn't possible to add user $app to group www-data"
|
|
||||||
|
|
||||||
ynh_backup_if_checksum_is_different --file="$finaluwsgiini"
|
|
||||||
cp ../conf/uwsgi.ini "$finaluwsgiini"
|
|
||||||
|
|
||||||
# To avoid a break by set -u, use a void substitution ${var:-}. If the variable is not set, it's simply set with an empty variable.
|
|
||||||
# Substitute in a nginx config file only if the variable is not empty
|
|
||||||
if test -n "${final_path:-}"; then
|
|
||||||
ynh_replace_string --match_string="__FINALPATH__" --replace_string="$final_path" --target_file="$finaluwsgiini"
|
|
||||||
fi
|
|
||||||
if test -n "${path_url:-}"; then
|
|
||||||
ynh_replace_string --match_string="__PATH__" --replace_string="$path_url" --target_file="$finaluwsgiini"
|
|
||||||
fi
|
|
||||||
if test -n "${app:-}"; then
|
|
||||||
ynh_replace_string --match_string="__APP__" --replace_string="$app" --target_file="$finaluwsgiini"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Replace all other variable given as arguments
|
|
||||||
for var_to_replace in $others_var
|
|
||||||
do
|
|
||||||
# ${var_to_replace^^} make the content of the variable on upper-cases
|
|
||||||
# ${!var_to_replace} get the content of the variable named $var_to_replace
|
|
||||||
ynh_replace_string --match_string="__${var_to_replace^^}__" --replace_string="${!var_to_replace}" --target_file="$finaluwsgiini"
|
|
||||||
done
|
|
||||||
|
|
||||||
ynh_store_file_checksum --file="$finaluwsgiini"
|
|
||||||
|
|
||||||
chown $app:root "$finaluwsgiini"
|
|
||||||
|
|
||||||
# make sure the folder for logs exists and set authorizations
|
|
||||||
mkdir -p /var/log/uwsgi/$app
|
|
||||||
chown $app:root /var/log/uwsgi/$app
|
|
||||||
chmod -R u=rwX,g=rX,o= /var/log/uwsgi/$app
|
|
||||||
|
|
||||||
# Setup specific Systemd rules if necessary
|
|
||||||
test -e ../conf/uwsgi-app@override.service && \
|
|
||||||
mkdir /etc/systemd/system/uwsgi-app@$app.service.d && \
|
|
||||||
cp ../conf/uwsgi-app@override.service /etc/systemd/system/uwsgi-app@$app.service.d/override.conf
|
|
||||||
|
|
||||||
systemctl daemon-reload
|
|
||||||
systemctl enable "uwsgi-app@$app.service"
|
|
||||||
|
|
||||||
# Add as a service
|
|
||||||
yunohost service add "uwsgi-app@$app" --log "/var/log/uwsgi/$app/$app.log"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Remove the dedicated uwsgi ini file
|
|
||||||
#
|
|
||||||
# usage: ynh_remove_uwsgi_service
|
|
||||||
ynh_remove_uwsgi_service () {
|
|
||||||
local finaluwsgiini="/etc/uwsgi/apps-available/$app.ini"
|
|
||||||
if [ -e "$finaluwsgiini" ]; then
|
|
||||||
yunohost service remove "uwsgi-app@$app"
|
|
||||||
systemctl stop "uwsgi-app@$app.service"
|
|
||||||
systemctl disable "uwsgi-app@$app.service"
|
|
||||||
|
|
||||||
ynh_secure_remove --file="$finaluwsgiini"
|
|
||||||
ynh_secure_remove --file="/var/log/uwsgi/$app"
|
|
||||||
ynh_secure_remove --file="/etc/systemd/system/uwsgi-app@$app.service.d"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
ynh_restore_uwsgi_service () {
|
|
||||||
ynh_check_global_uwsgi_config
|
|
||||||
systemctl enable "uwsgi-app@$app" --quiet
|
|
||||||
yunohost service add "uwsgi-app@$app" --log "/var/log/uwsgi/$app/$app.log"
|
|
||||||
}
|
|
|
@ -6,7 +6,6 @@
|
||||||
# IMPORT GENERIC HELPERS
|
# IMPORT GENERIC HELPERS
|
||||||
#=================================================
|
#=================================================
|
||||||
|
|
||||||
source ./experimental_helper.sh
|
|
||||||
source ./_common.sh
|
source ./_common.sh
|
||||||
source /usr/share/yunohost/helpers
|
source /usr/share/yunohost/helpers
|
||||||
|
|
||||||
|
@ -36,6 +35,9 @@ test ! -e "$final_path" || ynh_die --message="This path already contains a folde
|
||||||
# Register (book) web path
|
# Register (book) web path
|
||||||
ynh_webpath_register --app=$app --domain=$domain --path_url=$path_url
|
ynh_webpath_register --app=$app --domain=$domain --path_url=$path_url
|
||||||
|
|
||||||
|
# Find available ports
|
||||||
|
port=$(ynh_find_port --port 6000)
|
||||||
|
|
||||||
#=================================================
|
#=================================================
|
||||||
# STORE SETTINGS FROM MANIFEST
|
# STORE SETTINGS FROM MANIFEST
|
||||||
#=================================================
|
#=================================================
|
||||||
|
@ -43,6 +45,7 @@ ynh_webpath_register --app=$app --domain=$domain --path_url=$path_url
|
||||||
ynh_app_setting_set --app=$app --key=domain --value=$domain
|
ynh_app_setting_set --app=$app --key=domain --value=$domain
|
||||||
ynh_app_setting_set --app=$app --key=path --value=$path_url
|
ynh_app_setting_set --app=$app --key=path --value=$path_url
|
||||||
ynh_app_setting_set --app=$app --key=secret --value="$secret"
|
ynh_app_setting_set --app=$app --key=secret --value="$secret"
|
||||||
|
ynh_app_setting_set --app $app --key web_port --value $port
|
||||||
|
|
||||||
#=================================================
|
#=================================================
|
||||||
# STANDARD MODIFICATIONS
|
# STANDARD MODIFICATIONS
|
||||||
|
@ -104,11 +107,12 @@ ynh_system_user_create --username=$app --home_dir=$final_path
|
||||||
|
|
||||||
# create config file syncserver.ini
|
# create config file syncserver.ini
|
||||||
ynh_script_progression --message="Configuring application..."
|
ynh_script_progression --message="Configuring application..."
|
||||||
rm "$final_path/syncserver.ini"
|
ynh_add_config --template="syncserver.ini" --destination="$final_path/syncserver.ini"
|
||||||
ln -s "/etc/uwsgi/apps-available/$app.ini" "$final_path/syncserver.ini"
|
|
||||||
|
|
||||||
# configure uwsgi
|
# Configure init script
|
||||||
ynh_add_uwsgi_service 'domain secret db_user db_pwd db_name'
|
ynh_script_progression --message="Configuring a systemd service..." --weight=2
|
||||||
|
ynh_add_systemd_config
|
||||||
|
yunohost service add "$app"
|
||||||
|
|
||||||
#=================================================
|
#=================================================
|
||||||
# MODIFY A CONFIG FILE
|
# MODIFY A CONFIG FILE
|
||||||
|
@ -142,8 +146,8 @@ ynh_script_progression --message="Restart services..."
|
||||||
ynh_systemd_action --service_name=nginx --action=reload
|
ynh_systemd_action --service_name=nginx --action=reload
|
||||||
|
|
||||||
ynh_script_progression --message="Starting $app services..." --weight=3
|
ynh_script_progression --message="Starting $app services..." --weight=3
|
||||||
ynh_systemd_action --service_name "uwsgi-app@$app.service" \
|
ynh_systemd_action --service_name "$app.service" \
|
||||||
--line_match "WSGI app 0 \(mountpoint='[/[:alnum:]_-]*'\) ready in [[:digit:]]* seconds on interpreter" --log_path "/var/log/uwsgi/$app/$app.log"
|
--line_match "Booting worker with pid" --log_path "systemd" -t 20
|
||||||
|
|
||||||
#=================================================
|
#=================================================
|
||||||
# END OF SCRIPT
|
# END OF SCRIPT
|
||||||
|
|
|
@ -6,7 +6,6 @@
|
||||||
# IMPORT GENERIC HELPERS
|
# IMPORT GENERIC HELPERS
|
||||||
#=================================================
|
#=================================================
|
||||||
|
|
||||||
source ./experimental_helper.sh
|
|
||||||
source ./_common.sh
|
source ./_common.sh
|
||||||
source /usr/share/yunohost/helpers
|
source /usr/share/yunohost/helpers
|
||||||
|
|
||||||
|
@ -40,8 +39,10 @@ fi
|
||||||
#=================================================
|
#=================================================
|
||||||
ynh_script_progression --message="Removing configuration..."
|
ynh_script_progression --message="Removing configuration..."
|
||||||
|
|
||||||
# Remove the dedicated systemd config
|
# Remove init script
|
||||||
ynh_remove_uwsgi_service
|
ynh_script_progression --message="Removing systemd units..."
|
||||||
|
ynh_remove_systemd_config
|
||||||
|
yunohost service remove "$app"
|
||||||
|
|
||||||
#=================================================
|
#=================================================
|
||||||
# REMOVE THE MYSQL DATABASE
|
# REMOVE THE MYSQL DATABASE
|
||||||
|
|
|
@ -5,7 +5,6 @@
|
||||||
#=================================================
|
#=================================================
|
||||||
# IMPORT GENERIC HELPERS
|
# IMPORT GENERIC HELPERS
|
||||||
#=================================================
|
#=================================================
|
||||||
source ../settings/scripts/experimental_helper.sh
|
|
||||||
source ../settings/scripts/_common.sh
|
source ../settings/scripts/_common.sh
|
||||||
source /usr/share/yunohost/helpers
|
source /usr/share/yunohost/helpers
|
||||||
|
|
||||||
|
@ -31,8 +30,6 @@ db_user=$db_name
|
||||||
#=================================================
|
#=================================================
|
||||||
# CHECK IF THE APP CAN BE RESTORED
|
# CHECK IF THE APP CAN BE RESTORED
|
||||||
#=================================================
|
#=================================================
|
||||||
ynh_webpath_available --domain=$domain --path_url=$path_url \
|
|
||||||
|| ynh_die --message="Path not available: ${domain}${path_url}"
|
|
||||||
test ! -d $final_path \
|
test ! -d $final_path \
|
||||||
|| ynh_die --message="There is already a directory: $final_path "
|
|| ynh_die --message="There is already a directory: $final_path "
|
||||||
|
|
||||||
|
@ -82,8 +79,9 @@ ynh_mysql_connect_as --user=$db_user --password=$db_pwd --database=$db_name < ./
|
||||||
#=================================================
|
#=================================================
|
||||||
|
|
||||||
ynh_script_progression --message="Reloading services..." --weight=3
|
ynh_script_progression --message="Reloading services..." --weight=3
|
||||||
|
systemctl daemon-reload
|
||||||
ynh_restore_uwsgi_service
|
systemctl enable --quiet $app.service
|
||||||
|
yunohost service add "$app"
|
||||||
|
|
||||||
#=================================================
|
#=================================================
|
||||||
# GENERIC FINALIZATION
|
# GENERIC FINALIZATION
|
||||||
|
@ -92,8 +90,8 @@ ynh_restore_uwsgi_service
|
||||||
#=================================================
|
#=================================================
|
||||||
|
|
||||||
ynh_script_progression --message="Starting pgadmin services..." --weight=3
|
ynh_script_progression --message="Starting pgadmin services..." --weight=3
|
||||||
ynh_systemd_action --service_name "uwsgi-app@$app.service" \
|
ynh_systemd_action --service_name "$app.service" \
|
||||||
--line_match "WSGI app 0 \(mountpoint='[/[:alnum:]_-]*'\) ready in [[:digit:]]* seconds on interpreter" --log_path "/var/log/uwsgi/$app/$app.log"
|
--line_match "Booting worker with pid" --log_path "systemd"
|
||||||
ynh_systemd_action --service_name=nginx --action=reload
|
ynh_systemd_action --service_name=nginx --action=reload -t 20
|
||||||
|
|
||||||
ynh_script_progression --message="Restoration completed for $app" --last
|
ynh_script_progression --message="Restoration completed for $app" --last
|
||||||
|
|
|
@ -6,7 +6,6 @@
|
||||||
# IMPORT GENERIC HELPERS
|
# IMPORT GENERIC HELPERS
|
||||||
#=================================================
|
#=================================================
|
||||||
|
|
||||||
source ./experimental_helper.sh
|
|
||||||
source ./_common.sh
|
source ./_common.sh
|
||||||
source /usr/share/yunohost/helpers
|
source /usr/share/yunohost/helpers
|
||||||
|
|
||||||
|
@ -23,6 +22,7 @@ final_path=$(ynh_app_setting_get --app $app --key=final_path)
|
||||||
db_name=$(ynh_app_setting_get --app=$app --key=db_name)
|
db_name=$(ynh_app_setting_get --app=$app --key=db_name)
|
||||||
secret=$(ynh_app_setting_get --app $app --key=secret)
|
secret=$(ynh_app_setting_get --app $app --key=secret)
|
||||||
db_pwd=$(ynh_app_setting_get --app=$app --key mysqlpwd)
|
db_pwd=$(ynh_app_setting_get --app=$app --key mysqlpwd)
|
||||||
|
port=$(ynh_app_setting_get --app=$app --key web_port)
|
||||||
db_user=$app
|
db_user=$app
|
||||||
|
|
||||||
#=================================================
|
#=================================================
|
||||||
|
@ -49,6 +49,12 @@ if [ -z "$final_path" ]; then
|
||||||
ynh_app_setting_set --app=$app --key=final_path --value=$final_path
|
ynh_app_setting_set --app=$app --key=final_path --value=$final_path
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if [ -z "${port:-}" ]; then
|
||||||
|
# Find available ports
|
||||||
|
port=$(ynh_find_port --port 6000)
|
||||||
|
ynh_app_setting_set --app $app --key web_port --value $port
|
||||||
|
fi
|
||||||
|
|
||||||
# If path_url doesn't exist, create it
|
# If path_url doesn't exist, create it
|
||||||
if [ -z "$path_url" ]; then
|
if [ -z "$path_url" ]; then
|
||||||
path_url=$(ynh_app_setting_get --app=$app --key=path)
|
path_url=$(ynh_app_setting_get --app=$app --key=path)
|
||||||
|
@ -88,6 +94,23 @@ ynh_clean_setup () {
|
||||||
# Exit if an error occurs during the execution of the script
|
# Exit if an error occurs during the execution of the script
|
||||||
ynh_abort_if_errors
|
ynh_abort_if_errors
|
||||||
|
|
||||||
|
#=================================================
|
||||||
|
# ENSURE DOWNWARD COMPATIBILITY
|
||||||
|
#=================================================
|
||||||
|
ynh_script_progression --message="Checking backware compatibility..." --weight=10
|
||||||
|
|
||||||
|
# Detect old installation with uwsgi
|
||||||
|
if [ -e /etc/uwsgi/apps-available/$app.ini ]; then
|
||||||
|
systemctl stop uwsgi-app@$app.service
|
||||||
|
systemctl disable --quiet uwsgi-app@$app.service
|
||||||
|
yunohost service remove "uwsgi-app@$app"
|
||||||
|
ynh_secure_remove --file=/etc/uwsgi/apps-available/$app.ini
|
||||||
|
ynh_secure_remove --file=/etc/systemd/system/uwsgi-app@$app.service.d
|
||||||
|
ynh_secure_remove --file=$final_path
|
||||||
|
else
|
||||||
|
systemctl stop $app.service
|
||||||
|
fi
|
||||||
|
|
||||||
#=================================================
|
#=================================================
|
||||||
# STANDARD UPGRADE STEPS
|
# STANDARD UPGRADE STEPS
|
||||||
#=================================================
|
#=================================================
|
||||||
|
@ -105,7 +128,13 @@ ynh_install_app_dependencies $pkg_dependencies
|
||||||
|
|
||||||
# Download, check integrity, uncompress and patch the source from app.src
|
# Download, check integrity, uncompress and patch the source from app.src
|
||||||
ynh_script_progression --message="Upgrading source files..." --weight=6
|
ynh_script_progression --message="Upgrading source files..." --weight=6
|
||||||
install_sources
|
if [ -e $final_path/syncserver.ini ]; then
|
||||||
|
config_backup="$(cat $final_path/syncserver.ini)"
|
||||||
|
install_sources
|
||||||
|
echo "$config_backup" > $final_path/syncserver.ini
|
||||||
|
else
|
||||||
|
install_sources
|
||||||
|
fi
|
||||||
|
|
||||||
#=================================================
|
#=================================================
|
||||||
# NGINX CONFIGURATION
|
# NGINX CONFIGURATION
|
||||||
|
@ -134,11 +163,12 @@ ynh_system_user_create --username="$app"
|
||||||
ynh_script_progression --message="Configuring application..."
|
ynh_script_progression --message="Configuring application..."
|
||||||
|
|
||||||
# create config file syncserver.ini
|
# create config file syncserver.ini
|
||||||
rm "$final_path/syncserver.ini"
|
ynh_script_progression --message="Configuring application..."
|
||||||
ln -s "/etc/uwsgi/apps-available/$app.ini" "$final_path/syncserver.ini"
|
ynh_add_config --template="syncserver.ini" --destination="$final_path/syncserver.ini"
|
||||||
|
|
||||||
# configure uwsgi
|
# Configure init script
|
||||||
ynh_add_uwsgi_service 'domain secret db_user db_pwd db_name'
|
ynh_script_progression --message="Configuring a systemd service..." --weight=2
|
||||||
|
ynh_add_systemd_config
|
||||||
|
|
||||||
# Upgrade database table
|
# Upgrade database table
|
||||||
ynh_mysql_execute_as_root --sql='ALTER TABLE `users` ADD COLUMN IF NOT EXISTS `keys_changed_at` BIGINT NULL AFTER `replaced_at`;' --database=$db_name
|
ynh_mysql_execute_as_root --sql='ALTER TABLE `users` ADD COLUMN IF NOT EXISTS `keys_changed_at` BIGINT NULL AFTER `replaced_at`;' --database=$db_name
|
||||||
|
@ -166,10 +196,11 @@ then
|
||||||
fi
|
fi
|
||||||
ynh_permission_update --permission=main --add=visitors --protected=true --show_tile=true
|
ynh_permission_update --permission=main --add=visitors --protected=true --show_tile=true
|
||||||
|
|
||||||
|
yunohost service add "$app"
|
||||||
|
|
||||||
ynh_script_progression --message="Restarting $app services..." --weight=3
|
ynh_script_progression --message="Restarting $app services..." --weight=3
|
||||||
ynh_systemd_action --service_name "uwsgi-app@$app.service" \
|
ynh_systemd_action --service_name "$app.service" \
|
||||||
--line_match "WSGI app 0 \(mountpoint='[/[:alnum:]_-]*'\) ready in [[:digit:]]* seconds on interpreter" --log_path "/var/log/uwsgi/$app/$app.log"
|
--line_match "Booting worker with pid" --log_path "systemd" -t 20
|
||||||
|
|
||||||
#=================================================
|
#=================================================
|
||||||
# END OF SCRIPT
|
# END OF SCRIPT
|
||||||
|
|
18
sources/pip_20.1/__init__.py
Normal file
18
sources/pip_20.1/__init__.py
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
|
||||||
|
__version__ = "20.1.1"
|
||||||
|
|
||||||
|
|
||||||
|
def main(args=None):
|
||||||
|
# type: (Optional[List[str]]) -> int
|
||||||
|
"""This is an internal API only meant for use by pip's own console scripts.
|
||||||
|
|
||||||
|
For additional details, see https://github.com/pypa/pip/issues/7498.
|
||||||
|
"""
|
||||||
|
from pip._internal.utils.entrypoints import _wrapper
|
||||||
|
|
||||||
|
return _wrapper(args)
|
26
sources/pip_20.1/__main__.py
Normal file
26
sources/pip_20.1/__main__.py
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# Remove '' and current working directory from the first entry
|
||||||
|
# of sys.path, if present to avoid using current directory
|
||||||
|
# in pip commands check, freeze, install, list and show,
|
||||||
|
# when invoked as python -m pip <command>
|
||||||
|
if sys.path[0] in ('', os.getcwd()):
|
||||||
|
sys.path.pop(0)
|
||||||
|
|
||||||
|
# If we are running from a wheel, add the wheel to sys.path
|
||||||
|
# This allows the usage python pip-*.whl/pip install pip-*.whl
|
||||||
|
if __package__ == '':
|
||||||
|
# __file__ is pip-*.whl/pip/__main__.py
|
||||||
|
# first dirname call strips of '/__main__.py', second strips off '/pip'
|
||||||
|
# Resulting path is the name of the wheel itself
|
||||||
|
# Add that to sys.path so we can import pip
|
||||||
|
path = os.path.dirname(os.path.dirname(__file__))
|
||||||
|
sys.path.insert(0, path)
|
||||||
|
|
||||||
|
from pip._internal.cli.main import main as _main # isort:skip # noqa
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.exit(_main())
|
17
sources/pip_20.1/_internal/__init__.py
Normal file
17
sources/pip_20.1/_internal/__init__.py
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
import pip._internal.utils.inject_securetransport # noqa
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Optional, List
|
||||||
|
|
||||||
|
|
||||||
|
def main(args=None):
|
||||||
|
# type: (Optional[List[str]]) -> int
|
||||||
|
"""This is preserved for old console scripts that may still be referencing
|
||||||
|
it.
|
||||||
|
|
||||||
|
For additional details, see https://github.com/pypa/pip/issues/7498.
|
||||||
|
"""
|
||||||
|
from pip._internal.utils.entrypoints import _wrapper
|
||||||
|
|
||||||
|
return _wrapper(args)
|
219
sources/pip_20.1/_internal/build_env.py
Normal file
219
sources/pip_20.1/_internal/build_env.py
Normal file
|
@ -0,0 +1,219 @@
|
||||||
|
"""Build Environment used for isolation during sdist building
|
||||||
|
"""
|
||||||
|
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: strict-optional=False
|
||||||
|
# mypy: disallow-untyped-defs=False
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import textwrap
|
||||||
|
from collections import OrderedDict
|
||||||
|
from distutils.sysconfig import get_python_lib
|
||||||
|
from sysconfig import get_paths
|
||||||
|
|
||||||
|
from pip._vendor.pkg_resources import Requirement, VersionConflict, WorkingSet
|
||||||
|
|
||||||
|
from pip import __file__ as pip_location
|
||||||
|
from pip._internal.cli.spinners import open_spinner
|
||||||
|
from pip._internal.utils.subprocess import call_subprocess
|
||||||
|
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Tuple, Set, Iterable, Optional, List
|
||||||
|
from pip._internal.index.package_finder import PackageFinder
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class _Prefix:
|
||||||
|
|
||||||
|
def __init__(self, path):
|
||||||
|
# type: (str) -> None
|
||||||
|
self.path = path
|
||||||
|
self.setup = False
|
||||||
|
self.bin_dir = get_paths(
|
||||||
|
'nt' if os.name == 'nt' else 'posix_prefix',
|
||||||
|
vars={'base': path, 'platbase': path}
|
||||||
|
)['scripts']
|
||||||
|
# Note: prefer distutils' sysconfig to get the
|
||||||
|
# library paths so PyPy is correctly supported.
|
||||||
|
purelib = get_python_lib(plat_specific=False, prefix=path)
|
||||||
|
platlib = get_python_lib(plat_specific=True, prefix=path)
|
||||||
|
if purelib == platlib:
|
||||||
|
self.lib_dirs = [purelib]
|
||||||
|
else:
|
||||||
|
self.lib_dirs = [purelib, platlib]
|
||||||
|
|
||||||
|
|
||||||
|
class BuildEnvironment(object):
|
||||||
|
"""Creates and manages an isolated environment to install build deps
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
# type: () -> None
|
||||||
|
temp_dir = TempDirectory(
|
||||||
|
kind=tempdir_kinds.BUILD_ENV, globally_managed=True
|
||||||
|
)
|
||||||
|
|
||||||
|
self._prefixes = OrderedDict((
|
||||||
|
(name, _Prefix(os.path.join(temp_dir.path, name)))
|
||||||
|
for name in ('normal', 'overlay')
|
||||||
|
))
|
||||||
|
|
||||||
|
self._bin_dirs = [] # type: List[str]
|
||||||
|
self._lib_dirs = [] # type: List[str]
|
||||||
|
for prefix in reversed(list(self._prefixes.values())):
|
||||||
|
self._bin_dirs.append(prefix.bin_dir)
|
||||||
|
self._lib_dirs.extend(prefix.lib_dirs)
|
||||||
|
|
||||||
|
# Customize site to:
|
||||||
|
# - ensure .pth files are honored
|
||||||
|
# - prevent access to system site packages
|
||||||
|
system_sites = {
|
||||||
|
os.path.normcase(site) for site in (
|
||||||
|
get_python_lib(plat_specific=False),
|
||||||
|
get_python_lib(plat_specific=True),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
self._site_dir = os.path.join(temp_dir.path, 'site')
|
||||||
|
if not os.path.exists(self._site_dir):
|
||||||
|
os.mkdir(self._site_dir)
|
||||||
|
with open(os.path.join(self._site_dir, 'sitecustomize.py'), 'w') as fp:
|
||||||
|
fp.write(textwrap.dedent(
|
||||||
|
'''
|
||||||
|
import os, site, sys
|
||||||
|
|
||||||
|
# First, drop system-sites related paths.
|
||||||
|
original_sys_path = sys.path[:]
|
||||||
|
known_paths = set()
|
||||||
|
for path in {system_sites!r}:
|
||||||
|
site.addsitedir(path, known_paths=known_paths)
|
||||||
|
system_paths = set(
|
||||||
|
os.path.normcase(path)
|
||||||
|
for path in sys.path[len(original_sys_path):]
|
||||||
|
)
|
||||||
|
original_sys_path = [
|
||||||
|
path for path in original_sys_path
|
||||||
|
if os.path.normcase(path) not in system_paths
|
||||||
|
]
|
||||||
|
sys.path = original_sys_path
|
||||||
|
|
||||||
|
# Second, add lib directories.
|
||||||
|
# ensuring .pth file are processed.
|
||||||
|
for path in {lib_dirs!r}:
|
||||||
|
assert not path in sys.path
|
||||||
|
site.addsitedir(path)
|
||||||
|
'''
|
||||||
|
).format(system_sites=system_sites, lib_dirs=self._lib_dirs))
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
self._save_env = {
|
||||||
|
name: os.environ.get(name, None)
|
||||||
|
for name in ('PATH', 'PYTHONNOUSERSITE', 'PYTHONPATH')
|
||||||
|
}
|
||||||
|
|
||||||
|
path = self._bin_dirs[:]
|
||||||
|
old_path = self._save_env['PATH']
|
||||||
|
if old_path:
|
||||||
|
path.extend(old_path.split(os.pathsep))
|
||||||
|
|
||||||
|
pythonpath = [self._site_dir]
|
||||||
|
|
||||||
|
os.environ.update({
|
||||||
|
'PATH': os.pathsep.join(path),
|
||||||
|
'PYTHONNOUSERSITE': '1',
|
||||||
|
'PYTHONPATH': os.pathsep.join(pythonpath),
|
||||||
|
})
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
for varname, old_value in self._save_env.items():
|
||||||
|
if old_value is None:
|
||||||
|
os.environ.pop(varname, None)
|
||||||
|
else:
|
||||||
|
os.environ[varname] = old_value
|
||||||
|
|
||||||
|
def check_requirements(self, reqs):
|
||||||
|
# type: (Iterable[str]) -> Tuple[Set[Tuple[str, str]], Set[str]]
|
||||||
|
"""Return 2 sets:
|
||||||
|
- conflicting requirements: set of (installed, wanted) reqs tuples
|
||||||
|
- missing requirements: set of reqs
|
||||||
|
"""
|
||||||
|
missing = set()
|
||||||
|
conflicting = set()
|
||||||
|
if reqs:
|
||||||
|
ws = WorkingSet(self._lib_dirs)
|
||||||
|
for req in reqs:
|
||||||
|
try:
|
||||||
|
if ws.find(Requirement.parse(req)) is None:
|
||||||
|
missing.add(req)
|
||||||
|
except VersionConflict as e:
|
||||||
|
conflicting.add((str(e.args[0].as_requirement()),
|
||||||
|
str(e.args[1])))
|
||||||
|
return conflicting, missing
|
||||||
|
|
||||||
|
def install_requirements(
|
||||||
|
self,
|
||||||
|
finder, # type: PackageFinder
|
||||||
|
requirements, # type: Iterable[str]
|
||||||
|
prefix_as_string, # type: str
|
||||||
|
message # type: Optional[str]
|
||||||
|
):
|
||||||
|
# type: (...) -> None
|
||||||
|
prefix = self._prefixes[prefix_as_string]
|
||||||
|
assert not prefix.setup
|
||||||
|
prefix.setup = True
|
||||||
|
if not requirements:
|
||||||
|
return
|
||||||
|
args = [
|
||||||
|
sys.executable, os.path.dirname(pip_location), 'install',
|
||||||
|
'--ignore-installed', '--no-user', '--prefix', prefix.path,
|
||||||
|
'--no-warn-script-location',
|
||||||
|
] # type: List[str]
|
||||||
|
if logger.getEffectiveLevel() <= logging.DEBUG:
|
||||||
|
args.append('-v')
|
||||||
|
for format_control in ('no_binary', 'only_binary'):
|
||||||
|
formats = getattr(finder.format_control, format_control)
|
||||||
|
args.extend(('--' + format_control.replace('_', '-'),
|
||||||
|
','.join(sorted(formats or {':none:'}))))
|
||||||
|
|
||||||
|
index_urls = finder.index_urls
|
||||||
|
if index_urls:
|
||||||
|
args.extend(['-i', index_urls[0]])
|
||||||
|
for extra_index in index_urls[1:]:
|
||||||
|
args.extend(['--extra-index-url', extra_index])
|
||||||
|
else:
|
||||||
|
args.append('--no-index')
|
||||||
|
for link in finder.find_links:
|
||||||
|
args.extend(['--find-links', link])
|
||||||
|
|
||||||
|
for host in finder.trusted_hosts:
|
||||||
|
args.extend(['--trusted-host', host])
|
||||||
|
if finder.allow_all_prereleases:
|
||||||
|
args.append('--pre')
|
||||||
|
args.append('--')
|
||||||
|
args.extend(requirements)
|
||||||
|
with open_spinner(message) as spinner:
|
||||||
|
call_subprocess(args, spinner=spinner)
|
||||||
|
|
||||||
|
|
||||||
|
class NoOpBuildEnvironment(BuildEnvironment):
|
||||||
|
"""A no-op drop-in replacement for BuildEnvironment
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def cleanup(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def install_requirements(self, finder, requirements, prefix, message):
|
||||||
|
raise NotImplementedError()
|
349
sources/pip_20.1/_internal/cache.py
Normal file
349
sources/pip_20.1/_internal/cache.py
Normal file
|
@ -0,0 +1,349 @@
|
||||||
|
"""Cache Management
|
||||||
|
"""
|
||||||
|
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: strict-optional=False
|
||||||
|
|
||||||
|
import hashlib
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
from pip._vendor.packaging.tags import interpreter_name, interpreter_version
|
||||||
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
|
|
||||||
|
from pip._internal.exceptions import InvalidWheelFilename
|
||||||
|
from pip._internal.models.link import Link
|
||||||
|
from pip._internal.models.wheel import Wheel
|
||||||
|
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
from pip._internal.utils.urls import path_to_url
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Optional, Set, List, Any, Dict
|
||||||
|
|
||||||
|
from pip._vendor.packaging.tags import Tag
|
||||||
|
|
||||||
|
from pip._internal.models.format_control import FormatControl
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _hash_dict(d):
|
||||||
|
# type: (Dict[str, str]) -> str
|
||||||
|
"""Return a stable sha224 of a dictionary."""
|
||||||
|
s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True)
|
||||||
|
return hashlib.sha224(s.encode("ascii")).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
class Cache(object):
|
||||||
|
"""An abstract class - provides cache directories for data from links
|
||||||
|
|
||||||
|
|
||||||
|
:param cache_dir: The root of the cache.
|
||||||
|
:param format_control: An object of FormatControl class to limit
|
||||||
|
binaries being read from the cache.
|
||||||
|
:param allowed_formats: which formats of files the cache should store.
|
||||||
|
('binary' and 'source' are the only allowed values)
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, cache_dir, format_control, allowed_formats):
|
||||||
|
# type: (str, FormatControl, Set[str]) -> None
|
||||||
|
super(Cache, self).__init__()
|
||||||
|
assert not cache_dir or os.path.isabs(cache_dir)
|
||||||
|
self.cache_dir = cache_dir or None
|
||||||
|
self.format_control = format_control
|
||||||
|
self.allowed_formats = allowed_formats
|
||||||
|
|
||||||
|
_valid_formats = {"source", "binary"}
|
||||||
|
assert self.allowed_formats.union(_valid_formats) == _valid_formats
|
||||||
|
|
||||||
|
def _get_cache_path_parts_legacy(self, link):
|
||||||
|
# type: (Link) -> List[str]
|
||||||
|
"""Get parts of part that must be os.path.joined with cache_dir
|
||||||
|
|
||||||
|
Legacy cache key (pip < 20) for compatibility with older caches.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# We want to generate an url to use as our cache key, we don't want to
|
||||||
|
# just re-use the URL because it might have other items in the fragment
|
||||||
|
# and we don't care about those.
|
||||||
|
key_parts = [link.url_without_fragment]
|
||||||
|
if link.hash_name is not None and link.hash is not None:
|
||||||
|
key_parts.append("=".join([link.hash_name, link.hash]))
|
||||||
|
key_url = "#".join(key_parts)
|
||||||
|
|
||||||
|
# Encode our key url with sha224, we'll use this because it has similar
|
||||||
|
# security properties to sha256, but with a shorter total output (and
|
||||||
|
# thus less secure). However the differences don't make a lot of
|
||||||
|
# difference for our use case here.
|
||||||
|
hashed = hashlib.sha224(key_url.encode()).hexdigest()
|
||||||
|
|
||||||
|
# We want to nest the directories some to prevent having a ton of top
|
||||||
|
# level directories where we might run out of sub directories on some
|
||||||
|
# FS.
|
||||||
|
parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
|
||||||
|
|
||||||
|
return parts
|
||||||
|
|
||||||
|
def _get_cache_path_parts(self, link):
|
||||||
|
# type: (Link) -> List[str]
|
||||||
|
"""Get parts of part that must be os.path.joined with cache_dir
|
||||||
|
"""
|
||||||
|
|
||||||
|
# We want to generate an url to use as our cache key, we don't want to
|
||||||
|
# just re-use the URL because it might have other items in the fragment
|
||||||
|
# and we don't care about those.
|
||||||
|
key_parts = {"url": link.url_without_fragment}
|
||||||
|
if link.hash_name is not None and link.hash is not None:
|
||||||
|
key_parts[link.hash_name] = link.hash
|
||||||
|
if link.subdirectory_fragment:
|
||||||
|
key_parts["subdirectory"] = link.subdirectory_fragment
|
||||||
|
|
||||||
|
# Include interpreter name, major and minor version in cache key
|
||||||
|
# to cope with ill-behaved sdists that build a different wheel
|
||||||
|
# depending on the python version their setup.py is being run on,
|
||||||
|
# and don't encode the difference in compatibility tags.
|
||||||
|
# https://github.com/pypa/pip/issues/7296
|
||||||
|
key_parts["interpreter_name"] = interpreter_name()
|
||||||
|
key_parts["interpreter_version"] = interpreter_version()
|
||||||
|
|
||||||
|
# Encode our key url with sha224, we'll use this because it has similar
|
||||||
|
# security properties to sha256, but with a shorter total output (and
|
||||||
|
# thus less secure). However the differences don't make a lot of
|
||||||
|
# difference for our use case here.
|
||||||
|
hashed = _hash_dict(key_parts)
|
||||||
|
|
||||||
|
# We want to nest the directories some to prevent having a ton of top
|
||||||
|
# level directories where we might run out of sub directories on some
|
||||||
|
# FS.
|
||||||
|
parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
|
||||||
|
|
||||||
|
return parts
|
||||||
|
|
||||||
|
def _get_candidates(self, link, canonical_package_name):
|
||||||
|
# type: (Link, Optional[str]) -> List[Any]
|
||||||
|
can_not_cache = (
|
||||||
|
not self.cache_dir or
|
||||||
|
not canonical_package_name or
|
||||||
|
not link
|
||||||
|
)
|
||||||
|
if can_not_cache:
|
||||||
|
return []
|
||||||
|
|
||||||
|
formats = self.format_control.get_allowed_formats(
|
||||||
|
canonical_package_name
|
||||||
|
)
|
||||||
|
if not self.allowed_formats.intersection(formats):
|
||||||
|
return []
|
||||||
|
|
||||||
|
candidates = []
|
||||||
|
path = self.get_path_for_link(link)
|
||||||
|
if os.path.isdir(path):
|
||||||
|
for candidate in os.listdir(path):
|
||||||
|
candidates.append((candidate, path))
|
||||||
|
# TODO remove legacy path lookup in pip>=21
|
||||||
|
legacy_path = self.get_path_for_link_legacy(link)
|
||||||
|
if os.path.isdir(legacy_path):
|
||||||
|
for candidate in os.listdir(legacy_path):
|
||||||
|
candidates.append((candidate, legacy_path))
|
||||||
|
return candidates
|
||||||
|
|
||||||
|
def get_path_for_link_legacy(self, link):
|
||||||
|
# type: (Link) -> str
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def get_path_for_link(self, link):
|
||||||
|
# type: (Link) -> str
|
||||||
|
"""Return a directory to store cached items in for link.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def get(
|
||||||
|
self,
|
||||||
|
link, # type: Link
|
||||||
|
package_name, # type: Optional[str]
|
||||||
|
supported_tags, # type: List[Tag]
|
||||||
|
):
|
||||||
|
# type: (...) -> Link
|
||||||
|
"""Returns a link to a cached item if it exists, otherwise returns the
|
||||||
|
passed link.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
|
class SimpleWheelCache(Cache):
|
||||||
|
"""A cache of wheels for future installs.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, cache_dir, format_control):
|
||||||
|
# type: (str, FormatControl) -> None
|
||||||
|
super(SimpleWheelCache, self).__init__(
|
||||||
|
cache_dir, format_control, {"binary"}
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_path_for_link_legacy(self, link):
|
||||||
|
# type: (Link) -> str
|
||||||
|
parts = self._get_cache_path_parts_legacy(link)
|
||||||
|
return os.path.join(self.cache_dir, "wheels", *parts)
|
||||||
|
|
||||||
|
def get_path_for_link(self, link):
|
||||||
|
# type: (Link) -> str
|
||||||
|
"""Return a directory to store cached wheels for link
|
||||||
|
|
||||||
|
Because there are M wheels for any one sdist, we provide a directory
|
||||||
|
to cache them in, and then consult that directory when looking up
|
||||||
|
cache hits.
|
||||||
|
|
||||||
|
We only insert things into the cache if they have plausible version
|
||||||
|
numbers, so that we don't contaminate the cache with things that were
|
||||||
|
not unique. E.g. ./package might have dozens of installs done for it
|
||||||
|
and build a version of 0.0...and if we built and cached a wheel, we'd
|
||||||
|
end up using the same wheel even if the source has been edited.
|
||||||
|
|
||||||
|
:param link: The link of the sdist for which this will cache wheels.
|
||||||
|
"""
|
||||||
|
parts = self._get_cache_path_parts(link)
|
||||||
|
|
||||||
|
# Store wheels within the root cache_dir
|
||||||
|
return os.path.join(self.cache_dir, "wheels", *parts)
|
||||||
|
|
||||||
|
def get(
|
||||||
|
self,
|
||||||
|
link, # type: Link
|
||||||
|
package_name, # type: Optional[str]
|
||||||
|
supported_tags, # type: List[Tag]
|
||||||
|
):
|
||||||
|
# type: (...) -> Link
|
||||||
|
candidates = []
|
||||||
|
|
||||||
|
if not package_name:
|
||||||
|
return link
|
||||||
|
|
||||||
|
canonical_package_name = canonicalize_name(package_name)
|
||||||
|
for wheel_name, wheel_dir in self._get_candidates(
|
||||||
|
link, canonical_package_name
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
wheel = Wheel(wheel_name)
|
||||||
|
except InvalidWheelFilename:
|
||||||
|
continue
|
||||||
|
if canonicalize_name(wheel.name) != canonical_package_name:
|
||||||
|
logger.debug(
|
||||||
|
"Ignoring cached wheel {} for {} as it "
|
||||||
|
"does not match the expected distribution name {}.".format(
|
||||||
|
wheel_name, link, package_name
|
||||||
|
)
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
if not wheel.supported(supported_tags):
|
||||||
|
# Built for a different python/arch/etc
|
||||||
|
continue
|
||||||
|
candidates.append(
|
||||||
|
(
|
||||||
|
wheel.support_index_min(supported_tags),
|
||||||
|
wheel_name,
|
||||||
|
wheel_dir,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if not candidates:
|
||||||
|
return link
|
||||||
|
|
||||||
|
_, wheel_name, wheel_dir = min(candidates)
|
||||||
|
return Link(path_to_url(os.path.join(wheel_dir, wheel_name)))
|
||||||
|
|
||||||
|
|
||||||
|
class EphemWheelCache(SimpleWheelCache):
|
||||||
|
"""A SimpleWheelCache that creates it's own temporary cache directory
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, format_control):
|
||||||
|
# type: (FormatControl) -> None
|
||||||
|
self._temp_dir = TempDirectory(
|
||||||
|
kind=tempdir_kinds.EPHEM_WHEEL_CACHE,
|
||||||
|
globally_managed=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
super(EphemWheelCache, self).__init__(
|
||||||
|
self._temp_dir.path, format_control
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class CacheEntry(object):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
link, # type: Link
|
||||||
|
persistent, # type: bool
|
||||||
|
):
|
||||||
|
self.link = link
|
||||||
|
self.persistent = persistent
|
||||||
|
|
||||||
|
|
||||||
|
class WheelCache(Cache):
|
||||||
|
"""Wraps EphemWheelCache and SimpleWheelCache into a single Cache
|
||||||
|
|
||||||
|
This Cache allows for gracefully degradation, using the ephem wheel cache
|
||||||
|
when a certain link is not found in the simple wheel cache first.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, cache_dir, format_control):
|
||||||
|
# type: (str, FormatControl) -> None
|
||||||
|
super(WheelCache, self).__init__(
|
||||||
|
cache_dir, format_control, {'binary'}
|
||||||
|
)
|
||||||
|
self._wheel_cache = SimpleWheelCache(cache_dir, format_control)
|
||||||
|
self._ephem_cache = EphemWheelCache(format_control)
|
||||||
|
|
||||||
|
def get_path_for_link_legacy(self, link):
|
||||||
|
# type: (Link) -> str
|
||||||
|
return self._wheel_cache.get_path_for_link_legacy(link)
|
||||||
|
|
||||||
|
def get_path_for_link(self, link):
|
||||||
|
# type: (Link) -> str
|
||||||
|
return self._wheel_cache.get_path_for_link(link)
|
||||||
|
|
||||||
|
def get_ephem_path_for_link(self, link):
|
||||||
|
# type: (Link) -> str
|
||||||
|
return self._ephem_cache.get_path_for_link(link)
|
||||||
|
|
||||||
|
def get(
|
||||||
|
self,
|
||||||
|
link, # type: Link
|
||||||
|
package_name, # type: Optional[str]
|
||||||
|
supported_tags, # type: List[Tag]
|
||||||
|
):
|
||||||
|
# type: (...) -> Link
|
||||||
|
cache_entry = self.get_cache_entry(link, package_name, supported_tags)
|
||||||
|
if cache_entry is None:
|
||||||
|
return link
|
||||||
|
return cache_entry.link
|
||||||
|
|
||||||
|
def get_cache_entry(
|
||||||
|
self,
|
||||||
|
link, # type: Link
|
||||||
|
package_name, # type: Optional[str]
|
||||||
|
supported_tags, # type: List[Tag]
|
||||||
|
):
|
||||||
|
# type: (...) -> Optional[CacheEntry]
|
||||||
|
"""Returns a CacheEntry with a link to a cached item if it exists or
|
||||||
|
None. The cache entry indicates if the item was found in the persistent
|
||||||
|
or ephemeral cache.
|
||||||
|
"""
|
||||||
|
retval = self._wheel_cache.get(
|
||||||
|
link=link,
|
||||||
|
package_name=package_name,
|
||||||
|
supported_tags=supported_tags,
|
||||||
|
)
|
||||||
|
if retval is not link:
|
||||||
|
return CacheEntry(retval, persistent=True)
|
||||||
|
|
||||||
|
retval = self._ephem_cache.get(
|
||||||
|
link=link,
|
||||||
|
package_name=package_name,
|
||||||
|
supported_tags=supported_tags,
|
||||||
|
)
|
||||||
|
if retval is not link:
|
||||||
|
return CacheEntry(retval, persistent=False)
|
||||||
|
|
||||||
|
return None
|
4
sources/pip_20.1/_internal/cli/__init__.py
Normal file
4
sources/pip_20.1/_internal/cli/__init__.py
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
"""Subpackage containing all of pip's command line interface related code
|
||||||
|
"""
|
||||||
|
|
||||||
|
# This file intentionally does not import submodules
|
164
sources/pip_20.1/_internal/cli/autocompletion.py
Normal file
164
sources/pip_20.1/_internal/cli/autocompletion.py
Normal file
|
@ -0,0 +1,164 @@
|
||||||
|
"""Logic that powers autocompletion installed by ``pip completion``.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import optparse
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from itertools import chain
|
||||||
|
|
||||||
|
from pip._internal.cli.main_parser import create_main_parser
|
||||||
|
from pip._internal.commands import commands_dict, create_command
|
||||||
|
from pip._internal.utils.misc import get_installed_distributions
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Any, Iterable, List, Optional
|
||||||
|
|
||||||
|
|
||||||
|
def autocomplete():
|
||||||
|
# type: () -> None
|
||||||
|
"""Entry Point for completion of main and subcommand options.
|
||||||
|
"""
|
||||||
|
# Don't complete if user hasn't sourced bash_completion file.
|
||||||
|
if 'PIP_AUTO_COMPLETE' not in os.environ:
|
||||||
|
return
|
||||||
|
cwords = os.environ['COMP_WORDS'].split()[1:]
|
||||||
|
cword = int(os.environ['COMP_CWORD'])
|
||||||
|
try:
|
||||||
|
current = cwords[cword - 1]
|
||||||
|
except IndexError:
|
||||||
|
current = ''
|
||||||
|
|
||||||
|
parser = create_main_parser()
|
||||||
|
subcommands = list(commands_dict)
|
||||||
|
options = []
|
||||||
|
|
||||||
|
# subcommand
|
||||||
|
subcommand_name = None # type: Optional[str]
|
||||||
|
for word in cwords:
|
||||||
|
if word in subcommands:
|
||||||
|
subcommand_name = word
|
||||||
|
break
|
||||||
|
# subcommand options
|
||||||
|
if subcommand_name is not None:
|
||||||
|
# special case: 'help' subcommand has no options
|
||||||
|
if subcommand_name == 'help':
|
||||||
|
sys.exit(1)
|
||||||
|
# special case: list locally installed dists for show and uninstall
|
||||||
|
should_list_installed = (
|
||||||
|
subcommand_name in ['show', 'uninstall'] and
|
||||||
|
not current.startswith('-')
|
||||||
|
)
|
||||||
|
if should_list_installed:
|
||||||
|
installed = []
|
||||||
|
lc = current.lower()
|
||||||
|
for dist in get_installed_distributions(local_only=True):
|
||||||
|
if dist.key.startswith(lc) and dist.key not in cwords[1:]:
|
||||||
|
installed.append(dist.key)
|
||||||
|
# if there are no dists installed, fall back to option completion
|
||||||
|
if installed:
|
||||||
|
for dist in installed:
|
||||||
|
print(dist)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
subcommand = create_command(subcommand_name)
|
||||||
|
|
||||||
|
for opt in subcommand.parser.option_list_all:
|
||||||
|
if opt.help != optparse.SUPPRESS_HELP:
|
||||||
|
for opt_str in opt._long_opts + opt._short_opts:
|
||||||
|
options.append((opt_str, opt.nargs))
|
||||||
|
|
||||||
|
# filter out previously specified options from available options
|
||||||
|
prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]]
|
||||||
|
options = [(x, v) for (x, v) in options if x not in prev_opts]
|
||||||
|
# filter options by current input
|
||||||
|
options = [(k, v) for k, v in options if k.startswith(current)]
|
||||||
|
# get completion type given cwords and available subcommand options
|
||||||
|
completion_type = get_path_completion_type(
|
||||||
|
cwords, cword, subcommand.parser.option_list_all,
|
||||||
|
)
|
||||||
|
# get completion files and directories if ``completion_type`` is
|
||||||
|
# ``<file>``, ``<dir>`` or ``<path>``
|
||||||
|
if completion_type:
|
||||||
|
paths = auto_complete_paths(current, completion_type)
|
||||||
|
options = [(path, 0) for path in paths]
|
||||||
|
for option in options:
|
||||||
|
opt_label = option[0]
|
||||||
|
# append '=' to options which require args
|
||||||
|
if option[1] and option[0][:2] == "--":
|
||||||
|
opt_label += '='
|
||||||
|
print(opt_label)
|
||||||
|
else:
|
||||||
|
# show main parser options only when necessary
|
||||||
|
|
||||||
|
opts = [i.option_list for i in parser.option_groups]
|
||||||
|
opts.append(parser.option_list)
|
||||||
|
flattened_opts = chain.from_iterable(opts)
|
||||||
|
if current.startswith('-'):
|
||||||
|
for opt in flattened_opts:
|
||||||
|
if opt.help != optparse.SUPPRESS_HELP:
|
||||||
|
subcommands += opt._long_opts + opt._short_opts
|
||||||
|
else:
|
||||||
|
# get completion type given cwords and all available options
|
||||||
|
completion_type = get_path_completion_type(cwords, cword,
|
||||||
|
flattened_opts)
|
||||||
|
if completion_type:
|
||||||
|
subcommands = list(auto_complete_paths(current,
|
||||||
|
completion_type))
|
||||||
|
|
||||||
|
print(' '.join([x for x in subcommands if x.startswith(current)]))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def get_path_completion_type(cwords, cword, opts):
|
||||||
|
# type: (List[str], int, Iterable[Any]) -> Optional[str]
|
||||||
|
"""Get the type of path completion (``file``, ``dir``, ``path`` or None)
|
||||||
|
|
||||||
|
:param cwords: same as the environmental variable ``COMP_WORDS``
|
||||||
|
:param cword: same as the environmental variable ``COMP_CWORD``
|
||||||
|
:param opts: The available options to check
|
||||||
|
:return: path completion type (``file``, ``dir``, ``path`` or None)
|
||||||
|
"""
|
||||||
|
if cword < 2 or not cwords[cword - 2].startswith('-'):
|
||||||
|
return None
|
||||||
|
for opt in opts:
|
||||||
|
if opt.help == optparse.SUPPRESS_HELP:
|
||||||
|
continue
|
||||||
|
for o in str(opt).split('/'):
|
||||||
|
if cwords[cword - 2].split('=')[0] == o:
|
||||||
|
if not opt.metavar or any(
|
||||||
|
x in ('path', 'file', 'dir')
|
||||||
|
for x in opt.metavar.split('/')):
|
||||||
|
return opt.metavar
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def auto_complete_paths(current, completion_type):
|
||||||
|
# type: (str, str) -> Iterable[str]
|
||||||
|
"""If ``completion_type`` is ``file`` or ``path``, list all regular files
|
||||||
|
and directories starting with ``current``; otherwise only list directories
|
||||||
|
starting with ``current``.
|
||||||
|
|
||||||
|
:param current: The word to be completed
|
||||||
|
:param completion_type: path completion type(`file`, `path` or `dir`)i
|
||||||
|
:return: A generator of regular files and/or directories
|
||||||
|
"""
|
||||||
|
directory, filename = os.path.split(current)
|
||||||
|
current_path = os.path.abspath(directory)
|
||||||
|
# Don't complete paths if they can't be accessed
|
||||||
|
if not os.access(current_path, os.R_OK):
|
||||||
|
return
|
||||||
|
filename = os.path.normcase(filename)
|
||||||
|
# list all files that start with ``filename``
|
||||||
|
file_list = (x for x in os.listdir(current_path)
|
||||||
|
if os.path.normcase(x).startswith(filename))
|
||||||
|
for f in file_list:
|
||||||
|
opt = os.path.join(current_path, f)
|
||||||
|
comp_file = os.path.normcase(os.path.join(directory, f))
|
||||||
|
# complete regular files when there is not ``<dir>`` after option
|
||||||
|
# complete directories when there is ``<file>``, ``<path>`` or
|
||||||
|
# ``<dir>``after option
|
||||||
|
if completion_type != 'dir' and os.path.isfile(opt):
|
||||||
|
yield comp_file
|
||||||
|
elif os.path.isdir(opt):
|
||||||
|
yield os.path.join(comp_file, '')
|
228
sources/pip_20.1/_internal/cli/base_command.py
Normal file
228
sources/pip_20.1/_internal/cli/base_command.py
Normal file
|
@ -0,0 +1,228 @@
|
||||||
|
"""Base Command class, and related routines"""
|
||||||
|
|
||||||
|
from __future__ import absolute_import, print_function
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import logging.config
|
||||||
|
import optparse
|
||||||
|
import os
|
||||||
|
import platform
|
||||||
|
import sys
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
from pip._internal.cli import cmdoptions
|
||||||
|
from pip._internal.cli.command_context import CommandContextMixIn
|
||||||
|
from pip._internal.cli.parser import (
|
||||||
|
ConfigOptionParser,
|
||||||
|
UpdatingDefaultsHelpFormatter,
|
||||||
|
)
|
||||||
|
from pip._internal.cli.status_codes import (
|
||||||
|
ERROR,
|
||||||
|
PREVIOUS_BUILD_DIR_ERROR,
|
||||||
|
SUCCESS,
|
||||||
|
UNKNOWN_ERROR,
|
||||||
|
VIRTUALENV_NOT_FOUND,
|
||||||
|
)
|
||||||
|
from pip._internal.exceptions import (
|
||||||
|
BadCommand,
|
||||||
|
CommandError,
|
||||||
|
InstallationError,
|
||||||
|
PreviousBuildDirError,
|
||||||
|
UninstallationError,
|
||||||
|
)
|
||||||
|
from pip._internal.utils.deprecation import deprecated
|
||||||
|
from pip._internal.utils.filesystem import check_path_owner
|
||||||
|
from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging
|
||||||
|
from pip._internal.utils.misc import get_prog, normalize_path
|
||||||
|
from pip._internal.utils.temp_dir import (
|
||||||
|
global_tempdir_manager,
|
||||||
|
tempdir_registry,
|
||||||
|
)
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import List, Optional, Tuple, Any
|
||||||
|
from optparse import Values
|
||||||
|
|
||||||
|
from pip._internal.utils.temp_dir import (
|
||||||
|
TempDirectoryTypeRegistry as TempDirRegistry
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = ['Command']
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Command(CommandContextMixIn):
|
||||||
|
usage = None # type: str
|
||||||
|
ignore_require_venv = False # type: bool
|
||||||
|
|
||||||
|
def __init__(self, name, summary, isolated=False):
|
||||||
|
# type: (str, str, bool) -> None
|
||||||
|
super(Command, self).__init__()
|
||||||
|
parser_kw = {
|
||||||
|
'usage': self.usage,
|
||||||
|
'prog': '{} {}'.format(get_prog(), name),
|
||||||
|
'formatter': UpdatingDefaultsHelpFormatter(),
|
||||||
|
'add_help_option': False,
|
||||||
|
'name': name,
|
||||||
|
'description': self.__doc__,
|
||||||
|
'isolated': isolated,
|
||||||
|
}
|
||||||
|
|
||||||
|
self.name = name
|
||||||
|
self.summary = summary
|
||||||
|
self.parser = ConfigOptionParser(**parser_kw)
|
||||||
|
|
||||||
|
self.tempdir_registry = None # type: Optional[TempDirRegistry]
|
||||||
|
|
||||||
|
# Commands should add options to this option group
|
||||||
|
optgroup_name = '{} Options'.format(self.name.capitalize())
|
||||||
|
self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
|
||||||
|
|
||||||
|
# Add the general options
|
||||||
|
gen_opts = cmdoptions.make_option_group(
|
||||||
|
cmdoptions.general_group,
|
||||||
|
self.parser,
|
||||||
|
)
|
||||||
|
self.parser.add_option_group(gen_opts)
|
||||||
|
|
||||||
|
def handle_pip_version_check(self, options):
|
||||||
|
# type: (Values) -> None
|
||||||
|
"""
|
||||||
|
This is a no-op so that commands by default do not do the pip version
|
||||||
|
check.
|
||||||
|
"""
|
||||||
|
# Make sure we do the pip version check if the index_group options
|
||||||
|
# are present.
|
||||||
|
assert not hasattr(options, 'no_index')
|
||||||
|
|
||||||
|
def run(self, options, args):
|
||||||
|
# type: (Values, List[Any]) -> Any
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def parse_args(self, args):
|
||||||
|
# type: (List[str]) -> Tuple[Any, Any]
|
||||||
|
# factored out for testability
|
||||||
|
return self.parser.parse_args(args)
|
||||||
|
|
||||||
|
def main(self, args):
|
||||||
|
# type: (List[str]) -> int
|
||||||
|
try:
|
||||||
|
with self.main_context():
|
||||||
|
return self._main(args)
|
||||||
|
finally:
|
||||||
|
logging.shutdown()
|
||||||
|
|
||||||
|
def _main(self, args):
|
||||||
|
# type: (List[str]) -> int
|
||||||
|
# We must initialize this before the tempdir manager, otherwise the
|
||||||
|
# configuration would not be accessible by the time we clean up the
|
||||||
|
# tempdir manager.
|
||||||
|
self.tempdir_registry = self.enter_context(tempdir_registry())
|
||||||
|
# Intentionally set as early as possible so globally-managed temporary
|
||||||
|
# directories are available to the rest of the code.
|
||||||
|
self.enter_context(global_tempdir_manager())
|
||||||
|
|
||||||
|
options, args = self.parse_args(args)
|
||||||
|
|
||||||
|
# Set verbosity so that it can be used elsewhere.
|
||||||
|
self.verbosity = options.verbose - options.quiet
|
||||||
|
|
||||||
|
level_number = setup_logging(
|
||||||
|
verbosity=self.verbosity,
|
||||||
|
no_color=options.no_color,
|
||||||
|
user_log_file=options.log,
|
||||||
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
sys.version_info[:2] == (2, 7) and
|
||||||
|
not options.no_python_version_warning
|
||||||
|
):
|
||||||
|
message = (
|
||||||
|
"pip 21.0 will drop support for Python 2.7 in January 2021. "
|
||||||
|
"More details about Python 2 support in pip, can be found at "
|
||||||
|
"https://pip.pypa.io/en/latest/development/release-process/#python-2-support" # noqa
|
||||||
|
)
|
||||||
|
if platform.python_implementation() == "CPython":
|
||||||
|
message = (
|
||||||
|
"Python 2.7 reached the end of its life on January "
|
||||||
|
"1st, 2020. Please upgrade your Python as Python 2.7 "
|
||||||
|
"is no longer maintained. "
|
||||||
|
) + message
|
||||||
|
deprecated(message, replacement=None, gone_in=None)
|
||||||
|
|
||||||
|
# TODO: Try to get these passing down from the command?
|
||||||
|
# without resorting to os.environ to hold these.
|
||||||
|
# This also affects isolated builds and it should.
|
||||||
|
|
||||||
|
if options.no_input:
|
||||||
|
os.environ['PIP_NO_INPUT'] = '1'
|
||||||
|
|
||||||
|
if options.exists_action:
|
||||||
|
os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action)
|
||||||
|
|
||||||
|
if options.require_venv and not self.ignore_require_venv:
|
||||||
|
# If a venv is required check if it can really be found
|
||||||
|
if not running_under_virtualenv():
|
||||||
|
logger.critical(
|
||||||
|
'Could not find an activated virtualenv (required).'
|
||||||
|
)
|
||||||
|
sys.exit(VIRTUALENV_NOT_FOUND)
|
||||||
|
|
||||||
|
if options.cache_dir:
|
||||||
|
options.cache_dir = normalize_path(options.cache_dir)
|
||||||
|
if not check_path_owner(options.cache_dir):
|
||||||
|
logger.warning(
|
||||||
|
"The directory '%s' or its parent directory is not owned "
|
||||||
|
"or is not writable by the current user. The cache "
|
||||||
|
"has been disabled. Check the permissions and owner of "
|
||||||
|
"that directory. If executing pip with sudo, you may want "
|
||||||
|
"sudo's -H flag.",
|
||||||
|
options.cache_dir,
|
||||||
|
)
|
||||||
|
options.cache_dir = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
status = self.run(options, args)
|
||||||
|
# FIXME: all commands should return an exit status
|
||||||
|
# and when it is done, isinstance is not needed anymore
|
||||||
|
if isinstance(status, int):
|
||||||
|
return status
|
||||||
|
except PreviousBuildDirError as exc:
|
||||||
|
logger.critical(str(exc))
|
||||||
|
logger.debug('Exception information:', exc_info=True)
|
||||||
|
|
||||||
|
return PREVIOUS_BUILD_DIR_ERROR
|
||||||
|
except (InstallationError, UninstallationError, BadCommand) as exc:
|
||||||
|
logger.critical(str(exc))
|
||||||
|
logger.debug('Exception information:', exc_info=True)
|
||||||
|
|
||||||
|
return ERROR
|
||||||
|
except CommandError as exc:
|
||||||
|
logger.critical('%s', exc)
|
||||||
|
logger.debug('Exception information:', exc_info=True)
|
||||||
|
|
||||||
|
return ERROR
|
||||||
|
except BrokenStdoutLoggingError:
|
||||||
|
# Bypass our logger and write any remaining messages to stderr
|
||||||
|
# because stdout no longer works.
|
||||||
|
print('ERROR: Pipe to stdout was broken', file=sys.stderr)
|
||||||
|
if level_number <= logging.DEBUG:
|
||||||
|
traceback.print_exc(file=sys.stderr)
|
||||||
|
|
||||||
|
return ERROR
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
logger.critical('Operation cancelled by user')
|
||||||
|
logger.debug('Exception information:', exc_info=True)
|
||||||
|
|
||||||
|
return ERROR
|
||||||
|
except BaseException:
|
||||||
|
logger.critical('Exception:', exc_info=True)
|
||||||
|
|
||||||
|
return UNKNOWN_ERROR
|
||||||
|
finally:
|
||||||
|
self.handle_pip_version_check(options)
|
||||||
|
|
||||||
|
return SUCCESS
|
962
sources/pip_20.1/_internal/cli/cmdoptions.py
Normal file
962
sources/pip_20.1/_internal/cli/cmdoptions.py
Normal file
|
@ -0,0 +1,962 @@
|
||||||
|
"""
|
||||||
|
shared options and groups
|
||||||
|
|
||||||
|
The principle here is to define options once, but *not* instantiate them
|
||||||
|
globally. One reason being that options with action='append' can carry state
|
||||||
|
between parses. pip parses general options twice internally, and shouldn't
|
||||||
|
pass on state. To be consistent, all options will follow this design.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: strict-optional=False
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import textwrap
|
||||||
|
import warnings
|
||||||
|
from distutils.util import strtobool
|
||||||
|
from functools import partial
|
||||||
|
from optparse import SUPPRESS_HELP, Option, OptionGroup
|
||||||
|
from textwrap import dedent
|
||||||
|
|
||||||
|
from pip._internal.cli.progress_bars import BAR_TYPES
|
||||||
|
from pip._internal.exceptions import CommandError
|
||||||
|
from pip._internal.locations import USER_CACHE_DIR, get_src_prefix
|
||||||
|
from pip._internal.models.format_control import FormatControl
|
||||||
|
from pip._internal.models.index import PyPI
|
||||||
|
from pip._internal.models.target_python import TargetPython
|
||||||
|
from pip._internal.utils.hashes import STRONG_HASHES
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Any, Callable, Dict, Optional, Tuple
|
||||||
|
from optparse import OptionParser, Values
|
||||||
|
from pip._internal.cli.parser import ConfigOptionParser
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def raise_option_error(parser, option, msg):
|
||||||
|
# type: (OptionParser, Option, str) -> None
|
||||||
|
"""
|
||||||
|
Raise an option parsing error using parser.error().
|
||||||
|
|
||||||
|
Args:
|
||||||
|
parser: an OptionParser instance.
|
||||||
|
option: an Option instance.
|
||||||
|
msg: the error text.
|
||||||
|
"""
|
||||||
|
msg = '{} error: {}'.format(option, msg)
|
||||||
|
msg = textwrap.fill(' '.join(msg.split()))
|
||||||
|
parser.error(msg)
|
||||||
|
|
||||||
|
|
||||||
|
def make_option_group(group, parser):
|
||||||
|
# type: (Dict[str, Any], ConfigOptionParser) -> OptionGroup
|
||||||
|
"""
|
||||||
|
Return an OptionGroup object
|
||||||
|
group -- assumed to be dict with 'name' and 'options' keys
|
||||||
|
parser -- an optparse Parser
|
||||||
|
"""
|
||||||
|
option_group = OptionGroup(parser, group['name'])
|
||||||
|
for option in group['options']:
|
||||||
|
option_group.add_option(option())
|
||||||
|
return option_group
|
||||||
|
|
||||||
|
|
||||||
|
def check_install_build_global(options, check_options=None):
|
||||||
|
# type: (Values, Optional[Values]) -> None
|
||||||
|
"""Disable wheels if per-setup.py call options are set.
|
||||||
|
|
||||||
|
:param options: The OptionParser options to update.
|
||||||
|
:param check_options: The options to check, if not supplied defaults to
|
||||||
|
options.
|
||||||
|
"""
|
||||||
|
if check_options is None:
|
||||||
|
check_options = options
|
||||||
|
|
||||||
|
def getname(n):
|
||||||
|
# type: (str) -> Optional[Any]
|
||||||
|
return getattr(check_options, n, None)
|
||||||
|
names = ["build_options", "global_options", "install_options"]
|
||||||
|
if any(map(getname, names)):
|
||||||
|
control = options.format_control
|
||||||
|
control.disallow_binaries()
|
||||||
|
warnings.warn(
|
||||||
|
'Disabling all use of wheels due to the use of --build-option '
|
||||||
|
'/ --global-option / --install-option.', stacklevel=2,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def check_dist_restriction(options, check_target=False):
|
||||||
|
# type: (Values, bool) -> None
|
||||||
|
"""Function for determining if custom platform options are allowed.
|
||||||
|
|
||||||
|
:param options: The OptionParser options.
|
||||||
|
:param check_target: Whether or not to check if --target is being used.
|
||||||
|
"""
|
||||||
|
dist_restriction_set = any([
|
||||||
|
options.python_version,
|
||||||
|
options.platform,
|
||||||
|
options.abi,
|
||||||
|
options.implementation,
|
||||||
|
])
|
||||||
|
|
||||||
|
binary_only = FormatControl(set(), {':all:'})
|
||||||
|
sdist_dependencies_allowed = (
|
||||||
|
options.format_control != binary_only and
|
||||||
|
not options.ignore_dependencies
|
||||||
|
)
|
||||||
|
|
||||||
|
# Installations or downloads using dist restrictions must not combine
|
||||||
|
# source distributions and dist-specific wheels, as they are not
|
||||||
|
# guaranteed to be locally compatible.
|
||||||
|
if dist_restriction_set and sdist_dependencies_allowed:
|
||||||
|
raise CommandError(
|
||||||
|
"When restricting platform and interpreter constraints using "
|
||||||
|
"--python-version, --platform, --abi, or --implementation, "
|
||||||
|
"either --no-deps must be set, or --only-binary=:all: must be "
|
||||||
|
"set and --no-binary must not be set (or must be set to "
|
||||||
|
":none:)."
|
||||||
|
)
|
||||||
|
|
||||||
|
if check_target:
|
||||||
|
if dist_restriction_set and not options.target_dir:
|
||||||
|
raise CommandError(
|
||||||
|
"Can not use any platform or abi specific options unless "
|
||||||
|
"installing via '--target'"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _path_option_check(option, opt, value):
|
||||||
|
# type: (Option, str, str) -> str
|
||||||
|
return os.path.expanduser(value)
|
||||||
|
|
||||||
|
|
||||||
|
class PipOption(Option):
|
||||||
|
TYPES = Option.TYPES + ("path",)
|
||||||
|
TYPE_CHECKER = Option.TYPE_CHECKER.copy()
|
||||||
|
TYPE_CHECKER["path"] = _path_option_check
|
||||||
|
|
||||||
|
|
||||||
|
###########
|
||||||
|
# options #
|
||||||
|
###########
|
||||||
|
|
||||||
|
help_ = partial(
|
||||||
|
Option,
|
||||||
|
'-h', '--help',
|
||||||
|
dest='help',
|
||||||
|
action='help',
|
||||||
|
help='Show help.',
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
isolated_mode = partial(
|
||||||
|
Option,
|
||||||
|
"--isolated",
|
||||||
|
dest="isolated_mode",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help=(
|
||||||
|
"Run pip in an isolated mode, ignoring environment variables and user "
|
||||||
|
"configuration."
|
||||||
|
),
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
require_virtualenv = partial(
|
||||||
|
Option,
|
||||||
|
# Run only if inside a virtualenv, bail if not.
|
||||||
|
'--require-virtualenv', '--require-venv',
|
||||||
|
dest='require_venv',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help=SUPPRESS_HELP
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
verbose = partial(
|
||||||
|
Option,
|
||||||
|
'-v', '--verbose',
|
||||||
|
dest='verbose',
|
||||||
|
action='count',
|
||||||
|
default=0,
|
||||||
|
help='Give more output. Option is additive, and can be used up to 3 times.'
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
no_color = partial(
|
||||||
|
Option,
|
||||||
|
'--no-color',
|
||||||
|
dest='no_color',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help="Suppress colored output",
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
version = partial(
|
||||||
|
Option,
|
||||||
|
'-V', '--version',
|
||||||
|
dest='version',
|
||||||
|
action='store_true',
|
||||||
|
help='Show version and exit.',
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
quiet = partial(
|
||||||
|
Option,
|
||||||
|
'-q', '--quiet',
|
||||||
|
dest='quiet',
|
||||||
|
action='count',
|
||||||
|
default=0,
|
||||||
|
help=(
|
||||||
|
'Give less output. Option is additive, and can be used up to 3'
|
||||||
|
' times (corresponding to WARNING, ERROR, and CRITICAL logging'
|
||||||
|
' levels).'
|
||||||
|
),
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
progress_bar = partial(
|
||||||
|
Option,
|
||||||
|
'--progress-bar',
|
||||||
|
dest='progress_bar',
|
||||||
|
type='choice',
|
||||||
|
choices=list(BAR_TYPES.keys()),
|
||||||
|
default='on',
|
||||||
|
help=(
|
||||||
|
'Specify type of progress to be displayed [' +
|
||||||
|
'|'.join(BAR_TYPES.keys()) + '] (default: %default)'
|
||||||
|
),
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
log = partial(
|
||||||
|
PipOption,
|
||||||
|
"--log", "--log-file", "--local-log",
|
||||||
|
dest="log",
|
||||||
|
metavar="path",
|
||||||
|
type="path",
|
||||||
|
help="Path to a verbose appending log."
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
no_input = partial(
|
||||||
|
Option,
|
||||||
|
# Don't ask for input
|
||||||
|
'--no-input',
|
||||||
|
dest='no_input',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help=SUPPRESS_HELP
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
proxy = partial(
|
||||||
|
Option,
|
||||||
|
'--proxy',
|
||||||
|
dest='proxy',
|
||||||
|
type='str',
|
||||||
|
default='',
|
||||||
|
help="Specify a proxy in the form [user:passwd@]proxy.server:port."
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
retries = partial(
|
||||||
|
Option,
|
||||||
|
'--retries',
|
||||||
|
dest='retries',
|
||||||
|
type='int',
|
||||||
|
default=5,
|
||||||
|
help="Maximum number of retries each connection should attempt "
|
||||||
|
"(default %default times).",
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
timeout = partial(
|
||||||
|
Option,
|
||||||
|
'--timeout', '--default-timeout',
|
||||||
|
metavar='sec',
|
||||||
|
dest='timeout',
|
||||||
|
type='float',
|
||||||
|
default=15,
|
||||||
|
help='Set the socket timeout (default %default seconds).',
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
|
||||||
|
def exists_action():
|
||||||
|
# type: () -> Option
|
||||||
|
return Option(
|
||||||
|
# Option when path already exist
|
||||||
|
'--exists-action',
|
||||||
|
dest='exists_action',
|
||||||
|
type='choice',
|
||||||
|
choices=['s', 'i', 'w', 'b', 'a'],
|
||||||
|
default=[],
|
||||||
|
action='append',
|
||||||
|
metavar='action',
|
||||||
|
help="Default action when a path already exists: "
|
||||||
|
"(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
cert = partial(
|
||||||
|
PipOption,
|
||||||
|
'--cert',
|
||||||
|
dest='cert',
|
||||||
|
type='path',
|
||||||
|
metavar='path',
|
||||||
|
help="Path to alternate CA bundle.",
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
client_cert = partial(
|
||||||
|
PipOption,
|
||||||
|
'--client-cert',
|
||||||
|
dest='client_cert',
|
||||||
|
type='path',
|
||||||
|
default=None,
|
||||||
|
metavar='path',
|
||||||
|
help="Path to SSL client certificate, a single file containing the "
|
||||||
|
"private key and the certificate in PEM format.",
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
index_url = partial(
|
||||||
|
Option,
|
||||||
|
'-i', '--index-url', '--pypi-url',
|
||||||
|
dest='index_url',
|
||||||
|
metavar='URL',
|
||||||
|
default=PyPI.simple_url,
|
||||||
|
help="Base URL of the Python Package Index (default %default). "
|
||||||
|
"This should point to a repository compliant with PEP 503 "
|
||||||
|
"(the simple repository API) or a local directory laid out "
|
||||||
|
"in the same format.",
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
|
||||||
|
def extra_index_url():
|
||||||
|
# type: () -> Option
|
||||||
|
return Option(
|
||||||
|
'--extra-index-url',
|
||||||
|
dest='extra_index_urls',
|
||||||
|
metavar='URL',
|
||||||
|
action='append',
|
||||||
|
default=[],
|
||||||
|
help="Extra URLs of package indexes to use in addition to "
|
||||||
|
"--index-url. Should follow the same rules as "
|
||||||
|
"--index-url.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
no_index = partial(
|
||||||
|
Option,
|
||||||
|
'--no-index',
|
||||||
|
dest='no_index',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help='Ignore package index (only looking at --find-links URLs instead).',
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
|
||||||
|
def find_links():
|
||||||
|
# type: () -> Option
|
||||||
|
return Option(
|
||||||
|
'-f', '--find-links',
|
||||||
|
dest='find_links',
|
||||||
|
action='append',
|
||||||
|
default=[],
|
||||||
|
metavar='url',
|
||||||
|
help="If a URL or path to an html file, then parse for links to "
|
||||||
|
"archives such as sdist (.tar.gz) or wheel (.whl) files. "
|
||||||
|
"If a local path or file:// URL that's a directory, "
|
||||||
|
"then look for archives in the directory listing. "
|
||||||
|
"Links to VCS project URLs are not supported.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def trusted_host():
|
||||||
|
# type: () -> Option
|
||||||
|
return Option(
|
||||||
|
"--trusted-host",
|
||||||
|
dest="trusted_hosts",
|
||||||
|
action="append",
|
||||||
|
metavar="HOSTNAME",
|
||||||
|
default=[],
|
||||||
|
help="Mark this host or host:port pair as trusted, even though it "
|
||||||
|
"does not have valid or any HTTPS.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def constraints():
|
||||||
|
# type: () -> Option
|
||||||
|
return Option(
|
||||||
|
'-c', '--constraint',
|
||||||
|
dest='constraints',
|
||||||
|
action='append',
|
||||||
|
default=[],
|
||||||
|
metavar='file',
|
||||||
|
help='Constrain versions using the given constraints file. '
|
||||||
|
'This option can be used multiple times.'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def requirements():
|
||||||
|
# type: () -> Option
|
||||||
|
return Option(
|
||||||
|
'-r', '--requirement',
|
||||||
|
dest='requirements',
|
||||||
|
action='append',
|
||||||
|
default=[],
|
||||||
|
metavar='file',
|
||||||
|
help='Install from the given requirements file. '
|
||||||
|
'This option can be used multiple times.'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def editable():
|
||||||
|
# type: () -> Option
|
||||||
|
return Option(
|
||||||
|
'-e', '--editable',
|
||||||
|
dest='editables',
|
||||||
|
action='append',
|
||||||
|
default=[],
|
||||||
|
metavar='path/url',
|
||||||
|
help=('Install a project in editable mode (i.e. setuptools '
|
||||||
|
'"develop mode") from a local project path or a VCS url.'),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_src(option, opt_str, value, parser):
|
||||||
|
# type: (Option, str, str, OptionParser) -> None
|
||||||
|
value = os.path.abspath(value)
|
||||||
|
setattr(parser.values, option.dest, value)
|
||||||
|
|
||||||
|
|
||||||
|
src = partial(
|
||||||
|
PipOption,
|
||||||
|
'--src', '--source', '--source-dir', '--source-directory',
|
||||||
|
dest='src_dir',
|
||||||
|
type='path',
|
||||||
|
metavar='dir',
|
||||||
|
default=get_src_prefix(),
|
||||||
|
action='callback',
|
||||||
|
callback=_handle_src,
|
||||||
|
help='Directory to check out editable projects into. '
|
||||||
|
'The default in a virtualenv is "<venv path>/src". '
|
||||||
|
'The default for global installs is "<current dir>/src".'
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
|
||||||
|
def _get_format_control(values, option):
|
||||||
|
# type: (Values, Option) -> Any
|
||||||
|
"""Get a format_control object."""
|
||||||
|
return getattr(values, option.dest)
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_no_binary(option, opt_str, value, parser):
|
||||||
|
# type: (Option, str, str, OptionParser) -> None
|
||||||
|
existing = _get_format_control(parser.values, option)
|
||||||
|
FormatControl.handle_mutual_excludes(
|
||||||
|
value, existing.no_binary, existing.only_binary,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_only_binary(option, opt_str, value, parser):
|
||||||
|
# type: (Option, str, str, OptionParser) -> None
|
||||||
|
existing = _get_format_control(parser.values, option)
|
||||||
|
FormatControl.handle_mutual_excludes(
|
||||||
|
value, existing.only_binary, existing.no_binary,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def no_binary():
|
||||||
|
# type: () -> Option
|
||||||
|
format_control = FormatControl(set(), set())
|
||||||
|
return Option(
|
||||||
|
"--no-binary", dest="format_control", action="callback",
|
||||||
|
callback=_handle_no_binary, type="str",
|
||||||
|
default=format_control,
|
||||||
|
help='Do not use binary packages. Can be supplied multiple times, and '
|
||||||
|
'each time adds to the existing value. Accepts either ":all:" to '
|
||||||
|
'disable all binary packages, ":none:" to empty the set (notice '
|
||||||
|
'the colons), or one or more package names with commas between '
|
||||||
|
'them (no colons). Note that some packages are tricky to compile '
|
||||||
|
'and may fail to install when this option is used on them.',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def only_binary():
|
||||||
|
# type: () -> Option
|
||||||
|
format_control = FormatControl(set(), set())
|
||||||
|
return Option(
|
||||||
|
"--only-binary", dest="format_control", action="callback",
|
||||||
|
callback=_handle_only_binary, type="str",
|
||||||
|
default=format_control,
|
||||||
|
help='Do not use source packages. Can be supplied multiple times, and '
|
||||||
|
'each time adds to the existing value. Accepts either ":all:" to '
|
||||||
|
'disable all source packages, ":none:" to empty the set, or one '
|
||||||
|
'or more package names with commas between them. Packages '
|
||||||
|
'without binary distributions will fail to install when this '
|
||||||
|
'option is used on them.',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
platform = partial(
|
||||||
|
Option,
|
||||||
|
'--platform',
|
||||||
|
dest='platform',
|
||||||
|
metavar='platform',
|
||||||
|
default=None,
|
||||||
|
help=("Only use wheels compatible with <platform>. "
|
||||||
|
"Defaults to the platform of the running system."),
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
|
||||||
|
# This was made a separate function for unit-testing purposes.
|
||||||
|
def _convert_python_version(value):
|
||||||
|
# type: (str) -> Tuple[Tuple[int, ...], Optional[str]]
|
||||||
|
"""
|
||||||
|
Convert a version string like "3", "37", or "3.7.3" into a tuple of ints.
|
||||||
|
|
||||||
|
:return: A 2-tuple (version_info, error_msg), where `error_msg` is
|
||||||
|
non-None if and only if there was a parsing error.
|
||||||
|
"""
|
||||||
|
if not value:
|
||||||
|
# The empty string is the same as not providing a value.
|
||||||
|
return (None, None)
|
||||||
|
|
||||||
|
parts = value.split('.')
|
||||||
|
if len(parts) > 3:
|
||||||
|
return ((), 'at most three version parts are allowed')
|
||||||
|
|
||||||
|
if len(parts) == 1:
|
||||||
|
# Then we are in the case of "3" or "37".
|
||||||
|
value = parts[0]
|
||||||
|
if len(value) > 1:
|
||||||
|
parts = [value[0], value[1:]]
|
||||||
|
|
||||||
|
try:
|
||||||
|
version_info = tuple(int(part) for part in parts)
|
||||||
|
except ValueError:
|
||||||
|
return ((), 'each version part must be an integer')
|
||||||
|
|
||||||
|
return (version_info, None)
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_python_version(option, opt_str, value, parser):
|
||||||
|
# type: (Option, str, str, OptionParser) -> None
|
||||||
|
"""
|
||||||
|
Handle a provided --python-version value.
|
||||||
|
"""
|
||||||
|
version_info, error_msg = _convert_python_version(value)
|
||||||
|
if error_msg is not None:
|
||||||
|
msg = (
|
||||||
|
'invalid --python-version value: {!r}: {}'.format(
|
||||||
|
value, error_msg,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
raise_option_error(parser, option=option, msg=msg)
|
||||||
|
|
||||||
|
parser.values.python_version = version_info
|
||||||
|
|
||||||
|
|
||||||
|
python_version = partial(
|
||||||
|
Option,
|
||||||
|
'--python-version',
|
||||||
|
dest='python_version',
|
||||||
|
metavar='python_version',
|
||||||
|
action='callback',
|
||||||
|
callback=_handle_python_version, type='str',
|
||||||
|
default=None,
|
||||||
|
help=dedent("""\
|
||||||
|
The Python interpreter version to use for wheel and "Requires-Python"
|
||||||
|
compatibility checks. Defaults to a version derived from the running
|
||||||
|
interpreter. The version can be specified using up to three dot-separated
|
||||||
|
integers (e.g. "3" for 3.0.0, "3.7" for 3.7.0, or "3.7.3"). A major-minor
|
||||||
|
version can also be given as a string without dots (e.g. "37" for 3.7.0).
|
||||||
|
"""),
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
|
||||||
|
implementation = partial(
|
||||||
|
Option,
|
||||||
|
'--implementation',
|
||||||
|
dest='implementation',
|
||||||
|
metavar='implementation',
|
||||||
|
default=None,
|
||||||
|
help=("Only use wheels compatible with Python "
|
||||||
|
"implementation <implementation>, e.g. 'pp', 'jy', 'cp', "
|
||||||
|
" or 'ip'. If not specified, then the current "
|
||||||
|
"interpreter implementation is used. Use 'py' to force "
|
||||||
|
"implementation-agnostic wheels."),
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
|
||||||
|
abi = partial(
|
||||||
|
Option,
|
||||||
|
'--abi',
|
||||||
|
dest='abi',
|
||||||
|
metavar='abi',
|
||||||
|
default=None,
|
||||||
|
help=("Only use wheels compatible with Python "
|
||||||
|
"abi <abi>, e.g. 'pypy_41'. If not specified, then the "
|
||||||
|
"current interpreter abi tag is used. Generally "
|
||||||
|
"you will need to specify --implementation, "
|
||||||
|
"--platform, and --python-version when using "
|
||||||
|
"this option."),
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
|
||||||
|
def add_target_python_options(cmd_opts):
|
||||||
|
# type: (OptionGroup) -> None
|
||||||
|
cmd_opts.add_option(platform())
|
||||||
|
cmd_opts.add_option(python_version())
|
||||||
|
cmd_opts.add_option(implementation())
|
||||||
|
cmd_opts.add_option(abi())
|
||||||
|
|
||||||
|
|
||||||
|
def make_target_python(options):
|
||||||
|
# type: (Values) -> TargetPython
|
||||||
|
target_python = TargetPython(
|
||||||
|
platform=options.platform,
|
||||||
|
py_version_info=options.python_version,
|
||||||
|
abi=options.abi,
|
||||||
|
implementation=options.implementation,
|
||||||
|
)
|
||||||
|
|
||||||
|
return target_python
|
||||||
|
|
||||||
|
|
||||||
|
def prefer_binary():
|
||||||
|
# type: () -> Option
|
||||||
|
return Option(
|
||||||
|
"--prefer-binary",
|
||||||
|
dest="prefer_binary",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help="Prefer older binary packages over newer source packages."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
cache_dir = partial(
|
||||||
|
PipOption,
|
||||||
|
"--cache-dir",
|
||||||
|
dest="cache_dir",
|
||||||
|
default=USER_CACHE_DIR,
|
||||||
|
metavar="dir",
|
||||||
|
type='path',
|
||||||
|
help="Store the cache data in <dir>."
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_no_cache_dir(option, opt, value, parser):
|
||||||
|
# type: (Option, str, str, OptionParser) -> None
|
||||||
|
"""
|
||||||
|
Process a value provided for the --no-cache-dir option.
|
||||||
|
|
||||||
|
This is an optparse.Option callback for the --no-cache-dir option.
|
||||||
|
"""
|
||||||
|
# The value argument will be None if --no-cache-dir is passed via the
|
||||||
|
# command-line, since the option doesn't accept arguments. However,
|
||||||
|
# the value can be non-None if the option is triggered e.g. by an
|
||||||
|
# environment variable, like PIP_NO_CACHE_DIR=true.
|
||||||
|
if value is not None:
|
||||||
|
# Then parse the string value to get argument error-checking.
|
||||||
|
try:
|
||||||
|
strtobool(value)
|
||||||
|
except ValueError as exc:
|
||||||
|
raise_option_error(parser, option=option, msg=str(exc))
|
||||||
|
|
||||||
|
# Originally, setting PIP_NO_CACHE_DIR to a value that strtobool()
|
||||||
|
# converted to 0 (like "false" or "no") caused cache_dir to be disabled
|
||||||
|
# rather than enabled (logic would say the latter). Thus, we disable
|
||||||
|
# the cache directory not just on values that parse to True, but (for
|
||||||
|
# backwards compatibility reasons) also on values that parse to False.
|
||||||
|
# In other words, always set it to False if the option is provided in
|
||||||
|
# some (valid) form.
|
||||||
|
parser.values.cache_dir = False
|
||||||
|
|
||||||
|
|
||||||
|
no_cache = partial(
|
||||||
|
Option,
|
||||||
|
"--no-cache-dir",
|
||||||
|
dest="cache_dir",
|
||||||
|
action="callback",
|
||||||
|
callback=_handle_no_cache_dir,
|
||||||
|
help="Disable the cache.",
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
no_deps = partial(
|
||||||
|
Option,
|
||||||
|
'--no-deps', '--no-dependencies',
|
||||||
|
dest='ignore_dependencies',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help="Don't install package dependencies.",
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_build_dir(option, opt, value, parser):
|
||||||
|
# type: (Option, str, str, OptionParser) -> None
|
||||||
|
if value:
|
||||||
|
value = os.path.abspath(value)
|
||||||
|
setattr(parser.values, option.dest, value)
|
||||||
|
|
||||||
|
|
||||||
|
build_dir = partial(
|
||||||
|
PipOption,
|
||||||
|
'-b', '--build', '--build-dir', '--build-directory',
|
||||||
|
dest='build_dir',
|
||||||
|
type='path',
|
||||||
|
metavar='dir',
|
||||||
|
action='callback',
|
||||||
|
callback=_handle_build_dir,
|
||||||
|
help='Directory to unpack packages into and build in. Note that '
|
||||||
|
'an initial build still takes place in a temporary directory. '
|
||||||
|
'The location of temporary directories can be controlled by setting '
|
||||||
|
'the TMPDIR environment variable (TEMP on Windows) appropriately. '
|
||||||
|
'When passed, build directories are not cleaned in case of failures.'
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
ignore_requires_python = partial(
|
||||||
|
Option,
|
||||||
|
'--ignore-requires-python',
|
||||||
|
dest='ignore_requires_python',
|
||||||
|
action='store_true',
|
||||||
|
help='Ignore the Requires-Python information.'
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
no_build_isolation = partial(
|
||||||
|
Option,
|
||||||
|
'--no-build-isolation',
|
||||||
|
dest='build_isolation',
|
||||||
|
action='store_false',
|
||||||
|
default=True,
|
||||||
|
help='Disable isolation when building a modern source distribution. '
|
||||||
|
'Build dependencies specified by PEP 518 must be already installed '
|
||||||
|
'if this option is used.'
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_no_use_pep517(option, opt, value, parser):
|
||||||
|
# type: (Option, str, str, OptionParser) -> None
|
||||||
|
"""
|
||||||
|
Process a value provided for the --no-use-pep517 option.
|
||||||
|
|
||||||
|
This is an optparse.Option callback for the no_use_pep517 option.
|
||||||
|
"""
|
||||||
|
# Since --no-use-pep517 doesn't accept arguments, the value argument
|
||||||
|
# will be None if --no-use-pep517 is passed via the command-line.
|
||||||
|
# However, the value can be non-None if the option is triggered e.g.
|
||||||
|
# by an environment variable, for example "PIP_NO_USE_PEP517=true".
|
||||||
|
if value is not None:
|
||||||
|
msg = """A value was passed for --no-use-pep517,
|
||||||
|
probably using either the PIP_NO_USE_PEP517 environment variable
|
||||||
|
or the "no-use-pep517" config file option. Use an appropriate value
|
||||||
|
of the PIP_USE_PEP517 environment variable or the "use-pep517"
|
||||||
|
config file option instead.
|
||||||
|
"""
|
||||||
|
raise_option_error(parser, option=option, msg=msg)
|
||||||
|
|
||||||
|
# Otherwise, --no-use-pep517 was passed via the command-line.
|
||||||
|
parser.values.use_pep517 = False
|
||||||
|
|
||||||
|
|
||||||
|
use_pep517 = partial(
|
||||||
|
Option,
|
||||||
|
'--use-pep517',
|
||||||
|
dest='use_pep517',
|
||||||
|
action='store_true',
|
||||||
|
default=None,
|
||||||
|
help='Use PEP 517 for building source distributions '
|
||||||
|
'(use --no-use-pep517 to force legacy behaviour).'
|
||||||
|
) # type: Any
|
||||||
|
|
||||||
|
no_use_pep517 = partial(
|
||||||
|
Option,
|
||||||
|
'--no-use-pep517',
|
||||||
|
dest='use_pep517',
|
||||||
|
action='callback',
|
||||||
|
callback=_handle_no_use_pep517,
|
||||||
|
default=None,
|
||||||
|
help=SUPPRESS_HELP
|
||||||
|
) # type: Any
|
||||||
|
|
||||||
|
install_options = partial(
|
||||||
|
Option,
|
||||||
|
'--install-option',
|
||||||
|
dest='install_options',
|
||||||
|
action='append',
|
||||||
|
metavar='options',
|
||||||
|
help="Extra arguments to be supplied to the setup.py install "
|
||||||
|
"command (use like --install-option=\"--install-scripts=/usr/local/"
|
||||||
|
"bin\"). Use multiple --install-option options to pass multiple "
|
||||||
|
"options to setup.py install. If you are using an option with a "
|
||||||
|
"directory path, be sure to use absolute path.",
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
global_options = partial(
|
||||||
|
Option,
|
||||||
|
'--global-option',
|
||||||
|
dest='global_options',
|
||||||
|
action='append',
|
||||||
|
metavar='options',
|
||||||
|
help="Extra global options to be supplied to the setup.py "
|
||||||
|
"call before the install command.",
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
no_clean = partial(
|
||||||
|
Option,
|
||||||
|
'--no-clean',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help="Don't clean up build directories."
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
pre = partial(
|
||||||
|
Option,
|
||||||
|
'--pre',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help="Include pre-release and development versions. By default, "
|
||||||
|
"pip only finds stable versions.",
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
disable_pip_version_check = partial(
|
||||||
|
Option,
|
||||||
|
"--disable-pip-version-check",
|
||||||
|
dest="disable_pip_version_check",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help="Don't periodically check PyPI to determine whether a new version "
|
||||||
|
"of pip is available for download. Implied with --no-index.",
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
|
||||||
|
# Deprecated, Remove later
|
||||||
|
always_unzip = partial(
|
||||||
|
Option,
|
||||||
|
'-Z', '--always-unzip',
|
||||||
|
dest='always_unzip',
|
||||||
|
action='store_true',
|
||||||
|
help=SUPPRESS_HELP,
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_merge_hash(option, opt_str, value, parser):
|
||||||
|
# type: (Option, str, str, OptionParser) -> None
|
||||||
|
"""Given a value spelled "algo:digest", append the digest to a list
|
||||||
|
pointed to in a dict by the algo name."""
|
||||||
|
if not parser.values.hashes:
|
||||||
|
parser.values.hashes = {}
|
||||||
|
try:
|
||||||
|
algo, digest = value.split(':', 1)
|
||||||
|
except ValueError:
|
||||||
|
parser.error('Arguments to {} must be a hash name '
|
||||||
|
'followed by a value, like --hash=sha256:'
|
||||||
|
'abcde...'.format(opt_str))
|
||||||
|
if algo not in STRONG_HASHES:
|
||||||
|
parser.error('Allowed hash algorithms for {} are {}.'.format(
|
||||||
|
opt_str, ', '.join(STRONG_HASHES)))
|
||||||
|
parser.values.hashes.setdefault(algo, []).append(digest)
|
||||||
|
|
||||||
|
|
||||||
|
hash = partial(
|
||||||
|
Option,
|
||||||
|
'--hash',
|
||||||
|
# Hash values eventually end up in InstallRequirement.hashes due to
|
||||||
|
# __dict__ copying in process_line().
|
||||||
|
dest='hashes',
|
||||||
|
action='callback',
|
||||||
|
callback=_handle_merge_hash,
|
||||||
|
type='string',
|
||||||
|
help="Verify that the package's archive matches this "
|
||||||
|
'hash before installing. Example: --hash=sha256:abcdef...',
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
|
||||||
|
require_hashes = partial(
|
||||||
|
Option,
|
||||||
|
'--require-hashes',
|
||||||
|
dest='require_hashes',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help='Require a hash to check each requirement against, for '
|
||||||
|
'repeatable installs. This option is implied when any package in a '
|
||||||
|
'requirements file has a --hash option.',
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
|
||||||
|
list_path = partial(
|
||||||
|
PipOption,
|
||||||
|
'--path',
|
||||||
|
dest='path',
|
||||||
|
type='path',
|
||||||
|
action='append',
|
||||||
|
help='Restrict to the specified installation path for listing '
|
||||||
|
'packages (can be used multiple times).'
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
|
||||||
|
def check_list_path_option(options):
|
||||||
|
# type: (Values) -> None
|
||||||
|
if options.path and (options.user or options.local):
|
||||||
|
raise CommandError(
|
||||||
|
"Cannot combine '--path' with '--user' or '--local'"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
no_python_version_warning = partial(
|
||||||
|
Option,
|
||||||
|
'--no-python-version-warning',
|
||||||
|
dest='no_python_version_warning',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help='Silence deprecation warnings for upcoming unsupported Pythons.',
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
|
||||||
|
unstable_feature = partial(
|
||||||
|
Option,
|
||||||
|
'--unstable-feature',
|
||||||
|
dest='unstable_features',
|
||||||
|
metavar='feature',
|
||||||
|
action='append',
|
||||||
|
default=[],
|
||||||
|
choices=['resolver'],
|
||||||
|
help=SUPPRESS_HELP, # TODO: Enable this when the resolver actually works.
|
||||||
|
# help='Enable unstable feature(s) that may be backward incompatible.',
|
||||||
|
) # type: Callable[..., Option]
|
||||||
|
|
||||||
|
|
||||||
|
##########
|
||||||
|
# groups #
|
||||||
|
##########
|
||||||
|
|
||||||
|
general_group = {
|
||||||
|
'name': 'General Options',
|
||||||
|
'options': [
|
||||||
|
help_,
|
||||||
|
isolated_mode,
|
||||||
|
require_virtualenv,
|
||||||
|
verbose,
|
||||||
|
version,
|
||||||
|
quiet,
|
||||||
|
log,
|
||||||
|
no_input,
|
||||||
|
proxy,
|
||||||
|
retries,
|
||||||
|
timeout,
|
||||||
|
exists_action,
|
||||||
|
trusted_host,
|
||||||
|
cert,
|
||||||
|
client_cert,
|
||||||
|
cache_dir,
|
||||||
|
no_cache,
|
||||||
|
disable_pip_version_check,
|
||||||
|
no_color,
|
||||||
|
no_python_version_warning,
|
||||||
|
unstable_feature,
|
||||||
|
]
|
||||||
|
} # type: Dict[str, Any]
|
||||||
|
|
||||||
|
index_group = {
|
||||||
|
'name': 'Package Index Options',
|
||||||
|
'options': [
|
||||||
|
index_url,
|
||||||
|
extra_index_url,
|
||||||
|
no_index,
|
||||||
|
find_links,
|
||||||
|
]
|
||||||
|
} # type: Dict[str, Any]
|
36
sources/pip_20.1/_internal/cli/command_context.py
Normal file
36
sources/pip_20.1/_internal/cli/command_context.py
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
from contextlib import contextmanager
|
||||||
|
|
||||||
|
from pip._vendor.contextlib2 import ExitStack
|
||||||
|
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Iterator, ContextManager, TypeVar
|
||||||
|
|
||||||
|
_T = TypeVar('_T', covariant=True)
|
||||||
|
|
||||||
|
|
||||||
|
class CommandContextMixIn(object):
|
||||||
|
def __init__(self):
|
||||||
|
# type: () -> None
|
||||||
|
super(CommandContextMixIn, self).__init__()
|
||||||
|
self._in_main_context = False
|
||||||
|
self._main_context = ExitStack()
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def main_context(self):
|
||||||
|
# type: () -> Iterator[None]
|
||||||
|
assert not self._in_main_context
|
||||||
|
|
||||||
|
self._in_main_context = True
|
||||||
|
try:
|
||||||
|
with self._main_context:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
self._in_main_context = False
|
||||||
|
|
||||||
|
def enter_context(self, context_provider):
|
||||||
|
# type: (ContextManager[_T]) -> _T
|
||||||
|
assert self._in_main_context
|
||||||
|
|
||||||
|
return self._main_context.enter_context(context_provider)
|
75
sources/pip_20.1/_internal/cli/main.py
Normal file
75
sources/pip_20.1/_internal/cli/main.py
Normal file
|
@ -0,0 +1,75 @@
|
||||||
|
"""Primary application entrypoint.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import locale
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from pip._internal.cli.autocompletion import autocomplete
|
||||||
|
from pip._internal.cli.main_parser import parse_command
|
||||||
|
from pip._internal.commands import create_command
|
||||||
|
from pip._internal.exceptions import PipError
|
||||||
|
from pip._internal.utils import deprecation
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
# Do not import and use main() directly! Using it directly is actively
|
||||||
|
# discouraged by pip's maintainers. The name, location and behavior of
|
||||||
|
# this function is subject to change, so calling it directly is not
|
||||||
|
# portable across different pip versions.
|
||||||
|
|
||||||
|
# In addition, running pip in-process is unsupported and unsafe. This is
|
||||||
|
# elaborated in detail at
|
||||||
|
# https://pip.pypa.io/en/stable/user_guide/#using-pip-from-your-program.
|
||||||
|
# That document also provides suggestions that should work for nearly
|
||||||
|
# all users that are considering importing and using main() directly.
|
||||||
|
|
||||||
|
# However, we know that certain users will still want to invoke pip
|
||||||
|
# in-process. If you understand and accept the implications of using pip
|
||||||
|
# in an unsupported manner, the best approach is to use runpy to avoid
|
||||||
|
# depending on the exact location of this entry point.
|
||||||
|
|
||||||
|
# The following example shows how to use runpy to invoke pip in that
|
||||||
|
# case:
|
||||||
|
#
|
||||||
|
# sys.argv = ["pip", your, args, here]
|
||||||
|
# runpy.run_module("pip", run_name="__main__")
|
||||||
|
#
|
||||||
|
# Note that this will exit the process after running, unlike a direct
|
||||||
|
# call to main. As it is not safe to do any processing after calling
|
||||||
|
# main, this should not be an issue in practice.
|
||||||
|
|
||||||
|
def main(args=None):
|
||||||
|
# type: (Optional[List[str]]) -> int
|
||||||
|
if args is None:
|
||||||
|
args = sys.argv[1:]
|
||||||
|
|
||||||
|
# Configure our deprecation warnings to be sent through loggers
|
||||||
|
deprecation.install_warning_logger()
|
||||||
|
|
||||||
|
autocomplete()
|
||||||
|
|
||||||
|
try:
|
||||||
|
cmd_name, cmd_args = parse_command(args)
|
||||||
|
except PipError as exc:
|
||||||
|
sys.stderr.write("ERROR: {}".format(exc))
|
||||||
|
sys.stderr.write(os.linesep)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Needed for locale.getpreferredencoding(False) to work
|
||||||
|
# in pip._internal.utils.encoding.auto_decode
|
||||||
|
try:
|
||||||
|
locale.setlocale(locale.LC_ALL, '')
|
||||||
|
except locale.Error as e:
|
||||||
|
# setlocale can apparently crash if locale are uninitialized
|
||||||
|
logger.debug("Ignoring error %s when setting locale", e)
|
||||||
|
command = create_command(cmd_name, isolated=("--isolated" in cmd_args))
|
||||||
|
|
||||||
|
return command.main(cmd_args)
|
99
sources/pip_20.1/_internal/cli/main_parser.py
Normal file
99
sources/pip_20.1/_internal/cli/main_parser.py
Normal file
|
@ -0,0 +1,99 @@
|
||||||
|
"""A single place for constructing and exposing the main parser
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from pip._internal.cli import cmdoptions
|
||||||
|
from pip._internal.cli.parser import (
|
||||||
|
ConfigOptionParser,
|
||||||
|
UpdatingDefaultsHelpFormatter,
|
||||||
|
)
|
||||||
|
from pip._internal.commands import commands_dict, get_similar_commands
|
||||||
|
from pip._internal.exceptions import CommandError
|
||||||
|
from pip._internal.utils.misc import get_pip_version, get_prog
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Tuple, List
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ["create_main_parser", "parse_command"]
|
||||||
|
|
||||||
|
|
||||||
|
def create_main_parser():
|
||||||
|
# type: () -> ConfigOptionParser
|
||||||
|
"""Creates and returns the main parser for pip's CLI
|
||||||
|
"""
|
||||||
|
|
||||||
|
parser_kw = {
|
||||||
|
'usage': '\n%prog <command> [options]',
|
||||||
|
'add_help_option': False,
|
||||||
|
'formatter': UpdatingDefaultsHelpFormatter(),
|
||||||
|
'name': 'global',
|
||||||
|
'prog': get_prog(),
|
||||||
|
}
|
||||||
|
|
||||||
|
parser = ConfigOptionParser(**parser_kw)
|
||||||
|
parser.disable_interspersed_args()
|
||||||
|
|
||||||
|
parser.version = get_pip_version()
|
||||||
|
|
||||||
|
# add the general options
|
||||||
|
gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
|
||||||
|
parser.add_option_group(gen_opts)
|
||||||
|
|
||||||
|
# so the help formatter knows
|
||||||
|
parser.main = True # type: ignore
|
||||||
|
|
||||||
|
# create command listing for description
|
||||||
|
description = [''] + [
|
||||||
|
'{name:27} {command_info.summary}'.format(**locals())
|
||||||
|
for name, command_info in commands_dict.items()
|
||||||
|
]
|
||||||
|
parser.description = '\n'.join(description)
|
||||||
|
|
||||||
|
return parser
|
||||||
|
|
||||||
|
|
||||||
|
def parse_command(args):
|
||||||
|
# type: (List[str]) -> Tuple[str, List[str]]
|
||||||
|
parser = create_main_parser()
|
||||||
|
|
||||||
|
# Note: parser calls disable_interspersed_args(), so the result of this
|
||||||
|
# call is to split the initial args into the general options before the
|
||||||
|
# subcommand and everything else.
|
||||||
|
# For example:
|
||||||
|
# args: ['--timeout=5', 'install', '--user', 'INITools']
|
||||||
|
# general_options: ['--timeout==5']
|
||||||
|
# args_else: ['install', '--user', 'INITools']
|
||||||
|
general_options, args_else = parser.parse_args(args)
|
||||||
|
|
||||||
|
# --version
|
||||||
|
if general_options.version:
|
||||||
|
sys.stdout.write(parser.version) # type: ignore
|
||||||
|
sys.stdout.write(os.linesep)
|
||||||
|
sys.exit()
|
||||||
|
|
||||||
|
# pip || pip help -> print_help()
|
||||||
|
if not args_else or (args_else[0] == 'help' and len(args_else) == 1):
|
||||||
|
parser.print_help()
|
||||||
|
sys.exit()
|
||||||
|
|
||||||
|
# the subcommand name
|
||||||
|
cmd_name = args_else[0]
|
||||||
|
|
||||||
|
if cmd_name not in commands_dict:
|
||||||
|
guess = get_similar_commands(cmd_name)
|
||||||
|
|
||||||
|
msg = ['unknown command "{}"'.format(cmd_name)]
|
||||||
|
if guess:
|
||||||
|
msg.append('maybe you meant "{}"'.format(guess))
|
||||||
|
|
||||||
|
raise CommandError(' - '.join(msg))
|
||||||
|
|
||||||
|
# all the args without the subcommand
|
||||||
|
cmd_args = args[:]
|
||||||
|
cmd_args.remove(cmd_name)
|
||||||
|
|
||||||
|
return cmd_name, cmd_args
|
266
sources/pip_20.1/_internal/cli/parser.py
Normal file
266
sources/pip_20.1/_internal/cli/parser.py
Normal file
|
@ -0,0 +1,266 @@
|
||||||
|
"""Base option parser setup"""
|
||||||
|
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: disallow-untyped-defs=False
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import optparse
|
||||||
|
import sys
|
||||||
|
import textwrap
|
||||||
|
from distutils.util import strtobool
|
||||||
|
|
||||||
|
from pip._vendor.six import string_types
|
||||||
|
|
||||||
|
from pip._internal.cli.status_codes import UNKNOWN_ERROR
|
||||||
|
from pip._internal.configuration import Configuration, ConfigurationError
|
||||||
|
from pip._internal.utils.compat import get_terminal_size
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
|
||||||
|
"""A prettier/less verbose help formatter for optparse."""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
# help position must be aligned with __init__.parseopts.description
|
||||||
|
kwargs['max_help_position'] = 30
|
||||||
|
kwargs['indent_increment'] = 1
|
||||||
|
kwargs['width'] = get_terminal_size()[0] - 2
|
||||||
|
optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs)
|
||||||
|
|
||||||
|
def format_option_strings(self, option):
|
||||||
|
return self._format_option_strings(option)
|
||||||
|
|
||||||
|
def _format_option_strings(self, option, mvarfmt=' <{}>', optsep=', '):
|
||||||
|
"""
|
||||||
|
Return a comma-separated list of option strings and metavars.
|
||||||
|
|
||||||
|
:param option: tuple of (short opt, long opt), e.g: ('-f', '--format')
|
||||||
|
:param mvarfmt: metavar format string
|
||||||
|
:param optsep: separator
|
||||||
|
"""
|
||||||
|
opts = []
|
||||||
|
|
||||||
|
if option._short_opts:
|
||||||
|
opts.append(option._short_opts[0])
|
||||||
|
if option._long_opts:
|
||||||
|
opts.append(option._long_opts[0])
|
||||||
|
if len(opts) > 1:
|
||||||
|
opts.insert(1, optsep)
|
||||||
|
|
||||||
|
if option.takes_value():
|
||||||
|
metavar = option.metavar or option.dest.lower()
|
||||||
|
opts.append(mvarfmt.format(metavar.lower()))
|
||||||
|
|
||||||
|
return ''.join(opts)
|
||||||
|
|
||||||
|
def format_heading(self, heading):
|
||||||
|
if heading == 'Options':
|
||||||
|
return ''
|
||||||
|
return heading + ':\n'
|
||||||
|
|
||||||
|
def format_usage(self, usage):
|
||||||
|
"""
|
||||||
|
Ensure there is only one newline between usage and the first heading
|
||||||
|
if there is no description.
|
||||||
|
"""
|
||||||
|
msg = '\nUsage: {}\n'.format(
|
||||||
|
self.indent_lines(textwrap.dedent(usage), " "))
|
||||||
|
return msg
|
||||||
|
|
||||||
|
def format_description(self, description):
|
||||||
|
# leave full control over description to us
|
||||||
|
if description:
|
||||||
|
if hasattr(self.parser, 'main'):
|
||||||
|
label = 'Commands'
|
||||||
|
else:
|
||||||
|
label = 'Description'
|
||||||
|
# some doc strings have initial newlines, some don't
|
||||||
|
description = description.lstrip('\n')
|
||||||
|
# some doc strings have final newlines and spaces, some don't
|
||||||
|
description = description.rstrip()
|
||||||
|
# dedent, then reindent
|
||||||
|
description = self.indent_lines(textwrap.dedent(description), " ")
|
||||||
|
description = '{}:\n{}\n'.format(label, description)
|
||||||
|
return description
|
||||||
|
else:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
def format_epilog(self, epilog):
|
||||||
|
# leave full control over epilog to us
|
||||||
|
if epilog:
|
||||||
|
return epilog
|
||||||
|
else:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
def indent_lines(self, text, indent):
|
||||||
|
new_lines = [indent + line for line in text.split('\n')]
|
||||||
|
return "\n".join(new_lines)
|
||||||
|
|
||||||
|
|
||||||
|
class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
|
||||||
|
"""Custom help formatter for use in ConfigOptionParser.
|
||||||
|
|
||||||
|
This is updates the defaults before expanding them, allowing
|
||||||
|
them to show up correctly in the help listing.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def expand_default(self, option):
|
||||||
|
if self.parser is not None:
|
||||||
|
self.parser._update_defaults(self.parser.defaults)
|
||||||
|
return optparse.IndentedHelpFormatter.expand_default(self, option)
|
||||||
|
|
||||||
|
|
||||||
|
class CustomOptionParser(optparse.OptionParser):
|
||||||
|
|
||||||
|
def insert_option_group(self, idx, *args, **kwargs):
|
||||||
|
"""Insert an OptionGroup at a given position."""
|
||||||
|
group = self.add_option_group(*args, **kwargs)
|
||||||
|
|
||||||
|
self.option_groups.pop()
|
||||||
|
self.option_groups.insert(idx, group)
|
||||||
|
|
||||||
|
return group
|
||||||
|
|
||||||
|
@property
|
||||||
|
def option_list_all(self):
|
||||||
|
"""Get a list of all options, including those in option groups."""
|
||||||
|
res = self.option_list[:]
|
||||||
|
for i in self.option_groups:
|
||||||
|
res.extend(i.option_list)
|
||||||
|
|
||||||
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigOptionParser(CustomOptionParser):
|
||||||
|
"""Custom option parser which updates its defaults by checking the
|
||||||
|
configuration files and environmental variables"""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
self.name = kwargs.pop('name')
|
||||||
|
|
||||||
|
isolated = kwargs.pop("isolated", False)
|
||||||
|
self.config = Configuration(isolated)
|
||||||
|
|
||||||
|
assert self.name
|
||||||
|
optparse.OptionParser.__init__(self, *args, **kwargs)
|
||||||
|
|
||||||
|
def check_default(self, option, key, val):
|
||||||
|
try:
|
||||||
|
return option.check_value(key, val)
|
||||||
|
except optparse.OptionValueError as exc:
|
||||||
|
print("An error occurred during configuration: {}".format(exc))
|
||||||
|
sys.exit(3)
|
||||||
|
|
||||||
|
def _get_ordered_configuration_items(self):
|
||||||
|
# Configuration gives keys in an unordered manner. Order them.
|
||||||
|
override_order = ["global", self.name, ":env:"]
|
||||||
|
|
||||||
|
# Pool the options into different groups
|
||||||
|
section_items = {name: [] for name in override_order}
|
||||||
|
for section_key, val in self.config.items():
|
||||||
|
# ignore empty values
|
||||||
|
if not val:
|
||||||
|
logger.debug(
|
||||||
|
"Ignoring configuration key '%s' as it's value is empty.",
|
||||||
|
section_key
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
section, key = section_key.split(".", 1)
|
||||||
|
if section in override_order:
|
||||||
|
section_items[section].append((key, val))
|
||||||
|
|
||||||
|
# Yield each group in their override order
|
||||||
|
for section in override_order:
|
||||||
|
for key, val in section_items[section]:
|
||||||
|
yield key, val
|
||||||
|
|
||||||
|
def _update_defaults(self, defaults):
|
||||||
|
"""Updates the given defaults with values from the config files and
|
||||||
|
the environ. Does a little special handling for certain types of
|
||||||
|
options (lists)."""
|
||||||
|
|
||||||
|
# Accumulate complex default state.
|
||||||
|
self.values = optparse.Values(self.defaults)
|
||||||
|
late_eval = set()
|
||||||
|
# Then set the options with those values
|
||||||
|
for key, val in self._get_ordered_configuration_items():
|
||||||
|
# '--' because configuration supports only long names
|
||||||
|
option = self.get_option('--' + key)
|
||||||
|
|
||||||
|
# Ignore options not present in this parser. E.g. non-globals put
|
||||||
|
# in [global] by users that want them to apply to all applicable
|
||||||
|
# commands.
|
||||||
|
if option is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if option.action in ('store_true', 'store_false', 'count'):
|
||||||
|
try:
|
||||||
|
val = strtobool(val)
|
||||||
|
except ValueError:
|
||||||
|
error_msg = invalid_config_error_message(
|
||||||
|
option.action, key, val
|
||||||
|
)
|
||||||
|
self.error(error_msg)
|
||||||
|
|
||||||
|
elif option.action == 'append':
|
||||||
|
val = val.split()
|
||||||
|
val = [self.check_default(option, key, v) for v in val]
|
||||||
|
elif option.action == 'callback':
|
||||||
|
late_eval.add(option.dest)
|
||||||
|
opt_str = option.get_opt_string()
|
||||||
|
val = option.convert_value(opt_str, val)
|
||||||
|
# From take_action
|
||||||
|
args = option.callback_args or ()
|
||||||
|
kwargs = option.callback_kwargs or {}
|
||||||
|
option.callback(option, opt_str, val, self, *args, **kwargs)
|
||||||
|
else:
|
||||||
|
val = self.check_default(option, key, val)
|
||||||
|
|
||||||
|
defaults[option.dest] = val
|
||||||
|
|
||||||
|
for key in late_eval:
|
||||||
|
defaults[key] = getattr(self.values, key)
|
||||||
|
self.values = None
|
||||||
|
return defaults
|
||||||
|
|
||||||
|
def get_default_values(self):
|
||||||
|
"""Overriding to make updating the defaults after instantiation of
|
||||||
|
the option parser possible, _update_defaults() does the dirty work."""
|
||||||
|
if not self.process_default_values:
|
||||||
|
# Old, pre-Optik 1.5 behaviour.
|
||||||
|
return optparse.Values(self.defaults)
|
||||||
|
|
||||||
|
# Load the configuration, or error out in case of an error
|
||||||
|
try:
|
||||||
|
self.config.load()
|
||||||
|
except ConfigurationError as err:
|
||||||
|
self.exit(UNKNOWN_ERROR, str(err))
|
||||||
|
|
||||||
|
defaults = self._update_defaults(self.defaults.copy()) # ours
|
||||||
|
for option in self._get_all_options():
|
||||||
|
default = defaults.get(option.dest)
|
||||||
|
if isinstance(default, string_types):
|
||||||
|
opt_str = option.get_opt_string()
|
||||||
|
defaults[option.dest] = option.check_value(opt_str, default)
|
||||||
|
return optparse.Values(defaults)
|
||||||
|
|
||||||
|
def error(self, msg):
|
||||||
|
self.print_usage(sys.stderr)
|
||||||
|
self.exit(UNKNOWN_ERROR, "{}\n".format(msg))
|
||||||
|
|
||||||
|
|
||||||
|
def invalid_config_error_message(action, key, val):
|
||||||
|
"""Returns a better error message when invalid configuration option
|
||||||
|
is provided."""
|
||||||
|
if action in ('store_true', 'store_false'):
|
||||||
|
return ("{0} is not a valid value for {1} option, "
|
||||||
|
"please specify a boolean value like yes/no, "
|
||||||
|
"true/false or 1/0 instead.").format(val, key)
|
||||||
|
|
||||||
|
return ("{0} is not a valid value for {1} option, "
|
||||||
|
"please specify a numerical value like 1/0 "
|
||||||
|
"instead.").format(val, key)
|
277
sources/pip_20.1/_internal/cli/progress_bars.py
Normal file
277
sources/pip_20.1/_internal/cli/progress_bars.py
Normal file
|
@ -0,0 +1,277 @@
|
||||||
|
from __future__ import division
|
||||||
|
|
||||||
|
import itertools
|
||||||
|
import sys
|
||||||
|
from signal import SIGINT, default_int_handler, signal
|
||||||
|
|
||||||
|
from pip._vendor import six
|
||||||
|
from pip._vendor.progress.bar import Bar, FillingCirclesBar, IncrementalBar
|
||||||
|
from pip._vendor.progress.spinner import Spinner
|
||||||
|
|
||||||
|
from pip._internal.utils.compat import WINDOWS
|
||||||
|
from pip._internal.utils.logging import get_indentation
|
||||||
|
from pip._internal.utils.misc import format_size
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Any, Dict, List
|
||||||
|
|
||||||
|
try:
|
||||||
|
from pip._vendor import colorama
|
||||||
|
# Lots of different errors can come from this, including SystemError and
|
||||||
|
# ImportError.
|
||||||
|
except Exception:
|
||||||
|
colorama = None
|
||||||
|
|
||||||
|
|
||||||
|
def _select_progress_class(preferred, fallback):
|
||||||
|
# type: (Bar, Bar) -> Bar
|
||||||
|
encoding = getattr(preferred.file, "encoding", None)
|
||||||
|
|
||||||
|
# If we don't know what encoding this file is in, then we'll just assume
|
||||||
|
# that it doesn't support unicode and use the ASCII bar.
|
||||||
|
if not encoding:
|
||||||
|
return fallback
|
||||||
|
|
||||||
|
# Collect all of the possible characters we want to use with the preferred
|
||||||
|
# bar.
|
||||||
|
characters = [
|
||||||
|
getattr(preferred, "empty_fill", six.text_type()),
|
||||||
|
getattr(preferred, "fill", six.text_type()),
|
||||||
|
]
|
||||||
|
characters += list(getattr(preferred, "phases", []))
|
||||||
|
|
||||||
|
# Try to decode the characters we're using for the bar using the encoding
|
||||||
|
# of the given file, if this works then we'll assume that we can use the
|
||||||
|
# fancier bar and if not we'll fall back to the plaintext bar.
|
||||||
|
try:
|
||||||
|
six.text_type().join(characters).encode(encoding)
|
||||||
|
except UnicodeEncodeError:
|
||||||
|
return fallback
|
||||||
|
else:
|
||||||
|
return preferred
|
||||||
|
|
||||||
|
|
||||||
|
_BaseBar = _select_progress_class(IncrementalBar, Bar) # type: Any
|
||||||
|
|
||||||
|
|
||||||
|
class InterruptibleMixin(object):
|
||||||
|
"""
|
||||||
|
Helper to ensure that self.finish() gets called on keyboard interrupt.
|
||||||
|
|
||||||
|
This allows downloads to be interrupted without leaving temporary state
|
||||||
|
(like hidden cursors) behind.
|
||||||
|
|
||||||
|
This class is similar to the progress library's existing SigIntMixin
|
||||||
|
helper, but as of version 1.2, that helper has the following problems:
|
||||||
|
|
||||||
|
1. It calls sys.exit().
|
||||||
|
2. It discards the existing SIGINT handler completely.
|
||||||
|
3. It leaves its own handler in place even after an uninterrupted finish,
|
||||||
|
which will have unexpected delayed effects if the user triggers an
|
||||||
|
unrelated keyboard interrupt some time after a progress-displaying
|
||||||
|
download has already completed, for example.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
# type: (List[Any], Dict[Any, Any]) -> None
|
||||||
|
"""
|
||||||
|
Save the original SIGINT handler for later.
|
||||||
|
"""
|
||||||
|
super(InterruptibleMixin, self).__init__( # type: ignore
|
||||||
|
*args,
|
||||||
|
**kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
self.original_handler = signal(SIGINT, self.handle_sigint)
|
||||||
|
|
||||||
|
# If signal() returns None, the previous handler was not installed from
|
||||||
|
# Python, and we cannot restore it. This probably should not happen,
|
||||||
|
# but if it does, we must restore something sensible instead, at least.
|
||||||
|
# The least bad option should be Python's default SIGINT handler, which
|
||||||
|
# just raises KeyboardInterrupt.
|
||||||
|
if self.original_handler is None:
|
||||||
|
self.original_handler = default_int_handler
|
||||||
|
|
||||||
|
def finish(self):
|
||||||
|
# type: () -> None
|
||||||
|
"""
|
||||||
|
Restore the original SIGINT handler after finishing.
|
||||||
|
|
||||||
|
This should happen regardless of whether the progress display finishes
|
||||||
|
normally, or gets interrupted.
|
||||||
|
"""
|
||||||
|
super(InterruptibleMixin, self).finish() # type: ignore
|
||||||
|
signal(SIGINT, self.original_handler)
|
||||||
|
|
||||||
|
def handle_sigint(self, signum, frame): # type: ignore
|
||||||
|
"""
|
||||||
|
Call self.finish() before delegating to the original SIGINT handler.
|
||||||
|
|
||||||
|
This handler should only be in place while the progress display is
|
||||||
|
active.
|
||||||
|
"""
|
||||||
|
self.finish()
|
||||||
|
self.original_handler(signum, frame)
|
||||||
|
|
||||||
|
|
||||||
|
class SilentBar(Bar):
|
||||||
|
|
||||||
|
def update(self):
|
||||||
|
# type: () -> None
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class BlueEmojiBar(IncrementalBar):
|
||||||
|
|
||||||
|
suffix = "%(percent)d%%"
|
||||||
|
bar_prefix = " "
|
||||||
|
bar_suffix = " "
|
||||||
|
phases = (u"\U0001F539", u"\U0001F537", u"\U0001F535") # type: Any
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadProgressMixin(object):
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
# type: (List[Any], Dict[Any, Any]) -> None
|
||||||
|
super(DownloadProgressMixin, self).__init__( # type: ignore
|
||||||
|
*args,
|
||||||
|
**kwargs
|
||||||
|
)
|
||||||
|
self.message = (" " * (
|
||||||
|
get_indentation() + 2
|
||||||
|
)) + self.message # type: str
|
||||||
|
|
||||||
|
@property
|
||||||
|
def downloaded(self):
|
||||||
|
# type: () -> str
|
||||||
|
return format_size(self.index) # type: ignore
|
||||||
|
|
||||||
|
@property
|
||||||
|
def download_speed(self):
|
||||||
|
# type: () -> str
|
||||||
|
# Avoid zero division errors...
|
||||||
|
if self.avg == 0.0: # type: ignore
|
||||||
|
return "..."
|
||||||
|
return format_size(1 / self.avg) + "/s" # type: ignore
|
||||||
|
|
||||||
|
@property
|
||||||
|
def pretty_eta(self):
|
||||||
|
# type: () -> str
|
||||||
|
if self.eta: # type: ignore
|
||||||
|
return "eta {}".format(self.eta_td) # type: ignore
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def iter(self, it): # type: ignore
|
||||||
|
for x in it:
|
||||||
|
yield x
|
||||||
|
self.next(len(x))
|
||||||
|
self.finish()
|
||||||
|
|
||||||
|
|
||||||
|
class WindowsMixin(object):
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
# type: (List[Any], Dict[Any, Any]) -> None
|
||||||
|
# The Windows terminal does not support the hide/show cursor ANSI codes
|
||||||
|
# even with colorama. So we'll ensure that hide_cursor is False on
|
||||||
|
# Windows.
|
||||||
|
# This call needs to go before the super() call, so that hide_cursor
|
||||||
|
# is set in time. The base progress bar class writes the "hide cursor"
|
||||||
|
# code to the terminal in its init, so if we don't set this soon
|
||||||
|
# enough, we get a "hide" with no corresponding "show"...
|
||||||
|
if WINDOWS and self.hide_cursor: # type: ignore
|
||||||
|
self.hide_cursor = False
|
||||||
|
|
||||||
|
super(WindowsMixin, self).__init__(*args, **kwargs) # type: ignore
|
||||||
|
|
||||||
|
# Check if we are running on Windows and we have the colorama module,
|
||||||
|
# if we do then wrap our file with it.
|
||||||
|
if WINDOWS and colorama:
|
||||||
|
self.file = colorama.AnsiToWin32(self.file) # type: ignore
|
||||||
|
# The progress code expects to be able to call self.file.isatty()
|
||||||
|
# but the colorama.AnsiToWin32() object doesn't have that, so we'll
|
||||||
|
# add it.
|
||||||
|
self.file.isatty = lambda: self.file.wrapped.isatty()
|
||||||
|
# The progress code expects to be able to call self.file.flush()
|
||||||
|
# but the colorama.AnsiToWin32() object doesn't have that, so we'll
|
||||||
|
# add it.
|
||||||
|
self.file.flush = lambda: self.file.wrapped.flush()
|
||||||
|
|
||||||
|
|
||||||
|
class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin,
|
||||||
|
DownloadProgressMixin):
|
||||||
|
|
||||||
|
file = sys.stdout
|
||||||
|
message = "%(percent)d%%"
|
||||||
|
suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s"
|
||||||
|
|
||||||
|
# NOTE: The "type: ignore" comments on the following classes are there to
|
||||||
|
# work around https://github.com/python/typing/issues/241
|
||||||
|
|
||||||
|
|
||||||
|
class DefaultDownloadProgressBar(BaseDownloadProgressBar,
|
||||||
|
_BaseBar):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadSilentBar(BaseDownloadProgressBar, SilentBar): # type: ignore
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadBar(BaseDownloadProgressBar, # type: ignore
|
||||||
|
Bar):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadFillingCirclesBar(BaseDownloadProgressBar, # type: ignore
|
||||||
|
FillingCirclesBar):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, # type: ignore
|
||||||
|
BlueEmojiBar):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin,
|
||||||
|
DownloadProgressMixin, Spinner):
|
||||||
|
|
||||||
|
file = sys.stdout
|
||||||
|
suffix = "%(downloaded)s %(download_speed)s"
|
||||||
|
|
||||||
|
def next_phase(self): # type: ignore
|
||||||
|
if not hasattr(self, "_phaser"):
|
||||||
|
self._phaser = itertools.cycle(self.phases)
|
||||||
|
return next(self._phaser)
|
||||||
|
|
||||||
|
def update(self):
|
||||||
|
# type: () -> None
|
||||||
|
message = self.message % self
|
||||||
|
phase = self.next_phase()
|
||||||
|
suffix = self.suffix % self
|
||||||
|
line = ''.join([
|
||||||
|
message,
|
||||||
|
" " if message else "",
|
||||||
|
phase,
|
||||||
|
" " if suffix else "",
|
||||||
|
suffix,
|
||||||
|
])
|
||||||
|
|
||||||
|
self.writeln(line)
|
||||||
|
|
||||||
|
|
||||||
|
BAR_TYPES = {
|
||||||
|
"off": (DownloadSilentBar, DownloadSilentBar),
|
||||||
|
"on": (DefaultDownloadProgressBar, DownloadProgressSpinner),
|
||||||
|
"ascii": (DownloadBar, DownloadProgressSpinner),
|
||||||
|
"pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner),
|
||||||
|
"emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def DownloadProgressProvider(progress_bar, max=None): # type: ignore
|
||||||
|
if max is None or max == 0:
|
||||||
|
return BAR_TYPES[progress_bar][1]().iter
|
||||||
|
else:
|
||||||
|
return BAR_TYPES[progress_bar][0](max=max).iter
|
408
sources/pip_20.1/_internal/cli/req_command.py
Normal file
408
sources/pip_20.1/_internal/cli/req_command.py
Normal file
|
@ -0,0 +1,408 @@
|
||||||
|
"""Contains the Command base classes that depend on PipSession.
|
||||||
|
|
||||||
|
The classes in this module are in a separate module so the commands not
|
||||||
|
needing download / PackageFinder capability don't unnecessarily import the
|
||||||
|
PackageFinder machinery and all its vendored dependencies, etc.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
|
from pip._internal.cli import cmdoptions
|
||||||
|
from pip._internal.cli.base_command import Command
|
||||||
|
from pip._internal.cli.command_context import CommandContextMixIn
|
||||||
|
from pip._internal.exceptions import CommandError, PreviousBuildDirError
|
||||||
|
from pip._internal.index.package_finder import PackageFinder
|
||||||
|
from pip._internal.models.selection_prefs import SelectionPreferences
|
||||||
|
from pip._internal.network.download import Downloader
|
||||||
|
from pip._internal.network.session import PipSession
|
||||||
|
from pip._internal.operations.prepare import RequirementPreparer
|
||||||
|
from pip._internal.req.constructors import (
|
||||||
|
install_req_from_editable,
|
||||||
|
install_req_from_line,
|
||||||
|
install_req_from_parsed_requirement,
|
||||||
|
install_req_from_req_string,
|
||||||
|
)
|
||||||
|
from pip._internal.req.req_file import parse_requirements
|
||||||
|
from pip._internal.req.req_set import RequirementSet
|
||||||
|
from pip._internal.self_outdated_check import (
|
||||||
|
make_link_collector,
|
||||||
|
pip_self_version_check,
|
||||||
|
)
|
||||||
|
from pip._internal.utils.temp_dir import tempdir_kinds
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from optparse import Values
|
||||||
|
from typing import Any, List, Optional, Tuple
|
||||||
|
|
||||||
|
from pip._internal.cache import WheelCache
|
||||||
|
from pip._internal.models.target_python import TargetPython
|
||||||
|
from pip._internal.req.req_install import InstallRequirement
|
||||||
|
from pip._internal.req.req_tracker import RequirementTracker
|
||||||
|
from pip._internal.resolution.base import BaseResolver
|
||||||
|
from pip._internal.utils.temp_dir import (
|
||||||
|
TempDirectory,
|
||||||
|
TempDirectoryTypeRegistry,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class SessionCommandMixin(CommandContextMixIn):
|
||||||
|
|
||||||
|
"""
|
||||||
|
A class mixin for command classes needing _build_session().
|
||||||
|
"""
|
||||||
|
def __init__(self):
|
||||||
|
# type: () -> None
|
||||||
|
super(SessionCommandMixin, self).__init__()
|
||||||
|
self._session = None # Optional[PipSession]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _get_index_urls(cls, options):
|
||||||
|
# type: (Values) -> Optional[List[str]]
|
||||||
|
"""Return a list of index urls from user-provided options."""
|
||||||
|
index_urls = []
|
||||||
|
if not getattr(options, "no_index", False):
|
||||||
|
url = getattr(options, "index_url", None)
|
||||||
|
if url:
|
||||||
|
index_urls.append(url)
|
||||||
|
urls = getattr(options, "extra_index_urls", None)
|
||||||
|
if urls:
|
||||||
|
index_urls.extend(urls)
|
||||||
|
# Return None rather than an empty list
|
||||||
|
return index_urls or None
|
||||||
|
|
||||||
|
def get_default_session(self, options):
|
||||||
|
# type: (Values) -> PipSession
|
||||||
|
"""Get a default-managed session."""
|
||||||
|
if self._session is None:
|
||||||
|
self._session = self.enter_context(self._build_session(options))
|
||||||
|
# there's no type annotation on requests.Session, so it's
|
||||||
|
# automatically ContextManager[Any] and self._session becomes Any,
|
||||||
|
# then https://github.com/python/mypy/issues/7696 kicks in
|
||||||
|
assert self._session is not None
|
||||||
|
return self._session
|
||||||
|
|
||||||
|
def _build_session(self, options, retries=None, timeout=None):
|
||||||
|
# type: (Values, Optional[int], Optional[int]) -> PipSession
|
||||||
|
assert not options.cache_dir or os.path.isabs(options.cache_dir)
|
||||||
|
session = PipSession(
|
||||||
|
cache=(
|
||||||
|
os.path.join(options.cache_dir, "http")
|
||||||
|
if options.cache_dir else None
|
||||||
|
),
|
||||||
|
retries=retries if retries is not None else options.retries,
|
||||||
|
trusted_hosts=options.trusted_hosts,
|
||||||
|
index_urls=self._get_index_urls(options),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Handle custom ca-bundles from the user
|
||||||
|
if options.cert:
|
||||||
|
session.verify = options.cert
|
||||||
|
|
||||||
|
# Handle SSL client certificate
|
||||||
|
if options.client_cert:
|
||||||
|
session.cert = options.client_cert
|
||||||
|
|
||||||
|
# Handle timeouts
|
||||||
|
if options.timeout or timeout:
|
||||||
|
session.timeout = (
|
||||||
|
timeout if timeout is not None else options.timeout
|
||||||
|
)
|
||||||
|
|
||||||
|
# Handle configured proxies
|
||||||
|
if options.proxy:
|
||||||
|
session.proxies = {
|
||||||
|
"http": options.proxy,
|
||||||
|
"https": options.proxy,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Determine if we can prompt the user for authentication or not
|
||||||
|
session.auth.prompting = not options.no_input
|
||||||
|
|
||||||
|
return session
|
||||||
|
|
||||||
|
|
||||||
|
class IndexGroupCommand(Command, SessionCommandMixin):
|
||||||
|
|
||||||
|
"""
|
||||||
|
Abstract base class for commands with the index_group options.
|
||||||
|
|
||||||
|
This also corresponds to the commands that permit the pip version check.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def handle_pip_version_check(self, options):
|
||||||
|
# type: (Values) -> None
|
||||||
|
"""
|
||||||
|
Do the pip version check if not disabled.
|
||||||
|
|
||||||
|
This overrides the default behavior of not doing the check.
|
||||||
|
"""
|
||||||
|
# Make sure the index_group options are present.
|
||||||
|
assert hasattr(options, 'no_index')
|
||||||
|
|
||||||
|
if options.disable_pip_version_check or options.no_index:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Otherwise, check if we're using the latest version of pip available.
|
||||||
|
session = self._build_session(
|
||||||
|
options,
|
||||||
|
retries=0,
|
||||||
|
timeout=min(5, options.timeout)
|
||||||
|
)
|
||||||
|
with session:
|
||||||
|
pip_self_version_check(session, options)
|
||||||
|
|
||||||
|
|
||||||
|
KEEPABLE_TEMPDIR_TYPES = [
|
||||||
|
tempdir_kinds.BUILD_ENV,
|
||||||
|
tempdir_kinds.EPHEM_WHEEL_CACHE,
|
||||||
|
tempdir_kinds.REQ_BUILD,
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def with_cleanup(func):
|
||||||
|
# type: (Any) -> Any
|
||||||
|
"""Decorator for common logic related to managing temporary
|
||||||
|
directories.
|
||||||
|
"""
|
||||||
|
def configure_tempdir_registry(registry):
|
||||||
|
# type: (TempDirectoryTypeRegistry) -> None
|
||||||
|
for t in KEEPABLE_TEMPDIR_TYPES:
|
||||||
|
registry.set_delete(t, False)
|
||||||
|
|
||||||
|
def wrapper(self, options, args):
|
||||||
|
# type: (RequirementCommand, Values, List[Any]) -> Optional[int]
|
||||||
|
assert self.tempdir_registry is not None
|
||||||
|
if options.no_clean:
|
||||||
|
configure_tempdir_registry(self.tempdir_registry)
|
||||||
|
|
||||||
|
try:
|
||||||
|
return func(self, options, args)
|
||||||
|
except PreviousBuildDirError:
|
||||||
|
# This kind of conflict can occur when the user passes an explicit
|
||||||
|
# build directory with a pre-existing folder. In that case we do
|
||||||
|
# not want to accidentally remove it.
|
||||||
|
configure_tempdir_registry(self.tempdir_registry)
|
||||||
|
raise
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
class RequirementCommand(IndexGroupCommand):
|
||||||
|
|
||||||
|
def __init__(self, *args, **kw):
|
||||||
|
# type: (Any, Any) -> None
|
||||||
|
super(RequirementCommand, self).__init__(*args, **kw)
|
||||||
|
|
||||||
|
self.cmd_opts.add_option(cmdoptions.no_clean())
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def make_requirement_preparer(
|
||||||
|
temp_build_dir, # type: TempDirectory
|
||||||
|
options, # type: Values
|
||||||
|
req_tracker, # type: RequirementTracker
|
||||||
|
session, # type: PipSession
|
||||||
|
finder, # type: PackageFinder
|
||||||
|
use_user_site, # type: bool
|
||||||
|
download_dir=None, # type: str
|
||||||
|
wheel_download_dir=None, # type: str
|
||||||
|
):
|
||||||
|
# type: (...) -> RequirementPreparer
|
||||||
|
"""
|
||||||
|
Create a RequirementPreparer instance for the given parameters.
|
||||||
|
"""
|
||||||
|
downloader = Downloader(session, progress_bar=options.progress_bar)
|
||||||
|
|
||||||
|
temp_build_dir_path = temp_build_dir.path
|
||||||
|
assert temp_build_dir_path is not None
|
||||||
|
|
||||||
|
return RequirementPreparer(
|
||||||
|
build_dir=temp_build_dir_path,
|
||||||
|
src_dir=options.src_dir,
|
||||||
|
download_dir=download_dir,
|
||||||
|
wheel_download_dir=wheel_download_dir,
|
||||||
|
build_isolation=options.build_isolation,
|
||||||
|
req_tracker=req_tracker,
|
||||||
|
downloader=downloader,
|
||||||
|
finder=finder,
|
||||||
|
require_hashes=options.require_hashes,
|
||||||
|
use_user_site=use_user_site,
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def make_resolver(
|
||||||
|
preparer, # type: RequirementPreparer
|
||||||
|
finder, # type: PackageFinder
|
||||||
|
options, # type: Values
|
||||||
|
wheel_cache=None, # type: Optional[WheelCache]
|
||||||
|
use_user_site=False, # type: bool
|
||||||
|
ignore_installed=True, # type: bool
|
||||||
|
ignore_requires_python=False, # type: bool
|
||||||
|
force_reinstall=False, # type: bool
|
||||||
|
upgrade_strategy="to-satisfy-only", # type: str
|
||||||
|
use_pep517=None, # type: Optional[bool]
|
||||||
|
py_version_info=None # type: Optional[Tuple[int, ...]]
|
||||||
|
):
|
||||||
|
# type: (...) -> BaseResolver
|
||||||
|
"""
|
||||||
|
Create a Resolver instance for the given parameters.
|
||||||
|
"""
|
||||||
|
make_install_req = partial(
|
||||||
|
install_req_from_req_string,
|
||||||
|
isolated=options.isolated_mode,
|
||||||
|
use_pep517=use_pep517,
|
||||||
|
)
|
||||||
|
# The long import name and duplicated invocation is needed to convince
|
||||||
|
# Mypy into correctly typechecking. Otherwise it would complain the
|
||||||
|
# "Resolver" class being redefined.
|
||||||
|
if 'resolver' in options.unstable_features:
|
||||||
|
import pip._internal.resolution.resolvelib.resolver
|
||||||
|
return pip._internal.resolution.resolvelib.resolver.Resolver(
|
||||||
|
preparer=preparer,
|
||||||
|
finder=finder,
|
||||||
|
wheel_cache=wheel_cache,
|
||||||
|
make_install_req=make_install_req,
|
||||||
|
use_user_site=use_user_site,
|
||||||
|
ignore_dependencies=options.ignore_dependencies,
|
||||||
|
ignore_installed=ignore_installed,
|
||||||
|
ignore_requires_python=ignore_requires_python,
|
||||||
|
force_reinstall=force_reinstall,
|
||||||
|
upgrade_strategy=upgrade_strategy,
|
||||||
|
py_version_info=py_version_info,
|
||||||
|
)
|
||||||
|
import pip._internal.resolution.legacy.resolver
|
||||||
|
return pip._internal.resolution.legacy.resolver.Resolver(
|
||||||
|
preparer=preparer,
|
||||||
|
finder=finder,
|
||||||
|
wheel_cache=wheel_cache,
|
||||||
|
make_install_req=make_install_req,
|
||||||
|
use_user_site=use_user_site,
|
||||||
|
ignore_dependencies=options.ignore_dependencies,
|
||||||
|
ignore_installed=ignore_installed,
|
||||||
|
ignore_requires_python=ignore_requires_python,
|
||||||
|
force_reinstall=force_reinstall,
|
||||||
|
upgrade_strategy=upgrade_strategy,
|
||||||
|
py_version_info=py_version_info,
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_requirements(
|
||||||
|
self,
|
||||||
|
args, # type: List[str]
|
||||||
|
options, # type: Values
|
||||||
|
finder, # type: PackageFinder
|
||||||
|
session, # type: PipSession
|
||||||
|
check_supported_wheels=True, # type: bool
|
||||||
|
):
|
||||||
|
# type: (...) -> List[InstallRequirement]
|
||||||
|
"""
|
||||||
|
Parse command-line arguments into the corresponding requirements.
|
||||||
|
"""
|
||||||
|
requirement_set = RequirementSet(
|
||||||
|
check_supported_wheels=check_supported_wheels
|
||||||
|
)
|
||||||
|
for filename in options.constraints:
|
||||||
|
for parsed_req in parse_requirements(
|
||||||
|
filename,
|
||||||
|
constraint=True, finder=finder, options=options,
|
||||||
|
session=session):
|
||||||
|
req_to_add = install_req_from_parsed_requirement(
|
||||||
|
parsed_req,
|
||||||
|
isolated=options.isolated_mode,
|
||||||
|
)
|
||||||
|
req_to_add.is_direct = True
|
||||||
|
requirement_set.add_requirement(req_to_add)
|
||||||
|
|
||||||
|
for req in args:
|
||||||
|
req_to_add = install_req_from_line(
|
||||||
|
req, None, isolated=options.isolated_mode,
|
||||||
|
use_pep517=options.use_pep517,
|
||||||
|
)
|
||||||
|
req_to_add.is_direct = True
|
||||||
|
requirement_set.add_requirement(req_to_add)
|
||||||
|
|
||||||
|
for req in options.editables:
|
||||||
|
req_to_add = install_req_from_editable(
|
||||||
|
req,
|
||||||
|
isolated=options.isolated_mode,
|
||||||
|
use_pep517=options.use_pep517,
|
||||||
|
)
|
||||||
|
req_to_add.is_direct = True
|
||||||
|
requirement_set.add_requirement(req_to_add)
|
||||||
|
|
||||||
|
# NOTE: options.require_hashes may be set if --require-hashes is True
|
||||||
|
for filename in options.requirements:
|
||||||
|
for parsed_req in parse_requirements(
|
||||||
|
filename,
|
||||||
|
finder=finder, options=options, session=session):
|
||||||
|
req_to_add = install_req_from_parsed_requirement(
|
||||||
|
parsed_req,
|
||||||
|
isolated=options.isolated_mode,
|
||||||
|
use_pep517=options.use_pep517
|
||||||
|
)
|
||||||
|
req_to_add.is_direct = True
|
||||||
|
requirement_set.add_requirement(req_to_add)
|
||||||
|
|
||||||
|
# If any requirement has hash options, enable hash checking.
|
||||||
|
requirements = requirement_set.all_requirements
|
||||||
|
if any(req.has_hash_options for req in requirements):
|
||||||
|
options.require_hashes = True
|
||||||
|
|
||||||
|
if not (args or options.editables or options.requirements):
|
||||||
|
opts = {'name': self.name}
|
||||||
|
if options.find_links:
|
||||||
|
raise CommandError(
|
||||||
|
'You must give at least one requirement to {name} '
|
||||||
|
'(maybe you meant "pip {name} {links}"?)'.format(
|
||||||
|
**dict(opts, links=' '.join(options.find_links))))
|
||||||
|
else:
|
||||||
|
raise CommandError(
|
||||||
|
'You must give at least one requirement to {name} '
|
||||||
|
'(see "pip help {name}")'.format(**opts))
|
||||||
|
|
||||||
|
return requirements
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def trace_basic_info(finder):
|
||||||
|
# type: (PackageFinder) -> None
|
||||||
|
"""
|
||||||
|
Trace basic information about the provided objects.
|
||||||
|
"""
|
||||||
|
# Display where finder is looking for packages
|
||||||
|
search_scope = finder.search_scope
|
||||||
|
locations = search_scope.get_formatted_locations()
|
||||||
|
if locations:
|
||||||
|
logger.info(locations)
|
||||||
|
|
||||||
|
def _build_package_finder(
|
||||||
|
self,
|
||||||
|
options, # type: Values
|
||||||
|
session, # type: PipSession
|
||||||
|
target_python=None, # type: Optional[TargetPython]
|
||||||
|
ignore_requires_python=None, # type: Optional[bool]
|
||||||
|
):
|
||||||
|
# type: (...) -> PackageFinder
|
||||||
|
"""
|
||||||
|
Create a package finder appropriate to this requirement command.
|
||||||
|
|
||||||
|
:param ignore_requires_python: Whether to ignore incompatible
|
||||||
|
"Requires-Python" values in links. Defaults to False.
|
||||||
|
"""
|
||||||
|
link_collector = make_link_collector(session, options=options)
|
||||||
|
selection_prefs = SelectionPreferences(
|
||||||
|
allow_yanked=True,
|
||||||
|
format_control=options.format_control,
|
||||||
|
allow_all_prereleases=options.pre,
|
||||||
|
prefer_binary=options.prefer_binary,
|
||||||
|
ignore_requires_python=ignore_requires_python,
|
||||||
|
)
|
||||||
|
|
||||||
|
return PackageFinder.create(
|
||||||
|
link_collector=link_collector,
|
||||||
|
selection_prefs=selection_prefs,
|
||||||
|
target_python=target_python,
|
||||||
|
)
|
173
sources/pip_20.1/_internal/cli/spinners.py
Normal file
173
sources/pip_20.1/_internal/cli/spinners.py
Normal file
|
@ -0,0 +1,173 @@
|
||||||
|
from __future__ import absolute_import, division
|
||||||
|
|
||||||
|
import contextlib
|
||||||
|
import itertools
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
|
||||||
|
from pip._vendor.progress import HIDE_CURSOR, SHOW_CURSOR
|
||||||
|
|
||||||
|
from pip._internal.utils.compat import WINDOWS
|
||||||
|
from pip._internal.utils.logging import get_indentation
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Iterator, IO
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class SpinnerInterface(object):
|
||||||
|
def spin(self):
|
||||||
|
# type: () -> None
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def finish(self, final_status):
|
||||||
|
# type: (str) -> None
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
|
class InteractiveSpinner(SpinnerInterface):
|
||||||
|
def __init__(self, message, file=None, spin_chars="-\\|/",
|
||||||
|
# Empirically, 8 updates/second looks nice
|
||||||
|
min_update_interval_seconds=0.125):
|
||||||
|
# type: (str, IO[str], str, float) -> None
|
||||||
|
self._message = message
|
||||||
|
if file is None:
|
||||||
|
file = sys.stdout
|
||||||
|
self._file = file
|
||||||
|
self._rate_limiter = RateLimiter(min_update_interval_seconds)
|
||||||
|
self._finished = False
|
||||||
|
|
||||||
|
self._spin_cycle = itertools.cycle(spin_chars)
|
||||||
|
|
||||||
|
self._file.write(" " * get_indentation() + self._message + " ... ")
|
||||||
|
self._width = 0
|
||||||
|
|
||||||
|
def _write(self, status):
|
||||||
|
# type: (str) -> None
|
||||||
|
assert not self._finished
|
||||||
|
# Erase what we wrote before by backspacing to the beginning, writing
|
||||||
|
# spaces to overwrite the old text, and then backspacing again
|
||||||
|
backup = "\b" * self._width
|
||||||
|
self._file.write(backup + " " * self._width + backup)
|
||||||
|
# Now we have a blank slate to add our status
|
||||||
|
self._file.write(status)
|
||||||
|
self._width = len(status)
|
||||||
|
self._file.flush()
|
||||||
|
self._rate_limiter.reset()
|
||||||
|
|
||||||
|
def spin(self):
|
||||||
|
# type: () -> None
|
||||||
|
if self._finished:
|
||||||
|
return
|
||||||
|
if not self._rate_limiter.ready():
|
||||||
|
return
|
||||||
|
self._write(next(self._spin_cycle))
|
||||||
|
|
||||||
|
def finish(self, final_status):
|
||||||
|
# type: (str) -> None
|
||||||
|
if self._finished:
|
||||||
|
return
|
||||||
|
self._write(final_status)
|
||||||
|
self._file.write("\n")
|
||||||
|
self._file.flush()
|
||||||
|
self._finished = True
|
||||||
|
|
||||||
|
|
||||||
|
# Used for dumb terminals, non-interactive installs (no tty), etc.
|
||||||
|
# We still print updates occasionally (once every 60 seconds by default) to
|
||||||
|
# act as a keep-alive for systems like Travis-CI that take lack-of-output as
|
||||||
|
# an indication that a task has frozen.
|
||||||
|
class NonInteractiveSpinner(SpinnerInterface):
|
||||||
|
def __init__(self, message, min_update_interval_seconds=60):
|
||||||
|
# type: (str, float) -> None
|
||||||
|
self._message = message
|
||||||
|
self._finished = False
|
||||||
|
self._rate_limiter = RateLimiter(min_update_interval_seconds)
|
||||||
|
self._update("started")
|
||||||
|
|
||||||
|
def _update(self, status):
|
||||||
|
# type: (str) -> None
|
||||||
|
assert not self._finished
|
||||||
|
self._rate_limiter.reset()
|
||||||
|
logger.info("%s: %s", self._message, status)
|
||||||
|
|
||||||
|
def spin(self):
|
||||||
|
# type: () -> None
|
||||||
|
if self._finished:
|
||||||
|
return
|
||||||
|
if not self._rate_limiter.ready():
|
||||||
|
return
|
||||||
|
self._update("still running...")
|
||||||
|
|
||||||
|
def finish(self, final_status):
|
||||||
|
# type: (str) -> None
|
||||||
|
if self._finished:
|
||||||
|
return
|
||||||
|
self._update(
|
||||||
|
"finished with status '{final_status}'".format(**locals()))
|
||||||
|
self._finished = True
|
||||||
|
|
||||||
|
|
||||||
|
class RateLimiter(object):
|
||||||
|
def __init__(self, min_update_interval_seconds):
|
||||||
|
# type: (float) -> None
|
||||||
|
self._min_update_interval_seconds = min_update_interval_seconds
|
||||||
|
self._last_update = 0 # type: float
|
||||||
|
|
||||||
|
def ready(self):
|
||||||
|
# type: () -> bool
|
||||||
|
now = time.time()
|
||||||
|
delta = now - self._last_update
|
||||||
|
return delta >= self._min_update_interval_seconds
|
||||||
|
|
||||||
|
def reset(self):
|
||||||
|
# type: () -> None
|
||||||
|
self._last_update = time.time()
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def open_spinner(message):
|
||||||
|
# type: (str) -> Iterator[SpinnerInterface]
|
||||||
|
# Interactive spinner goes directly to sys.stdout rather than being routed
|
||||||
|
# through the logging system, but it acts like it has level INFO,
|
||||||
|
# i.e. it's only displayed if we're at level INFO or better.
|
||||||
|
# Non-interactive spinner goes through the logging system, so it is always
|
||||||
|
# in sync with logging configuration.
|
||||||
|
if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
|
||||||
|
spinner = InteractiveSpinner(message) # type: SpinnerInterface
|
||||||
|
else:
|
||||||
|
spinner = NonInteractiveSpinner(message)
|
||||||
|
try:
|
||||||
|
with hidden_cursor(sys.stdout):
|
||||||
|
yield spinner
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
spinner.finish("canceled")
|
||||||
|
raise
|
||||||
|
except Exception:
|
||||||
|
spinner.finish("error")
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
spinner.finish("done")
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def hidden_cursor(file):
|
||||||
|
# type: (IO[str]) -> Iterator[None]
|
||||||
|
# The Windows terminal does not support the hide/show cursor ANSI codes,
|
||||||
|
# even via colorama. So don't even try.
|
||||||
|
if WINDOWS:
|
||||||
|
yield
|
||||||
|
# We don't want to clutter the output with control characters if we're
|
||||||
|
# writing to a file, or if the user is running with --quiet.
|
||||||
|
# See https://github.com/pypa/pip/issues/3418
|
||||||
|
elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
|
||||||
|
yield
|
||||||
|
else:
|
||||||
|
file.write(HIDE_CURSOR)
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
file.write(SHOW_CURSOR)
|
8
sources/pip_20.1/_internal/cli/status_codes.py
Normal file
8
sources/pip_20.1/_internal/cli/status_codes.py
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
SUCCESS = 0
|
||||||
|
ERROR = 1
|
||||||
|
UNKNOWN_ERROR = 2
|
||||||
|
VIRTUALENV_NOT_FOUND = 3
|
||||||
|
PREVIOUS_BUILD_DIR_ERROR = 4
|
||||||
|
NO_MATCHES_FOUND = 23
|
122
sources/pip_20.1/_internal/commands/__init__.py
Normal file
122
sources/pip_20.1/_internal/commands/__init__.py
Normal file
|
@ -0,0 +1,122 @@
|
||||||
|
"""
|
||||||
|
Package containing all pip commands
|
||||||
|
"""
|
||||||
|
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: disallow-untyped-defs=False
|
||||||
|
# There is currently a bug in python/typeshed mentioned at
|
||||||
|
# https://github.com/python/typeshed/issues/3906 which causes the
|
||||||
|
# return type of difflib.get_close_matches to be reported
|
||||||
|
# as List[Sequence[str]] whereas it should have been List[str]
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import importlib
|
||||||
|
from collections import OrderedDict, namedtuple
|
||||||
|
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Any
|
||||||
|
from pip._internal.cli.base_command import Command
|
||||||
|
|
||||||
|
|
||||||
|
CommandInfo = namedtuple('CommandInfo', 'module_path, class_name, summary')
|
||||||
|
|
||||||
|
# The ordering matters for help display.
|
||||||
|
# Also, even though the module path starts with the same
|
||||||
|
# "pip._internal.commands" prefix in each case, we include the full path
|
||||||
|
# because it makes testing easier (specifically when modifying commands_dict
|
||||||
|
# in test setup / teardown by adding info for a FakeCommand class defined
|
||||||
|
# in a test-related module).
|
||||||
|
# Finally, we need to pass an iterable of pairs here rather than a dict
|
||||||
|
# so that the ordering won't be lost when using Python 2.7.
|
||||||
|
commands_dict = OrderedDict([
|
||||||
|
('install', CommandInfo(
|
||||||
|
'pip._internal.commands.install', 'InstallCommand',
|
||||||
|
'Install packages.',
|
||||||
|
)),
|
||||||
|
('download', CommandInfo(
|
||||||
|
'pip._internal.commands.download', 'DownloadCommand',
|
||||||
|
'Download packages.',
|
||||||
|
)),
|
||||||
|
('uninstall', CommandInfo(
|
||||||
|
'pip._internal.commands.uninstall', 'UninstallCommand',
|
||||||
|
'Uninstall packages.',
|
||||||
|
)),
|
||||||
|
('freeze', CommandInfo(
|
||||||
|
'pip._internal.commands.freeze', 'FreezeCommand',
|
||||||
|
'Output installed packages in requirements format.',
|
||||||
|
)),
|
||||||
|
('list', CommandInfo(
|
||||||
|
'pip._internal.commands.list', 'ListCommand',
|
||||||
|
'List installed packages.',
|
||||||
|
)),
|
||||||
|
('show', CommandInfo(
|
||||||
|
'pip._internal.commands.show', 'ShowCommand',
|
||||||
|
'Show information about installed packages.',
|
||||||
|
)),
|
||||||
|
('check', CommandInfo(
|
||||||
|
'pip._internal.commands.check', 'CheckCommand',
|
||||||
|
'Verify installed packages have compatible dependencies.',
|
||||||
|
)),
|
||||||
|
('config', CommandInfo(
|
||||||
|
'pip._internal.commands.configuration', 'ConfigurationCommand',
|
||||||
|
'Manage local and global configuration.',
|
||||||
|
)),
|
||||||
|
('search', CommandInfo(
|
||||||
|
'pip._internal.commands.search', 'SearchCommand',
|
||||||
|
'Search PyPI for packages.',
|
||||||
|
)),
|
||||||
|
('cache', CommandInfo(
|
||||||
|
'pip._internal.commands.cache', 'CacheCommand',
|
||||||
|
"Inspect and manage pip's wheel cache.",
|
||||||
|
)),
|
||||||
|
('wheel', CommandInfo(
|
||||||
|
'pip._internal.commands.wheel', 'WheelCommand',
|
||||||
|
'Build wheels from your requirements.',
|
||||||
|
)),
|
||||||
|
('hash', CommandInfo(
|
||||||
|
'pip._internal.commands.hash', 'HashCommand',
|
||||||
|
'Compute hashes of package archives.',
|
||||||
|
)),
|
||||||
|
('completion', CommandInfo(
|
||||||
|
'pip._internal.commands.completion', 'CompletionCommand',
|
||||||
|
'A helper command used for command completion.',
|
||||||
|
)),
|
||||||
|
('debug', CommandInfo(
|
||||||
|
'pip._internal.commands.debug', 'DebugCommand',
|
||||||
|
'Show information useful for debugging.',
|
||||||
|
)),
|
||||||
|
('help', CommandInfo(
|
||||||
|
'pip._internal.commands.help', 'HelpCommand',
|
||||||
|
'Show help for commands.',
|
||||||
|
)),
|
||||||
|
]) # type: OrderedDict[str, CommandInfo]
|
||||||
|
|
||||||
|
|
||||||
|
def create_command(name, **kwargs):
|
||||||
|
# type: (str, **Any) -> Command
|
||||||
|
"""
|
||||||
|
Create an instance of the Command class with the given name.
|
||||||
|
"""
|
||||||
|
module_path, class_name, summary = commands_dict[name]
|
||||||
|
module = importlib.import_module(module_path)
|
||||||
|
command_class = getattr(module, class_name)
|
||||||
|
command = command_class(name=name, summary=summary, **kwargs)
|
||||||
|
|
||||||
|
return command
|
||||||
|
|
||||||
|
|
||||||
|
def get_similar_commands(name):
|
||||||
|
"""Command name auto-correct."""
|
||||||
|
from difflib import get_close_matches
|
||||||
|
|
||||||
|
name = name.lower()
|
||||||
|
|
||||||
|
close_commands = get_close_matches(name, commands_dict.keys())
|
||||||
|
|
||||||
|
if close_commands:
|
||||||
|
return close_commands[0]
|
||||||
|
else:
|
||||||
|
return False
|
181
sources/pip_20.1/_internal/commands/cache.py
Normal file
181
sources/pip_20.1/_internal/commands/cache.py
Normal file
|
@ -0,0 +1,181 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import textwrap
|
||||||
|
|
||||||
|
import pip._internal.utils.filesystem as filesystem
|
||||||
|
from pip._internal.cli.base_command import Command
|
||||||
|
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
||||||
|
from pip._internal.exceptions import CommandError, PipError
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from optparse import Values
|
||||||
|
from typing import Any, List
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class CacheCommand(Command):
|
||||||
|
"""
|
||||||
|
Inspect and manage pip's wheel cache.
|
||||||
|
|
||||||
|
Subcommands:
|
||||||
|
|
||||||
|
dir: Show the cache directory.
|
||||||
|
info: Show information about the cache.
|
||||||
|
list: List filenames of packages stored in the cache.
|
||||||
|
remove: Remove one or more package from the cache.
|
||||||
|
purge: Remove all items from the cache.
|
||||||
|
|
||||||
|
<pattern> can be a glob expression or a package name.
|
||||||
|
"""
|
||||||
|
|
||||||
|
ignore_require_venv = True
|
||||||
|
usage = """
|
||||||
|
%prog dir
|
||||||
|
%prog info
|
||||||
|
%prog list [<pattern>]
|
||||||
|
%prog remove <pattern>
|
||||||
|
%prog purge
|
||||||
|
"""
|
||||||
|
|
||||||
|
def run(self, options, args):
|
||||||
|
# type: (Values, List[Any]) -> int
|
||||||
|
handlers = {
|
||||||
|
"dir": self.get_cache_dir,
|
||||||
|
"info": self.get_cache_info,
|
||||||
|
"list": self.list_cache_items,
|
||||||
|
"remove": self.remove_cache_items,
|
||||||
|
"purge": self.purge_cache,
|
||||||
|
}
|
||||||
|
|
||||||
|
if not options.cache_dir:
|
||||||
|
logger.error("pip cache commands can not "
|
||||||
|
"function since cache is disabled.")
|
||||||
|
return ERROR
|
||||||
|
|
||||||
|
# Determine action
|
||||||
|
if not args or args[0] not in handlers:
|
||||||
|
logger.error("Need an action ({}) to perform.".format(
|
||||||
|
", ".join(sorted(handlers)))
|
||||||
|
)
|
||||||
|
return ERROR
|
||||||
|
|
||||||
|
action = args[0]
|
||||||
|
|
||||||
|
# Error handling happens here, not in the action-handlers.
|
||||||
|
try:
|
||||||
|
handlers[action](options, args[1:])
|
||||||
|
except PipError as e:
|
||||||
|
logger.error(e.args[0])
|
||||||
|
return ERROR
|
||||||
|
|
||||||
|
return SUCCESS
|
||||||
|
|
||||||
|
def get_cache_dir(self, options, args):
|
||||||
|
# type: (Values, List[Any]) -> None
|
||||||
|
if args:
|
||||||
|
raise CommandError('Too many arguments')
|
||||||
|
|
||||||
|
logger.info(options.cache_dir)
|
||||||
|
|
||||||
|
def get_cache_info(self, options, args):
|
||||||
|
# type: (Values, List[Any]) -> None
|
||||||
|
if args:
|
||||||
|
raise CommandError('Too many arguments')
|
||||||
|
|
||||||
|
num_packages = len(self._find_wheels(options, '*'))
|
||||||
|
|
||||||
|
cache_location = self._wheels_cache_dir(options)
|
||||||
|
cache_size = filesystem.format_directory_size(cache_location)
|
||||||
|
|
||||||
|
message = textwrap.dedent("""
|
||||||
|
Location: {location}
|
||||||
|
Size: {size}
|
||||||
|
Number of wheels: {package_count}
|
||||||
|
""").format(
|
||||||
|
location=cache_location,
|
||||||
|
package_count=num_packages,
|
||||||
|
size=cache_size,
|
||||||
|
).strip()
|
||||||
|
|
||||||
|
logger.info(message)
|
||||||
|
|
||||||
|
def list_cache_items(self, options, args):
|
||||||
|
# type: (Values, List[Any]) -> None
|
||||||
|
if len(args) > 1:
|
||||||
|
raise CommandError('Too many arguments')
|
||||||
|
|
||||||
|
if args:
|
||||||
|
pattern = args[0]
|
||||||
|
else:
|
||||||
|
pattern = '*'
|
||||||
|
|
||||||
|
files = self._find_wheels(options, pattern)
|
||||||
|
|
||||||
|
if not files:
|
||||||
|
logger.info('Nothing cached.')
|
||||||
|
return
|
||||||
|
|
||||||
|
results = []
|
||||||
|
for filename in files:
|
||||||
|
wheel = os.path.basename(filename)
|
||||||
|
size = filesystem.format_file_size(filename)
|
||||||
|
results.append(' - {} ({})'.format(wheel, size))
|
||||||
|
logger.info('Cache contents:\n')
|
||||||
|
logger.info('\n'.join(sorted(results)))
|
||||||
|
|
||||||
|
def remove_cache_items(self, options, args):
|
||||||
|
# type: (Values, List[Any]) -> None
|
||||||
|
if len(args) > 1:
|
||||||
|
raise CommandError('Too many arguments')
|
||||||
|
|
||||||
|
if not args:
|
||||||
|
raise CommandError('Please provide a pattern')
|
||||||
|
|
||||||
|
files = self._find_wheels(options, args[0])
|
||||||
|
if not files:
|
||||||
|
raise CommandError('No matching packages')
|
||||||
|
|
||||||
|
for filename in files:
|
||||||
|
os.unlink(filename)
|
||||||
|
logger.debug('Removed %s', filename)
|
||||||
|
logger.info('Files removed: %s', len(files))
|
||||||
|
|
||||||
|
def purge_cache(self, options, args):
|
||||||
|
# type: (Values, List[Any]) -> None
|
||||||
|
if args:
|
||||||
|
raise CommandError('Too many arguments')
|
||||||
|
|
||||||
|
return self.remove_cache_items(options, ['*'])
|
||||||
|
|
||||||
|
def _wheels_cache_dir(self, options):
|
||||||
|
# type: (Values) -> str
|
||||||
|
return os.path.join(options.cache_dir, 'wheels')
|
||||||
|
|
||||||
|
def _find_wheels(self, options, pattern):
|
||||||
|
# type: (Values, str) -> List[str]
|
||||||
|
wheel_dir = self._wheels_cache_dir(options)
|
||||||
|
|
||||||
|
# The wheel filename format, as specified in PEP 427, is:
|
||||||
|
# {distribution}-{version}(-{build})?-{python}-{abi}-{platform}.whl
|
||||||
|
#
|
||||||
|
# Additionally, non-alphanumeric values in the distribution are
|
||||||
|
# normalized to underscores (_), meaning hyphens can never occur
|
||||||
|
# before `-{version}`.
|
||||||
|
#
|
||||||
|
# Given that information:
|
||||||
|
# - If the pattern we're given contains a hyphen (-), the user is
|
||||||
|
# providing at least the version. Thus, we can just append `*.whl`
|
||||||
|
# to match the rest of it.
|
||||||
|
# - If the pattern we're given doesn't contain a hyphen (-), the
|
||||||
|
# user is only providing the name. Thus, we append `-*.whl` to
|
||||||
|
# match the hyphen before the version, followed by anything else.
|
||||||
|
#
|
||||||
|
# PEP 427: https://www.python.org/dev/peps/pep-0427/
|
||||||
|
pattern = pattern + ("*.whl" if "-" in pattern else "-*.whl")
|
||||||
|
|
||||||
|
return filesystem.find_files(wheel_dir, pattern)
|
51
sources/pip_20.1/_internal/commands/check.py
Normal file
51
sources/pip_20.1/_internal/commands/check.py
Normal file
|
@ -0,0 +1,51 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from pip._internal.cli.base_command import Command
|
||||||
|
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
||||||
|
from pip._internal.operations.check import (
|
||||||
|
check_package_set,
|
||||||
|
create_package_set_from_installed,
|
||||||
|
)
|
||||||
|
from pip._internal.utils.misc import write_output
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import List, Any
|
||||||
|
from optparse import Values
|
||||||
|
|
||||||
|
|
||||||
|
class CheckCommand(Command):
|
||||||
|
"""Verify installed packages have compatible dependencies."""
|
||||||
|
|
||||||
|
usage = """
|
||||||
|
%prog [options]"""
|
||||||
|
|
||||||
|
def run(self, options, args):
|
||||||
|
# type: (Values, List[Any]) -> int
|
||||||
|
|
||||||
|
package_set, parsing_probs = create_package_set_from_installed()
|
||||||
|
missing, conflicting = check_package_set(package_set)
|
||||||
|
|
||||||
|
for project_name in missing:
|
||||||
|
version = package_set[project_name].version
|
||||||
|
for dependency in missing[project_name]:
|
||||||
|
write_output(
|
||||||
|
"%s %s requires %s, which is not installed.",
|
||||||
|
project_name, version, dependency[0],
|
||||||
|
)
|
||||||
|
|
||||||
|
for project_name in conflicting:
|
||||||
|
version = package_set[project_name].version
|
||||||
|
for dep_name, dep_version, req in conflicting[project_name]:
|
||||||
|
write_output(
|
||||||
|
"%s %s has requirement %s, but you have %s %s.",
|
||||||
|
project_name, version, req, dep_name, dep_version,
|
||||||
|
)
|
||||||
|
|
||||||
|
if missing or conflicting or parsing_probs:
|
||||||
|
return ERROR
|
||||||
|
else:
|
||||||
|
write_output("No broken requirements found.")
|
||||||
|
return SUCCESS
|
95
sources/pip_20.1/_internal/commands/completion.py
Normal file
95
sources/pip_20.1/_internal/commands/completion.py
Normal file
|
@ -0,0 +1,95 @@
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: disallow-untyped-defs=False
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import textwrap
|
||||||
|
|
||||||
|
from pip._internal.cli.base_command import Command
|
||||||
|
from pip._internal.utils.misc import get_prog
|
||||||
|
|
||||||
|
BASE_COMPLETION = """
|
||||||
|
# pip {shell} completion start{script}# pip {shell} completion end
|
||||||
|
"""
|
||||||
|
|
||||||
|
COMPLETION_SCRIPTS = {
|
||||||
|
'bash': """
|
||||||
|
_pip_completion()
|
||||||
|
{{
|
||||||
|
COMPREPLY=( $( COMP_WORDS="${{COMP_WORDS[*]}}" \\
|
||||||
|
COMP_CWORD=$COMP_CWORD \\
|
||||||
|
PIP_AUTO_COMPLETE=1 $1 2>/dev/null ) )
|
||||||
|
}}
|
||||||
|
complete -o default -F _pip_completion {prog}
|
||||||
|
""",
|
||||||
|
'zsh': """
|
||||||
|
function _pip_completion {{
|
||||||
|
local words cword
|
||||||
|
read -Ac words
|
||||||
|
read -cn cword
|
||||||
|
reply=( $( COMP_WORDS="$words[*]" \\
|
||||||
|
COMP_CWORD=$(( cword-1 )) \\
|
||||||
|
PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null ))
|
||||||
|
}}
|
||||||
|
compctl -K _pip_completion {prog}
|
||||||
|
""",
|
||||||
|
'fish': """
|
||||||
|
function __fish_complete_pip
|
||||||
|
set -lx COMP_WORDS (commandline -o) ""
|
||||||
|
set -lx COMP_CWORD ( \\
|
||||||
|
math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\
|
||||||
|
)
|
||||||
|
set -lx PIP_AUTO_COMPLETE 1
|
||||||
|
string split \\ -- (eval $COMP_WORDS[1])
|
||||||
|
end
|
||||||
|
complete -fa "(__fish_complete_pip)" -c {prog}
|
||||||
|
""",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class CompletionCommand(Command):
|
||||||
|
"""A helper command to be used for command completion."""
|
||||||
|
|
||||||
|
ignore_require_venv = True
|
||||||
|
|
||||||
|
def __init__(self, *args, **kw):
|
||||||
|
super(CompletionCommand, self).__init__(*args, **kw)
|
||||||
|
|
||||||
|
cmd_opts = self.cmd_opts
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--bash', '-b',
|
||||||
|
action='store_const',
|
||||||
|
const='bash',
|
||||||
|
dest='shell',
|
||||||
|
help='Emit completion code for bash')
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--zsh', '-z',
|
||||||
|
action='store_const',
|
||||||
|
const='zsh',
|
||||||
|
dest='shell',
|
||||||
|
help='Emit completion code for zsh')
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--fish', '-f',
|
||||||
|
action='store_const',
|
||||||
|
const='fish',
|
||||||
|
dest='shell',
|
||||||
|
help='Emit completion code for fish')
|
||||||
|
|
||||||
|
self.parser.insert_option_group(0, cmd_opts)
|
||||||
|
|
||||||
|
def run(self, options, args):
|
||||||
|
"""Prints the completion code of the given shell"""
|
||||||
|
shells = COMPLETION_SCRIPTS.keys()
|
||||||
|
shell_options = ['--' + shell for shell in sorted(shells)]
|
||||||
|
if options.shell in shells:
|
||||||
|
script = textwrap.dedent(
|
||||||
|
COMPLETION_SCRIPTS.get(options.shell, '').format(
|
||||||
|
prog=get_prog())
|
||||||
|
)
|
||||||
|
print(BASE_COMPLETION.format(script=script, shell=options.shell))
|
||||||
|
else:
|
||||||
|
sys.stderr.write(
|
||||||
|
'ERROR: You must pass {}\n' .format(' or '.join(shell_options))
|
||||||
|
)
|
233
sources/pip_20.1/_internal/commands/configuration.py
Normal file
233
sources/pip_20.1/_internal/commands/configuration.py
Normal file
|
@ -0,0 +1,233 @@
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: disallow-untyped-defs=False
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
from pip._internal.cli.base_command import Command
|
||||||
|
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
||||||
|
from pip._internal.configuration import (
|
||||||
|
Configuration,
|
||||||
|
get_configuration_files,
|
||||||
|
kinds,
|
||||||
|
)
|
||||||
|
from pip._internal.exceptions import PipError
|
||||||
|
from pip._internal.utils.misc import get_prog, write_output
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigurationCommand(Command):
|
||||||
|
"""Manage local and global configuration.
|
||||||
|
|
||||||
|
Subcommands:
|
||||||
|
|
||||||
|
list: List the active configuration (or from the file specified)
|
||||||
|
edit: Edit the configuration file in an editor
|
||||||
|
get: Get the value associated with name
|
||||||
|
set: Set the name=value
|
||||||
|
unset: Unset the value associated with name
|
||||||
|
|
||||||
|
If none of --user, --global and --site are passed, a virtual
|
||||||
|
environment configuration file is used if one is active and the file
|
||||||
|
exists. Otherwise, all modifications happen on the to the user file by
|
||||||
|
default.
|
||||||
|
"""
|
||||||
|
|
||||||
|
ignore_require_venv = True
|
||||||
|
usage = """
|
||||||
|
%prog [<file-option>] list
|
||||||
|
%prog [<file-option>] [--editor <editor-path>] edit
|
||||||
|
|
||||||
|
%prog [<file-option>] get name
|
||||||
|
%prog [<file-option>] set name value
|
||||||
|
%prog [<file-option>] unset name
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(ConfigurationCommand, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
self.configuration = None
|
||||||
|
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
'--editor',
|
||||||
|
dest='editor',
|
||||||
|
action='store',
|
||||||
|
default=None,
|
||||||
|
help=(
|
||||||
|
'Editor to use to edit the file. Uses VISUAL or EDITOR '
|
||||||
|
'environment variables if not provided.'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
'--global',
|
||||||
|
dest='global_file',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help='Use the system-wide configuration file only'
|
||||||
|
)
|
||||||
|
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
'--user',
|
||||||
|
dest='user_file',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help='Use the user configuration file only'
|
||||||
|
)
|
||||||
|
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
'--site',
|
||||||
|
dest='site_file',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help='Use the current environment configuration file only'
|
||||||
|
)
|
||||||
|
|
||||||
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
|
|
||||||
|
def run(self, options, args):
|
||||||
|
handlers = {
|
||||||
|
"list": self.list_values,
|
||||||
|
"edit": self.open_in_editor,
|
||||||
|
"get": self.get_name,
|
||||||
|
"set": self.set_name_value,
|
||||||
|
"unset": self.unset_name
|
||||||
|
}
|
||||||
|
|
||||||
|
# Determine action
|
||||||
|
if not args or args[0] not in handlers:
|
||||||
|
logger.error("Need an action ({}) to perform.".format(
|
||||||
|
", ".join(sorted(handlers)))
|
||||||
|
)
|
||||||
|
return ERROR
|
||||||
|
|
||||||
|
action = args[0]
|
||||||
|
|
||||||
|
# Determine which configuration files are to be loaded
|
||||||
|
# Depends on whether the command is modifying.
|
||||||
|
try:
|
||||||
|
load_only = self._determine_file(
|
||||||
|
options, need_value=(action in ["get", "set", "unset", "edit"])
|
||||||
|
)
|
||||||
|
except PipError as e:
|
||||||
|
logger.error(e.args[0])
|
||||||
|
return ERROR
|
||||||
|
|
||||||
|
# Load a new configuration
|
||||||
|
self.configuration = Configuration(
|
||||||
|
isolated=options.isolated_mode, load_only=load_only
|
||||||
|
)
|
||||||
|
self.configuration.load()
|
||||||
|
|
||||||
|
# Error handling happens here, not in the action-handlers.
|
||||||
|
try:
|
||||||
|
handlers[action](options, args[1:])
|
||||||
|
except PipError as e:
|
||||||
|
logger.error(e.args[0])
|
||||||
|
return ERROR
|
||||||
|
|
||||||
|
return SUCCESS
|
||||||
|
|
||||||
|
def _determine_file(self, options, need_value):
|
||||||
|
file_options = [key for key, value in (
|
||||||
|
(kinds.USER, options.user_file),
|
||||||
|
(kinds.GLOBAL, options.global_file),
|
||||||
|
(kinds.SITE, options.site_file),
|
||||||
|
) if value]
|
||||||
|
|
||||||
|
if not file_options:
|
||||||
|
if not need_value:
|
||||||
|
return None
|
||||||
|
# Default to user, unless there's a site file.
|
||||||
|
elif any(
|
||||||
|
os.path.exists(site_config_file)
|
||||||
|
for site_config_file in get_configuration_files()[kinds.SITE]
|
||||||
|
):
|
||||||
|
return kinds.SITE
|
||||||
|
else:
|
||||||
|
return kinds.USER
|
||||||
|
elif len(file_options) == 1:
|
||||||
|
return file_options[0]
|
||||||
|
|
||||||
|
raise PipError(
|
||||||
|
"Need exactly one file to operate upon "
|
||||||
|
"(--user, --site, --global) to perform."
|
||||||
|
)
|
||||||
|
|
||||||
|
def list_values(self, options, args):
|
||||||
|
self._get_n_args(args, "list", n=0)
|
||||||
|
|
||||||
|
for key, value in sorted(self.configuration.items()):
|
||||||
|
write_output("%s=%r", key, value)
|
||||||
|
|
||||||
|
def get_name(self, options, args):
|
||||||
|
key = self._get_n_args(args, "get [name]", n=1)
|
||||||
|
value = self.configuration.get_value(key)
|
||||||
|
|
||||||
|
write_output("%s", value)
|
||||||
|
|
||||||
|
def set_name_value(self, options, args):
|
||||||
|
key, value = self._get_n_args(args, "set [name] [value]", n=2)
|
||||||
|
self.configuration.set_value(key, value)
|
||||||
|
|
||||||
|
self._save_configuration()
|
||||||
|
|
||||||
|
def unset_name(self, options, args):
|
||||||
|
key = self._get_n_args(args, "unset [name]", n=1)
|
||||||
|
self.configuration.unset_value(key)
|
||||||
|
|
||||||
|
self._save_configuration()
|
||||||
|
|
||||||
|
def open_in_editor(self, options, args):
|
||||||
|
editor = self._determine_editor(options)
|
||||||
|
|
||||||
|
fname = self.configuration.get_file_to_edit()
|
||||||
|
if fname is None:
|
||||||
|
raise PipError("Could not determine appropriate file.")
|
||||||
|
|
||||||
|
try:
|
||||||
|
subprocess.check_call([editor, fname])
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
raise PipError(
|
||||||
|
"Editor Subprocess exited with exit code {}"
|
||||||
|
.format(e.returncode)
|
||||||
|
)
|
||||||
|
|
||||||
|
def _get_n_args(self, args, example, n):
|
||||||
|
"""Helper to make sure the command got the right number of arguments
|
||||||
|
"""
|
||||||
|
if len(args) != n:
|
||||||
|
msg = (
|
||||||
|
'Got unexpected number of arguments, expected {}. '
|
||||||
|
'(example: "{} config {}")'
|
||||||
|
).format(n, get_prog(), example)
|
||||||
|
raise PipError(msg)
|
||||||
|
|
||||||
|
if n == 1:
|
||||||
|
return args[0]
|
||||||
|
else:
|
||||||
|
return args
|
||||||
|
|
||||||
|
def _save_configuration(self):
|
||||||
|
# We successfully ran a modifying command. Need to save the
|
||||||
|
# configuration.
|
||||||
|
try:
|
||||||
|
self.configuration.save()
|
||||||
|
except Exception:
|
||||||
|
logger.error(
|
||||||
|
"Unable to save configuration. Please report this as a bug.",
|
||||||
|
exc_info=1
|
||||||
|
)
|
||||||
|
raise PipError("Internal Error.")
|
||||||
|
|
||||||
|
def _determine_editor(self, options):
|
||||||
|
if options.editor is not None:
|
||||||
|
return options.editor
|
||||||
|
elif "VISUAL" in os.environ:
|
||||||
|
return os.environ["VISUAL"]
|
||||||
|
elif "EDITOR" in os.environ:
|
||||||
|
return os.environ["EDITOR"]
|
||||||
|
else:
|
||||||
|
raise PipError("Could not determine editor to use.")
|
237
sources/pip_20.1/_internal/commands/debug.py
Normal file
237
sources/pip_20.1/_internal/commands/debug.py
Normal file
|
@ -0,0 +1,237 @@
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: disallow-untyped-defs=False
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import locale
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import pip._vendor
|
||||||
|
from pip._vendor import pkg_resources
|
||||||
|
from pip._vendor.certifi import where
|
||||||
|
|
||||||
|
from pip import __file__ as pip_location
|
||||||
|
from pip._internal.cli import cmdoptions
|
||||||
|
from pip._internal.cli.base_command import Command
|
||||||
|
from pip._internal.cli.cmdoptions import make_target_python
|
||||||
|
from pip._internal.cli.status_codes import SUCCESS
|
||||||
|
from pip._internal.utils.logging import indent_log
|
||||||
|
from pip._internal.utils.misc import get_pip_version
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from types import ModuleType
|
||||||
|
from typing import Any, List, Optional, Dict
|
||||||
|
from optparse import Values
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def show_value(name, value):
|
||||||
|
# type: (str, Optional[str]) -> None
|
||||||
|
logger.info('{}: {}'.format(name, value))
|
||||||
|
|
||||||
|
|
||||||
|
def show_sys_implementation():
|
||||||
|
# type: () -> None
|
||||||
|
logger.info('sys.implementation:')
|
||||||
|
if hasattr(sys, 'implementation'):
|
||||||
|
implementation = sys.implementation # type: ignore
|
||||||
|
implementation_name = implementation.name
|
||||||
|
else:
|
||||||
|
implementation_name = ''
|
||||||
|
|
||||||
|
with indent_log():
|
||||||
|
show_value('name', implementation_name)
|
||||||
|
|
||||||
|
|
||||||
|
def create_vendor_txt_map():
|
||||||
|
# type: () -> Dict[str, str]
|
||||||
|
vendor_txt_path = os.path.join(
|
||||||
|
os.path.dirname(pip_location),
|
||||||
|
'_vendor',
|
||||||
|
'vendor.txt'
|
||||||
|
)
|
||||||
|
|
||||||
|
with open(vendor_txt_path) as f:
|
||||||
|
# Purge non version specifying lines.
|
||||||
|
# Also, remove any space prefix or suffixes (including comments).
|
||||||
|
lines = [line.strip().split(' ', 1)[0]
|
||||||
|
for line in f.readlines() if '==' in line]
|
||||||
|
|
||||||
|
# Transform into "module" -> version dict.
|
||||||
|
return dict(line.split('==', 1) for line in lines) # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
def get_module_from_module_name(module_name):
|
||||||
|
# type: (str) -> ModuleType
|
||||||
|
|
||||||
|
# Module name can be uppercase in vendor.txt for some reason...
|
||||||
|
module_name = module_name.lower()
|
||||||
|
# PATCH: setuptools is actually only pkg_resources.
|
||||||
|
if module_name == 'setuptools':
|
||||||
|
module_name = 'pkg_resources'
|
||||||
|
|
||||||
|
__import__(
|
||||||
|
'pip._vendor.{}'.format(module_name),
|
||||||
|
globals(),
|
||||||
|
locals(),
|
||||||
|
level=0
|
||||||
|
)
|
||||||
|
return getattr(pip._vendor, module_name)
|
||||||
|
|
||||||
|
|
||||||
|
def get_vendor_version_from_module(module_name):
|
||||||
|
# type: (str) -> str
|
||||||
|
|
||||||
|
module = get_module_from_module_name(module_name)
|
||||||
|
version = getattr(module, '__version__', None)
|
||||||
|
|
||||||
|
if not version:
|
||||||
|
# Try to find version in debundled module info
|
||||||
|
pkg_set = pkg_resources.WorkingSet(
|
||||||
|
[os.path.dirname(getattr(module, '__file__'))]
|
||||||
|
)
|
||||||
|
package = pkg_set.find(pkg_resources.Requirement.parse(module_name))
|
||||||
|
version = getattr(package, 'version', None)
|
||||||
|
|
||||||
|
return version
|
||||||
|
|
||||||
|
|
||||||
|
def show_actual_vendor_versions(vendor_txt_versions):
|
||||||
|
# type: (Dict[str, str]) -> None
|
||||||
|
# Logs the actual version and print extra info
|
||||||
|
# if there is a conflict or if the actual version could not be imported.
|
||||||
|
|
||||||
|
for module_name, expected_version in vendor_txt_versions.items():
|
||||||
|
extra_message = ''
|
||||||
|
actual_version = get_vendor_version_from_module(module_name)
|
||||||
|
if not actual_version:
|
||||||
|
extra_message = ' (Unable to locate actual module version, using'\
|
||||||
|
' vendor.txt specified version)'
|
||||||
|
actual_version = expected_version
|
||||||
|
elif actual_version != expected_version:
|
||||||
|
extra_message = ' (CONFLICT: vendor.txt suggests version should'\
|
||||||
|
' be {})'.format(expected_version)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
'{name}=={actual}{extra}'.format(
|
||||||
|
name=module_name,
|
||||||
|
actual=actual_version,
|
||||||
|
extra=extra_message
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def show_vendor_versions():
|
||||||
|
# type: () -> None
|
||||||
|
logger.info('vendored library versions:')
|
||||||
|
|
||||||
|
vendor_txt_versions = create_vendor_txt_map()
|
||||||
|
with indent_log():
|
||||||
|
show_actual_vendor_versions(vendor_txt_versions)
|
||||||
|
|
||||||
|
|
||||||
|
def show_tags(options):
|
||||||
|
# type: (Values) -> None
|
||||||
|
tag_limit = 10
|
||||||
|
|
||||||
|
target_python = make_target_python(options)
|
||||||
|
tags = target_python.get_tags()
|
||||||
|
|
||||||
|
# Display the target options that were explicitly provided.
|
||||||
|
formatted_target = target_python.format_given()
|
||||||
|
suffix = ''
|
||||||
|
if formatted_target:
|
||||||
|
suffix = ' (target: {})'.format(formatted_target)
|
||||||
|
|
||||||
|
msg = 'Compatible tags: {}{}'.format(len(tags), suffix)
|
||||||
|
logger.info(msg)
|
||||||
|
|
||||||
|
if options.verbose < 1 and len(tags) > tag_limit:
|
||||||
|
tags_limited = True
|
||||||
|
tags = tags[:tag_limit]
|
||||||
|
else:
|
||||||
|
tags_limited = False
|
||||||
|
|
||||||
|
with indent_log():
|
||||||
|
for tag in tags:
|
||||||
|
logger.info(str(tag))
|
||||||
|
|
||||||
|
if tags_limited:
|
||||||
|
msg = (
|
||||||
|
'...\n'
|
||||||
|
'[First {tag_limit} tags shown. Pass --verbose to show all.]'
|
||||||
|
).format(tag_limit=tag_limit)
|
||||||
|
logger.info(msg)
|
||||||
|
|
||||||
|
|
||||||
|
def ca_bundle_info(config):
|
||||||
|
levels = set()
|
||||||
|
for key, value in config.items():
|
||||||
|
levels.add(key.split('.')[0])
|
||||||
|
|
||||||
|
if not levels:
|
||||||
|
return "Not specified"
|
||||||
|
|
||||||
|
levels_that_override_global = ['install', 'wheel', 'download']
|
||||||
|
global_overriding_level = [
|
||||||
|
level for level in levels if level in levels_that_override_global
|
||||||
|
]
|
||||||
|
if not global_overriding_level:
|
||||||
|
return 'global'
|
||||||
|
|
||||||
|
if 'global' in levels:
|
||||||
|
levels.remove('global')
|
||||||
|
return ", ".join(levels)
|
||||||
|
|
||||||
|
|
||||||
|
class DebugCommand(Command):
|
||||||
|
"""
|
||||||
|
Display debug information.
|
||||||
|
"""
|
||||||
|
|
||||||
|
usage = """
|
||||||
|
%prog <options>"""
|
||||||
|
ignore_require_venv = True
|
||||||
|
|
||||||
|
def __init__(self, *args, **kw):
|
||||||
|
super(DebugCommand, self).__init__(*args, **kw)
|
||||||
|
|
||||||
|
cmd_opts = self.cmd_opts
|
||||||
|
cmdoptions.add_target_python_options(cmd_opts)
|
||||||
|
self.parser.insert_option_group(0, cmd_opts)
|
||||||
|
self.parser.config.load()
|
||||||
|
|
||||||
|
def run(self, options, args):
|
||||||
|
# type: (Values, List[Any]) -> int
|
||||||
|
logger.warning(
|
||||||
|
"This command is only meant for debugging. "
|
||||||
|
"Do not use this with automation for parsing and getting these "
|
||||||
|
"details, since the output and options of this command may "
|
||||||
|
"change without notice."
|
||||||
|
)
|
||||||
|
show_value('pip version', get_pip_version())
|
||||||
|
show_value('sys.version', sys.version)
|
||||||
|
show_value('sys.executable', sys.executable)
|
||||||
|
show_value('sys.getdefaultencoding', sys.getdefaultencoding())
|
||||||
|
show_value('sys.getfilesystemencoding', sys.getfilesystemencoding())
|
||||||
|
show_value(
|
||||||
|
'locale.getpreferredencoding', locale.getpreferredencoding(),
|
||||||
|
)
|
||||||
|
show_value('sys.platform', sys.platform)
|
||||||
|
show_sys_implementation()
|
||||||
|
|
||||||
|
show_value("'cert' config value", ca_bundle_info(self.parser.config))
|
||||||
|
show_value("REQUESTS_CA_BUNDLE", os.environ.get('REQUESTS_CA_BUNDLE'))
|
||||||
|
show_value("CURL_CA_BUNDLE", os.environ.get('CURL_CA_BUNDLE'))
|
||||||
|
show_value("pip._vendor.certifi.where()", where())
|
||||||
|
show_value("pip._vendor.DEBUNDLED", pip._vendor.DEBUNDLED)
|
||||||
|
|
||||||
|
show_vendor_versions()
|
||||||
|
|
||||||
|
show_tags(options)
|
||||||
|
|
||||||
|
return SUCCESS
|
142
sources/pip_20.1/_internal/commands/download.py
Normal file
142
sources/pip_20.1/_internal/commands/download.py
Normal file
|
@ -0,0 +1,142 @@
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: disallow-untyped-defs=False
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
from pip._internal.cli import cmdoptions
|
||||||
|
from pip._internal.cli.cmdoptions import make_target_python
|
||||||
|
from pip._internal.cli.req_command import RequirementCommand, with_cleanup
|
||||||
|
from pip._internal.req.req_tracker import get_requirement_tracker
|
||||||
|
from pip._internal.utils.misc import ensure_dir, normalize_path, write_output
|
||||||
|
from pip._internal.utils.temp_dir import TempDirectory
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadCommand(RequirementCommand):
|
||||||
|
"""
|
||||||
|
Download packages from:
|
||||||
|
|
||||||
|
- PyPI (and other indexes) using requirement specifiers.
|
||||||
|
- VCS project urls.
|
||||||
|
- Local project directories.
|
||||||
|
- Local or remote source archives.
|
||||||
|
|
||||||
|
pip also supports downloading from "requirements files", which provide
|
||||||
|
an easy way to specify a whole environment to be downloaded.
|
||||||
|
"""
|
||||||
|
|
||||||
|
usage = """
|
||||||
|
%prog [options] <requirement specifier> [package-index-options] ...
|
||||||
|
%prog [options] -r <requirements file> [package-index-options] ...
|
||||||
|
%prog [options] <vcs project url> ...
|
||||||
|
%prog [options] <local project path> ...
|
||||||
|
%prog [options] <archive url/path> ..."""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kw):
|
||||||
|
super(DownloadCommand, self).__init__(*args, **kw)
|
||||||
|
|
||||||
|
cmd_opts = self.cmd_opts
|
||||||
|
|
||||||
|
cmd_opts.add_option(cmdoptions.constraints())
|
||||||
|
cmd_opts.add_option(cmdoptions.requirements())
|
||||||
|
cmd_opts.add_option(cmdoptions.build_dir())
|
||||||
|
cmd_opts.add_option(cmdoptions.no_deps())
|
||||||
|
cmd_opts.add_option(cmdoptions.global_options())
|
||||||
|
cmd_opts.add_option(cmdoptions.no_binary())
|
||||||
|
cmd_opts.add_option(cmdoptions.only_binary())
|
||||||
|
cmd_opts.add_option(cmdoptions.prefer_binary())
|
||||||
|
cmd_opts.add_option(cmdoptions.src())
|
||||||
|
cmd_opts.add_option(cmdoptions.pre())
|
||||||
|
cmd_opts.add_option(cmdoptions.require_hashes())
|
||||||
|
cmd_opts.add_option(cmdoptions.progress_bar())
|
||||||
|
cmd_opts.add_option(cmdoptions.no_build_isolation())
|
||||||
|
cmd_opts.add_option(cmdoptions.use_pep517())
|
||||||
|
cmd_opts.add_option(cmdoptions.no_use_pep517())
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'-d', '--dest', '--destination-dir', '--destination-directory',
|
||||||
|
dest='download_dir',
|
||||||
|
metavar='dir',
|
||||||
|
default=os.curdir,
|
||||||
|
help=("Download packages into <dir>."),
|
||||||
|
)
|
||||||
|
|
||||||
|
cmdoptions.add_target_python_options(cmd_opts)
|
||||||
|
|
||||||
|
index_opts = cmdoptions.make_option_group(
|
||||||
|
cmdoptions.index_group,
|
||||||
|
self.parser,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.parser.insert_option_group(0, index_opts)
|
||||||
|
self.parser.insert_option_group(0, cmd_opts)
|
||||||
|
|
||||||
|
@with_cleanup
|
||||||
|
def run(self, options, args):
|
||||||
|
options.ignore_installed = True
|
||||||
|
# editable doesn't really make sense for `pip download`, but the bowels
|
||||||
|
# of the RequirementSet code require that property.
|
||||||
|
options.editables = []
|
||||||
|
|
||||||
|
cmdoptions.check_dist_restriction(options)
|
||||||
|
|
||||||
|
options.download_dir = normalize_path(options.download_dir)
|
||||||
|
|
||||||
|
ensure_dir(options.download_dir)
|
||||||
|
|
||||||
|
session = self.get_default_session(options)
|
||||||
|
|
||||||
|
target_python = make_target_python(options)
|
||||||
|
finder = self._build_package_finder(
|
||||||
|
options=options,
|
||||||
|
session=session,
|
||||||
|
target_python=target_python,
|
||||||
|
)
|
||||||
|
build_delete = (not (options.no_clean or options.build_dir))
|
||||||
|
|
||||||
|
req_tracker = self.enter_context(get_requirement_tracker())
|
||||||
|
|
||||||
|
directory = TempDirectory(
|
||||||
|
options.build_dir,
|
||||||
|
delete=build_delete,
|
||||||
|
kind="download",
|
||||||
|
globally_managed=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
reqs = self.get_requirements(args, options, finder, session)
|
||||||
|
|
||||||
|
preparer = self.make_requirement_preparer(
|
||||||
|
temp_build_dir=directory,
|
||||||
|
options=options,
|
||||||
|
req_tracker=req_tracker,
|
||||||
|
session=session,
|
||||||
|
finder=finder,
|
||||||
|
download_dir=options.download_dir,
|
||||||
|
use_user_site=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
resolver = self.make_resolver(
|
||||||
|
preparer=preparer,
|
||||||
|
finder=finder,
|
||||||
|
options=options,
|
||||||
|
py_version_info=options.python_version,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.trace_basic_info(finder)
|
||||||
|
|
||||||
|
requirement_set = resolver.resolve(
|
||||||
|
reqs, check_supported_wheels=True
|
||||||
|
)
|
||||||
|
|
||||||
|
downloaded = ' '.join([
|
||||||
|
req.name for req in requirement_set.requirements.values()
|
||||||
|
if req.successfully_downloaded
|
||||||
|
])
|
||||||
|
if downloaded:
|
||||||
|
write_output('Successfully downloaded %s', downloaded)
|
||||||
|
|
||||||
|
return requirement_set
|
99
sources/pip_20.1/_internal/commands/freeze.py
Normal file
99
sources/pip_20.1/_internal/commands/freeze.py
Normal file
|
@ -0,0 +1,99 @@
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: disallow-untyped-defs=False
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from pip._internal.cache import WheelCache
|
||||||
|
from pip._internal.cli import cmdoptions
|
||||||
|
from pip._internal.cli.base_command import Command
|
||||||
|
from pip._internal.models.format_control import FormatControl
|
||||||
|
from pip._internal.operations.freeze import freeze
|
||||||
|
from pip._internal.utils.compat import stdlib_pkgs
|
||||||
|
|
||||||
|
DEV_PKGS = {'pip', 'setuptools', 'distribute', 'wheel'}
|
||||||
|
|
||||||
|
|
||||||
|
class FreezeCommand(Command):
|
||||||
|
"""
|
||||||
|
Output installed packages in requirements format.
|
||||||
|
|
||||||
|
packages are listed in a case-insensitive sorted order.
|
||||||
|
"""
|
||||||
|
|
||||||
|
usage = """
|
||||||
|
%prog [options]"""
|
||||||
|
log_streams = ("ext://sys.stderr", "ext://sys.stderr")
|
||||||
|
|
||||||
|
def __init__(self, *args, **kw):
|
||||||
|
super(FreezeCommand, self).__init__(*args, **kw)
|
||||||
|
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
'-r', '--requirement',
|
||||||
|
dest='requirements',
|
||||||
|
action='append',
|
||||||
|
default=[],
|
||||||
|
metavar='file',
|
||||||
|
help="Use the order in the given requirements file and its "
|
||||||
|
"comments when generating output. This option can be "
|
||||||
|
"used multiple times.")
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
'-f', '--find-links',
|
||||||
|
dest='find_links',
|
||||||
|
action='append',
|
||||||
|
default=[],
|
||||||
|
metavar='URL',
|
||||||
|
help='URL for finding packages, which will be added to the '
|
||||||
|
'output.')
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
'-l', '--local',
|
||||||
|
dest='local',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help='If in a virtualenv that has global access, do not output '
|
||||||
|
'globally-installed packages.')
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
'--user',
|
||||||
|
dest='user',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help='Only output packages installed in user-site.')
|
||||||
|
self.cmd_opts.add_option(cmdoptions.list_path())
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
'--all',
|
||||||
|
dest='freeze_all',
|
||||||
|
action='store_true',
|
||||||
|
help='Do not skip these packages in the output:'
|
||||||
|
' {}'.format(', '.join(DEV_PKGS)))
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
'--exclude-editable',
|
||||||
|
dest='exclude_editable',
|
||||||
|
action='store_true',
|
||||||
|
help='Exclude editable package from output.')
|
||||||
|
|
||||||
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
|
|
||||||
|
def run(self, options, args):
|
||||||
|
format_control = FormatControl(set(), set())
|
||||||
|
wheel_cache = WheelCache(options.cache_dir, format_control)
|
||||||
|
skip = set(stdlib_pkgs)
|
||||||
|
if not options.freeze_all:
|
||||||
|
skip.update(DEV_PKGS)
|
||||||
|
|
||||||
|
cmdoptions.check_list_path_option(options)
|
||||||
|
|
||||||
|
freeze_kwargs = dict(
|
||||||
|
requirement=options.requirements,
|
||||||
|
find_links=options.find_links,
|
||||||
|
local_only=options.local,
|
||||||
|
user_only=options.user,
|
||||||
|
paths=options.path,
|
||||||
|
isolated=options.isolated_mode,
|
||||||
|
wheel_cache=wheel_cache,
|
||||||
|
skip=skip,
|
||||||
|
exclude_editable=options.exclude_editable,
|
||||||
|
)
|
||||||
|
|
||||||
|
for line in freeze(**freeze_kwargs):
|
||||||
|
sys.stdout.write(line + '\n')
|
58
sources/pip_20.1/_internal/commands/hash.py
Normal file
58
sources/pip_20.1/_internal/commands/hash.py
Normal file
|
@ -0,0 +1,58 @@
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: disallow-untyped-defs=False
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import hashlib
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from pip._internal.cli.base_command import Command
|
||||||
|
from pip._internal.cli.status_codes import ERROR
|
||||||
|
from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES
|
||||||
|
from pip._internal.utils.misc import read_chunks, write_output
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class HashCommand(Command):
|
||||||
|
"""
|
||||||
|
Compute a hash of a local package archive.
|
||||||
|
|
||||||
|
These can be used with --hash in a requirements file to do repeatable
|
||||||
|
installs.
|
||||||
|
"""
|
||||||
|
|
||||||
|
usage = '%prog [options] <file> ...'
|
||||||
|
ignore_require_venv = True
|
||||||
|
|
||||||
|
def __init__(self, *args, **kw):
|
||||||
|
super(HashCommand, self).__init__(*args, **kw)
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
'-a', '--algorithm',
|
||||||
|
dest='algorithm',
|
||||||
|
choices=STRONG_HASHES,
|
||||||
|
action='store',
|
||||||
|
default=FAVORITE_HASH,
|
||||||
|
help='The hash algorithm to use: one of {}'.format(
|
||||||
|
', '.join(STRONG_HASHES)))
|
||||||
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
|
|
||||||
|
def run(self, options, args):
|
||||||
|
if not args:
|
||||||
|
self.parser.print_usage(sys.stderr)
|
||||||
|
return ERROR
|
||||||
|
|
||||||
|
algorithm = options.algorithm
|
||||||
|
for path in args:
|
||||||
|
write_output('%s:\n--hash=%s:%s',
|
||||||
|
path, algorithm, _hash_of_file(path, algorithm))
|
||||||
|
|
||||||
|
|
||||||
|
def _hash_of_file(path, algorithm):
|
||||||
|
"""Return the hash digest of a file."""
|
||||||
|
with open(path, 'rb') as archive:
|
||||||
|
hash = hashlib.new(algorithm)
|
||||||
|
for chunk in read_chunks(archive):
|
||||||
|
hash.update(chunk)
|
||||||
|
return hash.hexdigest()
|
41
sources/pip_20.1/_internal/commands/help.py
Normal file
41
sources/pip_20.1/_internal/commands/help.py
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: disallow-untyped-defs=False
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
from pip._internal.cli.base_command import Command
|
||||||
|
from pip._internal.cli.status_codes import SUCCESS
|
||||||
|
from pip._internal.exceptions import CommandError
|
||||||
|
|
||||||
|
|
||||||
|
class HelpCommand(Command):
|
||||||
|
"""Show help for commands"""
|
||||||
|
|
||||||
|
usage = """
|
||||||
|
%prog <command>"""
|
||||||
|
ignore_require_venv = True
|
||||||
|
|
||||||
|
def run(self, options, args):
|
||||||
|
from pip._internal.commands import (
|
||||||
|
commands_dict, create_command, get_similar_commands,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# 'pip help' with no args is handled by pip.__init__.parseopt()
|
||||||
|
cmd_name = args[0] # the command we need help for
|
||||||
|
except IndexError:
|
||||||
|
return SUCCESS
|
||||||
|
|
||||||
|
if cmd_name not in commands_dict:
|
||||||
|
guess = get_similar_commands(cmd_name)
|
||||||
|
|
||||||
|
msg = ['unknown command "{}"'.format(cmd_name)]
|
||||||
|
if guess:
|
||||||
|
msg.append('maybe you meant "{}"'.format(guess))
|
||||||
|
|
||||||
|
raise CommandError(' - '.join(msg))
|
||||||
|
|
||||||
|
command = create_command(cmd_name)
|
||||||
|
command.parser.print_help()
|
||||||
|
|
||||||
|
return SUCCESS
|
691
sources/pip_20.1/_internal/commands/install.py
Normal file
691
sources/pip_20.1/_internal/commands/install.py
Normal file
|
@ -0,0 +1,691 @@
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# It's included for now because without it InstallCommand.run() has a
|
||||||
|
# couple errors where we have to know req.name is str rather than
|
||||||
|
# Optional[str] for the InstallRequirement req.
|
||||||
|
# mypy: strict-optional=False
|
||||||
|
# mypy: disallow-untyped-defs=False
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import errno
|
||||||
|
import logging
|
||||||
|
import operator
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import site
|
||||||
|
from optparse import SUPPRESS_HELP
|
||||||
|
|
||||||
|
from pip._vendor import pkg_resources
|
||||||
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
|
|
||||||
|
from pip._internal.cache import WheelCache
|
||||||
|
from pip._internal.cli import cmdoptions
|
||||||
|
from pip._internal.cli.cmdoptions import make_target_python
|
||||||
|
from pip._internal.cli.req_command import RequirementCommand, with_cleanup
|
||||||
|
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
||||||
|
from pip._internal.exceptions import CommandError, InstallationError
|
||||||
|
from pip._internal.locations import distutils_scheme
|
||||||
|
from pip._internal.operations.check import check_install_conflicts
|
||||||
|
from pip._internal.req import install_given_reqs
|
||||||
|
from pip._internal.req.req_tracker import get_requirement_tracker
|
||||||
|
from pip._internal.utils.deprecation import deprecated
|
||||||
|
from pip._internal.utils.distutils_args import parse_distutils_args
|
||||||
|
from pip._internal.utils.filesystem import test_writable_dir
|
||||||
|
from pip._internal.utils.misc import (
|
||||||
|
ensure_dir,
|
||||||
|
get_installed_version,
|
||||||
|
protect_pip_from_modification_on_windows,
|
||||||
|
write_output,
|
||||||
|
)
|
||||||
|
from pip._internal.utils.temp_dir import TempDirectory
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
from pip._internal.utils.virtualenv import virtualenv_no_global
|
||||||
|
from pip._internal.wheel_builder import build, should_build_for_install_command
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from optparse import Values
|
||||||
|
from typing import Any, Iterable, List, Optional
|
||||||
|
|
||||||
|
from pip._internal.models.format_control import FormatControl
|
||||||
|
from pip._internal.req.req_install import InstallRequirement
|
||||||
|
from pip._internal.wheel_builder import BinaryAllowedPredicate
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def get_check_binary_allowed(format_control):
|
||||||
|
# type: (FormatControl) -> BinaryAllowedPredicate
|
||||||
|
def check_binary_allowed(req):
|
||||||
|
# type: (InstallRequirement) -> bool
|
||||||
|
if req.use_pep517:
|
||||||
|
return True
|
||||||
|
canonical_name = canonicalize_name(req.name)
|
||||||
|
allowed_formats = format_control.get_allowed_formats(canonical_name)
|
||||||
|
return "binary" in allowed_formats
|
||||||
|
|
||||||
|
return check_binary_allowed
|
||||||
|
|
||||||
|
|
||||||
|
class InstallCommand(RequirementCommand):
|
||||||
|
"""
|
||||||
|
Install packages from:
|
||||||
|
|
||||||
|
- PyPI (and other indexes) using requirement specifiers.
|
||||||
|
- VCS project urls.
|
||||||
|
- Local project directories.
|
||||||
|
- Local or remote source archives.
|
||||||
|
|
||||||
|
pip also supports installing from "requirements files", which provide
|
||||||
|
an easy way to specify a whole environment to be installed.
|
||||||
|
"""
|
||||||
|
|
||||||
|
usage = """
|
||||||
|
%prog [options] <requirement specifier> [package-index-options] ...
|
||||||
|
%prog [options] -r <requirements file> [package-index-options] ...
|
||||||
|
%prog [options] [-e] <vcs project url> ...
|
||||||
|
%prog [options] [-e] <local project path> ...
|
||||||
|
%prog [options] <archive url/path> ..."""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kw):
|
||||||
|
super(InstallCommand, self).__init__(*args, **kw)
|
||||||
|
|
||||||
|
cmd_opts = self.cmd_opts
|
||||||
|
|
||||||
|
cmd_opts.add_option(cmdoptions.requirements())
|
||||||
|
cmd_opts.add_option(cmdoptions.constraints())
|
||||||
|
cmd_opts.add_option(cmdoptions.no_deps())
|
||||||
|
cmd_opts.add_option(cmdoptions.pre())
|
||||||
|
|
||||||
|
cmd_opts.add_option(cmdoptions.editable())
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'-t', '--target',
|
||||||
|
dest='target_dir',
|
||||||
|
metavar='dir',
|
||||||
|
default=None,
|
||||||
|
help='Install packages into <dir>. '
|
||||||
|
'By default this will not replace existing files/folders in '
|
||||||
|
'<dir>. Use --upgrade to replace existing packages in <dir> '
|
||||||
|
'with new versions.'
|
||||||
|
)
|
||||||
|
cmdoptions.add_target_python_options(cmd_opts)
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--user',
|
||||||
|
dest='use_user_site',
|
||||||
|
action='store_true',
|
||||||
|
help="Install to the Python user install directory for your "
|
||||||
|
"platform. Typically ~/.local/, or %APPDATA%\\Python on "
|
||||||
|
"Windows. (See the Python documentation for site.USER_BASE "
|
||||||
|
"for full details.)")
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--no-user',
|
||||||
|
dest='use_user_site',
|
||||||
|
action='store_false',
|
||||||
|
help=SUPPRESS_HELP)
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--root',
|
||||||
|
dest='root_path',
|
||||||
|
metavar='dir',
|
||||||
|
default=None,
|
||||||
|
help="Install everything relative to this alternate root "
|
||||||
|
"directory.")
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--prefix',
|
||||||
|
dest='prefix_path',
|
||||||
|
metavar='dir',
|
||||||
|
default=None,
|
||||||
|
help="Installation prefix where lib, bin and other top-level "
|
||||||
|
"folders are placed")
|
||||||
|
|
||||||
|
cmd_opts.add_option(cmdoptions.build_dir())
|
||||||
|
|
||||||
|
cmd_opts.add_option(cmdoptions.src())
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'-U', '--upgrade',
|
||||||
|
dest='upgrade',
|
||||||
|
action='store_true',
|
||||||
|
help='Upgrade all specified packages to the newest available '
|
||||||
|
'version. The handling of dependencies depends on the '
|
||||||
|
'upgrade-strategy used.'
|
||||||
|
)
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--upgrade-strategy',
|
||||||
|
dest='upgrade_strategy',
|
||||||
|
default='only-if-needed',
|
||||||
|
choices=['only-if-needed', 'eager'],
|
||||||
|
help='Determines how dependency upgrading should be handled '
|
||||||
|
'[default: %default]. '
|
||||||
|
'"eager" - dependencies are upgraded regardless of '
|
||||||
|
'whether the currently installed version satisfies the '
|
||||||
|
'requirements of the upgraded package(s). '
|
||||||
|
'"only-if-needed" - are upgraded only when they do not '
|
||||||
|
'satisfy the requirements of the upgraded package(s).'
|
||||||
|
)
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--force-reinstall',
|
||||||
|
dest='force_reinstall',
|
||||||
|
action='store_true',
|
||||||
|
help='Reinstall all packages even if they are already '
|
||||||
|
'up-to-date.')
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'-I', '--ignore-installed',
|
||||||
|
dest='ignore_installed',
|
||||||
|
action='store_true',
|
||||||
|
help='Ignore the installed packages, overwriting them. '
|
||||||
|
'This can break your system if the existing package '
|
||||||
|
'is of a different version or was installed '
|
||||||
|
'with a different package manager!'
|
||||||
|
)
|
||||||
|
|
||||||
|
cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
||||||
|
cmd_opts.add_option(cmdoptions.no_build_isolation())
|
||||||
|
cmd_opts.add_option(cmdoptions.use_pep517())
|
||||||
|
cmd_opts.add_option(cmdoptions.no_use_pep517())
|
||||||
|
|
||||||
|
cmd_opts.add_option(cmdoptions.install_options())
|
||||||
|
cmd_opts.add_option(cmdoptions.global_options())
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
"--compile",
|
||||||
|
action="store_true",
|
||||||
|
dest="compile",
|
||||||
|
default=True,
|
||||||
|
help="Compile Python source files to bytecode",
|
||||||
|
)
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
"--no-compile",
|
||||||
|
action="store_false",
|
||||||
|
dest="compile",
|
||||||
|
help="Do not compile Python source files to bytecode",
|
||||||
|
)
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
"--no-warn-script-location",
|
||||||
|
action="store_false",
|
||||||
|
dest="warn_script_location",
|
||||||
|
default=True,
|
||||||
|
help="Do not warn when installing scripts outside PATH",
|
||||||
|
)
|
||||||
|
cmd_opts.add_option(
|
||||||
|
"--no-warn-conflicts",
|
||||||
|
action="store_false",
|
||||||
|
dest="warn_about_conflicts",
|
||||||
|
default=True,
|
||||||
|
help="Do not warn about broken dependencies",
|
||||||
|
)
|
||||||
|
|
||||||
|
cmd_opts.add_option(cmdoptions.no_binary())
|
||||||
|
cmd_opts.add_option(cmdoptions.only_binary())
|
||||||
|
cmd_opts.add_option(cmdoptions.prefer_binary())
|
||||||
|
cmd_opts.add_option(cmdoptions.require_hashes())
|
||||||
|
cmd_opts.add_option(cmdoptions.progress_bar())
|
||||||
|
|
||||||
|
index_opts = cmdoptions.make_option_group(
|
||||||
|
cmdoptions.index_group,
|
||||||
|
self.parser,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.parser.insert_option_group(0, index_opts)
|
||||||
|
self.parser.insert_option_group(0, cmd_opts)
|
||||||
|
|
||||||
|
@with_cleanup
|
||||||
|
def run(self, options, args):
|
||||||
|
# type: (Values, List[Any]) -> int
|
||||||
|
if options.use_user_site and options.target_dir is not None:
|
||||||
|
raise CommandError("Can not combine '--user' and '--target'")
|
||||||
|
|
||||||
|
cmdoptions.check_install_build_global(options)
|
||||||
|
upgrade_strategy = "to-satisfy-only"
|
||||||
|
if options.upgrade:
|
||||||
|
upgrade_strategy = options.upgrade_strategy
|
||||||
|
|
||||||
|
cmdoptions.check_dist_restriction(options, check_target=True)
|
||||||
|
|
||||||
|
install_options = options.install_options or []
|
||||||
|
|
||||||
|
options.use_user_site = decide_user_install(
|
||||||
|
options.use_user_site,
|
||||||
|
prefix_path=options.prefix_path,
|
||||||
|
target_dir=options.target_dir,
|
||||||
|
root_path=options.root_path,
|
||||||
|
isolated_mode=options.isolated_mode,
|
||||||
|
)
|
||||||
|
|
||||||
|
target_temp_dir = None # type: Optional[TempDirectory]
|
||||||
|
target_temp_dir_path = None # type: Optional[str]
|
||||||
|
if options.target_dir:
|
||||||
|
options.ignore_installed = True
|
||||||
|
options.target_dir = os.path.abspath(options.target_dir)
|
||||||
|
if (os.path.exists(options.target_dir) and not
|
||||||
|
os.path.isdir(options.target_dir)):
|
||||||
|
raise CommandError(
|
||||||
|
"Target path exists but is not a directory, will not "
|
||||||
|
"continue."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create a target directory for using with the target option
|
||||||
|
target_temp_dir = TempDirectory(kind="target")
|
||||||
|
target_temp_dir_path = target_temp_dir.path
|
||||||
|
|
||||||
|
global_options = options.global_options or []
|
||||||
|
|
||||||
|
session = self.get_default_session(options)
|
||||||
|
|
||||||
|
target_python = make_target_python(options)
|
||||||
|
finder = self._build_package_finder(
|
||||||
|
options=options,
|
||||||
|
session=session,
|
||||||
|
target_python=target_python,
|
||||||
|
ignore_requires_python=options.ignore_requires_python,
|
||||||
|
)
|
||||||
|
build_delete = (not (options.no_clean or options.build_dir))
|
||||||
|
wheel_cache = WheelCache(options.cache_dir, options.format_control)
|
||||||
|
|
||||||
|
req_tracker = self.enter_context(get_requirement_tracker())
|
||||||
|
|
||||||
|
directory = TempDirectory(
|
||||||
|
options.build_dir,
|
||||||
|
delete=build_delete,
|
||||||
|
kind="install",
|
||||||
|
globally_managed=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
reqs = self.get_requirements(
|
||||||
|
args, options, finder, session,
|
||||||
|
check_supported_wheels=not options.target_dir,
|
||||||
|
)
|
||||||
|
|
||||||
|
warn_deprecated_install_options(
|
||||||
|
reqs, options.install_options
|
||||||
|
)
|
||||||
|
|
||||||
|
preparer = self.make_requirement_preparer(
|
||||||
|
temp_build_dir=directory,
|
||||||
|
options=options,
|
||||||
|
req_tracker=req_tracker,
|
||||||
|
session=session,
|
||||||
|
finder=finder,
|
||||||
|
use_user_site=options.use_user_site,
|
||||||
|
)
|
||||||
|
resolver = self.make_resolver(
|
||||||
|
preparer=preparer,
|
||||||
|
finder=finder,
|
||||||
|
options=options,
|
||||||
|
wheel_cache=wheel_cache,
|
||||||
|
use_user_site=options.use_user_site,
|
||||||
|
ignore_installed=options.ignore_installed,
|
||||||
|
ignore_requires_python=options.ignore_requires_python,
|
||||||
|
force_reinstall=options.force_reinstall,
|
||||||
|
upgrade_strategy=upgrade_strategy,
|
||||||
|
use_pep517=options.use_pep517,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.trace_basic_info(finder)
|
||||||
|
|
||||||
|
requirement_set = resolver.resolve(
|
||||||
|
reqs, check_supported_wheels=not options.target_dir
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
pip_req = requirement_set.get_requirement("pip")
|
||||||
|
except KeyError:
|
||||||
|
modifying_pip = None
|
||||||
|
else:
|
||||||
|
# If we're not replacing an already installed pip,
|
||||||
|
# we're not modifying it.
|
||||||
|
modifying_pip = pip_req.satisfied_by is None
|
||||||
|
protect_pip_from_modification_on_windows(
|
||||||
|
modifying_pip=modifying_pip
|
||||||
|
)
|
||||||
|
|
||||||
|
check_binary_allowed = get_check_binary_allowed(
|
||||||
|
finder.format_control
|
||||||
|
)
|
||||||
|
|
||||||
|
reqs_to_build = [
|
||||||
|
r for r in requirement_set.requirements.values()
|
||||||
|
if should_build_for_install_command(
|
||||||
|
r, check_binary_allowed
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
_, build_failures = build(
|
||||||
|
reqs_to_build,
|
||||||
|
wheel_cache=wheel_cache,
|
||||||
|
build_options=[],
|
||||||
|
global_options=[],
|
||||||
|
)
|
||||||
|
|
||||||
|
# If we're using PEP 517, we cannot do a direct install
|
||||||
|
# so we fail here.
|
||||||
|
# We don't care about failures building legacy
|
||||||
|
# requirements, as we'll fall through to a direct
|
||||||
|
# install for those.
|
||||||
|
pep517_build_failures = [
|
||||||
|
r for r in build_failures if r.use_pep517
|
||||||
|
]
|
||||||
|
if pep517_build_failures:
|
||||||
|
raise InstallationError(
|
||||||
|
"Could not build wheels for {} which use"
|
||||||
|
" PEP 517 and cannot be installed directly".format(
|
||||||
|
", ".join(r.name for r in pep517_build_failures)))
|
||||||
|
|
||||||
|
to_install = resolver.get_installation_order(
|
||||||
|
requirement_set
|
||||||
|
)
|
||||||
|
|
||||||
|
# Consistency Checking of the package set we're installing.
|
||||||
|
should_warn_about_conflicts = (
|
||||||
|
not options.ignore_dependencies and
|
||||||
|
options.warn_about_conflicts
|
||||||
|
)
|
||||||
|
if should_warn_about_conflicts:
|
||||||
|
self._warn_about_conflicts(to_install)
|
||||||
|
|
||||||
|
# Don't warn about script install locations if
|
||||||
|
# --target has been specified
|
||||||
|
warn_script_location = options.warn_script_location
|
||||||
|
if options.target_dir:
|
||||||
|
warn_script_location = False
|
||||||
|
|
||||||
|
installed = install_given_reqs(
|
||||||
|
to_install,
|
||||||
|
install_options,
|
||||||
|
global_options,
|
||||||
|
root=options.root_path,
|
||||||
|
home=target_temp_dir_path,
|
||||||
|
prefix=options.prefix_path,
|
||||||
|
pycompile=options.compile,
|
||||||
|
warn_script_location=warn_script_location,
|
||||||
|
use_user_site=options.use_user_site,
|
||||||
|
)
|
||||||
|
|
||||||
|
lib_locations = get_lib_location_guesses(
|
||||||
|
user=options.use_user_site,
|
||||||
|
home=target_temp_dir_path,
|
||||||
|
root=options.root_path,
|
||||||
|
prefix=options.prefix_path,
|
||||||
|
isolated=options.isolated_mode,
|
||||||
|
)
|
||||||
|
working_set = pkg_resources.WorkingSet(lib_locations)
|
||||||
|
|
||||||
|
installed.sort(key=operator.attrgetter('name'))
|
||||||
|
items = []
|
||||||
|
for result in installed:
|
||||||
|
item = result.name
|
||||||
|
try:
|
||||||
|
installed_version = get_installed_version(
|
||||||
|
result.name, working_set=working_set
|
||||||
|
)
|
||||||
|
if installed_version:
|
||||||
|
item += '-' + installed_version
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
items.append(item)
|
||||||
|
installed_desc = ' '.join(items)
|
||||||
|
if installed_desc:
|
||||||
|
write_output(
|
||||||
|
'Successfully installed %s', installed_desc,
|
||||||
|
)
|
||||||
|
except EnvironmentError as error:
|
||||||
|
show_traceback = (self.verbosity >= 1)
|
||||||
|
|
||||||
|
message = create_env_error_message(
|
||||||
|
error, show_traceback, options.use_user_site,
|
||||||
|
)
|
||||||
|
logger.error(message, exc_info=show_traceback)
|
||||||
|
|
||||||
|
return ERROR
|
||||||
|
|
||||||
|
if options.target_dir:
|
||||||
|
self._handle_target_dir(
|
||||||
|
options.target_dir, target_temp_dir, options.upgrade
|
||||||
|
)
|
||||||
|
|
||||||
|
return SUCCESS
|
||||||
|
|
||||||
|
def _handle_target_dir(self, target_dir, target_temp_dir, upgrade):
|
||||||
|
ensure_dir(target_dir)
|
||||||
|
|
||||||
|
# Checking both purelib and platlib directories for installed
|
||||||
|
# packages to be moved to target directory
|
||||||
|
lib_dir_list = []
|
||||||
|
|
||||||
|
with target_temp_dir:
|
||||||
|
# Checking both purelib and platlib directories for installed
|
||||||
|
# packages to be moved to target directory
|
||||||
|
scheme = distutils_scheme('', home=target_temp_dir.path)
|
||||||
|
purelib_dir = scheme['purelib']
|
||||||
|
platlib_dir = scheme['platlib']
|
||||||
|
data_dir = scheme['data']
|
||||||
|
|
||||||
|
if os.path.exists(purelib_dir):
|
||||||
|
lib_dir_list.append(purelib_dir)
|
||||||
|
if os.path.exists(platlib_dir) and platlib_dir != purelib_dir:
|
||||||
|
lib_dir_list.append(platlib_dir)
|
||||||
|
if os.path.exists(data_dir):
|
||||||
|
lib_dir_list.append(data_dir)
|
||||||
|
|
||||||
|
for lib_dir in lib_dir_list:
|
||||||
|
for item in os.listdir(lib_dir):
|
||||||
|
if lib_dir == data_dir:
|
||||||
|
ddir = os.path.join(data_dir, item)
|
||||||
|
if any(s.startswith(ddir) for s in lib_dir_list[:-1]):
|
||||||
|
continue
|
||||||
|
target_item_dir = os.path.join(target_dir, item)
|
||||||
|
if os.path.exists(target_item_dir):
|
||||||
|
if not upgrade:
|
||||||
|
logger.warning(
|
||||||
|
'Target directory %s already exists. Specify '
|
||||||
|
'--upgrade to force replacement.',
|
||||||
|
target_item_dir
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
if os.path.islink(target_item_dir):
|
||||||
|
logger.warning(
|
||||||
|
'Target directory %s already exists and is '
|
||||||
|
'a link. pip will not automatically replace '
|
||||||
|
'links, please remove if replacement is '
|
||||||
|
'desired.',
|
||||||
|
target_item_dir
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
if os.path.isdir(target_item_dir):
|
||||||
|
shutil.rmtree(target_item_dir)
|
||||||
|
else:
|
||||||
|
os.remove(target_item_dir)
|
||||||
|
|
||||||
|
shutil.move(
|
||||||
|
os.path.join(lib_dir, item),
|
||||||
|
target_item_dir
|
||||||
|
)
|
||||||
|
|
||||||
|
def _warn_about_conflicts(self, to_install):
|
||||||
|
try:
|
||||||
|
package_set, _dep_info = check_install_conflicts(to_install)
|
||||||
|
except Exception:
|
||||||
|
logger.error("Error checking for conflicts.", exc_info=True)
|
||||||
|
return
|
||||||
|
missing, conflicting = _dep_info
|
||||||
|
|
||||||
|
# NOTE: There is some duplication here from pip check
|
||||||
|
for project_name in missing:
|
||||||
|
version = package_set[project_name][0]
|
||||||
|
for dependency in missing[project_name]:
|
||||||
|
logger.critical(
|
||||||
|
"%s %s requires %s, which is not installed.",
|
||||||
|
project_name, version, dependency[1],
|
||||||
|
)
|
||||||
|
|
||||||
|
for project_name in conflicting:
|
||||||
|
version = package_set[project_name][0]
|
||||||
|
for dep_name, dep_version, req in conflicting[project_name]:
|
||||||
|
logger.critical(
|
||||||
|
"%s %s has requirement %s, but you'll have %s %s which is "
|
||||||
|
"incompatible.",
|
||||||
|
project_name, version, req, dep_name, dep_version,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_lib_location_guesses(*args, **kwargs):
|
||||||
|
scheme = distutils_scheme('', *args, **kwargs)
|
||||||
|
return [scheme['purelib'], scheme['platlib']]
|
||||||
|
|
||||||
|
|
||||||
|
def site_packages_writable(**kwargs):
|
||||||
|
return all(
|
||||||
|
test_writable_dir(d) for d in set(get_lib_location_guesses(**kwargs))
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def decide_user_install(
|
||||||
|
use_user_site, # type: Optional[bool]
|
||||||
|
prefix_path=None, # type: Optional[str]
|
||||||
|
target_dir=None, # type: Optional[str]
|
||||||
|
root_path=None, # type: Optional[str]
|
||||||
|
isolated_mode=False, # type: bool
|
||||||
|
):
|
||||||
|
# type: (...) -> bool
|
||||||
|
"""Determine whether to do a user install based on the input options.
|
||||||
|
|
||||||
|
If use_user_site is False, no additional checks are done.
|
||||||
|
If use_user_site is True, it is checked for compatibility with other
|
||||||
|
options.
|
||||||
|
If use_user_site is None, the default behaviour depends on the environment,
|
||||||
|
which is provided by the other arguments.
|
||||||
|
"""
|
||||||
|
# In some cases (config from tox), use_user_site can be set to an integer
|
||||||
|
# rather than a bool, which 'use_user_site is False' wouldn't catch.
|
||||||
|
if (use_user_site is not None) and (not use_user_site):
|
||||||
|
logger.debug("Non-user install by explicit request")
|
||||||
|
return False
|
||||||
|
|
||||||
|
if use_user_site:
|
||||||
|
if prefix_path:
|
||||||
|
raise CommandError(
|
||||||
|
"Can not combine '--user' and '--prefix' as they imply "
|
||||||
|
"different installation locations"
|
||||||
|
)
|
||||||
|
if virtualenv_no_global():
|
||||||
|
raise InstallationError(
|
||||||
|
"Can not perform a '--user' install. User site-packages "
|
||||||
|
"are not visible in this virtualenv."
|
||||||
|
)
|
||||||
|
logger.debug("User install by explicit request")
|
||||||
|
return True
|
||||||
|
|
||||||
|
# If we are here, user installs have not been explicitly requested/avoided
|
||||||
|
assert use_user_site is None
|
||||||
|
|
||||||
|
# user install incompatible with --prefix/--target
|
||||||
|
if prefix_path or target_dir:
|
||||||
|
logger.debug("Non-user install due to --prefix or --target option")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# If user installs are not enabled, choose a non-user install
|
||||||
|
if not site.ENABLE_USER_SITE:
|
||||||
|
logger.debug("Non-user install because user site-packages disabled")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# If we have permission for a non-user install, do that,
|
||||||
|
# otherwise do a user install.
|
||||||
|
if site_packages_writable(root=root_path, isolated=isolated_mode):
|
||||||
|
logger.debug("Non-user install because site-packages writeable")
|
||||||
|
return False
|
||||||
|
|
||||||
|
logger.info("Defaulting to user installation because normal site-packages "
|
||||||
|
"is not writeable")
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def warn_deprecated_install_options(requirements, options):
|
||||||
|
# type: (List[InstallRequirement], Optional[List[str]]) -> None
|
||||||
|
"""If any location-changing --install-option arguments were passed for
|
||||||
|
requirements or on the command-line, then show a deprecation warning.
|
||||||
|
"""
|
||||||
|
def format_options(option_names):
|
||||||
|
# type: (Iterable[str]) -> List[str]
|
||||||
|
return ["--{}".format(name.replace("_", "-")) for name in option_names]
|
||||||
|
|
||||||
|
offenders = []
|
||||||
|
|
||||||
|
for requirement in requirements:
|
||||||
|
install_options = requirement.install_options
|
||||||
|
location_options = parse_distutils_args(install_options)
|
||||||
|
if location_options:
|
||||||
|
offenders.append(
|
||||||
|
"{!r} from {}".format(
|
||||||
|
format_options(location_options.keys()), requirement
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if options:
|
||||||
|
location_options = parse_distutils_args(options)
|
||||||
|
if location_options:
|
||||||
|
offenders.append(
|
||||||
|
"{!r} from command line".format(
|
||||||
|
format_options(location_options.keys())
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if not offenders:
|
||||||
|
return
|
||||||
|
|
||||||
|
deprecated(
|
||||||
|
reason=(
|
||||||
|
"Location-changing options found in --install-option: {}. "
|
||||||
|
"This configuration may cause unexpected behavior and is "
|
||||||
|
"unsupported.".format(
|
||||||
|
"; ".join(offenders)
|
||||||
|
)
|
||||||
|
),
|
||||||
|
replacement=(
|
||||||
|
"using pip-level options like --user, --prefix, --root, and "
|
||||||
|
"--target"
|
||||||
|
),
|
||||||
|
gone_in="20.2",
|
||||||
|
issue=7309,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def create_env_error_message(error, show_traceback, using_user_site):
|
||||||
|
"""Format an error message for an EnvironmentError
|
||||||
|
|
||||||
|
It may occur anytime during the execution of the install command.
|
||||||
|
"""
|
||||||
|
parts = []
|
||||||
|
|
||||||
|
# Mention the error if we are not going to show a traceback
|
||||||
|
parts.append("Could not install packages due to an EnvironmentError")
|
||||||
|
if not show_traceback:
|
||||||
|
parts.append(": ")
|
||||||
|
parts.append(str(error))
|
||||||
|
else:
|
||||||
|
parts.append(".")
|
||||||
|
|
||||||
|
# Spilt the error indication from a helper message (if any)
|
||||||
|
parts[-1] += "\n"
|
||||||
|
|
||||||
|
# Suggest useful actions to the user:
|
||||||
|
# (1) using user site-packages or (2) verifying the permissions
|
||||||
|
if error.errno == errno.EACCES:
|
||||||
|
user_option_part = "Consider using the `--user` option"
|
||||||
|
permissions_part = "Check the permissions"
|
||||||
|
|
||||||
|
if not using_user_site:
|
||||||
|
parts.extend([
|
||||||
|
user_option_part, " or ",
|
||||||
|
permissions_part.lower(),
|
||||||
|
])
|
||||||
|
else:
|
||||||
|
parts.append(permissions_part)
|
||||||
|
parts.append(".\n")
|
||||||
|
|
||||||
|
return "".join(parts).strip() + "\n"
|
299
sources/pip_20.1/_internal/commands/list.py
Normal file
299
sources/pip_20.1/_internal/commands/list.py
Normal file
|
@ -0,0 +1,299 @@
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: disallow-untyped-defs=False
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from pip._vendor import six
|
||||||
|
|
||||||
|
from pip._internal.cli import cmdoptions
|
||||||
|
from pip._internal.cli.req_command import IndexGroupCommand
|
||||||
|
from pip._internal.exceptions import CommandError
|
||||||
|
from pip._internal.index.package_finder import PackageFinder
|
||||||
|
from pip._internal.models.selection_prefs import SelectionPreferences
|
||||||
|
from pip._internal.self_outdated_check import make_link_collector
|
||||||
|
from pip._internal.utils.misc import (
|
||||||
|
dist_is_editable,
|
||||||
|
get_installed_distributions,
|
||||||
|
tabulate,
|
||||||
|
write_output,
|
||||||
|
)
|
||||||
|
from pip._internal.utils.packaging import get_installer
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ListCommand(IndexGroupCommand):
|
||||||
|
"""
|
||||||
|
List installed packages, including editables.
|
||||||
|
|
||||||
|
Packages are listed in a case-insensitive sorted order.
|
||||||
|
"""
|
||||||
|
|
||||||
|
usage = """
|
||||||
|
%prog [options]"""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kw):
|
||||||
|
super(ListCommand, self).__init__(*args, **kw)
|
||||||
|
|
||||||
|
cmd_opts = self.cmd_opts
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'-o', '--outdated',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help='List outdated packages')
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'-u', '--uptodate',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help='List uptodate packages')
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'-e', '--editable',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help='List editable projects.')
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'-l', '--local',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help=('If in a virtualenv that has global access, do not list '
|
||||||
|
'globally-installed packages.'),
|
||||||
|
)
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
'--user',
|
||||||
|
dest='user',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help='Only output packages installed in user-site.')
|
||||||
|
cmd_opts.add_option(cmdoptions.list_path())
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--pre',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help=("Include pre-release and development versions. By default, "
|
||||||
|
"pip only finds stable versions."),
|
||||||
|
)
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--format',
|
||||||
|
action='store',
|
||||||
|
dest='list_format',
|
||||||
|
default="columns",
|
||||||
|
choices=('columns', 'freeze', 'json'),
|
||||||
|
help="Select the output format among: columns (default), freeze, "
|
||||||
|
"or json",
|
||||||
|
)
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--not-required',
|
||||||
|
action='store_true',
|
||||||
|
dest='not_required',
|
||||||
|
help="List packages that are not dependencies of "
|
||||||
|
"installed packages.",
|
||||||
|
)
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--exclude-editable',
|
||||||
|
action='store_false',
|
||||||
|
dest='include_editable',
|
||||||
|
help='Exclude editable package from output.',
|
||||||
|
)
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--include-editable',
|
||||||
|
action='store_true',
|
||||||
|
dest='include_editable',
|
||||||
|
help='Include editable package from output.',
|
||||||
|
default=True,
|
||||||
|
)
|
||||||
|
index_opts = cmdoptions.make_option_group(
|
||||||
|
cmdoptions.index_group, self.parser
|
||||||
|
)
|
||||||
|
|
||||||
|
self.parser.insert_option_group(0, index_opts)
|
||||||
|
self.parser.insert_option_group(0, cmd_opts)
|
||||||
|
|
||||||
|
def _build_package_finder(self, options, session):
|
||||||
|
"""
|
||||||
|
Create a package finder appropriate to this list command.
|
||||||
|
"""
|
||||||
|
link_collector = make_link_collector(session, options=options)
|
||||||
|
|
||||||
|
# Pass allow_yanked=False to ignore yanked versions.
|
||||||
|
selection_prefs = SelectionPreferences(
|
||||||
|
allow_yanked=False,
|
||||||
|
allow_all_prereleases=options.pre,
|
||||||
|
)
|
||||||
|
|
||||||
|
return PackageFinder.create(
|
||||||
|
link_collector=link_collector,
|
||||||
|
selection_prefs=selection_prefs,
|
||||||
|
)
|
||||||
|
|
||||||
|
def run(self, options, args):
|
||||||
|
if options.outdated and options.uptodate:
|
||||||
|
raise CommandError(
|
||||||
|
"Options --outdated and --uptodate cannot be combined.")
|
||||||
|
|
||||||
|
cmdoptions.check_list_path_option(options)
|
||||||
|
|
||||||
|
packages = get_installed_distributions(
|
||||||
|
local_only=options.local,
|
||||||
|
user_only=options.user,
|
||||||
|
editables_only=options.editable,
|
||||||
|
include_editables=options.include_editable,
|
||||||
|
paths=options.path,
|
||||||
|
)
|
||||||
|
|
||||||
|
# get_not_required must be called firstly in order to find and
|
||||||
|
# filter out all dependencies correctly. Otherwise a package
|
||||||
|
# can't be identified as requirement because some parent packages
|
||||||
|
# could be filtered out before.
|
||||||
|
if options.not_required:
|
||||||
|
packages = self.get_not_required(packages, options)
|
||||||
|
|
||||||
|
if options.outdated:
|
||||||
|
packages = self.get_outdated(packages, options)
|
||||||
|
elif options.uptodate:
|
||||||
|
packages = self.get_uptodate(packages, options)
|
||||||
|
|
||||||
|
self.output_package_listing(packages, options)
|
||||||
|
|
||||||
|
def get_outdated(self, packages, options):
|
||||||
|
return [
|
||||||
|
dist for dist in self.iter_packages_latest_infos(packages, options)
|
||||||
|
if dist.latest_version > dist.parsed_version
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_uptodate(self, packages, options):
|
||||||
|
return [
|
||||||
|
dist for dist in self.iter_packages_latest_infos(packages, options)
|
||||||
|
if dist.latest_version == dist.parsed_version
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_not_required(self, packages, options):
|
||||||
|
dep_keys = set()
|
||||||
|
for dist in packages:
|
||||||
|
dep_keys.update(requirement.key for requirement in dist.requires())
|
||||||
|
return {pkg for pkg in packages if pkg.key not in dep_keys}
|
||||||
|
|
||||||
|
def iter_packages_latest_infos(self, packages, options):
|
||||||
|
with self._build_session(options) as session:
|
||||||
|
finder = self._build_package_finder(options, session)
|
||||||
|
|
||||||
|
def latest_info(dist):
|
||||||
|
typ = 'unknown'
|
||||||
|
all_candidates = finder.find_all_candidates(dist.key)
|
||||||
|
if not options.pre:
|
||||||
|
# Remove prereleases
|
||||||
|
all_candidates = [candidate for candidate in all_candidates
|
||||||
|
if not candidate.version.is_prerelease]
|
||||||
|
|
||||||
|
evaluator = finder.make_candidate_evaluator(
|
||||||
|
project_name=dist.project_name,
|
||||||
|
)
|
||||||
|
best_candidate = evaluator.sort_best_candidate(all_candidates)
|
||||||
|
if best_candidate is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
remote_version = best_candidate.version
|
||||||
|
if best_candidate.link.is_wheel:
|
||||||
|
typ = 'wheel'
|
||||||
|
else:
|
||||||
|
typ = 'sdist'
|
||||||
|
# This is dirty but makes the rest of the code much cleaner
|
||||||
|
dist.latest_version = remote_version
|
||||||
|
dist.latest_filetype = typ
|
||||||
|
return dist
|
||||||
|
|
||||||
|
for dist in map(latest_info, packages):
|
||||||
|
if dist is not None:
|
||||||
|
yield dist
|
||||||
|
|
||||||
|
def output_package_listing(self, packages, options):
|
||||||
|
packages = sorted(
|
||||||
|
packages,
|
||||||
|
key=lambda dist: dist.project_name.lower(),
|
||||||
|
)
|
||||||
|
if options.list_format == 'columns' and packages:
|
||||||
|
data, header = format_for_columns(packages, options)
|
||||||
|
self.output_package_listing_columns(data, header)
|
||||||
|
elif options.list_format == 'freeze':
|
||||||
|
for dist in packages:
|
||||||
|
if options.verbose >= 1:
|
||||||
|
write_output("%s==%s (%s)", dist.project_name,
|
||||||
|
dist.version, dist.location)
|
||||||
|
else:
|
||||||
|
write_output("%s==%s", dist.project_name, dist.version)
|
||||||
|
elif options.list_format == 'json':
|
||||||
|
write_output(format_for_json(packages, options))
|
||||||
|
|
||||||
|
def output_package_listing_columns(self, data, header):
|
||||||
|
# insert the header first: we need to know the size of column names
|
||||||
|
if len(data) > 0:
|
||||||
|
data.insert(0, header)
|
||||||
|
|
||||||
|
pkg_strings, sizes = tabulate(data)
|
||||||
|
|
||||||
|
# Create and add a separator.
|
||||||
|
if len(data) > 0:
|
||||||
|
pkg_strings.insert(1, " ".join(map(lambda x: '-' * x, sizes)))
|
||||||
|
|
||||||
|
for val in pkg_strings:
|
||||||
|
write_output(val)
|
||||||
|
|
||||||
|
|
||||||
|
def format_for_columns(pkgs, options):
|
||||||
|
"""
|
||||||
|
Convert the package data into something usable
|
||||||
|
by output_package_listing_columns.
|
||||||
|
"""
|
||||||
|
running_outdated = options.outdated
|
||||||
|
# Adjust the header for the `pip list --outdated` case.
|
||||||
|
if running_outdated:
|
||||||
|
header = ["Package", "Version", "Latest", "Type"]
|
||||||
|
else:
|
||||||
|
header = ["Package", "Version"]
|
||||||
|
|
||||||
|
data = []
|
||||||
|
if options.verbose >= 1 or any(dist_is_editable(x) for x in pkgs):
|
||||||
|
header.append("Location")
|
||||||
|
if options.verbose >= 1:
|
||||||
|
header.append("Installer")
|
||||||
|
|
||||||
|
for proj in pkgs:
|
||||||
|
# if we're working on the 'outdated' list, separate out the
|
||||||
|
# latest_version and type
|
||||||
|
row = [proj.project_name, proj.version]
|
||||||
|
|
||||||
|
if running_outdated:
|
||||||
|
row.append(proj.latest_version)
|
||||||
|
row.append(proj.latest_filetype)
|
||||||
|
|
||||||
|
if options.verbose >= 1 or dist_is_editable(proj):
|
||||||
|
row.append(proj.location)
|
||||||
|
if options.verbose >= 1:
|
||||||
|
row.append(get_installer(proj))
|
||||||
|
|
||||||
|
data.append(row)
|
||||||
|
|
||||||
|
return data, header
|
||||||
|
|
||||||
|
|
||||||
|
def format_for_json(packages, options):
|
||||||
|
data = []
|
||||||
|
for dist in packages:
|
||||||
|
info = {
|
||||||
|
'name': dist.project_name,
|
||||||
|
'version': six.text_type(dist.version),
|
||||||
|
}
|
||||||
|
if options.verbose >= 1:
|
||||||
|
info['location'] = dist.location
|
||||||
|
info['installer'] = get_installer(dist)
|
||||||
|
if options.outdated:
|
||||||
|
info['latest_version'] = six.text_type(dist.latest_version)
|
||||||
|
info['latest_filetype'] = dist.latest_filetype
|
||||||
|
data.append(info)
|
||||||
|
return json.dumps(data)
|
146
sources/pip_20.1/_internal/commands/search.py
Normal file
146
sources/pip_20.1/_internal/commands/search.py
Normal file
|
@ -0,0 +1,146 @@
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: disallow-untyped-defs=False
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
import textwrap
|
||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
from pip._vendor import pkg_resources
|
||||||
|
from pip._vendor.packaging.version import parse as parse_version
|
||||||
|
# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is
|
||||||
|
# why we ignore the type on this import
|
||||||
|
from pip._vendor.six.moves import xmlrpc_client # type: ignore
|
||||||
|
|
||||||
|
from pip._internal.cli.base_command import Command
|
||||||
|
from pip._internal.cli.req_command import SessionCommandMixin
|
||||||
|
from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS
|
||||||
|
from pip._internal.exceptions import CommandError
|
||||||
|
from pip._internal.models.index import PyPI
|
||||||
|
from pip._internal.network.xmlrpc import PipXmlrpcTransport
|
||||||
|
from pip._internal.utils.compat import get_terminal_size
|
||||||
|
from pip._internal.utils.logging import indent_log
|
||||||
|
from pip._internal.utils.misc import write_output
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class SearchCommand(Command, SessionCommandMixin):
|
||||||
|
"""Search for PyPI packages whose name or summary contains <query>."""
|
||||||
|
|
||||||
|
usage = """
|
||||||
|
%prog [options] <query>"""
|
||||||
|
ignore_require_venv = True
|
||||||
|
|
||||||
|
def __init__(self, *args, **kw):
|
||||||
|
super(SearchCommand, self).__init__(*args, **kw)
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
'-i', '--index',
|
||||||
|
dest='index',
|
||||||
|
metavar='URL',
|
||||||
|
default=PyPI.pypi_url,
|
||||||
|
help='Base URL of Python Package Index (default %default)')
|
||||||
|
|
||||||
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
|
|
||||||
|
def run(self, options, args):
|
||||||
|
if not args:
|
||||||
|
raise CommandError('Missing required argument (search query).')
|
||||||
|
query = args
|
||||||
|
pypi_hits = self.search(query, options)
|
||||||
|
hits = transform_hits(pypi_hits)
|
||||||
|
|
||||||
|
terminal_width = None
|
||||||
|
if sys.stdout.isatty():
|
||||||
|
terminal_width = get_terminal_size()[0]
|
||||||
|
|
||||||
|
print_results(hits, terminal_width=terminal_width)
|
||||||
|
if pypi_hits:
|
||||||
|
return SUCCESS
|
||||||
|
return NO_MATCHES_FOUND
|
||||||
|
|
||||||
|
def search(self, query, options):
|
||||||
|
index_url = options.index
|
||||||
|
|
||||||
|
session = self.get_default_session(options)
|
||||||
|
|
||||||
|
transport = PipXmlrpcTransport(index_url, session)
|
||||||
|
pypi = xmlrpc_client.ServerProxy(index_url, transport)
|
||||||
|
hits = pypi.search({'name': query, 'summary': query}, 'or')
|
||||||
|
return hits
|
||||||
|
|
||||||
|
|
||||||
|
def transform_hits(hits):
|
||||||
|
"""
|
||||||
|
The list from pypi is really a list of versions. We want a list of
|
||||||
|
packages with the list of versions stored inline. This converts the
|
||||||
|
list from pypi into one we can use.
|
||||||
|
"""
|
||||||
|
packages = OrderedDict()
|
||||||
|
for hit in hits:
|
||||||
|
name = hit['name']
|
||||||
|
summary = hit['summary']
|
||||||
|
version = hit['version']
|
||||||
|
|
||||||
|
if name not in packages.keys():
|
||||||
|
packages[name] = {
|
||||||
|
'name': name,
|
||||||
|
'summary': summary,
|
||||||
|
'versions': [version],
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
packages[name]['versions'].append(version)
|
||||||
|
|
||||||
|
# if this is the highest version, replace summary and score
|
||||||
|
if version == highest_version(packages[name]['versions']):
|
||||||
|
packages[name]['summary'] = summary
|
||||||
|
|
||||||
|
return list(packages.values())
|
||||||
|
|
||||||
|
|
||||||
|
def print_results(hits, name_column_width=None, terminal_width=None):
|
||||||
|
if not hits:
|
||||||
|
return
|
||||||
|
if name_column_width is None:
|
||||||
|
name_column_width = max([
|
||||||
|
len(hit['name']) + len(highest_version(hit.get('versions', ['-'])))
|
||||||
|
for hit in hits
|
||||||
|
]) + 4
|
||||||
|
|
||||||
|
installed_packages = [p.project_name for p in pkg_resources.working_set]
|
||||||
|
for hit in hits:
|
||||||
|
name = hit['name']
|
||||||
|
summary = hit['summary'] or ''
|
||||||
|
latest = highest_version(hit.get('versions', ['-']))
|
||||||
|
if terminal_width is not None:
|
||||||
|
target_width = terminal_width - name_column_width - 5
|
||||||
|
if target_width > 10:
|
||||||
|
# wrap and indent summary to fit terminal
|
||||||
|
summary = textwrap.wrap(summary, target_width)
|
||||||
|
summary = ('\n' + ' ' * (name_column_width + 3)).join(summary)
|
||||||
|
|
||||||
|
line = '{name_latest:{name_column_width}} - {summary}'.format(
|
||||||
|
name_latest='{name} ({latest})'.format(**locals()),
|
||||||
|
**locals())
|
||||||
|
try:
|
||||||
|
write_output(line)
|
||||||
|
if name in installed_packages:
|
||||||
|
dist = pkg_resources.get_distribution(name)
|
||||||
|
with indent_log():
|
||||||
|
if dist.version == latest:
|
||||||
|
write_output('INSTALLED: %s (latest)', dist.version)
|
||||||
|
else:
|
||||||
|
write_output('INSTALLED: %s', dist.version)
|
||||||
|
if parse_version(latest).pre:
|
||||||
|
write_output('LATEST: %s (pre-release; install'
|
||||||
|
' with "pip install --pre")', latest)
|
||||||
|
else:
|
||||||
|
write_output('LATEST: %s', latest)
|
||||||
|
except UnicodeEncodeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def highest_version(versions):
|
||||||
|
return max(versions, key=parse_version)
|
180
sources/pip_20.1/_internal/commands/show.py
Normal file
180
sources/pip_20.1/_internal/commands/show.py
Normal file
|
@ -0,0 +1,180 @@
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: disallow-untyped-defs=False
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from email.parser import FeedParser
|
||||||
|
|
||||||
|
from pip._vendor import pkg_resources
|
||||||
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
|
|
||||||
|
from pip._internal.cli.base_command import Command
|
||||||
|
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
||||||
|
from pip._internal.utils.misc import write_output
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ShowCommand(Command):
|
||||||
|
"""
|
||||||
|
Show information about one or more installed packages.
|
||||||
|
|
||||||
|
The output is in RFC-compliant mail header format.
|
||||||
|
"""
|
||||||
|
|
||||||
|
usage = """
|
||||||
|
%prog [options] <package> ..."""
|
||||||
|
ignore_require_venv = True
|
||||||
|
|
||||||
|
def __init__(self, *args, **kw):
|
||||||
|
super(ShowCommand, self).__init__(*args, **kw)
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
'-f', '--files',
|
||||||
|
dest='files',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help='Show the full list of installed files for each package.')
|
||||||
|
|
||||||
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
|
|
||||||
|
def run(self, options, args):
|
||||||
|
if not args:
|
||||||
|
logger.warning('ERROR: Please provide a package name or names.')
|
||||||
|
return ERROR
|
||||||
|
query = args
|
||||||
|
|
||||||
|
results = search_packages_info(query)
|
||||||
|
if not print_results(
|
||||||
|
results, list_files=options.files, verbose=options.verbose):
|
||||||
|
return ERROR
|
||||||
|
return SUCCESS
|
||||||
|
|
||||||
|
|
||||||
|
def search_packages_info(query):
|
||||||
|
"""
|
||||||
|
Gather details from installed distributions. Print distribution name,
|
||||||
|
version, location, and installed files. Installed files requires a
|
||||||
|
pip generated 'installed-files.txt' in the distributions '.egg-info'
|
||||||
|
directory.
|
||||||
|
"""
|
||||||
|
installed = {}
|
||||||
|
for p in pkg_resources.working_set:
|
||||||
|
installed[canonicalize_name(p.project_name)] = p
|
||||||
|
|
||||||
|
query_names = [canonicalize_name(name) for name in query]
|
||||||
|
missing = sorted(
|
||||||
|
[name for name, pkg in zip(query, query_names) if pkg not in installed]
|
||||||
|
)
|
||||||
|
if missing:
|
||||||
|
logger.warning('Package(s) not found: %s', ', '.join(missing))
|
||||||
|
|
||||||
|
def get_requiring_packages(package_name):
|
||||||
|
canonical_name = canonicalize_name(package_name)
|
||||||
|
return [
|
||||||
|
pkg.project_name for pkg in pkg_resources.working_set
|
||||||
|
if canonical_name in
|
||||||
|
[canonicalize_name(required.name) for required in
|
||||||
|
pkg.requires()]
|
||||||
|
]
|
||||||
|
|
||||||
|
for dist in [installed[pkg] for pkg in query_names if pkg in installed]:
|
||||||
|
package = {
|
||||||
|
'name': dist.project_name,
|
||||||
|
'version': dist.version,
|
||||||
|
'location': dist.location,
|
||||||
|
'requires': [dep.project_name for dep in dist.requires()],
|
||||||
|
'required_by': get_requiring_packages(dist.project_name)
|
||||||
|
}
|
||||||
|
file_list = None
|
||||||
|
metadata = None
|
||||||
|
if isinstance(dist, pkg_resources.DistInfoDistribution):
|
||||||
|
# RECORDs should be part of .dist-info metadatas
|
||||||
|
if dist.has_metadata('RECORD'):
|
||||||
|
lines = dist.get_metadata_lines('RECORD')
|
||||||
|
paths = [l.split(',')[0] for l in lines]
|
||||||
|
paths = [os.path.join(dist.location, p) for p in paths]
|
||||||
|
file_list = [os.path.relpath(p, dist.location) for p in paths]
|
||||||
|
|
||||||
|
if dist.has_metadata('METADATA'):
|
||||||
|
metadata = dist.get_metadata('METADATA')
|
||||||
|
else:
|
||||||
|
# Otherwise use pip's log for .egg-info's
|
||||||
|
if dist.has_metadata('installed-files.txt'):
|
||||||
|
paths = dist.get_metadata_lines('installed-files.txt')
|
||||||
|
paths = [os.path.join(dist.egg_info, p) for p in paths]
|
||||||
|
file_list = [os.path.relpath(p, dist.location) for p in paths]
|
||||||
|
|
||||||
|
if dist.has_metadata('PKG-INFO'):
|
||||||
|
metadata = dist.get_metadata('PKG-INFO')
|
||||||
|
|
||||||
|
if dist.has_metadata('entry_points.txt'):
|
||||||
|
entry_points = dist.get_metadata_lines('entry_points.txt')
|
||||||
|
package['entry_points'] = entry_points
|
||||||
|
|
||||||
|
if dist.has_metadata('INSTALLER'):
|
||||||
|
for line in dist.get_metadata_lines('INSTALLER'):
|
||||||
|
if line.strip():
|
||||||
|
package['installer'] = line.strip()
|
||||||
|
break
|
||||||
|
|
||||||
|
# @todo: Should pkg_resources.Distribution have a
|
||||||
|
# `get_pkg_info` method?
|
||||||
|
feed_parser = FeedParser()
|
||||||
|
feed_parser.feed(metadata)
|
||||||
|
pkg_info_dict = feed_parser.close()
|
||||||
|
for key in ('metadata-version', 'summary',
|
||||||
|
'home-page', 'author', 'author-email', 'license'):
|
||||||
|
package[key] = pkg_info_dict.get(key)
|
||||||
|
|
||||||
|
# It looks like FeedParser cannot deal with repeated headers
|
||||||
|
classifiers = []
|
||||||
|
for line in metadata.splitlines():
|
||||||
|
if line.startswith('Classifier: '):
|
||||||
|
classifiers.append(line[len('Classifier: '):])
|
||||||
|
package['classifiers'] = classifiers
|
||||||
|
|
||||||
|
if file_list:
|
||||||
|
package['files'] = sorted(file_list)
|
||||||
|
yield package
|
||||||
|
|
||||||
|
|
||||||
|
def print_results(distributions, list_files=False, verbose=False):
|
||||||
|
"""
|
||||||
|
Print the information from installed distributions found.
|
||||||
|
"""
|
||||||
|
results_printed = False
|
||||||
|
for i, dist in enumerate(distributions):
|
||||||
|
results_printed = True
|
||||||
|
if i > 0:
|
||||||
|
write_output("---")
|
||||||
|
|
||||||
|
write_output("Name: %s", dist.get('name', ''))
|
||||||
|
write_output("Version: %s", dist.get('version', ''))
|
||||||
|
write_output("Summary: %s", dist.get('summary', ''))
|
||||||
|
write_output("Home-page: %s", dist.get('home-page', ''))
|
||||||
|
write_output("Author: %s", dist.get('author', ''))
|
||||||
|
write_output("Author-email: %s", dist.get('author-email', ''))
|
||||||
|
write_output("License: %s", dist.get('license', ''))
|
||||||
|
write_output("Location: %s", dist.get('location', ''))
|
||||||
|
write_output("Requires: %s", ', '.join(dist.get('requires', [])))
|
||||||
|
write_output("Required-by: %s", ', '.join(dist.get('required_by', [])))
|
||||||
|
|
||||||
|
if verbose:
|
||||||
|
write_output("Metadata-Version: %s",
|
||||||
|
dist.get('metadata-version', ''))
|
||||||
|
write_output("Installer: %s", dist.get('installer', ''))
|
||||||
|
write_output("Classifiers:")
|
||||||
|
for classifier in dist.get('classifiers', []):
|
||||||
|
write_output(" %s", classifier)
|
||||||
|
write_output("Entry-points:")
|
||||||
|
for entry in dist.get('entry_points', []):
|
||||||
|
write_output(" %s", entry.strip())
|
||||||
|
if list_files:
|
||||||
|
write_output("Files:")
|
||||||
|
for line in dist.get('files', []):
|
||||||
|
write_output(" %s", line.strip())
|
||||||
|
if "files" not in dist:
|
||||||
|
write_output("Cannot locate installed-files.txt")
|
||||||
|
return results_printed
|
89
sources/pip_20.1/_internal/commands/uninstall.py
Normal file
89
sources/pip_20.1/_internal/commands/uninstall.py
Normal file
|
@ -0,0 +1,89 @@
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: disallow-untyped-defs=False
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
|
|
||||||
|
from pip._internal.cli.base_command import Command
|
||||||
|
from pip._internal.cli.req_command import SessionCommandMixin
|
||||||
|
from pip._internal.exceptions import InstallationError
|
||||||
|
from pip._internal.req import parse_requirements
|
||||||
|
from pip._internal.req.constructors import (
|
||||||
|
install_req_from_line,
|
||||||
|
install_req_from_parsed_requirement,
|
||||||
|
)
|
||||||
|
from pip._internal.utils.misc import protect_pip_from_modification_on_windows
|
||||||
|
|
||||||
|
|
||||||
|
class UninstallCommand(Command, SessionCommandMixin):
|
||||||
|
"""
|
||||||
|
Uninstall packages.
|
||||||
|
|
||||||
|
pip is able to uninstall most installed packages. Known exceptions are:
|
||||||
|
|
||||||
|
- Pure distutils packages installed with ``python setup.py install``, which
|
||||||
|
leave behind no metadata to determine what files were installed.
|
||||||
|
- Script wrappers installed by ``python setup.py develop``.
|
||||||
|
"""
|
||||||
|
|
||||||
|
usage = """
|
||||||
|
%prog [options] <package> ...
|
||||||
|
%prog [options] -r <requirements file> ..."""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kw):
|
||||||
|
super(UninstallCommand, self).__init__(*args, **kw)
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
'-r', '--requirement',
|
||||||
|
dest='requirements',
|
||||||
|
action='append',
|
||||||
|
default=[],
|
||||||
|
metavar='file',
|
||||||
|
help='Uninstall all the packages listed in the given requirements '
|
||||||
|
'file. This option can be used multiple times.',
|
||||||
|
)
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
'-y', '--yes',
|
||||||
|
dest='yes',
|
||||||
|
action='store_true',
|
||||||
|
help="Don't ask for confirmation of uninstall deletions.")
|
||||||
|
|
||||||
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
|
|
||||||
|
def run(self, options, args):
|
||||||
|
session = self.get_default_session(options)
|
||||||
|
|
||||||
|
reqs_to_uninstall = {}
|
||||||
|
for name in args:
|
||||||
|
req = install_req_from_line(
|
||||||
|
name, isolated=options.isolated_mode,
|
||||||
|
)
|
||||||
|
if req.name:
|
||||||
|
reqs_to_uninstall[canonicalize_name(req.name)] = req
|
||||||
|
for filename in options.requirements:
|
||||||
|
for parsed_req in parse_requirements(
|
||||||
|
filename,
|
||||||
|
options=options,
|
||||||
|
session=session):
|
||||||
|
req = install_req_from_parsed_requirement(
|
||||||
|
parsed_req,
|
||||||
|
isolated=options.isolated_mode
|
||||||
|
)
|
||||||
|
if req.name:
|
||||||
|
reqs_to_uninstall[canonicalize_name(req.name)] = req
|
||||||
|
if not reqs_to_uninstall:
|
||||||
|
raise InstallationError(
|
||||||
|
'You must give at least one requirement to {self.name} (see '
|
||||||
|
'"pip help {self.name}")'.format(**locals())
|
||||||
|
)
|
||||||
|
|
||||||
|
protect_pip_from_modification_on_windows(
|
||||||
|
modifying_pip="pip" in reqs_to_uninstall
|
||||||
|
)
|
||||||
|
|
||||||
|
for req in reqs_to_uninstall.values():
|
||||||
|
uninstall_pathset = req.uninstall(
|
||||||
|
auto_confirm=options.yes, verbose=self.verbosity > 0,
|
||||||
|
)
|
||||||
|
if uninstall_pathset:
|
||||||
|
uninstall_pathset.commit()
|
190
sources/pip_20.1/_internal/commands/wheel.py
Normal file
190
sources/pip_20.1/_internal/commands/wheel.py
Normal file
|
@ -0,0 +1,190 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: disallow-untyped-defs=False
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
from pip._internal.cache import WheelCache
|
||||||
|
from pip._internal.cli import cmdoptions
|
||||||
|
from pip._internal.cli.req_command import RequirementCommand, with_cleanup
|
||||||
|
from pip._internal.exceptions import CommandError
|
||||||
|
from pip._internal.req.req_tracker import get_requirement_tracker
|
||||||
|
from pip._internal.utils.misc import ensure_dir, normalize_path
|
||||||
|
from pip._internal.utils.temp_dir import TempDirectory
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
from pip._internal.wheel_builder import build, should_build_for_wheel_command
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from optparse import Values
|
||||||
|
from typing import Any, List
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class WheelCommand(RequirementCommand):
|
||||||
|
"""
|
||||||
|
Build Wheel archives for your requirements and dependencies.
|
||||||
|
|
||||||
|
Wheel is a built-package format, and offers the advantage of not
|
||||||
|
recompiling your software during every install. For more details, see the
|
||||||
|
wheel docs: https://wheel.readthedocs.io/en/latest/
|
||||||
|
|
||||||
|
Requirements: setuptools>=0.8, and wheel.
|
||||||
|
|
||||||
|
'pip wheel' uses the bdist_wheel setuptools extension from the wheel
|
||||||
|
package to build individual wheels.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
usage = """
|
||||||
|
%prog [options] <requirement specifier> ...
|
||||||
|
%prog [options] -r <requirements file> ...
|
||||||
|
%prog [options] [-e] <vcs project url> ...
|
||||||
|
%prog [options] [-e] <local project path> ...
|
||||||
|
%prog [options] <archive url/path> ..."""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kw):
|
||||||
|
super(WheelCommand, self).__init__(*args, **kw)
|
||||||
|
|
||||||
|
cmd_opts = self.cmd_opts
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'-w', '--wheel-dir',
|
||||||
|
dest='wheel_dir',
|
||||||
|
metavar='dir',
|
||||||
|
default=os.curdir,
|
||||||
|
help=("Build wheels into <dir>, where the default is the "
|
||||||
|
"current working directory."),
|
||||||
|
)
|
||||||
|
cmd_opts.add_option(cmdoptions.no_binary())
|
||||||
|
cmd_opts.add_option(cmdoptions.only_binary())
|
||||||
|
cmd_opts.add_option(cmdoptions.prefer_binary())
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--build-option',
|
||||||
|
dest='build_options',
|
||||||
|
metavar='options',
|
||||||
|
action='append',
|
||||||
|
help="Extra arguments to be supplied to 'setup.py bdist_wheel'.",
|
||||||
|
)
|
||||||
|
cmd_opts.add_option(cmdoptions.no_build_isolation())
|
||||||
|
cmd_opts.add_option(cmdoptions.use_pep517())
|
||||||
|
cmd_opts.add_option(cmdoptions.no_use_pep517())
|
||||||
|
cmd_opts.add_option(cmdoptions.constraints())
|
||||||
|
cmd_opts.add_option(cmdoptions.editable())
|
||||||
|
cmd_opts.add_option(cmdoptions.requirements())
|
||||||
|
cmd_opts.add_option(cmdoptions.src())
|
||||||
|
cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
||||||
|
cmd_opts.add_option(cmdoptions.no_deps())
|
||||||
|
cmd_opts.add_option(cmdoptions.build_dir())
|
||||||
|
cmd_opts.add_option(cmdoptions.progress_bar())
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--global-option',
|
||||||
|
dest='global_options',
|
||||||
|
action='append',
|
||||||
|
metavar='options',
|
||||||
|
help="Extra global options to be supplied to the setup.py "
|
||||||
|
"call before the 'bdist_wheel' command.")
|
||||||
|
|
||||||
|
cmd_opts.add_option(
|
||||||
|
'--pre',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help=("Include pre-release and development versions. By default, "
|
||||||
|
"pip only finds stable versions."),
|
||||||
|
)
|
||||||
|
|
||||||
|
cmd_opts.add_option(cmdoptions.require_hashes())
|
||||||
|
|
||||||
|
index_opts = cmdoptions.make_option_group(
|
||||||
|
cmdoptions.index_group,
|
||||||
|
self.parser,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.parser.insert_option_group(0, index_opts)
|
||||||
|
self.parser.insert_option_group(0, cmd_opts)
|
||||||
|
|
||||||
|
@with_cleanup
|
||||||
|
def run(self, options, args):
|
||||||
|
# type: (Values, List[Any]) -> None
|
||||||
|
cmdoptions.check_install_build_global(options)
|
||||||
|
|
||||||
|
session = self.get_default_session(options)
|
||||||
|
|
||||||
|
finder = self._build_package_finder(options, session)
|
||||||
|
build_delete = (not (options.no_clean or options.build_dir))
|
||||||
|
wheel_cache = WheelCache(options.cache_dir, options.format_control)
|
||||||
|
|
||||||
|
options.wheel_dir = normalize_path(options.wheel_dir)
|
||||||
|
ensure_dir(options.wheel_dir)
|
||||||
|
|
||||||
|
req_tracker = self.enter_context(get_requirement_tracker())
|
||||||
|
|
||||||
|
directory = TempDirectory(
|
||||||
|
options.build_dir,
|
||||||
|
delete=build_delete,
|
||||||
|
kind="wheel",
|
||||||
|
globally_managed=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
reqs = self.get_requirements(args, options, finder, session)
|
||||||
|
|
||||||
|
preparer = self.make_requirement_preparer(
|
||||||
|
temp_build_dir=directory,
|
||||||
|
options=options,
|
||||||
|
req_tracker=req_tracker,
|
||||||
|
session=session,
|
||||||
|
finder=finder,
|
||||||
|
wheel_download_dir=options.wheel_dir,
|
||||||
|
use_user_site=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
resolver = self.make_resolver(
|
||||||
|
preparer=preparer,
|
||||||
|
finder=finder,
|
||||||
|
options=options,
|
||||||
|
wheel_cache=wheel_cache,
|
||||||
|
ignore_requires_python=options.ignore_requires_python,
|
||||||
|
use_pep517=options.use_pep517,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.trace_basic_info(finder)
|
||||||
|
|
||||||
|
requirement_set = resolver.resolve(
|
||||||
|
reqs, check_supported_wheels=True
|
||||||
|
)
|
||||||
|
|
||||||
|
reqs_to_build = [
|
||||||
|
r for r in requirement_set.requirements.values()
|
||||||
|
if should_build_for_wheel_command(r)
|
||||||
|
]
|
||||||
|
|
||||||
|
# build wheels
|
||||||
|
build_successes, build_failures = build(
|
||||||
|
reqs_to_build,
|
||||||
|
wheel_cache=wheel_cache,
|
||||||
|
build_options=options.build_options or [],
|
||||||
|
global_options=options.global_options or [],
|
||||||
|
)
|
||||||
|
for req in build_successes:
|
||||||
|
assert req.link and req.link.is_wheel
|
||||||
|
assert req.local_file_path
|
||||||
|
# copy from cache to target directory
|
||||||
|
try:
|
||||||
|
shutil.copy(req.local_file_path, options.wheel_dir)
|
||||||
|
except OSError as e:
|
||||||
|
logger.warning(
|
||||||
|
"Building wheel for %s failed: %s",
|
||||||
|
req.name, e,
|
||||||
|
)
|
||||||
|
build_failures.append(req)
|
||||||
|
if len(build_failures) != 0:
|
||||||
|
raise CommandError(
|
||||||
|
"Failed to build one or more wheels"
|
||||||
|
)
|
426
sources/pip_20.1/_internal/configuration.py
Normal file
426
sources/pip_20.1/_internal/configuration.py
Normal file
|
@ -0,0 +1,426 @@
|
||||||
|
"""Configuration management setup
|
||||||
|
|
||||||
|
Some terminology:
|
||||||
|
- name
|
||||||
|
As written in config files.
|
||||||
|
- value
|
||||||
|
Value associated with a name
|
||||||
|
- key
|
||||||
|
Name combined with it's section (section.name)
|
||||||
|
- variant
|
||||||
|
A single word describing where the configuration key-value pair came from
|
||||||
|
"""
|
||||||
|
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: strict-optional=False
|
||||||
|
|
||||||
|
import locale
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from pip._vendor.six.moves import configparser
|
||||||
|
|
||||||
|
from pip._internal.exceptions import (
|
||||||
|
ConfigurationError,
|
||||||
|
ConfigurationFileCouldNotBeLoaded,
|
||||||
|
)
|
||||||
|
from pip._internal.utils import appdirs
|
||||||
|
from pip._internal.utils.compat import WINDOWS, expanduser
|
||||||
|
from pip._internal.utils.misc import ensure_dir, enum
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import (
|
||||||
|
Any, Dict, Iterable, List, NewType, Optional, Tuple
|
||||||
|
)
|
||||||
|
|
||||||
|
RawConfigParser = configparser.RawConfigParser # Shorthand
|
||||||
|
Kind = NewType("Kind", str)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
# NOTE: Maybe use the optionx attribute to normalize keynames.
|
||||||
|
def _normalize_name(name):
|
||||||
|
# type: (str) -> str
|
||||||
|
"""Make a name consistent regardless of source (environment or file)
|
||||||
|
"""
|
||||||
|
name = name.lower().replace('_', '-')
|
||||||
|
if name.startswith('--'):
|
||||||
|
name = name[2:] # only prefer long opts
|
||||||
|
return name
|
||||||
|
|
||||||
|
|
||||||
|
def _disassemble_key(name):
|
||||||
|
# type: (str) -> List[str]
|
||||||
|
if "." not in name:
|
||||||
|
error_message = (
|
||||||
|
"Key does not contain dot separated section and key. "
|
||||||
|
"Perhaps you wanted to use 'global.{}' instead?"
|
||||||
|
).format(name)
|
||||||
|
raise ConfigurationError(error_message)
|
||||||
|
return name.split(".", 1)
|
||||||
|
|
||||||
|
|
||||||
|
# The kinds of configurations there are.
|
||||||
|
kinds = enum(
|
||||||
|
USER="user", # User Specific
|
||||||
|
GLOBAL="global", # System Wide
|
||||||
|
SITE="site", # [Virtual] Environment Specific
|
||||||
|
ENV="env", # from PIP_CONFIG_FILE
|
||||||
|
ENV_VAR="env-var", # from Environment Variables
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
CONFIG_BASENAME = 'pip.ini' if WINDOWS else 'pip.conf'
|
||||||
|
|
||||||
|
|
||||||
|
def get_configuration_files():
|
||||||
|
# type: () -> Dict[Kind, List[str]]
|
||||||
|
global_config_files = [
|
||||||
|
os.path.join(path, CONFIG_BASENAME)
|
||||||
|
for path in appdirs.site_config_dirs('pip')
|
||||||
|
]
|
||||||
|
|
||||||
|
site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME)
|
||||||
|
legacy_config_file = os.path.join(
|
||||||
|
expanduser('~'),
|
||||||
|
'pip' if WINDOWS else '.pip',
|
||||||
|
CONFIG_BASENAME,
|
||||||
|
)
|
||||||
|
new_config_file = os.path.join(
|
||||||
|
appdirs.user_config_dir("pip"), CONFIG_BASENAME
|
||||||
|
)
|
||||||
|
return {
|
||||||
|
kinds.GLOBAL: global_config_files,
|
||||||
|
kinds.SITE: [site_config_file],
|
||||||
|
kinds.USER: [legacy_config_file, new_config_file],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class Configuration(object):
|
||||||
|
"""Handles management of configuration.
|
||||||
|
|
||||||
|
Provides an interface to accessing and managing configuration files.
|
||||||
|
|
||||||
|
This class converts provides an API that takes "section.key-name" style
|
||||||
|
keys and stores the value associated with it as "key-name" under the
|
||||||
|
section "section".
|
||||||
|
|
||||||
|
This allows for a clean interface wherein the both the section and the
|
||||||
|
key-name are preserved in an easy to manage form in the configuration files
|
||||||
|
and the data stored is also nice.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, isolated, load_only=None):
|
||||||
|
# type: (bool, Kind) -> None
|
||||||
|
super(Configuration, self).__init__()
|
||||||
|
|
||||||
|
_valid_load_only = [kinds.USER, kinds.GLOBAL, kinds.SITE, None]
|
||||||
|
if load_only not in _valid_load_only:
|
||||||
|
raise ConfigurationError(
|
||||||
|
"Got invalid value for load_only - should be one of {}".format(
|
||||||
|
", ".join(map(repr, _valid_load_only[:-1]))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.isolated = isolated # type: bool
|
||||||
|
self.load_only = load_only # type: Optional[Kind]
|
||||||
|
|
||||||
|
# The order here determines the override order.
|
||||||
|
self._override_order = [
|
||||||
|
kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR
|
||||||
|
]
|
||||||
|
|
||||||
|
self._ignore_env_names = ["version", "help"]
|
||||||
|
|
||||||
|
# Because we keep track of where we got the data from
|
||||||
|
self._parsers = {
|
||||||
|
variant: [] for variant in self._override_order
|
||||||
|
} # type: Dict[Kind, List[Tuple[str, RawConfigParser]]]
|
||||||
|
self._config = {
|
||||||
|
variant: {} for variant in self._override_order
|
||||||
|
} # type: Dict[Kind, Dict[str, Any]]
|
||||||
|
self._modified_parsers = [] # type: List[Tuple[str, RawConfigParser]]
|
||||||
|
|
||||||
|
def load(self):
|
||||||
|
# type: () -> None
|
||||||
|
"""Loads configuration from configuration files and environment
|
||||||
|
"""
|
||||||
|
self._load_config_files()
|
||||||
|
if not self.isolated:
|
||||||
|
self._load_environment_vars()
|
||||||
|
|
||||||
|
def get_file_to_edit(self):
|
||||||
|
# type: () -> Optional[str]
|
||||||
|
"""Returns the file with highest priority in configuration
|
||||||
|
"""
|
||||||
|
assert self.load_only is not None, \
|
||||||
|
"Need to be specified a file to be editing"
|
||||||
|
|
||||||
|
try:
|
||||||
|
return self._get_parser_to_modify()[0]
|
||||||
|
except IndexError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def items(self):
|
||||||
|
# type: () -> Iterable[Tuple[str, Any]]
|
||||||
|
"""Returns key-value pairs like dict.items() representing the loaded
|
||||||
|
configuration
|
||||||
|
"""
|
||||||
|
return self._dictionary.items()
|
||||||
|
|
||||||
|
def get_value(self, key):
|
||||||
|
# type: (str) -> Any
|
||||||
|
"""Get a value from the configuration.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return self._dictionary[key]
|
||||||
|
except KeyError:
|
||||||
|
raise ConfigurationError("No such key - {}".format(key))
|
||||||
|
|
||||||
|
def set_value(self, key, value):
|
||||||
|
# type: (str, Any) -> None
|
||||||
|
"""Modify a value in the configuration.
|
||||||
|
"""
|
||||||
|
self._ensure_have_load_only()
|
||||||
|
|
||||||
|
fname, parser = self._get_parser_to_modify()
|
||||||
|
|
||||||
|
if parser is not None:
|
||||||
|
section, name = _disassemble_key(key)
|
||||||
|
|
||||||
|
# Modify the parser and the configuration
|
||||||
|
if not parser.has_section(section):
|
||||||
|
parser.add_section(section)
|
||||||
|
parser.set(section, name, value)
|
||||||
|
|
||||||
|
self._config[self.load_only][key] = value
|
||||||
|
self._mark_as_modified(fname, parser)
|
||||||
|
|
||||||
|
def unset_value(self, key):
|
||||||
|
# type: (str) -> None
|
||||||
|
"""Unset a value in the configuration.
|
||||||
|
"""
|
||||||
|
self._ensure_have_load_only()
|
||||||
|
|
||||||
|
if key not in self._config[self.load_only]:
|
||||||
|
raise ConfigurationError("No such key - {}".format(key))
|
||||||
|
|
||||||
|
fname, parser = self._get_parser_to_modify()
|
||||||
|
|
||||||
|
if parser is not None:
|
||||||
|
section, name = _disassemble_key(key)
|
||||||
|
|
||||||
|
# Remove the key in the parser
|
||||||
|
modified_something = False
|
||||||
|
if parser.has_section(section):
|
||||||
|
# Returns whether the option was removed or not
|
||||||
|
modified_something = parser.remove_option(section, name)
|
||||||
|
|
||||||
|
if modified_something:
|
||||||
|
# name removed from parser, section may now be empty
|
||||||
|
section_iter = iter(parser.items(section))
|
||||||
|
try:
|
||||||
|
val = next(section_iter)
|
||||||
|
except StopIteration:
|
||||||
|
val = None
|
||||||
|
|
||||||
|
if val is None:
|
||||||
|
parser.remove_section(section)
|
||||||
|
|
||||||
|
self._mark_as_modified(fname, parser)
|
||||||
|
else:
|
||||||
|
raise ConfigurationError(
|
||||||
|
"Fatal Internal error [id=1]. Please report as a bug."
|
||||||
|
)
|
||||||
|
|
||||||
|
del self._config[self.load_only][key]
|
||||||
|
|
||||||
|
def save(self):
|
||||||
|
# type: () -> None
|
||||||
|
"""Save the current in-memory state.
|
||||||
|
"""
|
||||||
|
self._ensure_have_load_only()
|
||||||
|
|
||||||
|
for fname, parser in self._modified_parsers:
|
||||||
|
logger.info("Writing to %s", fname)
|
||||||
|
|
||||||
|
# Ensure directory exists.
|
||||||
|
ensure_dir(os.path.dirname(fname))
|
||||||
|
|
||||||
|
with open(fname, "w") as f:
|
||||||
|
parser.write(f)
|
||||||
|
|
||||||
|
#
|
||||||
|
# Private routines
|
||||||
|
#
|
||||||
|
|
||||||
|
def _ensure_have_load_only(self):
|
||||||
|
# type: () -> None
|
||||||
|
if self.load_only is None:
|
||||||
|
raise ConfigurationError("Needed a specific file to be modifying.")
|
||||||
|
logger.debug("Will be working with %s variant only", self.load_only)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _dictionary(self):
|
||||||
|
# type: () -> Dict[str, Any]
|
||||||
|
"""A dictionary representing the loaded configuration.
|
||||||
|
"""
|
||||||
|
# NOTE: Dictionaries are not populated if not loaded. So, conditionals
|
||||||
|
# are not needed here.
|
||||||
|
retval = {}
|
||||||
|
|
||||||
|
for variant in self._override_order:
|
||||||
|
retval.update(self._config[variant])
|
||||||
|
|
||||||
|
return retval
|
||||||
|
|
||||||
|
def _load_config_files(self):
|
||||||
|
# type: () -> None
|
||||||
|
"""Loads configuration from configuration files
|
||||||
|
"""
|
||||||
|
config_files = dict(self._iter_config_files())
|
||||||
|
if config_files[kinds.ENV][0:1] == [os.devnull]:
|
||||||
|
logger.debug(
|
||||||
|
"Skipping loading configuration files due to "
|
||||||
|
"environment's PIP_CONFIG_FILE being os.devnull"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
for variant, files in config_files.items():
|
||||||
|
for fname in files:
|
||||||
|
# If there's specific variant set in `load_only`, load only
|
||||||
|
# that variant, not the others.
|
||||||
|
if self.load_only is not None and variant != self.load_only:
|
||||||
|
logger.debug(
|
||||||
|
"Skipping file '%s' (variant: %s)", fname, variant
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
parser = self._load_file(variant, fname)
|
||||||
|
|
||||||
|
# Keeping track of the parsers used
|
||||||
|
self._parsers[variant].append((fname, parser))
|
||||||
|
|
||||||
|
def _load_file(self, variant, fname):
|
||||||
|
# type: (Kind, str) -> RawConfigParser
|
||||||
|
logger.debug("For variant '%s', will try loading '%s'", variant, fname)
|
||||||
|
parser = self._construct_parser(fname)
|
||||||
|
|
||||||
|
for section in parser.sections():
|
||||||
|
items = parser.items(section)
|
||||||
|
self._config[variant].update(self._normalized_keys(section, items))
|
||||||
|
|
||||||
|
return parser
|
||||||
|
|
||||||
|
def _construct_parser(self, fname):
|
||||||
|
# type: (str) -> RawConfigParser
|
||||||
|
parser = configparser.RawConfigParser()
|
||||||
|
# If there is no such file, don't bother reading it but create the
|
||||||
|
# parser anyway, to hold the data.
|
||||||
|
# Doing this is useful when modifying and saving files, where we don't
|
||||||
|
# need to construct a parser.
|
||||||
|
if os.path.exists(fname):
|
||||||
|
try:
|
||||||
|
parser.read(fname)
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
# See https://github.com/pypa/pip/issues/4963
|
||||||
|
raise ConfigurationFileCouldNotBeLoaded(
|
||||||
|
reason="contains invalid {} characters".format(
|
||||||
|
locale.getpreferredencoding(False)
|
||||||
|
),
|
||||||
|
fname=fname,
|
||||||
|
)
|
||||||
|
except configparser.Error as error:
|
||||||
|
# See https://github.com/pypa/pip/issues/4893
|
||||||
|
raise ConfigurationFileCouldNotBeLoaded(error=error)
|
||||||
|
return parser
|
||||||
|
|
||||||
|
def _load_environment_vars(self):
|
||||||
|
# type: () -> None
|
||||||
|
"""Loads configuration from environment variables
|
||||||
|
"""
|
||||||
|
self._config[kinds.ENV_VAR].update(
|
||||||
|
self._normalized_keys(":env:", self._get_environ_vars())
|
||||||
|
)
|
||||||
|
|
||||||
|
def _normalized_keys(self, section, items):
|
||||||
|
# type: (str, Iterable[Tuple[str, Any]]) -> Dict[str, Any]
|
||||||
|
"""Normalizes items to construct a dictionary with normalized keys.
|
||||||
|
|
||||||
|
This routine is where the names become keys and are made the same
|
||||||
|
regardless of source - configuration files or environment.
|
||||||
|
"""
|
||||||
|
normalized = {}
|
||||||
|
for name, val in items:
|
||||||
|
key = section + "." + _normalize_name(name)
|
||||||
|
normalized[key] = val
|
||||||
|
return normalized
|
||||||
|
|
||||||
|
def _get_environ_vars(self):
|
||||||
|
# type: () -> Iterable[Tuple[str, str]]
|
||||||
|
"""Returns a generator with all environmental vars with prefix PIP_"""
|
||||||
|
for key, val in os.environ.items():
|
||||||
|
should_be_yielded = (
|
||||||
|
key.startswith("PIP_") and
|
||||||
|
key[4:].lower() not in self._ignore_env_names
|
||||||
|
)
|
||||||
|
if should_be_yielded:
|
||||||
|
yield key[4:].lower(), val
|
||||||
|
|
||||||
|
# XXX: This is patched in the tests.
|
||||||
|
def _iter_config_files(self):
|
||||||
|
# type: () -> Iterable[Tuple[Kind, List[str]]]
|
||||||
|
"""Yields variant and configuration files associated with it.
|
||||||
|
|
||||||
|
This should be treated like items of a dictionary.
|
||||||
|
"""
|
||||||
|
# SMELL: Move the conditions out of this function
|
||||||
|
|
||||||
|
# environment variables have the lowest priority
|
||||||
|
config_file = os.environ.get('PIP_CONFIG_FILE', None)
|
||||||
|
if config_file is not None:
|
||||||
|
yield kinds.ENV, [config_file]
|
||||||
|
else:
|
||||||
|
yield kinds.ENV, []
|
||||||
|
|
||||||
|
config_files = get_configuration_files()
|
||||||
|
|
||||||
|
# at the base we have any global configuration
|
||||||
|
yield kinds.GLOBAL, config_files[kinds.GLOBAL]
|
||||||
|
|
||||||
|
# per-user configuration next
|
||||||
|
should_load_user_config = not self.isolated and not (
|
||||||
|
config_file and os.path.exists(config_file)
|
||||||
|
)
|
||||||
|
if should_load_user_config:
|
||||||
|
# The legacy config file is overridden by the new config file
|
||||||
|
yield kinds.USER, config_files[kinds.USER]
|
||||||
|
|
||||||
|
# finally virtualenv configuration first trumping others
|
||||||
|
yield kinds.SITE, config_files[kinds.SITE]
|
||||||
|
|
||||||
|
def _get_parser_to_modify(self):
|
||||||
|
# type: () -> Tuple[str, RawConfigParser]
|
||||||
|
# Determine which parser to modify
|
||||||
|
parsers = self._parsers[self.load_only]
|
||||||
|
if not parsers:
|
||||||
|
# This should not happen if everything works correctly.
|
||||||
|
raise ConfigurationError(
|
||||||
|
"Fatal Internal error [id=2]. Please report as a bug."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Use the highest priority parser.
|
||||||
|
return parsers[-1]
|
||||||
|
|
||||||
|
# XXX: This is patched in the tests.
|
||||||
|
def _mark_as_modified(self, fname, parser):
|
||||||
|
# type: (str, RawConfigParser) -> None
|
||||||
|
file_parser_tuple = (fname, parser)
|
||||||
|
if file_parser_tuple not in self._modified_parsers:
|
||||||
|
self._modified_parsers.append(file_parser_tuple)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
# type: () -> str
|
||||||
|
return "{}({!r})".format(self.__class__.__name__, self._dictionary)
|
24
sources/pip_20.1/_internal/distributions/__init__.py
Normal file
24
sources/pip_20.1/_internal/distributions/__init__.py
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
from pip._internal.distributions.sdist import SourceDistribution
|
||||||
|
from pip._internal.distributions.wheel import WheelDistribution
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from pip._internal.distributions.base import AbstractDistribution
|
||||||
|
from pip._internal.req.req_install import InstallRequirement
|
||||||
|
|
||||||
|
|
||||||
|
def make_distribution_for_install_requirement(install_req):
|
||||||
|
# type: (InstallRequirement) -> AbstractDistribution
|
||||||
|
"""Returns a Distribution for the given InstallRequirement
|
||||||
|
"""
|
||||||
|
# Editable requirements will always be source distributions. They use the
|
||||||
|
# legacy logic until we create a modern standard for them.
|
||||||
|
if install_req.editable:
|
||||||
|
return SourceDistribution(install_req)
|
||||||
|
|
||||||
|
# If it's a wheel, it's a WheelDistribution
|
||||||
|
if install_req.is_wheel:
|
||||||
|
return WheelDistribution(install_req)
|
||||||
|
|
||||||
|
# Otherwise, a SourceDistribution
|
||||||
|
return SourceDistribution(install_req)
|
45
sources/pip_20.1/_internal/distributions/base.py
Normal file
45
sources/pip_20.1/_internal/distributions/base.py
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
import abc
|
||||||
|
|
||||||
|
from pip._vendor.six import add_metaclass
|
||||||
|
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from pip._vendor.pkg_resources import Distribution
|
||||||
|
from pip._internal.req import InstallRequirement
|
||||||
|
from pip._internal.index.package_finder import PackageFinder
|
||||||
|
|
||||||
|
|
||||||
|
@add_metaclass(abc.ABCMeta)
|
||||||
|
class AbstractDistribution(object):
|
||||||
|
"""A base class for handling installable artifacts.
|
||||||
|
|
||||||
|
The requirements for anything installable are as follows:
|
||||||
|
|
||||||
|
- we must be able to determine the requirement name
|
||||||
|
(or we can't correctly handle the non-upgrade case).
|
||||||
|
|
||||||
|
- for packages with setup requirements, we must also be able
|
||||||
|
to determine their requirements without installing additional
|
||||||
|
packages (for the same reason as run-time dependencies)
|
||||||
|
|
||||||
|
- we must be able to create a Distribution object exposing the
|
||||||
|
above metadata.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, req):
|
||||||
|
# type: (InstallRequirement) -> None
|
||||||
|
super(AbstractDistribution, self).__init__()
|
||||||
|
self.req = req
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def get_pkg_resources_distribution(self):
|
||||||
|
# type: () -> Optional[Distribution]
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def prepare_distribution_metadata(self, finder, build_isolation):
|
||||||
|
# type: (PackageFinder, bool) -> None
|
||||||
|
raise NotImplementedError()
|
24
sources/pip_20.1/_internal/distributions/installed.py
Normal file
24
sources/pip_20.1/_internal/distributions/installed.py
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
from pip._internal.distributions.base import AbstractDistribution
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from pip._vendor.pkg_resources import Distribution
|
||||||
|
from pip._internal.index.package_finder import PackageFinder
|
||||||
|
|
||||||
|
|
||||||
|
class InstalledDistribution(AbstractDistribution):
|
||||||
|
"""Represents an installed package.
|
||||||
|
|
||||||
|
This does not need any preparation as the required information has already
|
||||||
|
been computed.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def get_pkg_resources_distribution(self):
|
||||||
|
# type: () -> Optional[Distribution]
|
||||||
|
return self.req.satisfied_by
|
||||||
|
|
||||||
|
def prepare_distribution_metadata(self, finder, build_isolation):
|
||||||
|
# type: (PackageFinder, bool) -> None
|
||||||
|
pass
|
104
sources/pip_20.1/_internal/distributions/sdist.py
Normal file
104
sources/pip_20.1/_internal/distributions/sdist.py
Normal file
|
@ -0,0 +1,104 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from pip._internal.build_env import BuildEnvironment
|
||||||
|
from pip._internal.distributions.base import AbstractDistribution
|
||||||
|
from pip._internal.exceptions import InstallationError
|
||||||
|
from pip._internal.utils.subprocess import runner_with_spinner_message
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Set, Tuple
|
||||||
|
|
||||||
|
from pip._vendor.pkg_resources import Distribution
|
||||||
|
from pip._internal.index.package_finder import PackageFinder
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class SourceDistribution(AbstractDistribution):
|
||||||
|
"""Represents a source distribution.
|
||||||
|
|
||||||
|
The preparation step for these needs metadata for the packages to be
|
||||||
|
generated, either using PEP 517 or using the legacy `setup.py egg_info`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def get_pkg_resources_distribution(self):
|
||||||
|
# type: () -> Distribution
|
||||||
|
return self.req.get_dist()
|
||||||
|
|
||||||
|
def prepare_distribution_metadata(self, finder, build_isolation):
|
||||||
|
# type: (PackageFinder, bool) -> None
|
||||||
|
# Load pyproject.toml, to determine whether PEP 517 is to be used
|
||||||
|
self.req.load_pyproject_toml()
|
||||||
|
|
||||||
|
# Set up the build isolation, if this requirement should be isolated
|
||||||
|
should_isolate = self.req.use_pep517 and build_isolation
|
||||||
|
if should_isolate:
|
||||||
|
self._setup_isolation(finder)
|
||||||
|
|
||||||
|
self.req.prepare_metadata()
|
||||||
|
|
||||||
|
def _setup_isolation(self, finder):
|
||||||
|
# type: (PackageFinder) -> None
|
||||||
|
def _raise_conflicts(conflicting_with, conflicting_reqs):
|
||||||
|
# type: (str, Set[Tuple[str, str]]) -> None
|
||||||
|
format_string = (
|
||||||
|
"Some build dependencies for {requirement} "
|
||||||
|
"conflict with {conflicting_with}: {description}."
|
||||||
|
)
|
||||||
|
error_message = format_string.format(
|
||||||
|
requirement=self.req,
|
||||||
|
conflicting_with=conflicting_with,
|
||||||
|
description=', '.join(
|
||||||
|
'{} is incompatible with {}'.format(installed, wanted)
|
||||||
|
for installed, wanted in sorted(conflicting)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
raise InstallationError(error_message)
|
||||||
|
|
||||||
|
# Isolate in a BuildEnvironment and install the build-time
|
||||||
|
# requirements.
|
||||||
|
pyproject_requires = self.req.pyproject_requires
|
||||||
|
assert pyproject_requires is not None
|
||||||
|
|
||||||
|
self.req.build_env = BuildEnvironment()
|
||||||
|
self.req.build_env.install_requirements(
|
||||||
|
finder, pyproject_requires, 'overlay',
|
||||||
|
"Installing build dependencies"
|
||||||
|
)
|
||||||
|
conflicting, missing = self.req.build_env.check_requirements(
|
||||||
|
self.req.requirements_to_check
|
||||||
|
)
|
||||||
|
if conflicting:
|
||||||
|
_raise_conflicts("PEP 517/518 supported requirements",
|
||||||
|
conflicting)
|
||||||
|
if missing:
|
||||||
|
logger.warning(
|
||||||
|
"Missing build requirements in pyproject.toml for %s.",
|
||||||
|
self.req,
|
||||||
|
)
|
||||||
|
logger.warning(
|
||||||
|
"The project does not specify a build backend, and "
|
||||||
|
"pip cannot fall back to setuptools without %s.",
|
||||||
|
" and ".join(map(repr, sorted(missing)))
|
||||||
|
)
|
||||||
|
# Install any extra build dependencies that the backend requests.
|
||||||
|
# This must be done in a second pass, as the pyproject.toml
|
||||||
|
# dependencies must be installed before we can call the backend.
|
||||||
|
with self.req.build_env:
|
||||||
|
runner = runner_with_spinner_message(
|
||||||
|
"Getting requirements to build wheel"
|
||||||
|
)
|
||||||
|
backend = self.req.pep517_backend
|
||||||
|
assert backend is not None
|
||||||
|
with backend.subprocess_runner(runner):
|
||||||
|
reqs = backend.get_requires_for_build_wheel()
|
||||||
|
|
||||||
|
conflicting, missing = self.req.build_env.check_requirements(reqs)
|
||||||
|
if conflicting:
|
||||||
|
_raise_conflicts("the backend dependencies", conflicting)
|
||||||
|
self.req.build_env.install_requirements(
|
||||||
|
finder, missing, 'normal',
|
||||||
|
"Installing backend dependencies"
|
||||||
|
)
|
36
sources/pip_20.1/_internal/distributions/wheel.py
Normal file
36
sources/pip_20.1/_internal/distributions/wheel.py
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
from zipfile import ZipFile
|
||||||
|
|
||||||
|
from pip._internal.distributions.base import AbstractDistribution
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
from pip._internal.utils.wheel import pkg_resources_distribution_for_wheel
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from pip._vendor.pkg_resources import Distribution
|
||||||
|
from pip._internal.index.package_finder import PackageFinder
|
||||||
|
|
||||||
|
|
||||||
|
class WheelDistribution(AbstractDistribution):
|
||||||
|
"""Represents a wheel distribution.
|
||||||
|
|
||||||
|
This does not need any preparation as wheels can be directly unpacked.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def get_pkg_resources_distribution(self):
|
||||||
|
# type: () -> Distribution
|
||||||
|
"""Loads the metadata from the wheel file into memory and returns a
|
||||||
|
Distribution that uses it, not relying on the wheel file or
|
||||||
|
requirement.
|
||||||
|
"""
|
||||||
|
# Set as part of preparation during download.
|
||||||
|
assert self.req.local_file_path
|
||||||
|
# Wheels are never unnamed.
|
||||||
|
assert self.req.name
|
||||||
|
|
||||||
|
with ZipFile(self.req.local_file_path, allowZip64=True) as z:
|
||||||
|
return pkg_resources_distribution_for_wheel(
|
||||||
|
z, self.req.name, self.req.local_file_path
|
||||||
|
)
|
||||||
|
|
||||||
|
def prepare_distribution_metadata(self, finder, build_isolation):
|
||||||
|
# type: (PackageFinder, bool) -> None
|
||||||
|
pass
|
308
sources/pip_20.1/_internal/exceptions.py
Normal file
308
sources/pip_20.1/_internal/exceptions.py
Normal file
|
@ -0,0 +1,308 @@
|
||||||
|
"""Exceptions used throughout package"""
|
||||||
|
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: disallow-untyped-defs=False
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
from itertools import chain, groupby, repeat
|
||||||
|
|
||||||
|
from pip._vendor.six import iteritems
|
||||||
|
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Optional
|
||||||
|
from pip._vendor.pkg_resources import Distribution
|
||||||
|
from pip._internal.req.req_install import InstallRequirement
|
||||||
|
|
||||||
|
|
||||||
|
class PipError(Exception):
|
||||||
|
"""Base pip exception"""
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigurationError(PipError):
|
||||||
|
"""General exception in configuration"""
|
||||||
|
|
||||||
|
|
||||||
|
class InstallationError(PipError):
|
||||||
|
"""General exception during installation"""
|
||||||
|
|
||||||
|
|
||||||
|
class UninstallationError(PipError):
|
||||||
|
"""General exception during uninstallation"""
|
||||||
|
|
||||||
|
|
||||||
|
class NoneMetadataError(PipError):
|
||||||
|
"""
|
||||||
|
Raised when accessing "METADATA" or "PKG-INFO" metadata for a
|
||||||
|
pip._vendor.pkg_resources.Distribution object and
|
||||||
|
`dist.has_metadata('METADATA')` returns True but
|
||||||
|
`dist.get_metadata('METADATA')` returns None (and similarly for
|
||||||
|
"PKG-INFO").
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, dist, metadata_name):
|
||||||
|
# type: (Distribution, str) -> None
|
||||||
|
"""
|
||||||
|
:param dist: A Distribution object.
|
||||||
|
:param metadata_name: The name of the metadata being accessed
|
||||||
|
(can be "METADATA" or "PKG-INFO").
|
||||||
|
"""
|
||||||
|
self.dist = dist
|
||||||
|
self.metadata_name = metadata_name
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
# type: () -> str
|
||||||
|
# Use `dist` in the error message because its stringification
|
||||||
|
# includes more information, like the version and location.
|
||||||
|
return (
|
||||||
|
'None {} metadata found for distribution: {}'.format(
|
||||||
|
self.metadata_name, self.dist,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class DistributionNotFound(InstallationError):
|
||||||
|
"""Raised when a distribution cannot be found to satisfy a requirement"""
|
||||||
|
|
||||||
|
|
||||||
|
class RequirementsFileParseError(InstallationError):
|
||||||
|
"""Raised when a general error occurs parsing a requirements file line."""
|
||||||
|
|
||||||
|
|
||||||
|
class BestVersionAlreadyInstalled(PipError):
|
||||||
|
"""Raised when the most up-to-date version of a package is already
|
||||||
|
installed."""
|
||||||
|
|
||||||
|
|
||||||
|
class BadCommand(PipError):
|
||||||
|
"""Raised when virtualenv or a command is not found"""
|
||||||
|
|
||||||
|
|
||||||
|
class CommandError(PipError):
|
||||||
|
"""Raised when there is an error in command-line arguments"""
|
||||||
|
|
||||||
|
|
||||||
|
class PreviousBuildDirError(PipError):
|
||||||
|
"""Raised when there's a previous conflicting build directory"""
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidWheelFilename(InstallationError):
|
||||||
|
"""Invalid wheel filename."""
|
||||||
|
|
||||||
|
|
||||||
|
class UnsupportedWheel(InstallationError):
|
||||||
|
"""Unsupported wheel."""
|
||||||
|
|
||||||
|
|
||||||
|
class HashErrors(InstallationError):
|
||||||
|
"""Multiple HashError instances rolled into one for reporting"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.errors = []
|
||||||
|
|
||||||
|
def append(self, error):
|
||||||
|
self.errors.append(error)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
lines = []
|
||||||
|
self.errors.sort(key=lambda e: e.order)
|
||||||
|
for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__):
|
||||||
|
lines.append(cls.head)
|
||||||
|
lines.extend(e.body() for e in errors_of_cls)
|
||||||
|
if lines:
|
||||||
|
return '\n'.join(lines)
|
||||||
|
|
||||||
|
def __nonzero__(self):
|
||||||
|
return bool(self.errors)
|
||||||
|
|
||||||
|
def __bool__(self):
|
||||||
|
return self.__nonzero__()
|
||||||
|
|
||||||
|
|
||||||
|
class HashError(InstallationError):
|
||||||
|
"""
|
||||||
|
A failure to verify a package against known-good hashes
|
||||||
|
|
||||||
|
:cvar order: An int sorting hash exception classes by difficulty of
|
||||||
|
recovery (lower being harder), so the user doesn't bother fretting
|
||||||
|
about unpinned packages when he has deeper issues, like VCS
|
||||||
|
dependencies, to deal with. Also keeps error reports in a
|
||||||
|
deterministic order.
|
||||||
|
:cvar head: A section heading for display above potentially many
|
||||||
|
exceptions of this kind
|
||||||
|
:ivar req: The InstallRequirement that triggered this error. This is
|
||||||
|
pasted on after the exception is instantiated, because it's not
|
||||||
|
typically available earlier.
|
||||||
|
|
||||||
|
"""
|
||||||
|
req = None # type: Optional[InstallRequirement]
|
||||||
|
head = ''
|
||||||
|
|
||||||
|
def body(self):
|
||||||
|
"""Return a summary of me for display under the heading.
|
||||||
|
|
||||||
|
This default implementation simply prints a description of the
|
||||||
|
triggering requirement.
|
||||||
|
|
||||||
|
:param req: The InstallRequirement that provoked this error, with
|
||||||
|
its link already populated by the resolver's _populate_link().
|
||||||
|
|
||||||
|
"""
|
||||||
|
return ' {}'.format(self._requirement_name())
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return '{}\n{}'.format(self.head, self.body())
|
||||||
|
|
||||||
|
def _requirement_name(self):
|
||||||
|
"""Return a description of the requirement that triggered me.
|
||||||
|
|
||||||
|
This default implementation returns long description of the req, with
|
||||||
|
line numbers
|
||||||
|
|
||||||
|
"""
|
||||||
|
return str(self.req) if self.req else 'unknown package'
|
||||||
|
|
||||||
|
|
||||||
|
class VcsHashUnsupported(HashError):
|
||||||
|
"""A hash was provided for a version-control-system-based requirement, but
|
||||||
|
we don't have a method for hashing those."""
|
||||||
|
|
||||||
|
order = 0
|
||||||
|
head = ("Can't verify hashes for these requirements because we don't "
|
||||||
|
"have a way to hash version control repositories:")
|
||||||
|
|
||||||
|
|
||||||
|
class DirectoryUrlHashUnsupported(HashError):
|
||||||
|
"""A hash was provided for a version-control-system-based requirement, but
|
||||||
|
we don't have a method for hashing those."""
|
||||||
|
|
||||||
|
order = 1
|
||||||
|
head = ("Can't verify hashes for these file:// requirements because they "
|
||||||
|
"point to directories:")
|
||||||
|
|
||||||
|
|
||||||
|
class HashMissing(HashError):
|
||||||
|
"""A hash was needed for a requirement but is absent."""
|
||||||
|
|
||||||
|
order = 2
|
||||||
|
head = ('Hashes are required in --require-hashes mode, but they are '
|
||||||
|
'missing from some requirements. Here is a list of those '
|
||||||
|
'requirements along with the hashes their downloaded archives '
|
||||||
|
'actually had. Add lines like these to your requirements files to '
|
||||||
|
'prevent tampering. (If you did not enable --require-hashes '
|
||||||
|
'manually, note that it turns on automatically when any package '
|
||||||
|
'has a hash.)')
|
||||||
|
|
||||||
|
def __init__(self, gotten_hash):
|
||||||
|
"""
|
||||||
|
:param gotten_hash: The hash of the (possibly malicious) archive we
|
||||||
|
just downloaded
|
||||||
|
"""
|
||||||
|
self.gotten_hash = gotten_hash
|
||||||
|
|
||||||
|
def body(self):
|
||||||
|
# Dodge circular import.
|
||||||
|
from pip._internal.utils.hashes import FAVORITE_HASH
|
||||||
|
|
||||||
|
package = None
|
||||||
|
if self.req:
|
||||||
|
# In the case of URL-based requirements, display the original URL
|
||||||
|
# seen in the requirements file rather than the package name,
|
||||||
|
# so the output can be directly copied into the requirements file.
|
||||||
|
package = (self.req.original_link if self.req.original_link
|
||||||
|
# In case someone feeds something downright stupid
|
||||||
|
# to InstallRequirement's constructor.
|
||||||
|
else getattr(self.req, 'req', None))
|
||||||
|
return ' {} --hash={}:{}'.format(package or 'unknown package',
|
||||||
|
FAVORITE_HASH,
|
||||||
|
self.gotten_hash)
|
||||||
|
|
||||||
|
|
||||||
|
class HashUnpinned(HashError):
|
||||||
|
"""A requirement had a hash specified but was not pinned to a specific
|
||||||
|
version."""
|
||||||
|
|
||||||
|
order = 3
|
||||||
|
head = ('In --require-hashes mode, all requirements must have their '
|
||||||
|
'versions pinned with ==. These do not:')
|
||||||
|
|
||||||
|
|
||||||
|
class HashMismatch(HashError):
|
||||||
|
"""
|
||||||
|
Distribution file hash values don't match.
|
||||||
|
|
||||||
|
:ivar package_name: The name of the package that triggered the hash
|
||||||
|
mismatch. Feel free to write to this after the exception is raise to
|
||||||
|
improve its error message.
|
||||||
|
|
||||||
|
"""
|
||||||
|
order = 4
|
||||||
|
head = ('THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS '
|
||||||
|
'FILE. If you have updated the package versions, please update '
|
||||||
|
'the hashes. Otherwise, examine the package contents carefully; '
|
||||||
|
'someone may have tampered with them.')
|
||||||
|
|
||||||
|
def __init__(self, allowed, gots):
|
||||||
|
"""
|
||||||
|
:param allowed: A dict of algorithm names pointing to lists of allowed
|
||||||
|
hex digests
|
||||||
|
:param gots: A dict of algorithm names pointing to hashes we
|
||||||
|
actually got from the files under suspicion
|
||||||
|
"""
|
||||||
|
self.allowed = allowed
|
||||||
|
self.gots = gots
|
||||||
|
|
||||||
|
def body(self):
|
||||||
|
return ' {}:\n{}'.format(self._requirement_name(),
|
||||||
|
self._hash_comparison())
|
||||||
|
|
||||||
|
def _hash_comparison(self):
|
||||||
|
"""
|
||||||
|
Return a comparison of actual and expected hash values.
|
||||||
|
|
||||||
|
Example::
|
||||||
|
|
||||||
|
Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde
|
||||||
|
or 123451234512345123451234512345123451234512345
|
||||||
|
Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef
|
||||||
|
|
||||||
|
"""
|
||||||
|
def hash_then_or(hash_name):
|
||||||
|
# For now, all the decent hashes have 6-char names, so we can get
|
||||||
|
# away with hard-coding space literals.
|
||||||
|
return chain([hash_name], repeat(' or'))
|
||||||
|
|
||||||
|
lines = []
|
||||||
|
for hash_name, expecteds in iteritems(self.allowed):
|
||||||
|
prefix = hash_then_or(hash_name)
|
||||||
|
lines.extend((' Expected {} {}'.format(next(prefix), e))
|
||||||
|
for e in expecteds)
|
||||||
|
lines.append(' Got {}\n'.format(
|
||||||
|
self.gots[hash_name].hexdigest()))
|
||||||
|
return '\n'.join(lines)
|
||||||
|
|
||||||
|
|
||||||
|
class UnsupportedPythonVersion(InstallationError):
|
||||||
|
"""Unsupported python version according to Requires-Python package
|
||||||
|
metadata."""
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigurationFileCouldNotBeLoaded(ConfigurationError):
|
||||||
|
"""When there are errors while loading a configuration file
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, reason="could not be loaded", fname=None, error=None):
|
||||||
|
super(ConfigurationFileCouldNotBeLoaded, self).__init__(error)
|
||||||
|
self.reason = reason
|
||||||
|
self.fname = fname
|
||||||
|
self.error = error
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
if self.fname is not None:
|
||||||
|
message_part = " in {}.".format(self.fname)
|
||||||
|
else:
|
||||||
|
assert self.error is not None
|
||||||
|
message_part = ".\n{}\n".format(self.error.message)
|
||||||
|
return "Configuration file {}{}".format(self.reason, message_part)
|
2
sources/pip_20.1/_internal/index/__init__.py
Normal file
2
sources/pip_20.1/_internal/index/__init__.py
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
"""Index interaction code
|
||||||
|
"""
|
661
sources/pip_20.1/_internal/index/collector.py
Normal file
661
sources/pip_20.1/_internal/index/collector.py
Normal file
|
@ -0,0 +1,661 @@
|
||||||
|
"""
|
||||||
|
The main purpose of this module is to expose LinkCollector.collect_links().
|
||||||
|
"""
|
||||||
|
|
||||||
|
import cgi
|
||||||
|
import functools
|
||||||
|
import itertools
|
||||||
|
import logging
|
||||||
|
import mimetypes
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
from pip._vendor import html5lib, requests
|
||||||
|
from pip._vendor.distlib.compat import unescape
|
||||||
|
from pip._vendor.requests.exceptions import HTTPError, RetryError, SSLError
|
||||||
|
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||||
|
from pip._vendor.six.moves.urllib import request as urllib_request
|
||||||
|
|
||||||
|
from pip._internal.models.link import Link
|
||||||
|
from pip._internal.utils.filetypes import ARCHIVE_EXTENSIONS
|
||||||
|
from pip._internal.utils.misc import pairwise, redact_auth_from_url
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
from pip._internal.utils.urls import path_to_url, url_to_path
|
||||||
|
from pip._internal.vcs import is_url, vcs
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import (
|
||||||
|
Callable, Iterable, List, MutableMapping, Optional,
|
||||||
|
Protocol, Sequence, Tuple, TypeVar, Union,
|
||||||
|
)
|
||||||
|
import xml.etree.ElementTree
|
||||||
|
|
||||||
|
from pip._vendor.requests import Response
|
||||||
|
|
||||||
|
from pip._internal.models.search_scope import SearchScope
|
||||||
|
from pip._internal.network.session import PipSession
|
||||||
|
|
||||||
|
HTMLElement = xml.etree.ElementTree.Element
|
||||||
|
ResponseHeaders = MutableMapping[str, str]
|
||||||
|
|
||||||
|
# Used in the @lru_cache polyfill.
|
||||||
|
F = TypeVar('F')
|
||||||
|
|
||||||
|
class LruCache(Protocol):
|
||||||
|
def __call__(self, maxsize=None):
|
||||||
|
# type: (Optional[int]) -> Callable[[F], F]
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
# Fallback to noop_lru_cache in Python 2
|
||||||
|
# TODO: this can be removed when python 2 support is dropped!
|
||||||
|
def noop_lru_cache(maxsize=None):
|
||||||
|
# type: (Optional[int]) -> Callable[[F], F]
|
||||||
|
def _wrapper(f):
|
||||||
|
# type: (F) -> F
|
||||||
|
return f
|
||||||
|
return _wrapper
|
||||||
|
|
||||||
|
|
||||||
|
_lru_cache = getattr(functools, "lru_cache", noop_lru_cache) # type: LruCache
|
||||||
|
|
||||||
|
|
||||||
|
def _match_vcs_scheme(url):
|
||||||
|
# type: (str) -> Optional[str]
|
||||||
|
"""Look for VCS schemes in the URL.
|
||||||
|
|
||||||
|
Returns the matched VCS scheme, or None if there's no match.
|
||||||
|
"""
|
||||||
|
for scheme in vcs.schemes:
|
||||||
|
if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
|
||||||
|
return scheme
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _is_url_like_archive(url):
|
||||||
|
# type: (str) -> bool
|
||||||
|
"""Return whether the URL looks like an archive.
|
||||||
|
"""
|
||||||
|
filename = Link(url).filename
|
||||||
|
for bad_ext in ARCHIVE_EXTENSIONS:
|
||||||
|
if filename.endswith(bad_ext):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class _NotHTML(Exception):
|
||||||
|
def __init__(self, content_type, request_desc):
|
||||||
|
# type: (str, str) -> None
|
||||||
|
super(_NotHTML, self).__init__(content_type, request_desc)
|
||||||
|
self.content_type = content_type
|
||||||
|
self.request_desc = request_desc
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_html_header(response):
|
||||||
|
# type: (Response) -> None
|
||||||
|
"""Check the Content-Type header to ensure the response contains HTML.
|
||||||
|
|
||||||
|
Raises `_NotHTML` if the content type is not text/html.
|
||||||
|
"""
|
||||||
|
content_type = response.headers.get("Content-Type", "")
|
||||||
|
if not content_type.lower().startswith("text/html"):
|
||||||
|
raise _NotHTML(content_type, response.request.method)
|
||||||
|
|
||||||
|
|
||||||
|
class _NotHTTP(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_html_response(url, session):
|
||||||
|
# type: (str, PipSession) -> None
|
||||||
|
"""Send a HEAD request to the URL, and ensure the response contains HTML.
|
||||||
|
|
||||||
|
Raises `_NotHTTP` if the URL is not available for a HEAD request, or
|
||||||
|
`_NotHTML` if the content type is not text/html.
|
||||||
|
"""
|
||||||
|
scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url)
|
||||||
|
if scheme not in {'http', 'https'}:
|
||||||
|
raise _NotHTTP()
|
||||||
|
|
||||||
|
resp = session.head(url, allow_redirects=True)
|
||||||
|
resp.raise_for_status()
|
||||||
|
|
||||||
|
_ensure_html_header(resp)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_html_response(url, session):
|
||||||
|
# type: (str, PipSession) -> Response
|
||||||
|
"""Access an HTML page with GET, and return the response.
|
||||||
|
|
||||||
|
This consists of three parts:
|
||||||
|
|
||||||
|
1. If the URL looks suspiciously like an archive, send a HEAD first to
|
||||||
|
check the Content-Type is HTML, to avoid downloading a large file.
|
||||||
|
Raise `_NotHTTP` if the content type cannot be determined, or
|
||||||
|
`_NotHTML` if it is not HTML.
|
||||||
|
2. Actually perform the request. Raise HTTP exceptions on network failures.
|
||||||
|
3. Check the Content-Type header to make sure we got HTML, and raise
|
||||||
|
`_NotHTML` otherwise.
|
||||||
|
"""
|
||||||
|
if _is_url_like_archive(url):
|
||||||
|
_ensure_html_response(url, session=session)
|
||||||
|
|
||||||
|
logger.debug('Getting page %s', redact_auth_from_url(url))
|
||||||
|
|
||||||
|
resp = session.get(
|
||||||
|
url,
|
||||||
|
headers={
|
||||||
|
"Accept": "text/html",
|
||||||
|
# We don't want to blindly returned cached data for
|
||||||
|
# /simple/, because authors generally expecting that
|
||||||
|
# twine upload && pip install will function, but if
|
||||||
|
# they've done a pip install in the last ~10 minutes
|
||||||
|
# it won't. Thus by setting this to zero we will not
|
||||||
|
# blindly use any cached data, however the benefit of
|
||||||
|
# using max-age=0 instead of no-cache, is that we will
|
||||||
|
# still support conditional requests, so we will still
|
||||||
|
# minimize traffic sent in cases where the page hasn't
|
||||||
|
# changed at all, we will just always incur the round
|
||||||
|
# trip for the conditional GET now instead of only
|
||||||
|
# once per 10 minutes.
|
||||||
|
# For more information, please see pypa/pip#5670.
|
||||||
|
"Cache-Control": "max-age=0",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
resp.raise_for_status()
|
||||||
|
|
||||||
|
# The check for archives above only works if the url ends with
|
||||||
|
# something that looks like an archive. However that is not a
|
||||||
|
# requirement of an url. Unless we issue a HEAD request on every
|
||||||
|
# url we cannot know ahead of time for sure if something is HTML
|
||||||
|
# or not. However we can check after we've downloaded it.
|
||||||
|
_ensure_html_header(resp)
|
||||||
|
|
||||||
|
return resp
|
||||||
|
|
||||||
|
|
||||||
|
def _get_encoding_from_headers(headers):
|
||||||
|
# type: (ResponseHeaders) -> Optional[str]
|
||||||
|
"""Determine if we have any encoding information in our headers.
|
||||||
|
"""
|
||||||
|
if headers and "Content-Type" in headers:
|
||||||
|
content_type, params = cgi.parse_header(headers["Content-Type"])
|
||||||
|
if "charset" in params:
|
||||||
|
return params['charset']
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _determine_base_url(document, page_url):
|
||||||
|
# type: (HTMLElement, str) -> str
|
||||||
|
"""Determine the HTML document's base URL.
|
||||||
|
|
||||||
|
This looks for a ``<base>`` tag in the HTML document. If present, its href
|
||||||
|
attribute denotes the base URL of anchor tags in the document. If there is
|
||||||
|
no such tag (or if it does not have a valid href attribute), the HTML
|
||||||
|
file's URL is used as the base URL.
|
||||||
|
|
||||||
|
:param document: An HTML document representation. The current
|
||||||
|
implementation expects the result of ``html5lib.parse()``.
|
||||||
|
:param page_url: The URL of the HTML document.
|
||||||
|
"""
|
||||||
|
for base in document.findall(".//base"):
|
||||||
|
href = base.get("href")
|
||||||
|
if href is not None:
|
||||||
|
return href
|
||||||
|
return page_url
|
||||||
|
|
||||||
|
|
||||||
|
def _clean_url_path_part(part):
|
||||||
|
# type: (str) -> str
|
||||||
|
"""
|
||||||
|
Clean a "part" of a URL path (i.e. after splitting on "@" characters).
|
||||||
|
"""
|
||||||
|
# We unquote prior to quoting to make sure nothing is double quoted.
|
||||||
|
return urllib_parse.quote(urllib_parse.unquote(part))
|
||||||
|
|
||||||
|
|
||||||
|
def _clean_file_url_path(part):
|
||||||
|
# type: (str) -> str
|
||||||
|
"""
|
||||||
|
Clean the first part of a URL path that corresponds to a local
|
||||||
|
filesystem path (i.e. the first part after splitting on "@" characters).
|
||||||
|
"""
|
||||||
|
# We unquote prior to quoting to make sure nothing is double quoted.
|
||||||
|
# Also, on Windows the path part might contain a drive letter which
|
||||||
|
# should not be quoted. On Linux where drive letters do not
|
||||||
|
# exist, the colon should be quoted. We rely on urllib.request
|
||||||
|
# to do the right thing here.
|
||||||
|
return urllib_request.pathname2url(urllib_request.url2pathname(part))
|
||||||
|
|
||||||
|
|
||||||
|
# percent-encoded: /
|
||||||
|
_reserved_chars_re = re.compile('(@|%2F)', re.IGNORECASE)
|
||||||
|
|
||||||
|
|
||||||
|
def _clean_url_path(path, is_local_path):
|
||||||
|
# type: (str, bool) -> str
|
||||||
|
"""
|
||||||
|
Clean the path portion of a URL.
|
||||||
|
"""
|
||||||
|
if is_local_path:
|
||||||
|
clean_func = _clean_file_url_path
|
||||||
|
else:
|
||||||
|
clean_func = _clean_url_path_part
|
||||||
|
|
||||||
|
# Split on the reserved characters prior to cleaning so that
|
||||||
|
# revision strings in VCS URLs are properly preserved.
|
||||||
|
parts = _reserved_chars_re.split(path)
|
||||||
|
|
||||||
|
cleaned_parts = []
|
||||||
|
for to_clean, reserved in pairwise(itertools.chain(parts, [''])):
|
||||||
|
cleaned_parts.append(clean_func(to_clean))
|
||||||
|
# Normalize %xx escapes (e.g. %2f -> %2F)
|
||||||
|
cleaned_parts.append(reserved.upper())
|
||||||
|
|
||||||
|
return ''.join(cleaned_parts)
|
||||||
|
|
||||||
|
|
||||||
|
def _clean_link(url):
|
||||||
|
# type: (str) -> str
|
||||||
|
"""
|
||||||
|
Make sure a link is fully quoted.
|
||||||
|
For example, if ' ' occurs in the URL, it will be replaced with "%20",
|
||||||
|
and without double-quoting other characters.
|
||||||
|
"""
|
||||||
|
# Split the URL into parts according to the general structure
|
||||||
|
# `scheme://netloc/path;parameters?query#fragment`.
|
||||||
|
result = urllib_parse.urlparse(url)
|
||||||
|
# If the netloc is empty, then the URL refers to a local filesystem path.
|
||||||
|
is_local_path = not result.netloc
|
||||||
|
path = _clean_url_path(result.path, is_local_path=is_local_path)
|
||||||
|
return urllib_parse.urlunparse(result._replace(path=path))
|
||||||
|
|
||||||
|
|
||||||
|
def _create_link_from_element(
|
||||||
|
anchor, # type: HTMLElement
|
||||||
|
page_url, # type: str
|
||||||
|
base_url, # type: str
|
||||||
|
):
|
||||||
|
# type: (...) -> Optional[Link]
|
||||||
|
"""
|
||||||
|
Convert an anchor element in a simple repository page to a Link.
|
||||||
|
"""
|
||||||
|
href = anchor.get("href")
|
||||||
|
if not href:
|
||||||
|
return None
|
||||||
|
|
||||||
|
url = _clean_link(urllib_parse.urljoin(base_url, href))
|
||||||
|
pyrequire = anchor.get('data-requires-python')
|
||||||
|
pyrequire = unescape(pyrequire) if pyrequire else None
|
||||||
|
|
||||||
|
yanked_reason = anchor.get('data-yanked')
|
||||||
|
if yanked_reason:
|
||||||
|
# This is a unicode string in Python 2 (and 3).
|
||||||
|
yanked_reason = unescape(yanked_reason)
|
||||||
|
|
||||||
|
link = Link(
|
||||||
|
url,
|
||||||
|
comes_from=page_url,
|
||||||
|
requires_python=pyrequire,
|
||||||
|
yanked_reason=yanked_reason,
|
||||||
|
)
|
||||||
|
|
||||||
|
return link
|
||||||
|
|
||||||
|
|
||||||
|
class CacheablePageContent(object):
|
||||||
|
def __init__(self, page):
|
||||||
|
# type: (HTMLPage) -> None
|
||||||
|
assert page.cache_link_parsing
|
||||||
|
self.page = page
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
# type: (object) -> bool
|
||||||
|
return (isinstance(other, type(self)) and
|
||||||
|
self.page.url == other.page.url)
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
# type: () -> int
|
||||||
|
return hash(self.page.url)
|
||||||
|
|
||||||
|
|
||||||
|
def with_cached_html_pages(
|
||||||
|
fn, # type: Callable[[HTMLPage], Iterable[Link]]
|
||||||
|
):
|
||||||
|
# type: (...) -> Callable[[HTMLPage], List[Link]]
|
||||||
|
"""
|
||||||
|
Given a function that parses an Iterable[Link] from an HTMLPage, cache the
|
||||||
|
function's result (keyed by CacheablePageContent), unless the HTMLPage
|
||||||
|
`page` has `page.cache_link_parsing == False`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@_lru_cache(maxsize=None)
|
||||||
|
def wrapper(cacheable_page):
|
||||||
|
# type: (CacheablePageContent) -> List[Link]
|
||||||
|
return list(fn(cacheable_page.page))
|
||||||
|
|
||||||
|
@functools.wraps(fn)
|
||||||
|
def wrapper_wrapper(page):
|
||||||
|
# type: (HTMLPage) -> List[Link]
|
||||||
|
if page.cache_link_parsing:
|
||||||
|
return wrapper(CacheablePageContent(page))
|
||||||
|
return list(fn(page))
|
||||||
|
|
||||||
|
return wrapper_wrapper
|
||||||
|
|
||||||
|
|
||||||
|
@with_cached_html_pages
|
||||||
|
def parse_links(page):
|
||||||
|
# type: (HTMLPage) -> Iterable[Link]
|
||||||
|
"""
|
||||||
|
Parse an HTML document, and yield its anchor elements as Link objects.
|
||||||
|
"""
|
||||||
|
document = html5lib.parse(
|
||||||
|
page.content,
|
||||||
|
transport_encoding=page.encoding,
|
||||||
|
namespaceHTMLElements=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
url = page.url
|
||||||
|
base_url = _determine_base_url(document, url)
|
||||||
|
for anchor in document.findall(".//a"):
|
||||||
|
link = _create_link_from_element(
|
||||||
|
anchor,
|
||||||
|
page_url=url,
|
||||||
|
base_url=base_url,
|
||||||
|
)
|
||||||
|
if link is None:
|
||||||
|
continue
|
||||||
|
yield link
|
||||||
|
|
||||||
|
|
||||||
|
class HTMLPage(object):
|
||||||
|
"""Represents one page, along with its URL"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
content, # type: bytes
|
||||||
|
encoding, # type: Optional[str]
|
||||||
|
url, # type: str
|
||||||
|
cache_link_parsing=True, # type: bool
|
||||||
|
):
|
||||||
|
# type: (...) -> None
|
||||||
|
"""
|
||||||
|
:param encoding: the encoding to decode the given content.
|
||||||
|
:param url: the URL from which the HTML was downloaded.
|
||||||
|
:param cache_link_parsing: whether links parsed from this page's url
|
||||||
|
should be cached. PyPI index urls should
|
||||||
|
have this set to False, for example.
|
||||||
|
"""
|
||||||
|
self.content = content
|
||||||
|
self.encoding = encoding
|
||||||
|
self.url = url
|
||||||
|
self.cache_link_parsing = cache_link_parsing
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
# type: () -> str
|
||||||
|
return redact_auth_from_url(self.url)
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_get_page_fail(
|
||||||
|
link, # type: Link
|
||||||
|
reason, # type: Union[str, Exception]
|
||||||
|
meth=None # type: Optional[Callable[..., None]]
|
||||||
|
):
|
||||||
|
# type: (...) -> None
|
||||||
|
if meth is None:
|
||||||
|
meth = logger.debug
|
||||||
|
meth("Could not fetch URL %s: %s - skipping", link, reason)
|
||||||
|
|
||||||
|
|
||||||
|
def _make_html_page(response, cache_link_parsing=True):
|
||||||
|
# type: (Response, bool) -> HTMLPage
|
||||||
|
encoding = _get_encoding_from_headers(response.headers)
|
||||||
|
return HTMLPage(
|
||||||
|
response.content,
|
||||||
|
encoding=encoding,
|
||||||
|
url=response.url,
|
||||||
|
cache_link_parsing=cache_link_parsing)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_html_page(link, session=None):
|
||||||
|
# type: (Link, Optional[PipSession]) -> Optional[HTMLPage]
|
||||||
|
if session is None:
|
||||||
|
raise TypeError(
|
||||||
|
"_get_html_page() missing 1 required keyword argument: 'session'"
|
||||||
|
)
|
||||||
|
|
||||||
|
url = link.url.split('#', 1)[0]
|
||||||
|
|
||||||
|
# Check for VCS schemes that do not support lookup as web pages.
|
||||||
|
vcs_scheme = _match_vcs_scheme(url)
|
||||||
|
if vcs_scheme:
|
||||||
|
logger.debug('Cannot look at %s URL %s', vcs_scheme, link)
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Tack index.html onto file:// URLs that point to directories
|
||||||
|
scheme, _, path, _, _, _ = urllib_parse.urlparse(url)
|
||||||
|
if (scheme == 'file' and os.path.isdir(urllib_request.url2pathname(path))):
|
||||||
|
# add trailing slash if not present so urljoin doesn't trim
|
||||||
|
# final segment
|
||||||
|
if not url.endswith('/'):
|
||||||
|
url += '/'
|
||||||
|
url = urllib_parse.urljoin(url, 'index.html')
|
||||||
|
logger.debug(' file: URL is directory, getting %s', url)
|
||||||
|
|
||||||
|
try:
|
||||||
|
resp = _get_html_response(url, session=session)
|
||||||
|
except _NotHTTP:
|
||||||
|
logger.debug(
|
||||||
|
'Skipping page %s because it looks like an archive, and cannot '
|
||||||
|
'be checked by HEAD.', link,
|
||||||
|
)
|
||||||
|
except _NotHTML as exc:
|
||||||
|
logger.debug(
|
||||||
|
'Skipping page %s because the %s request got Content-Type: %s',
|
||||||
|
link, exc.request_desc, exc.content_type,
|
||||||
|
)
|
||||||
|
except HTTPError as exc:
|
||||||
|
_handle_get_page_fail(link, exc)
|
||||||
|
except RetryError as exc:
|
||||||
|
_handle_get_page_fail(link, exc)
|
||||||
|
except SSLError as exc:
|
||||||
|
reason = "There was a problem confirming the ssl certificate: "
|
||||||
|
reason += str(exc)
|
||||||
|
_handle_get_page_fail(link, reason, meth=logger.info)
|
||||||
|
except requests.ConnectionError as exc:
|
||||||
|
_handle_get_page_fail(link, "connection error: {}".format(exc))
|
||||||
|
except requests.Timeout:
|
||||||
|
_handle_get_page_fail(link, "timed out")
|
||||||
|
else:
|
||||||
|
return _make_html_page(resp,
|
||||||
|
cache_link_parsing=link.cache_link_parsing)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _remove_duplicate_links(links):
|
||||||
|
# type: (Iterable[Link]) -> List[Link]
|
||||||
|
"""
|
||||||
|
Return a list of links, with duplicates removed and ordering preserved.
|
||||||
|
"""
|
||||||
|
# We preserve the ordering when removing duplicates because we can.
|
||||||
|
return list(OrderedDict.fromkeys(links))
|
||||||
|
|
||||||
|
|
||||||
|
def group_locations(locations, expand_dir=False):
|
||||||
|
# type: (Sequence[str], bool) -> Tuple[List[str], List[str]]
|
||||||
|
"""
|
||||||
|
Divide a list of locations into two groups: "files" (archives) and "urls."
|
||||||
|
|
||||||
|
:return: A pair of lists (files, urls).
|
||||||
|
"""
|
||||||
|
files = []
|
||||||
|
urls = []
|
||||||
|
|
||||||
|
# puts the url for the given file path into the appropriate list
|
||||||
|
def sort_path(path):
|
||||||
|
# type: (str) -> None
|
||||||
|
url = path_to_url(path)
|
||||||
|
if mimetypes.guess_type(url, strict=False)[0] == 'text/html':
|
||||||
|
urls.append(url)
|
||||||
|
else:
|
||||||
|
files.append(url)
|
||||||
|
|
||||||
|
for url in locations:
|
||||||
|
|
||||||
|
is_local_path = os.path.exists(url)
|
||||||
|
is_file_url = url.startswith('file:')
|
||||||
|
|
||||||
|
if is_local_path or is_file_url:
|
||||||
|
if is_local_path:
|
||||||
|
path = url
|
||||||
|
else:
|
||||||
|
path = url_to_path(url)
|
||||||
|
if os.path.isdir(path):
|
||||||
|
if expand_dir:
|
||||||
|
path = os.path.realpath(path)
|
||||||
|
for item in os.listdir(path):
|
||||||
|
sort_path(os.path.join(path, item))
|
||||||
|
elif is_file_url:
|
||||||
|
urls.append(url)
|
||||||
|
else:
|
||||||
|
logger.warning(
|
||||||
|
"Path '{0}' is ignored: "
|
||||||
|
"it is a directory.".format(path),
|
||||||
|
)
|
||||||
|
elif os.path.isfile(path):
|
||||||
|
sort_path(path)
|
||||||
|
else:
|
||||||
|
logger.warning(
|
||||||
|
"Url '%s' is ignored: it is neither a file "
|
||||||
|
"nor a directory.", url,
|
||||||
|
)
|
||||||
|
elif is_url(url):
|
||||||
|
# Only add url with clear scheme
|
||||||
|
urls.append(url)
|
||||||
|
else:
|
||||||
|
logger.warning(
|
||||||
|
"Url '%s' is ignored. It is either a non-existing "
|
||||||
|
"path or lacks a specific scheme.", url,
|
||||||
|
)
|
||||||
|
|
||||||
|
return files, urls
|
||||||
|
|
||||||
|
|
||||||
|
class CollectedLinks(object):
|
||||||
|
|
||||||
|
"""
|
||||||
|
Encapsulates the return value of a call to LinkCollector.collect_links().
|
||||||
|
|
||||||
|
The return value includes both URLs to project pages containing package
|
||||||
|
links, as well as individual package Link objects collected from other
|
||||||
|
sources.
|
||||||
|
|
||||||
|
This info is stored separately as:
|
||||||
|
|
||||||
|
(1) links from the configured file locations,
|
||||||
|
(2) links from the configured find_links, and
|
||||||
|
(3) urls to HTML project pages, as described by the PEP 503 simple
|
||||||
|
repository API.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
files, # type: List[Link]
|
||||||
|
find_links, # type: List[Link]
|
||||||
|
project_urls, # type: List[Link]
|
||||||
|
):
|
||||||
|
# type: (...) -> None
|
||||||
|
"""
|
||||||
|
:param files: Links from file locations.
|
||||||
|
:param find_links: Links from find_links.
|
||||||
|
:param project_urls: URLs to HTML project pages, as described by
|
||||||
|
the PEP 503 simple repository API.
|
||||||
|
"""
|
||||||
|
self.files = files
|
||||||
|
self.find_links = find_links
|
||||||
|
self.project_urls = project_urls
|
||||||
|
|
||||||
|
|
||||||
|
class LinkCollector(object):
|
||||||
|
|
||||||
|
"""
|
||||||
|
Responsible for collecting Link objects from all configured locations,
|
||||||
|
making network requests as needed.
|
||||||
|
|
||||||
|
The class's main method is its collect_links() method.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
session, # type: PipSession
|
||||||
|
search_scope, # type: SearchScope
|
||||||
|
):
|
||||||
|
# type: (...) -> None
|
||||||
|
self.search_scope = search_scope
|
||||||
|
self.session = session
|
||||||
|
|
||||||
|
@property
|
||||||
|
def find_links(self):
|
||||||
|
# type: () -> List[str]
|
||||||
|
return self.search_scope.find_links
|
||||||
|
|
||||||
|
def fetch_page(self, location):
|
||||||
|
# type: (Link) -> Optional[HTMLPage]
|
||||||
|
"""
|
||||||
|
Fetch an HTML page containing package links.
|
||||||
|
"""
|
||||||
|
return _get_html_page(location, session=self.session)
|
||||||
|
|
||||||
|
def collect_links(self, project_name):
|
||||||
|
# type: (str) -> CollectedLinks
|
||||||
|
"""Find all available links for the given project name.
|
||||||
|
|
||||||
|
:return: All the Link objects (unfiltered), as a CollectedLinks object.
|
||||||
|
"""
|
||||||
|
search_scope = self.search_scope
|
||||||
|
index_locations = search_scope.get_index_urls_locations(project_name)
|
||||||
|
index_file_loc, index_url_loc = group_locations(index_locations)
|
||||||
|
fl_file_loc, fl_url_loc = group_locations(
|
||||||
|
self.find_links, expand_dir=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
file_links = [
|
||||||
|
Link(url) for url in itertools.chain(index_file_loc, fl_file_loc)
|
||||||
|
]
|
||||||
|
|
||||||
|
# We trust every directly linked archive in find_links
|
||||||
|
find_link_links = [Link(url, '-f') for url in self.find_links]
|
||||||
|
|
||||||
|
# We trust every url that the user has given us whether it was given
|
||||||
|
# via --index-url or --find-links.
|
||||||
|
# We want to filter out anything that does not have a secure origin.
|
||||||
|
url_locations = [
|
||||||
|
link for link in itertools.chain(
|
||||||
|
# Mark PyPI indices as "cache_link_parsing == False" -- this
|
||||||
|
# will avoid caching the result of parsing the page for links.
|
||||||
|
(Link(url, cache_link_parsing=False) for url in index_url_loc),
|
||||||
|
(Link(url) for url in fl_url_loc),
|
||||||
|
)
|
||||||
|
if self.session.is_secure_origin(link)
|
||||||
|
]
|
||||||
|
|
||||||
|
url_locations = _remove_duplicate_links(url_locations)
|
||||||
|
lines = [
|
||||||
|
'{} location(s) to search for versions of {}:'.format(
|
||||||
|
len(url_locations), project_name,
|
||||||
|
),
|
||||||
|
]
|
||||||
|
for link in url_locations:
|
||||||
|
lines.append('* {}'.format(link))
|
||||||
|
logger.debug('\n'.join(lines))
|
||||||
|
|
||||||
|
return CollectedLinks(
|
||||||
|
files=file_links,
|
||||||
|
find_links=find_link_links,
|
||||||
|
project_urls=url_locations,
|
||||||
|
)
|
1016
sources/pip_20.1/_internal/index/package_finder.py
Normal file
1016
sources/pip_20.1/_internal/index/package_finder.py
Normal file
File diff suppressed because it is too large
Load diff
194
sources/pip_20.1/_internal/locations.py
Normal file
194
sources/pip_20.1/_internal/locations.py
Normal file
|
@ -0,0 +1,194 @@
|
||||||
|
"""Locations where we look for configs, install stuff, etc"""
|
||||||
|
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: strict-optional=False
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import os
|
||||||
|
import os.path
|
||||||
|
import platform
|
||||||
|
import site
|
||||||
|
import sys
|
||||||
|
import sysconfig
|
||||||
|
from distutils import sysconfig as distutils_sysconfig
|
||||||
|
from distutils.command.install import SCHEME_KEYS # type: ignore
|
||||||
|
from distutils.command.install import install as distutils_install_command
|
||||||
|
|
||||||
|
from pip._internal.models.scheme import Scheme
|
||||||
|
from pip._internal.utils import appdirs
|
||||||
|
from pip._internal.utils.compat import WINDOWS
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING, cast
|
||||||
|
from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Dict, List, Optional, Union
|
||||||
|
|
||||||
|
from distutils.cmd import Command as DistutilsCommand
|
||||||
|
|
||||||
|
|
||||||
|
# Application Directories
|
||||||
|
USER_CACHE_DIR = appdirs.user_cache_dir("pip")
|
||||||
|
|
||||||
|
|
||||||
|
def get_major_minor_version():
|
||||||
|
# type: () -> str
|
||||||
|
"""
|
||||||
|
Return the major-minor version of the current Python as a string, e.g.
|
||||||
|
"3.7" or "3.10".
|
||||||
|
"""
|
||||||
|
return '{}.{}'.format(*sys.version_info)
|
||||||
|
|
||||||
|
|
||||||
|
def get_src_prefix():
|
||||||
|
# type: () -> str
|
||||||
|
if running_under_virtualenv():
|
||||||
|
src_prefix = os.path.join(sys.prefix, 'src')
|
||||||
|
else:
|
||||||
|
# FIXME: keep src in cwd for now (it is not a temporary folder)
|
||||||
|
try:
|
||||||
|
src_prefix = os.path.join(os.getcwd(), 'src')
|
||||||
|
except OSError:
|
||||||
|
# In case the current working directory has been renamed or deleted
|
||||||
|
sys.exit(
|
||||||
|
"The folder you are executing pip from can no longer be found."
|
||||||
|
)
|
||||||
|
|
||||||
|
# under macOS + virtualenv sys.prefix is not properly resolved
|
||||||
|
# it is something like /path/to/python/bin/..
|
||||||
|
return os.path.abspath(src_prefix)
|
||||||
|
|
||||||
|
|
||||||
|
# FIXME doesn't account for venv linked to global site-packages
|
||||||
|
|
||||||
|
site_packages = sysconfig.get_path("purelib") # type: Optional[str]
|
||||||
|
|
||||||
|
# This is because of a bug in PyPy's sysconfig module, see
|
||||||
|
# https://bitbucket.org/pypy/pypy/issues/2506/sysconfig-returns-incorrect-paths
|
||||||
|
# for more information.
|
||||||
|
if platform.python_implementation().lower() == "pypy":
|
||||||
|
site_packages = distutils_sysconfig.get_python_lib()
|
||||||
|
try:
|
||||||
|
# Use getusersitepackages if this is present, as it ensures that the
|
||||||
|
# value is initialised properly.
|
||||||
|
user_site = site.getusersitepackages()
|
||||||
|
except AttributeError:
|
||||||
|
user_site = site.USER_SITE
|
||||||
|
|
||||||
|
if WINDOWS:
|
||||||
|
bin_py = os.path.join(sys.prefix, 'Scripts')
|
||||||
|
bin_user = os.path.join(user_site, 'Scripts')
|
||||||
|
# buildout uses 'bin' on Windows too?
|
||||||
|
if not os.path.exists(bin_py):
|
||||||
|
bin_py = os.path.join(sys.prefix, 'bin')
|
||||||
|
bin_user = os.path.join(user_site, 'bin')
|
||||||
|
else:
|
||||||
|
bin_py = os.path.join(sys.prefix, 'bin')
|
||||||
|
bin_user = os.path.join(user_site, 'bin')
|
||||||
|
|
||||||
|
# Forcing to use /usr/local/bin for standard macOS framework installs
|
||||||
|
# Also log to ~/Library/Logs/ for use with the Console.app log viewer
|
||||||
|
if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/':
|
||||||
|
bin_py = '/usr/local/bin'
|
||||||
|
|
||||||
|
|
||||||
|
def distutils_scheme(
|
||||||
|
dist_name, user=False, home=None, root=None, isolated=False, prefix=None
|
||||||
|
):
|
||||||
|
# type:(str, bool, str, str, bool, str) -> Dict[str, str]
|
||||||
|
"""
|
||||||
|
Return a distutils install scheme
|
||||||
|
"""
|
||||||
|
from distutils.dist import Distribution
|
||||||
|
|
||||||
|
dist_args = {'name': dist_name} # type: Dict[str, Union[str, List[str]]]
|
||||||
|
if isolated:
|
||||||
|
dist_args["script_args"] = ["--no-user-cfg"]
|
||||||
|
|
||||||
|
d = Distribution(dist_args)
|
||||||
|
d.parse_config_files()
|
||||||
|
obj = None # type: Optional[DistutilsCommand]
|
||||||
|
obj = d.get_command_obj('install', create=True)
|
||||||
|
assert obj is not None
|
||||||
|
i = cast(distutils_install_command, obj)
|
||||||
|
# NOTE: setting user or home has the side-effect of creating the home dir
|
||||||
|
# or user base for installations during finalize_options()
|
||||||
|
# ideally, we'd prefer a scheme class that has no side-effects.
|
||||||
|
assert not (user and prefix), "user={} prefix={}".format(user, prefix)
|
||||||
|
assert not (home and prefix), "home={} prefix={}".format(home, prefix)
|
||||||
|
i.user = user or i.user
|
||||||
|
if user or home:
|
||||||
|
i.prefix = ""
|
||||||
|
i.prefix = prefix or i.prefix
|
||||||
|
i.home = home or i.home
|
||||||
|
i.root = root or i.root
|
||||||
|
i.finalize_options()
|
||||||
|
|
||||||
|
scheme = {}
|
||||||
|
for key in SCHEME_KEYS:
|
||||||
|
scheme[key] = getattr(i, 'install_' + key)
|
||||||
|
|
||||||
|
# install_lib specified in setup.cfg should install *everything*
|
||||||
|
# into there (i.e. it takes precedence over both purelib and
|
||||||
|
# platlib). Note, i.install_lib is *always* set after
|
||||||
|
# finalize_options(); we only want to override here if the user
|
||||||
|
# has explicitly requested it hence going back to the config
|
||||||
|
if 'install_lib' in d.get_option_dict('install'):
|
||||||
|
scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib))
|
||||||
|
|
||||||
|
if running_under_virtualenv():
|
||||||
|
scheme['headers'] = os.path.join(
|
||||||
|
sys.prefix,
|
||||||
|
'include',
|
||||||
|
'site',
|
||||||
|
'python{}'.format(get_major_minor_version()),
|
||||||
|
dist_name,
|
||||||
|
)
|
||||||
|
|
||||||
|
if root is not None:
|
||||||
|
path_no_drive = os.path.splitdrive(
|
||||||
|
os.path.abspath(scheme["headers"]))[1]
|
||||||
|
scheme["headers"] = os.path.join(
|
||||||
|
root,
|
||||||
|
path_no_drive[1:],
|
||||||
|
)
|
||||||
|
|
||||||
|
return scheme
|
||||||
|
|
||||||
|
|
||||||
|
def get_scheme(
|
||||||
|
dist_name, # type: str
|
||||||
|
user=False, # type: bool
|
||||||
|
home=None, # type: Optional[str]
|
||||||
|
root=None, # type: Optional[str]
|
||||||
|
isolated=False, # type: bool
|
||||||
|
prefix=None, # type: Optional[str]
|
||||||
|
):
|
||||||
|
# type: (...) -> Scheme
|
||||||
|
"""
|
||||||
|
Get the "scheme" corresponding to the input parameters. The distutils
|
||||||
|
documentation provides the context for the available schemes:
|
||||||
|
https://docs.python.org/3/install/index.html#alternate-installation
|
||||||
|
|
||||||
|
:param dist_name: the name of the package to retrieve the scheme for, used
|
||||||
|
in the headers scheme path
|
||||||
|
:param user: indicates to use the "user" scheme
|
||||||
|
:param home: indicates to use the "home" scheme and provides the base
|
||||||
|
directory for the same
|
||||||
|
:param root: root under which other directories are re-based
|
||||||
|
:param isolated: equivalent to --no-user-cfg, i.e. do not consider
|
||||||
|
~/.pydistutils.cfg (posix) or ~/pydistutils.cfg (non-posix) for
|
||||||
|
scheme paths
|
||||||
|
:param prefix: indicates to use the "prefix" scheme and provides the
|
||||||
|
base directory for the same
|
||||||
|
"""
|
||||||
|
scheme = distutils_scheme(
|
||||||
|
dist_name, user, home, root, isolated, prefix
|
||||||
|
)
|
||||||
|
return Scheme(
|
||||||
|
platlib=scheme["platlib"],
|
||||||
|
purelib=scheme["purelib"],
|
||||||
|
headers=scheme["headers"],
|
||||||
|
scripts=scheme["scripts"],
|
||||||
|
data=scheme["data"],
|
||||||
|
)
|
16
sources/pip_20.1/_internal/main.py
Normal file
16
sources/pip_20.1/_internal/main.py
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Optional, List
|
||||||
|
|
||||||
|
|
||||||
|
def main(args=None):
|
||||||
|
# type: (Optional[List[str]]) -> int
|
||||||
|
"""This is preserved for old console scripts that may still be referencing
|
||||||
|
it.
|
||||||
|
|
||||||
|
For additional details, see https://github.com/pypa/pip/issues/7498.
|
||||||
|
"""
|
||||||
|
from pip._internal.utils.entrypoints import _wrapper
|
||||||
|
|
||||||
|
return _wrapper(args)
|
2
sources/pip_20.1/_internal/models/__init__.py
Normal file
2
sources/pip_20.1/_internal/models/__init__.py
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
"""A package that contains models that represent entities.
|
||||||
|
"""
|
36
sources/pip_20.1/_internal/models/candidate.py
Normal file
36
sources/pip_20.1/_internal/models/candidate.py
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
from pip._vendor.packaging.version import parse as parse_version
|
||||||
|
|
||||||
|
from pip._internal.utils.models import KeyBasedCompareMixin
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from pip._vendor.packaging.version import _BaseVersion
|
||||||
|
from pip._internal.models.link import Link
|
||||||
|
|
||||||
|
|
||||||
|
class InstallationCandidate(KeyBasedCompareMixin):
|
||||||
|
"""Represents a potential "candidate" for installation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, name, version, link):
|
||||||
|
# type: (str, str, Link) -> None
|
||||||
|
self.name = name
|
||||||
|
self.version = parse_version(version) # type: _BaseVersion
|
||||||
|
self.link = link
|
||||||
|
|
||||||
|
super(InstallationCandidate, self).__init__(
|
||||||
|
key=(self.name, self.version, self.link),
|
||||||
|
defining_class=InstallationCandidate
|
||||||
|
)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
# type: () -> str
|
||||||
|
return "<InstallationCandidate({!r}, {!r}, {!r})>".format(
|
||||||
|
self.name, self.version, self.link,
|
||||||
|
)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
# type: () -> str
|
||||||
|
return '{!r} candidate (version {} at {})'.format(
|
||||||
|
self.name, self.version, self.link,
|
||||||
|
)
|
245
sources/pip_20.1/_internal/models/direct_url.py
Normal file
245
sources/pip_20.1/_internal/models/direct_url.py
Normal file
|
@ -0,0 +1,245 @@
|
||||||
|
""" PEP 610 """
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
|
||||||
|
from pip._vendor import six
|
||||||
|
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||||
|
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import (
|
||||||
|
Any, Dict, Iterable, Optional, Type, TypeVar, Union
|
||||||
|
)
|
||||||
|
|
||||||
|
T = TypeVar("T")
|
||||||
|
|
||||||
|
|
||||||
|
DIRECT_URL_METADATA_NAME = "direct_url.json"
|
||||||
|
ENV_VAR_RE = re.compile(r"^\$\{[A-Za-z0-9-_]+\}(:\$\{[A-Za-z0-9-_]+\})?$")
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"DirectUrl",
|
||||||
|
"DirectUrlValidationError",
|
||||||
|
"DirInfo",
|
||||||
|
"ArchiveInfo",
|
||||||
|
"VcsInfo",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class DirectUrlValidationError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def _get(d, expected_type, key, default=None):
|
||||||
|
# type: (Dict[str, Any], Type[T], str, Optional[T]) -> Optional[T]
|
||||||
|
"""Get value from dictionary and verify expected type."""
|
||||||
|
if key not in d:
|
||||||
|
return default
|
||||||
|
value = d[key]
|
||||||
|
if six.PY2 and expected_type is str:
|
||||||
|
expected_type = six.string_types # type: ignore
|
||||||
|
if not isinstance(value, expected_type):
|
||||||
|
raise DirectUrlValidationError(
|
||||||
|
"{!r} has unexpected type for {} (expected {})".format(
|
||||||
|
value, key, expected_type
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
def _get_required(d, expected_type, key, default=None):
|
||||||
|
# type: (Dict[str, Any], Type[T], str, Optional[T]) -> T
|
||||||
|
value = _get(d, expected_type, key, default)
|
||||||
|
if value is None:
|
||||||
|
raise DirectUrlValidationError("{} must have a value".format(key))
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
def _exactly_one_of(infos):
|
||||||
|
# type: (Iterable[Optional[InfoType]]) -> InfoType
|
||||||
|
infos = [info for info in infos if info is not None]
|
||||||
|
if not infos:
|
||||||
|
raise DirectUrlValidationError(
|
||||||
|
"missing one of archive_info, dir_info, vcs_info"
|
||||||
|
)
|
||||||
|
if len(infos) > 1:
|
||||||
|
raise DirectUrlValidationError(
|
||||||
|
"more than one of archive_info, dir_info, vcs_info"
|
||||||
|
)
|
||||||
|
assert infos[0] is not None
|
||||||
|
return infos[0]
|
||||||
|
|
||||||
|
|
||||||
|
def _filter_none(**kwargs):
|
||||||
|
# type: (Any) -> Dict[str, Any]
|
||||||
|
"""Make dict excluding None values."""
|
||||||
|
return {k: v for k, v in kwargs.items() if v is not None}
|
||||||
|
|
||||||
|
|
||||||
|
class VcsInfo(object):
|
||||||
|
name = "vcs_info"
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
vcs, # type: str
|
||||||
|
commit_id, # type: str
|
||||||
|
requested_revision=None, # type: Optional[str]
|
||||||
|
resolved_revision=None, # type: Optional[str]
|
||||||
|
resolved_revision_type=None, # type: Optional[str]
|
||||||
|
):
|
||||||
|
self.vcs = vcs
|
||||||
|
self.requested_revision = requested_revision
|
||||||
|
self.commit_id = commit_id
|
||||||
|
self.resolved_revision = resolved_revision
|
||||||
|
self.resolved_revision_type = resolved_revision_type
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _from_dict(cls, d):
|
||||||
|
# type: (Optional[Dict[str, Any]]) -> Optional[VcsInfo]
|
||||||
|
if d is None:
|
||||||
|
return None
|
||||||
|
return cls(
|
||||||
|
vcs=_get_required(d, str, "vcs"),
|
||||||
|
commit_id=_get_required(d, str, "commit_id"),
|
||||||
|
requested_revision=_get(d, str, "requested_revision"),
|
||||||
|
resolved_revision=_get(d, str, "resolved_revision"),
|
||||||
|
resolved_revision_type=_get(d, str, "resolved_revision_type"),
|
||||||
|
)
|
||||||
|
|
||||||
|
def _to_dict(self):
|
||||||
|
# type: () -> Dict[str, Any]
|
||||||
|
return _filter_none(
|
||||||
|
vcs=self.vcs,
|
||||||
|
requested_revision=self.requested_revision,
|
||||||
|
commit_id=self.commit_id,
|
||||||
|
resolved_revision=self.resolved_revision,
|
||||||
|
resolved_revision_type=self.resolved_revision_type,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ArchiveInfo(object):
|
||||||
|
name = "archive_info"
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
hash=None, # type: Optional[str]
|
||||||
|
):
|
||||||
|
self.hash = hash
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _from_dict(cls, d):
|
||||||
|
# type: (Optional[Dict[str, Any]]) -> Optional[ArchiveInfo]
|
||||||
|
if d is None:
|
||||||
|
return None
|
||||||
|
return cls(hash=_get(d, str, "hash"))
|
||||||
|
|
||||||
|
def _to_dict(self):
|
||||||
|
# type: () -> Dict[str, Any]
|
||||||
|
return _filter_none(hash=self.hash)
|
||||||
|
|
||||||
|
|
||||||
|
class DirInfo(object):
|
||||||
|
name = "dir_info"
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
editable=False, # type: bool
|
||||||
|
):
|
||||||
|
self.editable = editable
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _from_dict(cls, d):
|
||||||
|
# type: (Optional[Dict[str, Any]]) -> Optional[DirInfo]
|
||||||
|
if d is None:
|
||||||
|
return None
|
||||||
|
return cls(
|
||||||
|
editable=_get_required(d, bool, "editable", default=False)
|
||||||
|
)
|
||||||
|
|
||||||
|
def _to_dict(self):
|
||||||
|
# type: () -> Dict[str, Any]
|
||||||
|
return _filter_none(editable=self.editable or None)
|
||||||
|
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
InfoType = Union[ArchiveInfo, DirInfo, VcsInfo]
|
||||||
|
|
||||||
|
|
||||||
|
class DirectUrl(object):
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
url, # type: str
|
||||||
|
info, # type: InfoType
|
||||||
|
subdirectory=None, # type: Optional[str]
|
||||||
|
):
|
||||||
|
self.url = url
|
||||||
|
self.info = info
|
||||||
|
self.subdirectory = subdirectory
|
||||||
|
|
||||||
|
def _remove_auth_from_netloc(self, netloc):
|
||||||
|
# type: (str) -> str
|
||||||
|
if "@" not in netloc:
|
||||||
|
return netloc
|
||||||
|
user_pass, netloc_no_user_pass = netloc.split("@", 1)
|
||||||
|
if (
|
||||||
|
isinstance(self.info, VcsInfo) and
|
||||||
|
self.info.vcs == "git" and
|
||||||
|
user_pass == "git"
|
||||||
|
):
|
||||||
|
return netloc
|
||||||
|
if ENV_VAR_RE.match(user_pass):
|
||||||
|
return netloc
|
||||||
|
return netloc_no_user_pass
|
||||||
|
|
||||||
|
@property
|
||||||
|
def redacted_url(self):
|
||||||
|
# type: () -> str
|
||||||
|
"""url with user:password part removed unless it is formed with
|
||||||
|
environment variables as specified in PEP 610, or it is ``git``
|
||||||
|
in the case of a git URL.
|
||||||
|
"""
|
||||||
|
purl = urllib_parse.urlsplit(self.url)
|
||||||
|
netloc = self._remove_auth_from_netloc(purl.netloc)
|
||||||
|
surl = urllib_parse.urlunsplit(
|
||||||
|
(purl.scheme, netloc, purl.path, purl.query, purl.fragment)
|
||||||
|
)
|
||||||
|
return surl
|
||||||
|
|
||||||
|
def validate(self):
|
||||||
|
# type: () -> None
|
||||||
|
self.from_dict(self.to_dict())
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dict(cls, d):
|
||||||
|
# type: (Dict[str, Any]) -> DirectUrl
|
||||||
|
return DirectUrl(
|
||||||
|
url=_get_required(d, str, "url"),
|
||||||
|
subdirectory=_get(d, str, "subdirectory"),
|
||||||
|
info=_exactly_one_of(
|
||||||
|
[
|
||||||
|
ArchiveInfo._from_dict(_get(d, dict, "archive_info")),
|
||||||
|
DirInfo._from_dict(_get(d, dict, "dir_info")),
|
||||||
|
VcsInfo._from_dict(_get(d, dict, "vcs_info")),
|
||||||
|
]
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
def to_dict(self):
|
||||||
|
# type: () -> Dict[str, Any]
|
||||||
|
res = _filter_none(
|
||||||
|
url=self.redacted_url,
|
||||||
|
subdirectory=self.subdirectory,
|
||||||
|
)
|
||||||
|
res[self.info.name] = self.info._to_dict()
|
||||||
|
return res
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_json(cls, s):
|
||||||
|
# type: (str) -> DirectUrl
|
||||||
|
return cls.from_dict(json.loads(s))
|
||||||
|
|
||||||
|
def to_json(self):
|
||||||
|
# type: () -> str
|
||||||
|
return json.dumps(self.to_dict(), sort_keys=True)
|
84
sources/pip_20.1/_internal/models/format_control.py
Normal file
84
sources/pip_20.1/_internal/models/format_control.py
Normal file
|
@ -0,0 +1,84 @@
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: strict-optional=False
|
||||||
|
|
||||||
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
|
|
||||||
|
from pip._internal.exceptions import CommandError
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Optional, Set, FrozenSet
|
||||||
|
|
||||||
|
|
||||||
|
class FormatControl(object):
|
||||||
|
"""Helper for managing formats from which a package can be installed.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, no_binary=None, only_binary=None):
|
||||||
|
# type: (Optional[Set[str]], Optional[Set[str]]) -> None
|
||||||
|
if no_binary is None:
|
||||||
|
no_binary = set()
|
||||||
|
if only_binary is None:
|
||||||
|
only_binary = set()
|
||||||
|
|
||||||
|
self.no_binary = no_binary
|
||||||
|
self.only_binary = only_binary
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
# type: (object) -> bool
|
||||||
|
return self.__dict__ == other.__dict__
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
# type: (object) -> bool
|
||||||
|
return not self.__eq__(other)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
# type: () -> str
|
||||||
|
return "{}({}, {})".format(
|
||||||
|
self.__class__.__name__,
|
||||||
|
self.no_binary,
|
||||||
|
self.only_binary
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def handle_mutual_excludes(value, target, other):
|
||||||
|
# type: (str, Optional[Set[str]], Optional[Set[str]]) -> None
|
||||||
|
if value.startswith('-'):
|
||||||
|
raise CommandError(
|
||||||
|
"--no-binary / --only-binary option requires 1 argument."
|
||||||
|
)
|
||||||
|
new = value.split(',')
|
||||||
|
while ':all:' in new:
|
||||||
|
other.clear()
|
||||||
|
target.clear()
|
||||||
|
target.add(':all:')
|
||||||
|
del new[:new.index(':all:') + 1]
|
||||||
|
# Without a none, we want to discard everything as :all: covers it
|
||||||
|
if ':none:' not in new:
|
||||||
|
return
|
||||||
|
for name in new:
|
||||||
|
if name == ':none:':
|
||||||
|
target.clear()
|
||||||
|
continue
|
||||||
|
name = canonicalize_name(name)
|
||||||
|
other.discard(name)
|
||||||
|
target.add(name)
|
||||||
|
|
||||||
|
def get_allowed_formats(self, canonical_name):
|
||||||
|
# type: (str) -> FrozenSet[str]
|
||||||
|
result = {"binary", "source"}
|
||||||
|
if canonical_name in self.only_binary:
|
||||||
|
result.discard('source')
|
||||||
|
elif canonical_name in self.no_binary:
|
||||||
|
result.discard('binary')
|
||||||
|
elif ':all:' in self.only_binary:
|
||||||
|
result.discard('source')
|
||||||
|
elif ':all:' in self.no_binary:
|
||||||
|
result.discard('binary')
|
||||||
|
return frozenset(result)
|
||||||
|
|
||||||
|
def disallow_binaries(self):
|
||||||
|
# type: () -> None
|
||||||
|
self.handle_mutual_excludes(
|
||||||
|
':all:', self.no_binary, self.only_binary,
|
||||||
|
)
|
31
sources/pip_20.1/_internal/models/index.py
Normal file
31
sources/pip_20.1/_internal/models/index.py
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||||
|
|
||||||
|
|
||||||
|
class PackageIndex(object):
|
||||||
|
"""Represents a Package Index and provides easier access to endpoints
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, url, file_storage_domain):
|
||||||
|
# type: (str, str) -> None
|
||||||
|
super(PackageIndex, self).__init__()
|
||||||
|
self.url = url
|
||||||
|
self.netloc = urllib_parse.urlsplit(url).netloc
|
||||||
|
self.simple_url = self._url_for_path('simple')
|
||||||
|
self.pypi_url = self._url_for_path('pypi')
|
||||||
|
|
||||||
|
# This is part of a temporary hack used to block installs of PyPI
|
||||||
|
# packages which depend on external urls only necessary until PyPI can
|
||||||
|
# block such packages themselves
|
||||||
|
self.file_storage_domain = file_storage_domain
|
||||||
|
|
||||||
|
def _url_for_path(self, path):
|
||||||
|
# type: (str) -> str
|
||||||
|
return urllib_parse.urljoin(self.url, path)
|
||||||
|
|
||||||
|
|
||||||
|
PyPI = PackageIndex(
|
||||||
|
'https://pypi.org/', file_storage_domain='files.pythonhosted.org'
|
||||||
|
)
|
||||||
|
TestPyPI = PackageIndex(
|
||||||
|
'https://test.pypi.org/', file_storage_domain='test-files.pythonhosted.org'
|
||||||
|
)
|
236
sources/pip_20.1/_internal/models/link.py
Normal file
236
sources/pip_20.1/_internal/models/link.py
Normal file
|
@ -0,0 +1,236 @@
|
||||||
|
import os
|
||||||
|
import posixpath
|
||||||
|
import re
|
||||||
|
|
||||||
|
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||||
|
|
||||||
|
from pip._internal.utils.filetypes import WHEEL_EXTENSION
|
||||||
|
from pip._internal.utils.misc import (
|
||||||
|
redact_auth_from_url,
|
||||||
|
split_auth_from_netloc,
|
||||||
|
splitext,
|
||||||
|
)
|
||||||
|
from pip._internal.utils.models import KeyBasedCompareMixin
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
from pip._internal.utils.urls import path_to_url, url_to_path
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Optional, Text, Tuple, Union
|
||||||
|
from pip._internal.index.collector import HTMLPage
|
||||||
|
from pip._internal.utils.hashes import Hashes
|
||||||
|
|
||||||
|
|
||||||
|
class Link(KeyBasedCompareMixin):
|
||||||
|
"""Represents a parsed link from a Package Index's simple URL
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
url, # type: str
|
||||||
|
comes_from=None, # type: Optional[Union[str, HTMLPage]]
|
||||||
|
requires_python=None, # type: Optional[str]
|
||||||
|
yanked_reason=None, # type: Optional[Text]
|
||||||
|
cache_link_parsing=True, # type: bool
|
||||||
|
):
|
||||||
|
# type: (...) -> None
|
||||||
|
"""
|
||||||
|
:param url: url of the resource pointed to (href of the link)
|
||||||
|
:param comes_from: instance of HTMLPage where the link was found,
|
||||||
|
or string.
|
||||||
|
:param requires_python: String containing the `Requires-Python`
|
||||||
|
metadata field, specified in PEP 345. This may be specified by
|
||||||
|
a data-requires-python attribute in the HTML link tag, as
|
||||||
|
described in PEP 503.
|
||||||
|
:param yanked_reason: the reason the file has been yanked, if the
|
||||||
|
file has been yanked, or None if the file hasn't been yanked.
|
||||||
|
This is the value of the "data-yanked" attribute, if present, in
|
||||||
|
a simple repository HTML link. If the file has been yanked but
|
||||||
|
no reason was provided, this should be the empty string. See
|
||||||
|
PEP 592 for more information and the specification.
|
||||||
|
:param cache_link_parsing: A flag that is used elsewhere to determine
|
||||||
|
whether resources retrieved from this link
|
||||||
|
should be cached. PyPI index urls should
|
||||||
|
generally have this set to False, for
|
||||||
|
example.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# url can be a UNC windows share
|
||||||
|
if url.startswith('\\\\'):
|
||||||
|
url = path_to_url(url)
|
||||||
|
|
||||||
|
self._parsed_url = urllib_parse.urlsplit(url)
|
||||||
|
# Store the url as a private attribute to prevent accidentally
|
||||||
|
# trying to set a new value.
|
||||||
|
self._url = url
|
||||||
|
|
||||||
|
self.comes_from = comes_from
|
||||||
|
self.requires_python = requires_python if requires_python else None
|
||||||
|
self.yanked_reason = yanked_reason
|
||||||
|
|
||||||
|
super(Link, self).__init__(key=url, defining_class=Link)
|
||||||
|
|
||||||
|
self.cache_link_parsing = cache_link_parsing
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
# type: () -> str
|
||||||
|
if self.requires_python:
|
||||||
|
rp = ' (requires-python:{})'.format(self.requires_python)
|
||||||
|
else:
|
||||||
|
rp = ''
|
||||||
|
if self.comes_from:
|
||||||
|
return '{} (from {}){}'.format(
|
||||||
|
redact_auth_from_url(self._url), self.comes_from, rp)
|
||||||
|
else:
|
||||||
|
return redact_auth_from_url(str(self._url))
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
# type: () -> str
|
||||||
|
return '<Link {}>'.format(self)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def url(self):
|
||||||
|
# type: () -> str
|
||||||
|
return self._url
|
||||||
|
|
||||||
|
@property
|
||||||
|
def filename(self):
|
||||||
|
# type: () -> str
|
||||||
|
path = self.path.rstrip('/')
|
||||||
|
name = posixpath.basename(path)
|
||||||
|
if not name:
|
||||||
|
# Make sure we don't leak auth information if the netloc
|
||||||
|
# includes a username and password.
|
||||||
|
netloc, user_pass = split_auth_from_netloc(self.netloc)
|
||||||
|
return netloc
|
||||||
|
|
||||||
|
name = urllib_parse.unquote(name)
|
||||||
|
assert name, (
|
||||||
|
'URL {self._url!r} produced no filename'.format(**locals()))
|
||||||
|
return name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def file_path(self):
|
||||||
|
# type: () -> str
|
||||||
|
return url_to_path(self.url)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def scheme(self):
|
||||||
|
# type: () -> str
|
||||||
|
return self._parsed_url.scheme
|
||||||
|
|
||||||
|
@property
|
||||||
|
def netloc(self):
|
||||||
|
# type: () -> str
|
||||||
|
"""
|
||||||
|
This can contain auth information.
|
||||||
|
"""
|
||||||
|
return self._parsed_url.netloc
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path(self):
|
||||||
|
# type: () -> str
|
||||||
|
return urllib_parse.unquote(self._parsed_url.path)
|
||||||
|
|
||||||
|
def splitext(self):
|
||||||
|
# type: () -> Tuple[str, str]
|
||||||
|
return splitext(posixpath.basename(self.path.rstrip('/')))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ext(self):
|
||||||
|
# type: () -> str
|
||||||
|
return self.splitext()[1]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def url_without_fragment(self):
|
||||||
|
# type: () -> str
|
||||||
|
scheme, netloc, path, query, fragment = self._parsed_url
|
||||||
|
return urllib_parse.urlunsplit((scheme, netloc, path, query, None))
|
||||||
|
|
||||||
|
_egg_fragment_re = re.compile(r'[#&]egg=([^&]*)')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def egg_fragment(self):
|
||||||
|
# type: () -> Optional[str]
|
||||||
|
match = self._egg_fragment_re.search(self._url)
|
||||||
|
if not match:
|
||||||
|
return None
|
||||||
|
return match.group(1)
|
||||||
|
|
||||||
|
_subdirectory_fragment_re = re.compile(r'[#&]subdirectory=([^&]*)')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def subdirectory_fragment(self):
|
||||||
|
# type: () -> Optional[str]
|
||||||
|
match = self._subdirectory_fragment_re.search(self._url)
|
||||||
|
if not match:
|
||||||
|
return None
|
||||||
|
return match.group(1)
|
||||||
|
|
||||||
|
_hash_re = re.compile(
|
||||||
|
r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)'
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hash(self):
|
||||||
|
# type: () -> Optional[str]
|
||||||
|
match = self._hash_re.search(self._url)
|
||||||
|
if match:
|
||||||
|
return match.group(2)
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hash_name(self):
|
||||||
|
# type: () -> Optional[str]
|
||||||
|
match = self._hash_re.search(self._url)
|
||||||
|
if match:
|
||||||
|
return match.group(1)
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def show_url(self):
|
||||||
|
# type: () -> str
|
||||||
|
return posixpath.basename(self._url.split('#', 1)[0].split('?', 1)[0])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_file(self):
|
||||||
|
# type: () -> bool
|
||||||
|
return self.scheme == 'file'
|
||||||
|
|
||||||
|
def is_existing_dir(self):
|
||||||
|
# type: () -> bool
|
||||||
|
return self.is_file and os.path.isdir(self.file_path)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_wheel(self):
|
||||||
|
# type: () -> bool
|
||||||
|
return self.ext == WHEEL_EXTENSION
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_vcs(self):
|
||||||
|
# type: () -> bool
|
||||||
|
from pip._internal.vcs import vcs
|
||||||
|
|
||||||
|
return self.scheme in vcs.all_schemes
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_yanked(self):
|
||||||
|
# type: () -> bool
|
||||||
|
return self.yanked_reason is not None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def has_hash(self):
|
||||||
|
# type: () -> bool
|
||||||
|
return self.hash_name is not None
|
||||||
|
|
||||||
|
def is_hash_allowed(self, hashes):
|
||||||
|
# type: (Optional[Hashes]) -> bool
|
||||||
|
"""
|
||||||
|
Return True if the link has a hash and it is allowed.
|
||||||
|
"""
|
||||||
|
if hashes is None or not self.has_hash:
|
||||||
|
return False
|
||||||
|
# Assert non-None so mypy knows self.hash_name and self.hash are str.
|
||||||
|
assert self.hash_name is not None
|
||||||
|
assert self.hash is not None
|
||||||
|
|
||||||
|
return hashes.is_hash_allowed(self.hash_name, hex_digest=self.hash)
|
25
sources/pip_20.1/_internal/models/scheme.py
Normal file
25
sources/pip_20.1/_internal/models/scheme.py
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
"""
|
||||||
|
For types associated with installation schemes.
|
||||||
|
|
||||||
|
For a general overview of available schemes and their context, see
|
||||||
|
https://docs.python.org/3/install/index.html#alternate-installation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class Scheme(object):
|
||||||
|
"""A Scheme holds paths which are used as the base directories for
|
||||||
|
artifacts associated with a Python package.
|
||||||
|
"""
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
platlib, # type: str
|
||||||
|
purelib, # type: str
|
||||||
|
headers, # type: str
|
||||||
|
scripts, # type: str
|
||||||
|
data, # type: str
|
||||||
|
):
|
||||||
|
self.platlib = platlib
|
||||||
|
self.purelib = purelib
|
||||||
|
self.headers = headers
|
||||||
|
self.scripts = scripts
|
||||||
|
self.data = data
|
133
sources/pip_20.1/_internal/models/search_scope.py
Normal file
133
sources/pip_20.1/_internal/models/search_scope.py
Normal file
|
@ -0,0 +1,133 @@
|
||||||
|
import itertools
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import posixpath
|
||||||
|
|
||||||
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
|
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||||
|
|
||||||
|
from pip._internal.models.index import PyPI
|
||||||
|
from pip._internal.utils.compat import has_tls
|
||||||
|
from pip._internal.utils.misc import normalize_path, redact_auth_from_url
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class SearchScope(object):
|
||||||
|
|
||||||
|
"""
|
||||||
|
Encapsulates the locations that pip is configured to search.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create(
|
||||||
|
cls,
|
||||||
|
find_links, # type: List[str]
|
||||||
|
index_urls, # type: List[str]
|
||||||
|
):
|
||||||
|
# type: (...) -> SearchScope
|
||||||
|
"""
|
||||||
|
Create a SearchScope object after normalizing the `find_links`.
|
||||||
|
"""
|
||||||
|
# Build find_links. If an argument starts with ~, it may be
|
||||||
|
# a local file relative to a home directory. So try normalizing
|
||||||
|
# it and if it exists, use the normalized version.
|
||||||
|
# This is deliberately conservative - it might be fine just to
|
||||||
|
# blindly normalize anything starting with a ~...
|
||||||
|
built_find_links = [] # type: List[str]
|
||||||
|
for link in find_links:
|
||||||
|
if link.startswith('~'):
|
||||||
|
new_link = normalize_path(link)
|
||||||
|
if os.path.exists(new_link):
|
||||||
|
link = new_link
|
||||||
|
built_find_links.append(link)
|
||||||
|
|
||||||
|
# If we don't have TLS enabled, then WARN if anyplace we're looking
|
||||||
|
# relies on TLS.
|
||||||
|
if not has_tls():
|
||||||
|
for link in itertools.chain(index_urls, built_find_links):
|
||||||
|
parsed = urllib_parse.urlparse(link)
|
||||||
|
if parsed.scheme == 'https':
|
||||||
|
logger.warning(
|
||||||
|
'pip is configured with locations that require '
|
||||||
|
'TLS/SSL, however the ssl module in Python is not '
|
||||||
|
'available.'
|
||||||
|
)
|
||||||
|
break
|
||||||
|
|
||||||
|
return cls(
|
||||||
|
find_links=built_find_links,
|
||||||
|
index_urls=index_urls,
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
find_links, # type: List[str]
|
||||||
|
index_urls, # type: List[str]
|
||||||
|
):
|
||||||
|
# type: (...) -> None
|
||||||
|
self.find_links = find_links
|
||||||
|
self.index_urls = index_urls
|
||||||
|
|
||||||
|
def get_formatted_locations(self):
|
||||||
|
# type: () -> str
|
||||||
|
lines = []
|
||||||
|
redacted_index_urls = []
|
||||||
|
if self.index_urls and self.index_urls != [PyPI.simple_url]:
|
||||||
|
for url in self.index_urls:
|
||||||
|
|
||||||
|
redacted_index_url = redact_auth_from_url(url)
|
||||||
|
|
||||||
|
# Parse the URL
|
||||||
|
purl = urllib_parse.urlsplit(redacted_index_url)
|
||||||
|
|
||||||
|
# URL is generally invalid if scheme and netloc is missing
|
||||||
|
# there are issues with Python and URL parsing, so this test
|
||||||
|
# is a bit crude. See bpo-20271, bpo-23505. Python doesn't
|
||||||
|
# always parse invalid URLs correctly - it should raise
|
||||||
|
# exceptions for malformed URLs
|
||||||
|
if not purl.scheme and not purl.netloc:
|
||||||
|
logger.warning(
|
||||||
|
'The index url "{}" seems invalid, '
|
||||||
|
'please provide a scheme.'.format(redacted_index_url))
|
||||||
|
|
||||||
|
redacted_index_urls.append(redacted_index_url)
|
||||||
|
|
||||||
|
lines.append('Looking in indexes: {}'.format(
|
||||||
|
', '.join(redacted_index_urls)))
|
||||||
|
|
||||||
|
if self.find_links:
|
||||||
|
lines.append(
|
||||||
|
'Looking in links: {}'.format(', '.join(
|
||||||
|
redact_auth_from_url(url) for url in self.find_links))
|
||||||
|
)
|
||||||
|
return '\n'.join(lines)
|
||||||
|
|
||||||
|
def get_index_urls_locations(self, project_name):
|
||||||
|
# type: (str) -> List[str]
|
||||||
|
"""Returns the locations found via self.index_urls
|
||||||
|
|
||||||
|
Checks the url_name on the main (first in the list) index and
|
||||||
|
use this url_name to produce all locations
|
||||||
|
"""
|
||||||
|
|
||||||
|
def mkurl_pypi_url(url):
|
||||||
|
# type: (str) -> str
|
||||||
|
loc = posixpath.join(
|
||||||
|
url,
|
||||||
|
urllib_parse.quote(canonicalize_name(project_name)))
|
||||||
|
# For maximum compatibility with easy_install, ensure the path
|
||||||
|
# ends in a trailing slash. Although this isn't in the spec
|
||||||
|
# (and PyPI can handle it without the slash) some other index
|
||||||
|
# implementations might break if they relied on easy_install's
|
||||||
|
# behavior.
|
||||||
|
if not loc.endswith('/'):
|
||||||
|
loc = loc + '/'
|
||||||
|
return loc
|
||||||
|
|
||||||
|
return [mkurl_pypi_url(url) for url in self.index_urls]
|
47
sources/pip_20.1/_internal/models/selection_prefs.py
Normal file
47
sources/pip_20.1/_internal/models/selection_prefs.py
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Optional
|
||||||
|
from pip._internal.models.format_control import FormatControl
|
||||||
|
|
||||||
|
|
||||||
|
class SelectionPreferences(object):
|
||||||
|
|
||||||
|
"""
|
||||||
|
Encapsulates the candidate selection preferences for downloading
|
||||||
|
and installing files.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Don't include an allow_yanked default value to make sure each call
|
||||||
|
# site considers whether yanked releases are allowed. This also causes
|
||||||
|
# that decision to be made explicit in the calling code, which helps
|
||||||
|
# people when reading the code.
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
allow_yanked, # type: bool
|
||||||
|
allow_all_prereleases=False, # type: bool
|
||||||
|
format_control=None, # type: Optional[FormatControl]
|
||||||
|
prefer_binary=False, # type: bool
|
||||||
|
ignore_requires_python=None, # type: Optional[bool]
|
||||||
|
):
|
||||||
|
# type: (...) -> None
|
||||||
|
"""Create a SelectionPreferences object.
|
||||||
|
|
||||||
|
:param allow_yanked: Whether files marked as yanked (in the sense
|
||||||
|
of PEP 592) are permitted to be candidates for install.
|
||||||
|
:param format_control: A FormatControl object or None. Used to control
|
||||||
|
the selection of source packages / binary packages when consulting
|
||||||
|
the index and links.
|
||||||
|
:param prefer_binary: Whether to prefer an old, but valid, binary
|
||||||
|
dist over a new source dist.
|
||||||
|
:param ignore_requires_python: Whether to ignore incompatible
|
||||||
|
"Requires-Python" values in links. Defaults to False.
|
||||||
|
"""
|
||||||
|
if ignore_requires_python is None:
|
||||||
|
ignore_requires_python = False
|
||||||
|
|
||||||
|
self.allow_yanked = allow_yanked
|
||||||
|
self.allow_all_prereleases = allow_all_prereleases
|
||||||
|
self.format_control = format_control
|
||||||
|
self.prefer_binary = prefer_binary
|
||||||
|
self.ignore_requires_python = ignore_requires_python
|
110
sources/pip_20.1/_internal/models/target_python.py
Normal file
110
sources/pip_20.1/_internal/models/target_python.py
Normal file
|
@ -0,0 +1,110 @@
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from pip._internal.utils.compatibility_tags import (
|
||||||
|
get_supported,
|
||||||
|
version_info_to_nodot,
|
||||||
|
)
|
||||||
|
from pip._internal.utils.misc import normalize_version_info
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import List, Optional, Tuple
|
||||||
|
|
||||||
|
from pip._vendor.packaging.tags import Tag
|
||||||
|
|
||||||
|
|
||||||
|
class TargetPython(object):
|
||||||
|
|
||||||
|
"""
|
||||||
|
Encapsulates the properties of a Python interpreter one is targeting
|
||||||
|
for a package install, download, etc.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
platform=None, # type: Optional[str]
|
||||||
|
py_version_info=None, # type: Optional[Tuple[int, ...]]
|
||||||
|
abi=None, # type: Optional[str]
|
||||||
|
implementation=None, # type: Optional[str]
|
||||||
|
):
|
||||||
|
# type: (...) -> None
|
||||||
|
"""
|
||||||
|
:param platform: A string or None. If None, searches for packages
|
||||||
|
that are supported by the current system. Otherwise, will find
|
||||||
|
packages that can be built on the platform passed in. These
|
||||||
|
packages will only be downloaded for distribution: they will
|
||||||
|
not be built locally.
|
||||||
|
:param py_version_info: An optional tuple of ints representing the
|
||||||
|
Python version information to use (e.g. `sys.version_info[:3]`).
|
||||||
|
This can have length 1, 2, or 3 when provided.
|
||||||
|
:param abi: A string or None. This is passed to compatibility_tags.py's
|
||||||
|
get_supported() function as is.
|
||||||
|
:param implementation: A string or None. This is passed to
|
||||||
|
compatibility_tags.py's get_supported() function as is.
|
||||||
|
"""
|
||||||
|
# Store the given py_version_info for when we call get_supported().
|
||||||
|
self._given_py_version_info = py_version_info
|
||||||
|
|
||||||
|
if py_version_info is None:
|
||||||
|
py_version_info = sys.version_info[:3]
|
||||||
|
else:
|
||||||
|
py_version_info = normalize_version_info(py_version_info)
|
||||||
|
|
||||||
|
py_version = '.'.join(map(str, py_version_info[:2]))
|
||||||
|
|
||||||
|
self.abi = abi
|
||||||
|
self.implementation = implementation
|
||||||
|
self.platform = platform
|
||||||
|
self.py_version = py_version
|
||||||
|
self.py_version_info = py_version_info
|
||||||
|
|
||||||
|
# This is used to cache the return value of get_tags().
|
||||||
|
self._valid_tags = None # type: Optional[List[Tag]]
|
||||||
|
|
||||||
|
def format_given(self):
|
||||||
|
# type: () -> str
|
||||||
|
"""
|
||||||
|
Format the given, non-None attributes for display.
|
||||||
|
"""
|
||||||
|
display_version = None
|
||||||
|
if self._given_py_version_info is not None:
|
||||||
|
display_version = '.'.join(
|
||||||
|
str(part) for part in self._given_py_version_info
|
||||||
|
)
|
||||||
|
|
||||||
|
key_values = [
|
||||||
|
('platform', self.platform),
|
||||||
|
('version_info', display_version),
|
||||||
|
('abi', self.abi),
|
||||||
|
('implementation', self.implementation),
|
||||||
|
]
|
||||||
|
return ' '.join(
|
||||||
|
'{}={!r}'.format(key, value) for key, value in key_values
|
||||||
|
if value is not None
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_tags(self):
|
||||||
|
# type: () -> List[Tag]
|
||||||
|
"""
|
||||||
|
Return the supported PEP 425 tags to check wheel candidates against.
|
||||||
|
|
||||||
|
The tags are returned in order of preference (most preferred first).
|
||||||
|
"""
|
||||||
|
if self._valid_tags is None:
|
||||||
|
# Pass versions=None if no py_version_info was given since
|
||||||
|
# versions=None uses special default logic.
|
||||||
|
py_version_info = self._given_py_version_info
|
||||||
|
if py_version_info is None:
|
||||||
|
version = None
|
||||||
|
else:
|
||||||
|
version = version_info_to_nodot(py_version_info)
|
||||||
|
|
||||||
|
tags = get_supported(
|
||||||
|
version=version,
|
||||||
|
platform=self.platform,
|
||||||
|
abi=self.abi,
|
||||||
|
impl=self.implementation,
|
||||||
|
)
|
||||||
|
self._valid_tags = tags
|
||||||
|
|
||||||
|
return self._valid_tags
|
78
sources/pip_20.1/_internal/models/wheel.py
Normal file
78
sources/pip_20.1/_internal/models/wheel.py
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
"""Represents a wheel file and provides access to the various parts of the
|
||||||
|
name that have meaning.
|
||||||
|
"""
|
||||||
|
import re
|
||||||
|
|
||||||
|
from pip._vendor.packaging.tags import Tag
|
||||||
|
|
||||||
|
from pip._internal.exceptions import InvalidWheelFilename
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
|
||||||
|
class Wheel(object):
|
||||||
|
"""A wheel file"""
|
||||||
|
|
||||||
|
wheel_file_re = re.compile(
|
||||||
|
r"""^(?P<namever>(?P<name>.+?)-(?P<ver>.*?))
|
||||||
|
((-(?P<build>\d[^-]*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)
|
||||||
|
\.whl|\.dist-info)$""",
|
||||||
|
re.VERBOSE
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(self, filename):
|
||||||
|
# type: (str) -> None
|
||||||
|
"""
|
||||||
|
:raises InvalidWheelFilename: when the filename is invalid for a wheel
|
||||||
|
"""
|
||||||
|
wheel_info = self.wheel_file_re.match(filename)
|
||||||
|
if not wheel_info:
|
||||||
|
raise InvalidWheelFilename(
|
||||||
|
"{} is not a valid wheel filename.".format(filename)
|
||||||
|
)
|
||||||
|
self.filename = filename
|
||||||
|
self.name = wheel_info.group('name').replace('_', '-')
|
||||||
|
# we'll assume "_" means "-" due to wheel naming scheme
|
||||||
|
# (https://github.com/pypa/pip/issues/1150)
|
||||||
|
self.version = wheel_info.group('ver').replace('_', '-')
|
||||||
|
self.build_tag = wheel_info.group('build')
|
||||||
|
self.pyversions = wheel_info.group('pyver').split('.')
|
||||||
|
self.abis = wheel_info.group('abi').split('.')
|
||||||
|
self.plats = wheel_info.group('plat').split('.')
|
||||||
|
|
||||||
|
# All the tag combinations from this file
|
||||||
|
self.file_tags = {
|
||||||
|
Tag(x, y, z) for x in self.pyversions
|
||||||
|
for y in self.abis for z in self.plats
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_formatted_file_tags(self):
|
||||||
|
# type: () -> List[str]
|
||||||
|
"""Return the wheel's tags as a sorted list of strings."""
|
||||||
|
return sorted(str(tag) for tag in self.file_tags)
|
||||||
|
|
||||||
|
def support_index_min(self, tags):
|
||||||
|
# type: (List[Tag]) -> int
|
||||||
|
"""Return the lowest index that one of the wheel's file_tag combinations
|
||||||
|
achieves in the given list of supported tags.
|
||||||
|
|
||||||
|
For example, if there are 8 supported tags and one of the file tags
|
||||||
|
is first in the list, then return 0.
|
||||||
|
|
||||||
|
:param tags: the PEP 425 tags to check the wheel against, in order
|
||||||
|
with most preferred first.
|
||||||
|
|
||||||
|
:raises ValueError: If none of the wheel's file tags match one of
|
||||||
|
the supported tags.
|
||||||
|
"""
|
||||||
|
return min(tags.index(tag) for tag in self.file_tags if tag in tags)
|
||||||
|
|
||||||
|
def supported(self, tags):
|
||||||
|
# type: (List[Tag]) -> bool
|
||||||
|
"""Return whether the wheel is compatible with one of the given tags.
|
||||||
|
|
||||||
|
:param tags: the PEP 425 tags to check the wheel against.
|
||||||
|
"""
|
||||||
|
return not self.file_tags.isdisjoint(tags)
|
2
sources/pip_20.1/_internal/network/__init__.py
Normal file
2
sources/pip_20.1/_internal/network/__init__.py
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
"""Contains purely network-related utilities.
|
||||||
|
"""
|
298
sources/pip_20.1/_internal/network/auth.py
Normal file
298
sources/pip_20.1/_internal/network/auth.py
Normal file
|
@ -0,0 +1,298 @@
|
||||||
|
"""Network Authentication Helpers
|
||||||
|
|
||||||
|
Contains interface (MultiDomainBasicAuth) and associated glue code for
|
||||||
|
providing credentials in the context of network requests.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: disallow-untyped-defs=False
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
|
||||||
|
from pip._vendor.requests.utils import get_netrc_auth
|
||||||
|
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||||
|
|
||||||
|
from pip._internal.utils.misc import (
|
||||||
|
ask,
|
||||||
|
ask_input,
|
||||||
|
ask_password,
|
||||||
|
remove_auth_from_url,
|
||||||
|
split_auth_netloc_from_url,
|
||||||
|
)
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from optparse import Values
|
||||||
|
from typing import Dict, Optional, Tuple
|
||||||
|
|
||||||
|
from pip._internal.vcs.versioncontrol import AuthInfo
|
||||||
|
|
||||||
|
Credentials = Tuple[str, str, str]
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
try:
|
||||||
|
import keyring # noqa
|
||||||
|
except ImportError:
|
||||||
|
keyring = None
|
||||||
|
except Exception as exc:
|
||||||
|
logger.warning(
|
||||||
|
"Keyring is skipped due to an exception: %s", str(exc),
|
||||||
|
)
|
||||||
|
keyring = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_keyring_auth(url, username):
|
||||||
|
"""Return the tuple auth for a given url from keyring."""
|
||||||
|
if not url or not keyring:
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
try:
|
||||||
|
get_credential = keyring.get_credential
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
logger.debug("Getting credentials from keyring for %s", url)
|
||||||
|
cred = get_credential(url, username)
|
||||||
|
if cred is not None:
|
||||||
|
return cred.username, cred.password
|
||||||
|
return None
|
||||||
|
|
||||||
|
if username:
|
||||||
|
logger.debug("Getting password from keyring for %s", url)
|
||||||
|
password = keyring.get_password(url, username)
|
||||||
|
if password:
|
||||||
|
return username, password
|
||||||
|
|
||||||
|
except Exception as exc:
|
||||||
|
logger.warning(
|
||||||
|
"Keyring is skipped due to an exception: %s", str(exc),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class MultiDomainBasicAuth(AuthBase):
|
||||||
|
|
||||||
|
def __init__(self, prompting=True, index_urls=None):
|
||||||
|
# type: (bool, Optional[Values]) -> None
|
||||||
|
self.prompting = prompting
|
||||||
|
self.index_urls = index_urls
|
||||||
|
self.passwords = {} # type: Dict[str, AuthInfo]
|
||||||
|
# When the user is prompted to enter credentials and keyring is
|
||||||
|
# available, we will offer to save them. If the user accepts,
|
||||||
|
# this value is set to the credentials they entered. After the
|
||||||
|
# request authenticates, the caller should call
|
||||||
|
# ``save_credentials`` to save these.
|
||||||
|
self._credentials_to_save = None # type: Optional[Credentials]
|
||||||
|
|
||||||
|
def _get_index_url(self, url):
|
||||||
|
"""Return the original index URL matching the requested URL.
|
||||||
|
|
||||||
|
Cached or dynamically generated credentials may work against
|
||||||
|
the original index URL rather than just the netloc.
|
||||||
|
|
||||||
|
The provided url should have had its username and password
|
||||||
|
removed already. If the original index url had credentials then
|
||||||
|
they will be included in the return value.
|
||||||
|
|
||||||
|
Returns None if no matching index was found, or if --no-index
|
||||||
|
was specified by the user.
|
||||||
|
"""
|
||||||
|
if not url or not self.index_urls:
|
||||||
|
return None
|
||||||
|
|
||||||
|
for u in self.index_urls:
|
||||||
|
prefix = remove_auth_from_url(u).rstrip("/") + "/"
|
||||||
|
if url.startswith(prefix):
|
||||||
|
return u
|
||||||
|
|
||||||
|
def _get_new_credentials(self, original_url, allow_netrc=True,
|
||||||
|
allow_keyring=True):
|
||||||
|
"""Find and return credentials for the specified URL."""
|
||||||
|
# Split the credentials and netloc from the url.
|
||||||
|
url, netloc, url_user_password = split_auth_netloc_from_url(
|
||||||
|
original_url,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Start with the credentials embedded in the url
|
||||||
|
username, password = url_user_password
|
||||||
|
if username is not None and password is not None:
|
||||||
|
logger.debug("Found credentials in url for %s", netloc)
|
||||||
|
return url_user_password
|
||||||
|
|
||||||
|
# Find a matching index url for this request
|
||||||
|
index_url = self._get_index_url(url)
|
||||||
|
if index_url:
|
||||||
|
# Split the credentials from the url.
|
||||||
|
index_info = split_auth_netloc_from_url(index_url)
|
||||||
|
if index_info:
|
||||||
|
index_url, _, index_url_user_password = index_info
|
||||||
|
logger.debug("Found index url %s", index_url)
|
||||||
|
|
||||||
|
# If an index URL was found, try its embedded credentials
|
||||||
|
if index_url and index_url_user_password[0] is not None:
|
||||||
|
username, password = index_url_user_password
|
||||||
|
if username is not None and password is not None:
|
||||||
|
logger.debug("Found credentials in index url for %s", netloc)
|
||||||
|
return index_url_user_password
|
||||||
|
|
||||||
|
# Get creds from netrc if we still don't have them
|
||||||
|
if allow_netrc:
|
||||||
|
netrc_auth = get_netrc_auth(original_url)
|
||||||
|
if netrc_auth:
|
||||||
|
logger.debug("Found credentials in netrc for %s", netloc)
|
||||||
|
return netrc_auth
|
||||||
|
|
||||||
|
# If we don't have a password and keyring is available, use it.
|
||||||
|
if allow_keyring:
|
||||||
|
# The index url is more specific than the netloc, so try it first
|
||||||
|
kr_auth = (
|
||||||
|
get_keyring_auth(index_url, username) or
|
||||||
|
get_keyring_auth(netloc, username)
|
||||||
|
)
|
||||||
|
if kr_auth:
|
||||||
|
logger.debug("Found credentials in keyring for %s", netloc)
|
||||||
|
return kr_auth
|
||||||
|
|
||||||
|
return username, password
|
||||||
|
|
||||||
|
def _get_url_and_credentials(self, original_url):
|
||||||
|
"""Return the credentials to use for the provided URL.
|
||||||
|
|
||||||
|
If allowed, netrc and keyring may be used to obtain the
|
||||||
|
correct credentials.
|
||||||
|
|
||||||
|
Returns (url_without_credentials, username, password). Note
|
||||||
|
that even if the original URL contains credentials, this
|
||||||
|
function may return a different username and password.
|
||||||
|
"""
|
||||||
|
url, netloc, _ = split_auth_netloc_from_url(original_url)
|
||||||
|
|
||||||
|
# Use any stored credentials that we have for this netloc
|
||||||
|
username, password = self.passwords.get(netloc, (None, None))
|
||||||
|
|
||||||
|
if username is None and password is None:
|
||||||
|
# No stored credentials. Acquire new credentials without prompting
|
||||||
|
# the user. (e.g. from netrc, keyring, or the URL itself)
|
||||||
|
username, password = self._get_new_credentials(original_url)
|
||||||
|
|
||||||
|
if username is not None or password is not None:
|
||||||
|
# Convert the username and password if they're None, so that
|
||||||
|
# this netloc will show up as "cached" in the conditional above.
|
||||||
|
# Further, HTTPBasicAuth doesn't accept None, so it makes sense to
|
||||||
|
# cache the value that is going to be used.
|
||||||
|
username = username or ""
|
||||||
|
password = password or ""
|
||||||
|
|
||||||
|
# Store any acquired credentials.
|
||||||
|
self.passwords[netloc] = (username, password)
|
||||||
|
|
||||||
|
assert (
|
||||||
|
# Credentials were found
|
||||||
|
(username is not None and password is not None) or
|
||||||
|
# Credentials were not found
|
||||||
|
(username is None and password is None)
|
||||||
|
), "Could not load credentials from url: {}".format(original_url)
|
||||||
|
|
||||||
|
return url, username, password
|
||||||
|
|
||||||
|
def __call__(self, req):
|
||||||
|
# Get credentials for this request
|
||||||
|
url, username, password = self._get_url_and_credentials(req.url)
|
||||||
|
|
||||||
|
# Set the url of the request to the url without any credentials
|
||||||
|
req.url = url
|
||||||
|
|
||||||
|
if username is not None and password is not None:
|
||||||
|
# Send the basic auth with this request
|
||||||
|
req = HTTPBasicAuth(username, password)(req)
|
||||||
|
|
||||||
|
# Attach a hook to handle 401 responses
|
||||||
|
req.register_hook("response", self.handle_401)
|
||||||
|
|
||||||
|
return req
|
||||||
|
|
||||||
|
# Factored out to allow for easy patching in tests
|
||||||
|
def _prompt_for_password(self, netloc):
|
||||||
|
username = ask_input("User for {}: ".format(netloc))
|
||||||
|
if not username:
|
||||||
|
return None, None
|
||||||
|
auth = get_keyring_auth(netloc, username)
|
||||||
|
if auth:
|
||||||
|
return auth[0], auth[1], False
|
||||||
|
password = ask_password("Password: ")
|
||||||
|
return username, password, True
|
||||||
|
|
||||||
|
# Factored out to allow for easy patching in tests
|
||||||
|
def _should_save_password_to_keyring(self):
|
||||||
|
if not keyring:
|
||||||
|
return False
|
||||||
|
return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y"
|
||||||
|
|
||||||
|
def handle_401(self, resp, **kwargs):
|
||||||
|
# We only care about 401 responses, anything else we want to just
|
||||||
|
# pass through the actual response
|
||||||
|
if resp.status_code != 401:
|
||||||
|
return resp
|
||||||
|
|
||||||
|
# We are not able to prompt the user so simply return the response
|
||||||
|
if not self.prompting:
|
||||||
|
return resp
|
||||||
|
|
||||||
|
parsed = urllib_parse.urlparse(resp.url)
|
||||||
|
|
||||||
|
# Prompt the user for a new username and password
|
||||||
|
username, password, save = self._prompt_for_password(parsed.netloc)
|
||||||
|
|
||||||
|
# Store the new username and password to use for future requests
|
||||||
|
self._credentials_to_save = None
|
||||||
|
if username is not None and password is not None:
|
||||||
|
self.passwords[parsed.netloc] = (username, password)
|
||||||
|
|
||||||
|
# Prompt to save the password to keyring
|
||||||
|
if save and self._should_save_password_to_keyring():
|
||||||
|
self._credentials_to_save = (parsed.netloc, username, password)
|
||||||
|
|
||||||
|
# Consume content and release the original connection to allow our new
|
||||||
|
# request to reuse the same one.
|
||||||
|
resp.content
|
||||||
|
resp.raw.release_conn()
|
||||||
|
|
||||||
|
# Add our new username and password to the request
|
||||||
|
req = HTTPBasicAuth(username or "", password or "")(resp.request)
|
||||||
|
req.register_hook("response", self.warn_on_401)
|
||||||
|
|
||||||
|
# On successful request, save the credentials that were used to
|
||||||
|
# keyring. (Note that if the user responded "no" above, this member
|
||||||
|
# is not set and nothing will be saved.)
|
||||||
|
if self._credentials_to_save:
|
||||||
|
req.register_hook("response", self.save_credentials)
|
||||||
|
|
||||||
|
# Send our new request
|
||||||
|
new_resp = resp.connection.send(req, **kwargs)
|
||||||
|
new_resp.history.append(resp)
|
||||||
|
|
||||||
|
return new_resp
|
||||||
|
|
||||||
|
def warn_on_401(self, resp, **kwargs):
|
||||||
|
"""Response callback to warn about incorrect credentials."""
|
||||||
|
if resp.status_code == 401:
|
||||||
|
logger.warning(
|
||||||
|
'401 Error, Credentials not correct for %s', resp.request.url,
|
||||||
|
)
|
||||||
|
|
||||||
|
def save_credentials(self, resp, **kwargs):
|
||||||
|
"""Response callback to save credentials on success."""
|
||||||
|
assert keyring is not None, "should never reach here without keyring"
|
||||||
|
if not keyring:
|
||||||
|
return
|
||||||
|
|
||||||
|
creds = self._credentials_to_save
|
||||||
|
self._credentials_to_save = None
|
||||||
|
if creds and resp.status_code < 400:
|
||||||
|
try:
|
||||||
|
logger.info('Saving credentials to keyring')
|
||||||
|
keyring.set_password(*creds)
|
||||||
|
except Exception:
|
||||||
|
logger.exception('Failed to save credentials')
|
81
sources/pip_20.1/_internal/network/cache.py
Normal file
81
sources/pip_20.1/_internal/network/cache.py
Normal file
|
@ -0,0 +1,81 @@
|
||||||
|
"""HTTP cache implementation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: disallow-untyped-defs=False
|
||||||
|
|
||||||
|
import os
|
||||||
|
from contextlib import contextmanager
|
||||||
|
|
||||||
|
from pip._vendor.cachecontrol.cache import BaseCache
|
||||||
|
from pip._vendor.cachecontrol.caches import FileCache
|
||||||
|
from pip._vendor.requests.models import Response
|
||||||
|
|
||||||
|
from pip._internal.utils.filesystem import adjacent_tmp_file, replace
|
||||||
|
from pip._internal.utils.misc import ensure_dir
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
|
||||||
|
def is_from_cache(response):
|
||||||
|
# type: (Response) -> bool
|
||||||
|
return getattr(response, "from_cache", False)
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def suppressed_cache_errors():
|
||||||
|
"""If we can't access the cache then we can just skip caching and process
|
||||||
|
requests as if caching wasn't enabled.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
except (OSError, IOError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class SafeFileCache(BaseCache):
|
||||||
|
"""
|
||||||
|
A file based cache which is safe to use even when the target directory may
|
||||||
|
not be accessible or writable.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, directory):
|
||||||
|
# type: (str) -> None
|
||||||
|
assert directory is not None, "Cache directory must not be None."
|
||||||
|
super(SafeFileCache, self).__init__()
|
||||||
|
self.directory = directory
|
||||||
|
|
||||||
|
def _get_cache_path(self, name):
|
||||||
|
# type: (str) -> str
|
||||||
|
# From cachecontrol.caches.file_cache.FileCache._fn, brought into our
|
||||||
|
# class for backwards-compatibility and to avoid using a non-public
|
||||||
|
# method.
|
||||||
|
hashed = FileCache.encode(name)
|
||||||
|
parts = list(hashed[:5]) + [hashed]
|
||||||
|
return os.path.join(self.directory, *parts)
|
||||||
|
|
||||||
|
def get(self, key):
|
||||||
|
# type: (str) -> Optional[bytes]
|
||||||
|
path = self._get_cache_path(key)
|
||||||
|
with suppressed_cache_errors():
|
||||||
|
with open(path, 'rb') as f:
|
||||||
|
return f.read()
|
||||||
|
|
||||||
|
def set(self, key, value):
|
||||||
|
# type: (str, bytes) -> None
|
||||||
|
path = self._get_cache_path(key)
|
||||||
|
with suppressed_cache_errors():
|
||||||
|
ensure_dir(os.path.dirname(path))
|
||||||
|
|
||||||
|
with adjacent_tmp_file(path) as f:
|
||||||
|
f.write(value)
|
||||||
|
|
||||||
|
replace(f.name, path)
|
||||||
|
|
||||||
|
def delete(self, key):
|
||||||
|
# type: (str) -> None
|
||||||
|
path = self._get_cache_path(key)
|
||||||
|
with suppressed_cache_errors():
|
||||||
|
os.remove(path)
|
200
sources/pip_20.1/_internal/network/download.py
Normal file
200
sources/pip_20.1/_internal/network/download.py
Normal file
|
@ -0,0 +1,200 @@
|
||||||
|
"""Download files with progress indicators.
|
||||||
|
"""
|
||||||
|
import cgi
|
||||||
|
import logging
|
||||||
|
import mimetypes
|
||||||
|
import os
|
||||||
|
|
||||||
|
from pip._vendor import requests
|
||||||
|
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE
|
||||||
|
|
||||||
|
from pip._internal.cli.progress_bars import DownloadProgressProvider
|
||||||
|
from pip._internal.models.index import PyPI
|
||||||
|
from pip._internal.network.cache import is_from_cache
|
||||||
|
from pip._internal.network.utils import response_chunks
|
||||||
|
from pip._internal.utils.misc import (
|
||||||
|
format_size,
|
||||||
|
redact_auth_from_url,
|
||||||
|
splitext,
|
||||||
|
)
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Iterable, Optional
|
||||||
|
|
||||||
|
from pip._vendor.requests.models import Response
|
||||||
|
|
||||||
|
from pip._internal.models.link import Link
|
||||||
|
from pip._internal.network.session import PipSession
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_http_response_size(resp):
|
||||||
|
# type: (Response) -> Optional[int]
|
||||||
|
try:
|
||||||
|
return int(resp.headers['content-length'])
|
||||||
|
except (ValueError, KeyError, TypeError):
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _prepare_download(
|
||||||
|
resp, # type: Response
|
||||||
|
link, # type: Link
|
||||||
|
progress_bar # type: str
|
||||||
|
):
|
||||||
|
# type: (...) -> Iterable[bytes]
|
||||||
|
total_length = _get_http_response_size(resp)
|
||||||
|
|
||||||
|
if link.netloc == PyPI.file_storage_domain:
|
||||||
|
url = link.show_url
|
||||||
|
else:
|
||||||
|
url = link.url_without_fragment
|
||||||
|
|
||||||
|
logged_url = redact_auth_from_url(url)
|
||||||
|
|
||||||
|
if total_length:
|
||||||
|
logged_url = '{} ({})'.format(logged_url, format_size(total_length))
|
||||||
|
|
||||||
|
if is_from_cache(resp):
|
||||||
|
logger.info("Using cached %s", logged_url)
|
||||||
|
else:
|
||||||
|
logger.info("Downloading %s", logged_url)
|
||||||
|
|
||||||
|
if logger.getEffectiveLevel() > logging.INFO:
|
||||||
|
show_progress = False
|
||||||
|
elif is_from_cache(resp):
|
||||||
|
show_progress = False
|
||||||
|
elif not total_length:
|
||||||
|
show_progress = True
|
||||||
|
elif total_length > (40 * 1000):
|
||||||
|
show_progress = True
|
||||||
|
else:
|
||||||
|
show_progress = False
|
||||||
|
|
||||||
|
chunks = response_chunks(resp, CONTENT_CHUNK_SIZE)
|
||||||
|
|
||||||
|
if not show_progress:
|
||||||
|
return chunks
|
||||||
|
|
||||||
|
return DownloadProgressProvider(
|
||||||
|
progress_bar, max=total_length
|
||||||
|
)(chunks)
|
||||||
|
|
||||||
|
|
||||||
|
def sanitize_content_filename(filename):
|
||||||
|
# type: (str) -> str
|
||||||
|
"""
|
||||||
|
Sanitize the "filename" value from a Content-Disposition header.
|
||||||
|
"""
|
||||||
|
return os.path.basename(filename)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_content_disposition(content_disposition, default_filename):
|
||||||
|
# type: (str, str) -> str
|
||||||
|
"""
|
||||||
|
Parse the "filename" value from a Content-Disposition header, and
|
||||||
|
return the default filename if the result is empty.
|
||||||
|
"""
|
||||||
|
_type, params = cgi.parse_header(content_disposition)
|
||||||
|
filename = params.get('filename')
|
||||||
|
if filename:
|
||||||
|
# We need to sanitize the filename to prevent directory traversal
|
||||||
|
# in case the filename contains ".." path parts.
|
||||||
|
filename = sanitize_content_filename(filename)
|
||||||
|
return filename or default_filename
|
||||||
|
|
||||||
|
|
||||||
|
def _get_http_response_filename(resp, link):
|
||||||
|
# type: (Response, Link) -> str
|
||||||
|
"""Get an ideal filename from the given HTTP response, falling back to
|
||||||
|
the link filename if not provided.
|
||||||
|
"""
|
||||||
|
filename = link.filename # fallback
|
||||||
|
# Have a look at the Content-Disposition header for a better guess
|
||||||
|
content_disposition = resp.headers.get('content-disposition')
|
||||||
|
if content_disposition:
|
||||||
|
filename = parse_content_disposition(content_disposition, filename)
|
||||||
|
ext = splitext(filename)[1] # type: Optional[str]
|
||||||
|
if not ext:
|
||||||
|
ext = mimetypes.guess_extension(
|
||||||
|
resp.headers.get('content-type', '')
|
||||||
|
)
|
||||||
|
if ext:
|
||||||
|
filename += ext
|
||||||
|
if not ext and link.url != resp.url:
|
||||||
|
ext = os.path.splitext(resp.url)[1]
|
||||||
|
if ext:
|
||||||
|
filename += ext
|
||||||
|
return filename
|
||||||
|
|
||||||
|
|
||||||
|
def _http_get_download(session, link):
|
||||||
|
# type: (PipSession, Link) -> Response
|
||||||
|
target_url = link.url.split('#', 1)[0]
|
||||||
|
resp = session.get(
|
||||||
|
target_url,
|
||||||
|
# We use Accept-Encoding: identity here because requests
|
||||||
|
# defaults to accepting compressed responses. This breaks in
|
||||||
|
# a variety of ways depending on how the server is configured.
|
||||||
|
# - Some servers will notice that the file isn't a compressible
|
||||||
|
# file and will leave the file alone and with an empty
|
||||||
|
# Content-Encoding
|
||||||
|
# - Some servers will notice that the file is already
|
||||||
|
# compressed and will leave the file alone and will add a
|
||||||
|
# Content-Encoding: gzip header
|
||||||
|
# - Some servers won't notice anything at all and will take
|
||||||
|
# a file that's already been compressed and compress it again
|
||||||
|
# and set the Content-Encoding: gzip header
|
||||||
|
# By setting this to request only the identity encoding We're
|
||||||
|
# hoping to eliminate the third case. Hopefully there does not
|
||||||
|
# exist a server which when given a file will notice it is
|
||||||
|
# already compressed and that you're not asking for a
|
||||||
|
# compressed file and will then decompress it before sending
|
||||||
|
# because if that's the case I don't think it'll ever be
|
||||||
|
# possible to make this work.
|
||||||
|
headers={"Accept-Encoding": "identity"},
|
||||||
|
stream=True,
|
||||||
|
)
|
||||||
|
resp.raise_for_status()
|
||||||
|
return resp
|
||||||
|
|
||||||
|
|
||||||
|
class Download(object):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
response, # type: Response
|
||||||
|
filename, # type: str
|
||||||
|
chunks, # type: Iterable[bytes]
|
||||||
|
):
|
||||||
|
# type: (...) -> None
|
||||||
|
self.response = response
|
||||||
|
self.filename = filename
|
||||||
|
self.chunks = chunks
|
||||||
|
|
||||||
|
|
||||||
|
class Downloader(object):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
session, # type: PipSession
|
||||||
|
progress_bar, # type: str
|
||||||
|
):
|
||||||
|
# type: (...) -> None
|
||||||
|
self._session = session
|
||||||
|
self._progress_bar = progress_bar
|
||||||
|
|
||||||
|
def __call__(self, link):
|
||||||
|
# type: (Link) -> Download
|
||||||
|
try:
|
||||||
|
resp = _http_get_download(self._session, link)
|
||||||
|
except requests.HTTPError as e:
|
||||||
|
logger.critical(
|
||||||
|
"HTTP error %s while getting %s", e.response.status_code, link
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
|
return Download(
|
||||||
|
resp,
|
||||||
|
_get_http_response_filename(resp, link),
|
||||||
|
_prepare_download(resp, link, self._progress_bar),
|
||||||
|
)
|
421
sources/pip_20.1/_internal/network/session.py
Normal file
421
sources/pip_20.1/_internal/network/session.py
Normal file
|
@ -0,0 +1,421 @@
|
||||||
|
"""PipSession and supporting code, containing all pip-specific
|
||||||
|
network request configuration and behavior.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: disallow-untyped-defs=False
|
||||||
|
|
||||||
|
import email.utils
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import mimetypes
|
||||||
|
import os
|
||||||
|
import platform
|
||||||
|
import sys
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
from pip._vendor import requests, six, urllib3
|
||||||
|
from pip._vendor.cachecontrol import CacheControlAdapter
|
||||||
|
from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
|
||||||
|
from pip._vendor.requests.models import Response
|
||||||
|
from pip._vendor.requests.structures import CaseInsensitiveDict
|
||||||
|
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||||
|
from pip._vendor.urllib3.exceptions import InsecureRequestWarning
|
||||||
|
|
||||||
|
from pip import __version__
|
||||||
|
from pip._internal.network.auth import MultiDomainBasicAuth
|
||||||
|
from pip._internal.network.cache import SafeFileCache
|
||||||
|
# Import ssl from compat so the initial import occurs in only one place.
|
||||||
|
from pip._internal.utils.compat import has_tls, ipaddress
|
||||||
|
from pip._internal.utils.glibc import libc_ver
|
||||||
|
from pip._internal.utils.misc import (
|
||||||
|
build_url_from_netloc,
|
||||||
|
get_installed_version,
|
||||||
|
parse_netloc,
|
||||||
|
)
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
from pip._internal.utils.urls import url_to_path
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import (
|
||||||
|
Iterator, List, Optional, Tuple, Union,
|
||||||
|
)
|
||||||
|
|
||||||
|
from pip._internal.models.link import Link
|
||||||
|
|
||||||
|
SecureOrigin = Tuple[str, str, Optional[Union[int, str]]]
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
# Ignore warning raised when using --trusted-host.
|
||||||
|
warnings.filterwarnings("ignore", category=InsecureRequestWarning)
|
||||||
|
|
||||||
|
|
||||||
|
SECURE_ORIGINS = [
|
||||||
|
# protocol, hostname, port
|
||||||
|
# Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC)
|
||||||
|
("https", "*", "*"),
|
||||||
|
("*", "localhost", "*"),
|
||||||
|
("*", "127.0.0.0/8", "*"),
|
||||||
|
("*", "::1/128", "*"),
|
||||||
|
("file", "*", None),
|
||||||
|
# ssh is always secure.
|
||||||
|
("ssh", "*", "*"),
|
||||||
|
] # type: List[SecureOrigin]
|
||||||
|
|
||||||
|
|
||||||
|
# These are environment variables present when running under various
|
||||||
|
# CI systems. For each variable, some CI systems that use the variable
|
||||||
|
# are indicated. The collection was chosen so that for each of a number
|
||||||
|
# of popular systems, at least one of the environment variables is used.
|
||||||
|
# This list is used to provide some indication of and lower bound for
|
||||||
|
# CI traffic to PyPI. Thus, it is okay if the list is not comprehensive.
|
||||||
|
# For more background, see: https://github.com/pypa/pip/issues/5499
|
||||||
|
CI_ENVIRONMENT_VARIABLES = (
|
||||||
|
# Azure Pipelines
|
||||||
|
'BUILD_BUILDID',
|
||||||
|
# Jenkins
|
||||||
|
'BUILD_ID',
|
||||||
|
# AppVeyor, CircleCI, Codeship, Gitlab CI, Shippable, Travis CI
|
||||||
|
'CI',
|
||||||
|
# Explicit environment variable.
|
||||||
|
'PIP_IS_CI',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def looks_like_ci():
|
||||||
|
# type: () -> bool
|
||||||
|
"""
|
||||||
|
Return whether it looks like pip is running under CI.
|
||||||
|
"""
|
||||||
|
# We don't use the method of checking for a tty (e.g. using isatty())
|
||||||
|
# because some CI systems mimic a tty (e.g. Travis CI). Thus that
|
||||||
|
# method doesn't provide definitive information in either direction.
|
||||||
|
return any(name in os.environ for name in CI_ENVIRONMENT_VARIABLES)
|
||||||
|
|
||||||
|
|
||||||
|
def user_agent():
|
||||||
|
"""
|
||||||
|
Return a string representing the user agent.
|
||||||
|
"""
|
||||||
|
data = {
|
||||||
|
"installer": {"name": "pip", "version": __version__},
|
||||||
|
"python": platform.python_version(),
|
||||||
|
"implementation": {
|
||||||
|
"name": platform.python_implementation(),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
if data["implementation"]["name"] == 'CPython':
|
||||||
|
data["implementation"]["version"] = platform.python_version()
|
||||||
|
elif data["implementation"]["name"] == 'PyPy':
|
||||||
|
if sys.pypy_version_info.releaselevel == 'final':
|
||||||
|
pypy_version_info = sys.pypy_version_info[:3]
|
||||||
|
else:
|
||||||
|
pypy_version_info = sys.pypy_version_info
|
||||||
|
data["implementation"]["version"] = ".".join(
|
||||||
|
[str(x) for x in pypy_version_info]
|
||||||
|
)
|
||||||
|
elif data["implementation"]["name"] == 'Jython':
|
||||||
|
# Complete Guess
|
||||||
|
data["implementation"]["version"] = platform.python_version()
|
||||||
|
elif data["implementation"]["name"] == 'IronPython':
|
||||||
|
# Complete Guess
|
||||||
|
data["implementation"]["version"] = platform.python_version()
|
||||||
|
|
||||||
|
if sys.platform.startswith("linux"):
|
||||||
|
from pip._vendor import distro
|
||||||
|
distro_infos = dict(filter(
|
||||||
|
lambda x: x[1],
|
||||||
|
zip(["name", "version", "id"], distro.linux_distribution()),
|
||||||
|
))
|
||||||
|
libc = dict(filter(
|
||||||
|
lambda x: x[1],
|
||||||
|
zip(["lib", "version"], libc_ver()),
|
||||||
|
))
|
||||||
|
if libc:
|
||||||
|
distro_infos["libc"] = libc
|
||||||
|
if distro_infos:
|
||||||
|
data["distro"] = distro_infos
|
||||||
|
|
||||||
|
if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
|
||||||
|
data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]}
|
||||||
|
|
||||||
|
if platform.system():
|
||||||
|
data.setdefault("system", {})["name"] = platform.system()
|
||||||
|
|
||||||
|
if platform.release():
|
||||||
|
data.setdefault("system", {})["release"] = platform.release()
|
||||||
|
|
||||||
|
if platform.machine():
|
||||||
|
data["cpu"] = platform.machine()
|
||||||
|
|
||||||
|
if has_tls():
|
||||||
|
import _ssl as ssl
|
||||||
|
data["openssl_version"] = ssl.OPENSSL_VERSION
|
||||||
|
|
||||||
|
setuptools_version = get_installed_version("setuptools")
|
||||||
|
if setuptools_version is not None:
|
||||||
|
data["setuptools_version"] = setuptools_version
|
||||||
|
|
||||||
|
# Use None rather than False so as not to give the impression that
|
||||||
|
# pip knows it is not being run under CI. Rather, it is a null or
|
||||||
|
# inconclusive result. Also, we include some value rather than no
|
||||||
|
# value to make it easier to know that the check has been run.
|
||||||
|
data["ci"] = True if looks_like_ci() else None
|
||||||
|
|
||||||
|
user_data = os.environ.get("PIP_USER_AGENT_USER_DATA")
|
||||||
|
if user_data is not None:
|
||||||
|
data["user_data"] = user_data
|
||||||
|
|
||||||
|
return "{data[installer][name]}/{data[installer][version]} {json}".format(
|
||||||
|
data=data,
|
||||||
|
json=json.dumps(data, separators=(",", ":"), sort_keys=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class LocalFSAdapter(BaseAdapter):
|
||||||
|
|
||||||
|
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
|
||||||
|
proxies=None):
|
||||||
|
pathname = url_to_path(request.url)
|
||||||
|
|
||||||
|
resp = Response()
|
||||||
|
resp.status_code = 200
|
||||||
|
resp.url = request.url
|
||||||
|
|
||||||
|
try:
|
||||||
|
stats = os.stat(pathname)
|
||||||
|
except OSError as exc:
|
||||||
|
resp.status_code = 404
|
||||||
|
resp.raw = exc
|
||||||
|
else:
|
||||||
|
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
|
||||||
|
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
|
||||||
|
resp.headers = CaseInsensitiveDict({
|
||||||
|
"Content-Type": content_type,
|
||||||
|
"Content-Length": stats.st_size,
|
||||||
|
"Last-Modified": modified,
|
||||||
|
})
|
||||||
|
|
||||||
|
resp.raw = open(pathname, "rb")
|
||||||
|
resp.close = resp.raw.close
|
||||||
|
|
||||||
|
return resp
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InsecureHTTPAdapter(HTTPAdapter):
|
||||||
|
|
||||||
|
def cert_verify(self, conn, url, verify, cert):
|
||||||
|
super(InsecureHTTPAdapter, self).cert_verify(
|
||||||
|
conn=conn, url=url, verify=False, cert=cert
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class InsecureCacheControlAdapter(CacheControlAdapter):
|
||||||
|
|
||||||
|
def cert_verify(self, conn, url, verify, cert):
|
||||||
|
super(InsecureCacheControlAdapter, self).cert_verify(
|
||||||
|
conn=conn, url=url, verify=False, cert=cert
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PipSession(requests.Session):
|
||||||
|
|
||||||
|
timeout = None # type: Optional[int]
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
:param trusted_hosts: Domains not to emit warnings for when not using
|
||||||
|
HTTPS.
|
||||||
|
"""
|
||||||
|
retries = kwargs.pop("retries", 0)
|
||||||
|
cache = kwargs.pop("cache", None)
|
||||||
|
trusted_hosts = kwargs.pop("trusted_hosts", []) # type: List[str]
|
||||||
|
index_urls = kwargs.pop("index_urls", None)
|
||||||
|
|
||||||
|
super(PipSession, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
# Namespace the attribute with "pip_" just in case to prevent
|
||||||
|
# possible conflicts with the base class.
|
||||||
|
self.pip_trusted_origins = [] # type: List[Tuple[str, Optional[int]]]
|
||||||
|
|
||||||
|
# Attach our User Agent to the request
|
||||||
|
self.headers["User-Agent"] = user_agent()
|
||||||
|
|
||||||
|
# Attach our Authentication handler to the session
|
||||||
|
self.auth = MultiDomainBasicAuth(index_urls=index_urls)
|
||||||
|
|
||||||
|
# Create our urllib3.Retry instance which will allow us to customize
|
||||||
|
# how we handle retries.
|
||||||
|
retries = urllib3.Retry(
|
||||||
|
# Set the total number of retries that a particular request can
|
||||||
|
# have.
|
||||||
|
total=retries,
|
||||||
|
|
||||||
|
# A 503 error from PyPI typically means that the Fastly -> Origin
|
||||||
|
# connection got interrupted in some way. A 503 error in general
|
||||||
|
# is typically considered a transient error so we'll go ahead and
|
||||||
|
# retry it.
|
||||||
|
# A 500 may indicate transient error in Amazon S3
|
||||||
|
# A 520 or 527 - may indicate transient error in CloudFlare
|
||||||
|
status_forcelist=[500, 503, 520, 527],
|
||||||
|
|
||||||
|
# Add a small amount of back off between failed requests in
|
||||||
|
# order to prevent hammering the service.
|
||||||
|
backoff_factor=0.25,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Our Insecure HTTPAdapter disables HTTPS validation. It does not
|
||||||
|
# support caching so we'll use it for all http:// URLs.
|
||||||
|
# If caching is disabled, we will also use it for
|
||||||
|
# https:// hosts that we've marked as ignoring
|
||||||
|
# TLS errors for (trusted-hosts).
|
||||||
|
insecure_adapter = InsecureHTTPAdapter(max_retries=retries)
|
||||||
|
|
||||||
|
# We want to _only_ cache responses on securely fetched origins or when
|
||||||
|
# the host is specified as trusted. We do this because
|
||||||
|
# we can't validate the response of an insecurely/untrusted fetched
|
||||||
|
# origin, and we don't want someone to be able to poison the cache and
|
||||||
|
# require manual eviction from the cache to fix it.
|
||||||
|
if cache:
|
||||||
|
secure_adapter = CacheControlAdapter(
|
||||||
|
cache=SafeFileCache(cache),
|
||||||
|
max_retries=retries,
|
||||||
|
)
|
||||||
|
self._trusted_host_adapter = InsecureCacheControlAdapter(
|
||||||
|
cache=SafeFileCache(cache),
|
||||||
|
max_retries=retries,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
secure_adapter = HTTPAdapter(max_retries=retries)
|
||||||
|
self._trusted_host_adapter = insecure_adapter
|
||||||
|
|
||||||
|
self.mount("https://", secure_adapter)
|
||||||
|
self.mount("http://", insecure_adapter)
|
||||||
|
|
||||||
|
# Enable file:// urls
|
||||||
|
self.mount("file://", LocalFSAdapter())
|
||||||
|
|
||||||
|
for host in trusted_hosts:
|
||||||
|
self.add_trusted_host(host, suppress_logging=True)
|
||||||
|
|
||||||
|
def add_trusted_host(self, host, source=None, suppress_logging=False):
|
||||||
|
# type: (str, Optional[str], bool) -> None
|
||||||
|
"""
|
||||||
|
:param host: It is okay to provide a host that has previously been
|
||||||
|
added.
|
||||||
|
:param source: An optional source string, for logging where the host
|
||||||
|
string came from.
|
||||||
|
"""
|
||||||
|
if not suppress_logging:
|
||||||
|
msg = 'adding trusted host: {!r}'.format(host)
|
||||||
|
if source is not None:
|
||||||
|
msg += ' (from {})'.format(source)
|
||||||
|
logger.info(msg)
|
||||||
|
|
||||||
|
host_port = parse_netloc(host)
|
||||||
|
if host_port not in self.pip_trusted_origins:
|
||||||
|
self.pip_trusted_origins.append(host_port)
|
||||||
|
|
||||||
|
self.mount(
|
||||||
|
build_url_from_netloc(host) + '/',
|
||||||
|
self._trusted_host_adapter
|
||||||
|
)
|
||||||
|
if not host_port[1]:
|
||||||
|
# Mount wildcard ports for the same host.
|
||||||
|
self.mount(
|
||||||
|
build_url_from_netloc(host) + ':',
|
||||||
|
self._trusted_host_adapter
|
||||||
|
)
|
||||||
|
|
||||||
|
def iter_secure_origins(self):
|
||||||
|
# type: () -> Iterator[SecureOrigin]
|
||||||
|
for secure_origin in SECURE_ORIGINS:
|
||||||
|
yield secure_origin
|
||||||
|
for host, port in self.pip_trusted_origins:
|
||||||
|
yield ('*', host, '*' if port is None else port)
|
||||||
|
|
||||||
|
def is_secure_origin(self, location):
|
||||||
|
# type: (Link) -> bool
|
||||||
|
# Determine if this url used a secure transport mechanism
|
||||||
|
parsed = urllib_parse.urlparse(str(location))
|
||||||
|
origin_protocol, origin_host, origin_port = (
|
||||||
|
parsed.scheme, parsed.hostname, parsed.port,
|
||||||
|
)
|
||||||
|
|
||||||
|
# The protocol to use to see if the protocol matches.
|
||||||
|
# Don't count the repository type as part of the protocol: in
|
||||||
|
# cases such as "git+ssh", only use "ssh". (I.e., Only verify against
|
||||||
|
# the last scheme.)
|
||||||
|
origin_protocol = origin_protocol.rsplit('+', 1)[-1]
|
||||||
|
|
||||||
|
# Determine if our origin is a secure origin by looking through our
|
||||||
|
# hardcoded list of secure origins, as well as any additional ones
|
||||||
|
# configured on this PackageFinder instance.
|
||||||
|
for secure_origin in self.iter_secure_origins():
|
||||||
|
secure_protocol, secure_host, secure_port = secure_origin
|
||||||
|
if origin_protocol != secure_protocol and secure_protocol != "*":
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
addr = ipaddress.ip_address(
|
||||||
|
None
|
||||||
|
if origin_host is None
|
||||||
|
else six.ensure_text(origin_host)
|
||||||
|
)
|
||||||
|
network = ipaddress.ip_network(
|
||||||
|
six.ensure_text(secure_host)
|
||||||
|
)
|
||||||
|
except ValueError:
|
||||||
|
# We don't have both a valid address or a valid network, so
|
||||||
|
# we'll check this origin against hostnames.
|
||||||
|
if (
|
||||||
|
origin_host and
|
||||||
|
origin_host.lower() != secure_host.lower() and
|
||||||
|
secure_host != "*"
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
# We have a valid address and network, so see if the address
|
||||||
|
# is contained within the network.
|
||||||
|
if addr not in network:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Check to see if the port matches.
|
||||||
|
if (
|
||||||
|
origin_port != secure_port and
|
||||||
|
secure_port != "*" and
|
||||||
|
secure_port is not None
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# If we've gotten here, then this origin matches the current
|
||||||
|
# secure origin and we should return True
|
||||||
|
return True
|
||||||
|
|
||||||
|
# If we've gotten to this point, then the origin isn't secure and we
|
||||||
|
# will not accept it as a valid location to search. We will however
|
||||||
|
# log a warning that we are ignoring it.
|
||||||
|
logger.warning(
|
||||||
|
"The repository located at %s is not a trusted or secure host and "
|
||||||
|
"is being ignored. If this repository is available via HTTPS we "
|
||||||
|
"recommend you use HTTPS instead, otherwise you may silence "
|
||||||
|
"this warning and allow it anyway with '--trusted-host %s'.",
|
||||||
|
origin_host,
|
||||||
|
origin_host,
|
||||||
|
)
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def request(self, method, url, *args, **kwargs):
|
||||||
|
# Allow setting a default timeout on a session
|
||||||
|
kwargs.setdefault("timeout", self.timeout)
|
||||||
|
|
||||||
|
# Dispatch the actual request
|
||||||
|
return super(PipSession, self).request(method, url, *args, **kwargs)
|
48
sources/pip_20.1/_internal/network/utils.py
Normal file
48
sources/pip_20.1/_internal/network/utils.py
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
|
||||||
|
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Iterator
|
||||||
|
|
||||||
|
|
||||||
|
def response_chunks(response, chunk_size=CONTENT_CHUNK_SIZE):
|
||||||
|
# type: (Response, int) -> Iterator[bytes]
|
||||||
|
"""Given a requests Response, provide the data chunks.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# Special case for urllib3.
|
||||||
|
for chunk in response.raw.stream(
|
||||||
|
chunk_size,
|
||||||
|
# We use decode_content=False here because we don't
|
||||||
|
# want urllib3 to mess with the raw bytes we get
|
||||||
|
# from the server. If we decompress inside of
|
||||||
|
# urllib3 then we cannot verify the checksum
|
||||||
|
# because the checksum will be of the compressed
|
||||||
|
# file. This breakage will only occur if the
|
||||||
|
# server adds a Content-Encoding header, which
|
||||||
|
# depends on how the server was configured:
|
||||||
|
# - Some servers will notice that the file isn't a
|
||||||
|
# compressible file and will leave the file alone
|
||||||
|
# and with an empty Content-Encoding
|
||||||
|
# - Some servers will notice that the file is
|
||||||
|
# already compressed and will leave the file
|
||||||
|
# alone and will add a Content-Encoding: gzip
|
||||||
|
# header
|
||||||
|
# - Some servers won't notice anything at all and
|
||||||
|
# will take a file that's already been compressed
|
||||||
|
# and compress it again and set the
|
||||||
|
# Content-Encoding: gzip header
|
||||||
|
#
|
||||||
|
# By setting this not to decode automatically we
|
||||||
|
# hope to eliminate problems with the second case.
|
||||||
|
decode_content=False,
|
||||||
|
):
|
||||||
|
yield chunk
|
||||||
|
except AttributeError:
|
||||||
|
# Standard file-like object.
|
||||||
|
while True:
|
||||||
|
chunk = response.raw.read(chunk_size)
|
||||||
|
if not chunk:
|
||||||
|
break
|
||||||
|
yield chunk
|
44
sources/pip_20.1/_internal/network/xmlrpc.py
Normal file
44
sources/pip_20.1/_internal/network/xmlrpc.py
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
"""xmlrpclib.Transport implementation
|
||||||
|
"""
|
||||||
|
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: disallow-untyped-defs=False
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from pip._vendor import requests
|
||||||
|
# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is
|
||||||
|
# why we ignore the type on this import
|
||||||
|
from pip._vendor.six.moves import xmlrpc_client # type: ignore
|
||||||
|
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class PipXmlrpcTransport(xmlrpc_client.Transport):
|
||||||
|
"""Provide a `xmlrpclib.Transport` implementation via a `PipSession`
|
||||||
|
object.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, index_url, session, use_datetime=False):
|
||||||
|
xmlrpc_client.Transport.__init__(self, use_datetime)
|
||||||
|
index_parts = urllib_parse.urlparse(index_url)
|
||||||
|
self._scheme = index_parts.scheme
|
||||||
|
self._session = session
|
||||||
|
|
||||||
|
def request(self, host, handler, request_body, verbose=False):
|
||||||
|
parts = (self._scheme, host, handler, None, None, None)
|
||||||
|
url = urllib_parse.urlunparse(parts)
|
||||||
|
try:
|
||||||
|
headers = {'Content-Type': 'text/xml'}
|
||||||
|
response = self._session.post(url, data=request_body,
|
||||||
|
headers=headers, stream=True)
|
||||||
|
response.raise_for_status()
|
||||||
|
self.verbose = verbose
|
||||||
|
return self.parse_response(response.raw)
|
||||||
|
except requests.HTTPError as exc:
|
||||||
|
logger.critical(
|
||||||
|
"HTTP error %s while getting %s",
|
||||||
|
exc.response.status_code, url,
|
||||||
|
)
|
||||||
|
raise
|
0
sources/pip_20.1/_internal/operations/__init__.py
Normal file
0
sources/pip_20.1/_internal/operations/__init__.py
Normal file
0
sources/pip_20.1/_internal/operations/build/__init__.py
Normal file
0
sources/pip_20.1/_internal/operations/build/__init__.py
Normal file
40
sources/pip_20.1/_internal/operations/build/metadata.py
Normal file
40
sources/pip_20.1/_internal/operations/build/metadata.py
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
"""Metadata generation logic for source distributions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
from pip._internal.utils.subprocess import runner_with_spinner_message
|
||||||
|
from pip._internal.utils.temp_dir import TempDirectory
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from pip._internal.build_env import BuildEnvironment
|
||||||
|
from pip._vendor.pep517.wrappers import Pep517HookCaller
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def generate_metadata(build_env, backend):
|
||||||
|
# type: (BuildEnvironment, Pep517HookCaller) -> str
|
||||||
|
"""Generate metadata using mechanisms described in PEP 517.
|
||||||
|
|
||||||
|
Returns the generated metadata directory.
|
||||||
|
"""
|
||||||
|
metadata_tmpdir = TempDirectory(
|
||||||
|
kind="modern-metadata", globally_managed=True
|
||||||
|
)
|
||||||
|
|
||||||
|
metadata_dir = metadata_tmpdir.path
|
||||||
|
|
||||||
|
with build_env:
|
||||||
|
# Note that Pep517HookCaller implements a fallback for
|
||||||
|
# prepare_metadata_for_build_wheel, so we don't have to
|
||||||
|
# consider the possibility that this hook doesn't exist.
|
||||||
|
runner = runner_with_spinner_message("Preparing wheel metadata")
|
||||||
|
with backend.subprocess_runner(runner):
|
||||||
|
distinfo_dir = backend.prepare_metadata_for_build_wheel(
|
||||||
|
metadata_dir
|
||||||
|
)
|
||||||
|
|
||||||
|
return os.path.join(metadata_dir, distinfo_dir)
|
|
@ -0,0 +1,77 @@
|
||||||
|
"""Metadata generation logic for legacy source distributions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
from pip._internal.exceptions import InstallationError
|
||||||
|
from pip._internal.utils.setuptools_build import make_setuptools_egg_info_args
|
||||||
|
from pip._internal.utils.subprocess import call_subprocess
|
||||||
|
from pip._internal.utils.temp_dir import TempDirectory
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from pip._internal.build_env import BuildEnvironment
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _find_egg_info(directory):
|
||||||
|
# type: (str) -> str
|
||||||
|
"""Find an .egg-info subdirectory in `directory`.
|
||||||
|
"""
|
||||||
|
filenames = [
|
||||||
|
f for f in os.listdir(directory) if f.endswith(".egg-info")
|
||||||
|
]
|
||||||
|
|
||||||
|
if not filenames:
|
||||||
|
raise InstallationError(
|
||||||
|
"No .egg-info directory found in {}".format(directory)
|
||||||
|
)
|
||||||
|
|
||||||
|
if len(filenames) > 1:
|
||||||
|
raise InstallationError(
|
||||||
|
"More than one .egg-info directory found in {}".format(
|
||||||
|
directory
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return os.path.join(directory, filenames[0])
|
||||||
|
|
||||||
|
|
||||||
|
def generate_metadata(
|
||||||
|
build_env, # type: BuildEnvironment
|
||||||
|
setup_py_path, # type: str
|
||||||
|
source_dir, # type: str
|
||||||
|
isolated, # type: bool
|
||||||
|
details, # type: str
|
||||||
|
):
|
||||||
|
# type: (...) -> str
|
||||||
|
"""Generate metadata using setup.py-based defacto mechanisms.
|
||||||
|
|
||||||
|
Returns the generated metadata directory.
|
||||||
|
"""
|
||||||
|
logger.debug(
|
||||||
|
'Running setup.py (path:%s) egg_info for package %s',
|
||||||
|
setup_py_path, details,
|
||||||
|
)
|
||||||
|
|
||||||
|
egg_info_dir = TempDirectory(
|
||||||
|
kind="pip-egg-info", globally_managed=True
|
||||||
|
).path
|
||||||
|
|
||||||
|
args = make_setuptools_egg_info_args(
|
||||||
|
setup_py_path,
|
||||||
|
egg_info_dir=egg_info_dir,
|
||||||
|
no_user_config=isolated,
|
||||||
|
)
|
||||||
|
|
||||||
|
with build_env:
|
||||||
|
call_subprocess(
|
||||||
|
args,
|
||||||
|
cwd=source_dir,
|
||||||
|
command_desc='python setup.py egg_info',
|
||||||
|
)
|
||||||
|
|
||||||
|
# Return the .egg-info directory.
|
||||||
|
return _find_egg_info(egg_info_dir)
|
46
sources/pip_20.1/_internal/operations/build/wheel.py
Normal file
46
sources/pip_20.1/_internal/operations/build/wheel.py
Normal file
|
@ -0,0 +1,46 @@
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
from pip._internal.utils.subprocess import runner_with_spinner_message
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import List, Optional
|
||||||
|
from pip._vendor.pep517.wrappers import Pep517HookCaller
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def build_wheel_pep517(
|
||||||
|
name, # type: str
|
||||||
|
backend, # type: Pep517HookCaller
|
||||||
|
metadata_directory, # type: str
|
||||||
|
build_options, # type: List[str]
|
||||||
|
tempd, # type: str
|
||||||
|
):
|
||||||
|
# type: (...) -> Optional[str]
|
||||||
|
"""Build one InstallRequirement using the PEP 517 build process.
|
||||||
|
|
||||||
|
Returns path to wheel if successfully built. Otherwise, returns None.
|
||||||
|
"""
|
||||||
|
assert metadata_directory is not None
|
||||||
|
if build_options:
|
||||||
|
# PEP 517 does not support --build-options
|
||||||
|
logger.error('Cannot build wheel for %s using PEP 517 when '
|
||||||
|
'--build-option is present' % (name,))
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
logger.debug('Destination directory: %s', tempd)
|
||||||
|
|
||||||
|
runner = runner_with_spinner_message(
|
||||||
|
'Building wheel for {} (PEP 517)'.format(name)
|
||||||
|
)
|
||||||
|
with backend.subprocess_runner(runner):
|
||||||
|
wheel_name = backend.build_wheel(
|
||||||
|
tempd,
|
||||||
|
metadata_directory=metadata_directory,
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
logger.error('Failed building wheel for %s', name)
|
||||||
|
return None
|
||||||
|
return os.path.join(tempd, wheel_name)
|
115
sources/pip_20.1/_internal/operations/build/wheel_legacy.py
Normal file
115
sources/pip_20.1/_internal/operations/build/wheel_legacy.py
Normal file
|
@ -0,0 +1,115 @@
|
||||||
|
import logging
|
||||||
|
import os.path
|
||||||
|
|
||||||
|
from pip._internal.cli.spinners import open_spinner
|
||||||
|
from pip._internal.utils.setuptools_build import (
|
||||||
|
make_setuptools_bdist_wheel_args,
|
||||||
|
)
|
||||||
|
from pip._internal.utils.subprocess import (
|
||||||
|
LOG_DIVIDER,
|
||||||
|
call_subprocess,
|
||||||
|
format_command_args,
|
||||||
|
)
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import List, Optional, Text
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def format_command_result(
|
||||||
|
command_args, # type: List[str]
|
||||||
|
command_output, # type: Text
|
||||||
|
):
|
||||||
|
# type: (...) -> str
|
||||||
|
"""Format command information for logging."""
|
||||||
|
command_desc = format_command_args(command_args)
|
||||||
|
text = 'Command arguments: {}\n'.format(command_desc)
|
||||||
|
|
||||||
|
if not command_output:
|
||||||
|
text += 'Command output: None'
|
||||||
|
elif logger.getEffectiveLevel() > logging.DEBUG:
|
||||||
|
text += 'Command output: [use --verbose to show]'
|
||||||
|
else:
|
||||||
|
if not command_output.endswith('\n'):
|
||||||
|
command_output += '\n'
|
||||||
|
text += 'Command output:\n{}{}'.format(command_output, LOG_DIVIDER)
|
||||||
|
|
||||||
|
return text
|
||||||
|
|
||||||
|
|
||||||
|
def get_legacy_build_wheel_path(
|
||||||
|
names, # type: List[str]
|
||||||
|
temp_dir, # type: str
|
||||||
|
name, # type: str
|
||||||
|
command_args, # type: List[str]
|
||||||
|
command_output, # type: Text
|
||||||
|
):
|
||||||
|
# type: (...) -> Optional[str]
|
||||||
|
"""Return the path to the wheel in the temporary build directory."""
|
||||||
|
# Sort for determinism.
|
||||||
|
names = sorted(names)
|
||||||
|
if not names:
|
||||||
|
msg = (
|
||||||
|
'Legacy build of wheel for {!r} created no files.\n'
|
||||||
|
).format(name)
|
||||||
|
msg += format_command_result(command_args, command_output)
|
||||||
|
logger.warning(msg)
|
||||||
|
return None
|
||||||
|
|
||||||
|
if len(names) > 1:
|
||||||
|
msg = (
|
||||||
|
'Legacy build of wheel for {!r} created more than one file.\n'
|
||||||
|
'Filenames (choosing first): {}\n'
|
||||||
|
).format(name, names)
|
||||||
|
msg += format_command_result(command_args, command_output)
|
||||||
|
logger.warning(msg)
|
||||||
|
|
||||||
|
return os.path.join(temp_dir, names[0])
|
||||||
|
|
||||||
|
|
||||||
|
def build_wheel_legacy(
|
||||||
|
name, # type: str
|
||||||
|
setup_py_path, # type: str
|
||||||
|
source_dir, # type: str
|
||||||
|
global_options, # type: List[str]
|
||||||
|
build_options, # type: List[str]
|
||||||
|
tempd, # type: str
|
||||||
|
):
|
||||||
|
# type: (...) -> Optional[str]
|
||||||
|
"""Build one unpacked package using the "legacy" build process.
|
||||||
|
|
||||||
|
Returns path to wheel if successfully built. Otherwise, returns None.
|
||||||
|
"""
|
||||||
|
wheel_args = make_setuptools_bdist_wheel_args(
|
||||||
|
setup_py_path,
|
||||||
|
global_options=global_options,
|
||||||
|
build_options=build_options,
|
||||||
|
destination_dir=tempd,
|
||||||
|
)
|
||||||
|
|
||||||
|
spin_message = 'Building wheel for {} (setup.py)'.format(name)
|
||||||
|
with open_spinner(spin_message) as spinner:
|
||||||
|
logger.debug('Destination directory: %s', tempd)
|
||||||
|
|
||||||
|
try:
|
||||||
|
output = call_subprocess(
|
||||||
|
wheel_args,
|
||||||
|
cwd=source_dir,
|
||||||
|
spinner=spinner,
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
spinner.finish("error")
|
||||||
|
logger.error('Failed building wheel for %s', name)
|
||||||
|
return None
|
||||||
|
|
||||||
|
names = os.listdir(tempd)
|
||||||
|
wheel_path = get_legacy_build_wheel_path(
|
||||||
|
names=names,
|
||||||
|
temp_dir=tempd,
|
||||||
|
name=name,
|
||||||
|
command_args=wheel_args,
|
||||||
|
command_output=output,
|
||||||
|
)
|
||||||
|
return wheel_path
|
163
sources/pip_20.1/_internal/operations/check.py
Normal file
163
sources/pip_20.1/_internal/operations/check.py
Normal file
|
@ -0,0 +1,163 @@
|
||||||
|
"""Validation of dependencies of packages
|
||||||
|
"""
|
||||||
|
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: strict-optional=False
|
||||||
|
# mypy: disallow-untyped-defs=False
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from collections import namedtuple
|
||||||
|
|
||||||
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
|
from pip._vendor.pkg_resources import RequirementParseError
|
||||||
|
|
||||||
|
from pip._internal.distributions import (
|
||||||
|
make_distribution_for_install_requirement,
|
||||||
|
)
|
||||||
|
from pip._internal.utils.misc import get_installed_distributions
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from pip._internal.req.req_install import InstallRequirement
|
||||||
|
from typing import (
|
||||||
|
Any, Callable, Dict, Optional, Set, Tuple, List
|
||||||
|
)
|
||||||
|
|
||||||
|
# Shorthands
|
||||||
|
PackageSet = Dict[str, 'PackageDetails']
|
||||||
|
Missing = Tuple[str, Any]
|
||||||
|
Conflicting = Tuple[str, str, Any]
|
||||||
|
|
||||||
|
MissingDict = Dict[str, List[Missing]]
|
||||||
|
ConflictingDict = Dict[str, List[Conflicting]]
|
||||||
|
CheckResult = Tuple[MissingDict, ConflictingDict]
|
||||||
|
|
||||||
|
PackageDetails = namedtuple('PackageDetails', ['version', 'requires'])
|
||||||
|
|
||||||
|
|
||||||
|
def create_package_set_from_installed(**kwargs):
|
||||||
|
# type: (**Any) -> Tuple[PackageSet, bool]
|
||||||
|
"""Converts a list of distributions into a PackageSet.
|
||||||
|
"""
|
||||||
|
# Default to using all packages installed on the system
|
||||||
|
if kwargs == {}:
|
||||||
|
kwargs = {"local_only": False, "skip": ()}
|
||||||
|
|
||||||
|
package_set = {}
|
||||||
|
problems = False
|
||||||
|
for dist in get_installed_distributions(**kwargs):
|
||||||
|
name = canonicalize_name(dist.project_name)
|
||||||
|
try:
|
||||||
|
package_set[name] = PackageDetails(dist.version, dist.requires())
|
||||||
|
except RequirementParseError as e:
|
||||||
|
# Don't crash on broken metadata
|
||||||
|
logger.warning("Error parsing requirements for %s: %s", name, e)
|
||||||
|
problems = True
|
||||||
|
return package_set, problems
|
||||||
|
|
||||||
|
|
||||||
|
def check_package_set(package_set, should_ignore=None):
|
||||||
|
# type: (PackageSet, Optional[Callable[[str], bool]]) -> CheckResult
|
||||||
|
"""Check if a package set is consistent
|
||||||
|
|
||||||
|
If should_ignore is passed, it should be a callable that takes a
|
||||||
|
package name and returns a boolean.
|
||||||
|
"""
|
||||||
|
if should_ignore is None:
|
||||||
|
def should_ignore(name):
|
||||||
|
return False
|
||||||
|
|
||||||
|
missing = {}
|
||||||
|
conflicting = {}
|
||||||
|
|
||||||
|
for package_name in package_set:
|
||||||
|
# Info about dependencies of package_name
|
||||||
|
missing_deps = set() # type: Set[Missing]
|
||||||
|
conflicting_deps = set() # type: Set[Conflicting]
|
||||||
|
|
||||||
|
if should_ignore(package_name):
|
||||||
|
continue
|
||||||
|
|
||||||
|
for req in package_set[package_name].requires:
|
||||||
|
name = canonicalize_name(req.project_name) # type: str
|
||||||
|
|
||||||
|
# Check if it's missing
|
||||||
|
if name not in package_set:
|
||||||
|
missed = True
|
||||||
|
if req.marker is not None:
|
||||||
|
missed = req.marker.evaluate()
|
||||||
|
if missed:
|
||||||
|
missing_deps.add((name, req))
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Check if there's a conflict
|
||||||
|
version = package_set[name].version # type: str
|
||||||
|
if not req.specifier.contains(version, prereleases=True):
|
||||||
|
conflicting_deps.add((name, version, req))
|
||||||
|
|
||||||
|
if missing_deps:
|
||||||
|
missing[package_name] = sorted(missing_deps, key=str)
|
||||||
|
if conflicting_deps:
|
||||||
|
conflicting[package_name] = sorted(conflicting_deps, key=str)
|
||||||
|
|
||||||
|
return missing, conflicting
|
||||||
|
|
||||||
|
|
||||||
|
def check_install_conflicts(to_install):
|
||||||
|
# type: (List[InstallRequirement]) -> Tuple[PackageSet, CheckResult]
|
||||||
|
"""For checking if the dependency graph would be consistent after \
|
||||||
|
installing given requirements
|
||||||
|
"""
|
||||||
|
# Start from the current state
|
||||||
|
package_set, _ = create_package_set_from_installed()
|
||||||
|
# Install packages
|
||||||
|
would_be_installed = _simulate_installation_of(to_install, package_set)
|
||||||
|
|
||||||
|
# Only warn about directly-dependent packages; create a whitelist of them
|
||||||
|
whitelist = _create_whitelist(would_be_installed, package_set)
|
||||||
|
|
||||||
|
return (
|
||||||
|
package_set,
|
||||||
|
check_package_set(
|
||||||
|
package_set, should_ignore=lambda name: name not in whitelist
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _simulate_installation_of(to_install, package_set):
|
||||||
|
# type: (List[InstallRequirement], PackageSet) -> Set[str]
|
||||||
|
"""Computes the version of packages after installing to_install.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Keep track of packages that were installed
|
||||||
|
installed = set()
|
||||||
|
|
||||||
|
# Modify it as installing requirement_set would (assuming no errors)
|
||||||
|
for inst_req in to_install:
|
||||||
|
abstract_dist = make_distribution_for_install_requirement(inst_req)
|
||||||
|
dist = abstract_dist.get_pkg_resources_distribution()
|
||||||
|
|
||||||
|
name = canonicalize_name(dist.key)
|
||||||
|
package_set[name] = PackageDetails(dist.version, dist.requires())
|
||||||
|
|
||||||
|
installed.add(name)
|
||||||
|
|
||||||
|
return installed
|
||||||
|
|
||||||
|
|
||||||
|
def _create_whitelist(would_be_installed, package_set):
|
||||||
|
# type: (Set[str], PackageSet) -> Set[str]
|
||||||
|
packages_affected = set(would_be_installed)
|
||||||
|
|
||||||
|
for package_name in package_set:
|
||||||
|
if package_name in packages_affected:
|
||||||
|
continue
|
||||||
|
|
||||||
|
for req in package_set[package_name].requires:
|
||||||
|
if canonicalize_name(req.name) in packages_affected:
|
||||||
|
packages_affected.add(package_name)
|
||||||
|
break
|
||||||
|
|
||||||
|
return packages_affected
|
272
sources/pip_20.1/_internal/operations/freeze.py
Normal file
272
sources/pip_20.1/_internal/operations/freeze.py
Normal file
|
@ -0,0 +1,272 @@
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: strict-optional=False
|
||||||
|
# mypy: disallow-untyped-defs=False
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import collections
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
from pip._vendor import six
|
||||||
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
|
from pip._vendor.pkg_resources import RequirementParseError
|
||||||
|
|
||||||
|
from pip._internal.exceptions import BadCommand, InstallationError
|
||||||
|
from pip._internal.req.constructors import (
|
||||||
|
install_req_from_editable,
|
||||||
|
install_req_from_line,
|
||||||
|
)
|
||||||
|
from pip._internal.req.req_file import COMMENT_RE
|
||||||
|
from pip._internal.utils.direct_url_helpers import (
|
||||||
|
direct_url_as_pep440_direct_reference,
|
||||||
|
dist_get_direct_url,
|
||||||
|
)
|
||||||
|
from pip._internal.utils.misc import (
|
||||||
|
dist_is_editable,
|
||||||
|
get_installed_distributions,
|
||||||
|
)
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import (
|
||||||
|
Iterator, Optional, List, Container, Set, Dict, Tuple, Iterable, Union
|
||||||
|
)
|
||||||
|
from pip._internal.cache import WheelCache
|
||||||
|
from pip._vendor.pkg_resources import (
|
||||||
|
Distribution, Requirement
|
||||||
|
)
|
||||||
|
|
||||||
|
RequirementInfo = Tuple[Optional[Union[str, Requirement]], bool, List[str]]
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def freeze(
|
||||||
|
requirement=None, # type: Optional[List[str]]
|
||||||
|
find_links=None, # type: Optional[List[str]]
|
||||||
|
local_only=None, # type: Optional[bool]
|
||||||
|
user_only=None, # type: Optional[bool]
|
||||||
|
paths=None, # type: Optional[List[str]]
|
||||||
|
isolated=False, # type: bool
|
||||||
|
wheel_cache=None, # type: Optional[WheelCache]
|
||||||
|
exclude_editable=False, # type: bool
|
||||||
|
skip=() # type: Container[str]
|
||||||
|
):
|
||||||
|
# type: (...) -> Iterator[str]
|
||||||
|
find_links = find_links or []
|
||||||
|
|
||||||
|
for link in find_links:
|
||||||
|
yield '-f {}'.format(link)
|
||||||
|
installations = {} # type: Dict[str, FrozenRequirement]
|
||||||
|
for dist in get_installed_distributions(local_only=local_only,
|
||||||
|
skip=(),
|
||||||
|
user_only=user_only,
|
||||||
|
paths=paths):
|
||||||
|
try:
|
||||||
|
req = FrozenRequirement.from_dist(dist)
|
||||||
|
except RequirementParseError as exc:
|
||||||
|
# We include dist rather than dist.project_name because the
|
||||||
|
# dist string includes more information, like the version and
|
||||||
|
# location. We also include the exception message to aid
|
||||||
|
# troubleshooting.
|
||||||
|
logger.warning(
|
||||||
|
'Could not generate requirement for distribution %r: %s',
|
||||||
|
dist, exc
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
if exclude_editable and req.editable:
|
||||||
|
continue
|
||||||
|
installations[req.canonical_name] = req
|
||||||
|
|
||||||
|
if requirement:
|
||||||
|
# the options that don't get turned into an InstallRequirement
|
||||||
|
# should only be emitted once, even if the same option is in multiple
|
||||||
|
# requirements files, so we need to keep track of what has been emitted
|
||||||
|
# so that we don't emit it again if it's seen again
|
||||||
|
emitted_options = set() # type: Set[str]
|
||||||
|
# keep track of which files a requirement is in so that we can
|
||||||
|
# give an accurate warning if a requirement appears multiple times.
|
||||||
|
req_files = collections.defaultdict(list) # type: Dict[str, List[str]]
|
||||||
|
for req_file_path in requirement:
|
||||||
|
with open(req_file_path) as req_file:
|
||||||
|
for line in req_file:
|
||||||
|
if (not line.strip() or
|
||||||
|
line.strip().startswith('#') or
|
||||||
|
line.startswith((
|
||||||
|
'-r', '--requirement',
|
||||||
|
'-Z', '--always-unzip',
|
||||||
|
'-f', '--find-links',
|
||||||
|
'-i', '--index-url',
|
||||||
|
'--pre',
|
||||||
|
'--trusted-host',
|
||||||
|
'--process-dependency-links',
|
||||||
|
'--extra-index-url'))):
|
||||||
|
line = line.rstrip()
|
||||||
|
if line not in emitted_options:
|
||||||
|
emitted_options.add(line)
|
||||||
|
yield line
|
||||||
|
continue
|
||||||
|
|
||||||
|
if line.startswith('-e') or line.startswith('--editable'):
|
||||||
|
if line.startswith('-e'):
|
||||||
|
line = line[2:].strip()
|
||||||
|
else:
|
||||||
|
line = line[len('--editable'):].strip().lstrip('=')
|
||||||
|
line_req = install_req_from_editable(
|
||||||
|
line,
|
||||||
|
isolated=isolated,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
line_req = install_req_from_line(
|
||||||
|
COMMENT_RE.sub('', line).strip(),
|
||||||
|
isolated=isolated,
|
||||||
|
)
|
||||||
|
|
||||||
|
if not line_req.name:
|
||||||
|
logger.info(
|
||||||
|
"Skipping line in requirement file [%s] because "
|
||||||
|
"it's not clear what it would install: %s",
|
||||||
|
req_file_path, line.strip(),
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
" (add #egg=PackageName to the URL to avoid"
|
||||||
|
" this warning)"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
line_req_canonical_name = canonicalize_name(
|
||||||
|
line_req.name)
|
||||||
|
if line_req_canonical_name not in installations:
|
||||||
|
# either it's not installed, or it is installed
|
||||||
|
# but has been processed already
|
||||||
|
if not req_files[line_req.name]:
|
||||||
|
logger.warning(
|
||||||
|
"Requirement file [%s] contains %s, but "
|
||||||
|
"package %r is not installed",
|
||||||
|
req_file_path,
|
||||||
|
COMMENT_RE.sub('', line).strip(),
|
||||||
|
line_req.name
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
req_files[line_req.name].append(req_file_path)
|
||||||
|
else:
|
||||||
|
yield str(installations[
|
||||||
|
line_req_canonical_name]).rstrip()
|
||||||
|
del installations[line_req_canonical_name]
|
||||||
|
req_files[line_req.name].append(req_file_path)
|
||||||
|
|
||||||
|
# Warn about requirements that were included multiple times (in a
|
||||||
|
# single requirements file or in different requirements files).
|
||||||
|
for name, files in six.iteritems(req_files):
|
||||||
|
if len(files) > 1:
|
||||||
|
logger.warning("Requirement %s included multiple times [%s]",
|
||||||
|
name, ', '.join(sorted(set(files))))
|
||||||
|
|
||||||
|
yield(
|
||||||
|
'## The following requirements were added by '
|
||||||
|
'pip freeze:'
|
||||||
|
)
|
||||||
|
for installation in sorted(
|
||||||
|
installations.values(), key=lambda x: x.name.lower()):
|
||||||
|
if installation.canonical_name not in skip:
|
||||||
|
yield str(installation).rstrip()
|
||||||
|
|
||||||
|
|
||||||
|
def get_requirement_info(dist):
|
||||||
|
# type: (Distribution) -> RequirementInfo
|
||||||
|
"""
|
||||||
|
Compute and return values (req, editable, comments) for use in
|
||||||
|
FrozenRequirement.from_dist().
|
||||||
|
"""
|
||||||
|
if not dist_is_editable(dist):
|
||||||
|
return (None, False, [])
|
||||||
|
|
||||||
|
location = os.path.normcase(os.path.abspath(dist.location))
|
||||||
|
|
||||||
|
from pip._internal.vcs import vcs, RemoteNotFoundError
|
||||||
|
vcs_backend = vcs.get_backend_for_dir(location)
|
||||||
|
|
||||||
|
if vcs_backend is None:
|
||||||
|
req = dist.as_requirement()
|
||||||
|
logger.debug(
|
||||||
|
'No VCS found for editable requirement "%s" in: %r', req,
|
||||||
|
location,
|
||||||
|
)
|
||||||
|
comments = [
|
||||||
|
'# Editable install with no version control ({})'.format(req)
|
||||||
|
]
|
||||||
|
return (location, True, comments)
|
||||||
|
|
||||||
|
try:
|
||||||
|
req = vcs_backend.get_src_requirement(location, dist.project_name)
|
||||||
|
except RemoteNotFoundError:
|
||||||
|
req = dist.as_requirement()
|
||||||
|
comments = [
|
||||||
|
'# Editable {} install with no remote ({})'.format(
|
||||||
|
type(vcs_backend).__name__, req,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
return (location, True, comments)
|
||||||
|
|
||||||
|
except BadCommand:
|
||||||
|
logger.warning(
|
||||||
|
'cannot determine version of editable source in %s '
|
||||||
|
'(%s command not found in path)',
|
||||||
|
location,
|
||||||
|
vcs_backend.name,
|
||||||
|
)
|
||||||
|
return (None, True, [])
|
||||||
|
|
||||||
|
except InstallationError as exc:
|
||||||
|
logger.warning(
|
||||||
|
"Error when trying to get requirement for VCS system %s, "
|
||||||
|
"falling back to uneditable format", exc
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
if req is not None:
|
||||||
|
return (req, True, [])
|
||||||
|
|
||||||
|
logger.warning(
|
||||||
|
'Could not determine repository location of %s', location
|
||||||
|
)
|
||||||
|
comments = ['## !! Could not determine repository location']
|
||||||
|
|
||||||
|
return (None, False, comments)
|
||||||
|
|
||||||
|
|
||||||
|
class FrozenRequirement(object):
|
||||||
|
def __init__(self, name, req, editable, comments=()):
|
||||||
|
# type: (str, Union[str, Requirement], bool, Iterable[str]) -> None
|
||||||
|
self.name = name
|
||||||
|
self.canonical_name = canonicalize_name(name)
|
||||||
|
self.req = req
|
||||||
|
self.editable = editable
|
||||||
|
self.comments = comments
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dist(cls, dist):
|
||||||
|
# type: (Distribution) -> FrozenRequirement
|
||||||
|
# TODO `get_requirement_info` is taking care of editable requirements.
|
||||||
|
# TODO This should be refactored when we will add detection of
|
||||||
|
# editable that provide .dist-info metadata.
|
||||||
|
req, editable, comments = get_requirement_info(dist)
|
||||||
|
if req is None and not editable:
|
||||||
|
# if PEP 610 metadata is present, attempt to use it
|
||||||
|
direct_url = dist_get_direct_url(dist)
|
||||||
|
if direct_url:
|
||||||
|
req = direct_url_as_pep440_direct_reference(
|
||||||
|
direct_url, dist.project_name
|
||||||
|
)
|
||||||
|
comments = []
|
||||||
|
if req is None:
|
||||||
|
# name==version requirement
|
||||||
|
req = dist.as_requirement()
|
||||||
|
|
||||||
|
return cls(dist.project_name, req, editable, comments=comments)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
req = self.req
|
||||||
|
if self.editable:
|
||||||
|
req = '-e {}'.format(req)
|
||||||
|
return '\n'.join(list(self.comments) + [str(req)]) + '\n'
|
|
@ -0,0 +1,2 @@
|
||||||
|
"""For modules related to installing packages.
|
||||||
|
"""
|
|
@ -0,0 +1,52 @@
|
||||||
|
"""Legacy editable installation process, i.e. `setup.py develop`.
|
||||||
|
"""
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from pip._internal.utils.logging import indent_log
|
||||||
|
from pip._internal.utils.setuptools_build import make_setuptools_develop_args
|
||||||
|
from pip._internal.utils.subprocess import call_subprocess
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import List, Optional, Sequence
|
||||||
|
|
||||||
|
from pip._internal.build_env import BuildEnvironment
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def install_editable(
|
||||||
|
install_options, # type: List[str]
|
||||||
|
global_options, # type: Sequence[str]
|
||||||
|
prefix, # type: Optional[str]
|
||||||
|
home, # type: Optional[str]
|
||||||
|
use_user_site, # type: bool
|
||||||
|
name, # type: str
|
||||||
|
setup_py_path, # type: str
|
||||||
|
isolated, # type: bool
|
||||||
|
build_env, # type: BuildEnvironment
|
||||||
|
unpacked_source_directory, # type: str
|
||||||
|
):
|
||||||
|
# type: (...) -> None
|
||||||
|
"""Install a package in editable mode. Most arguments are pass-through
|
||||||
|
to setuptools.
|
||||||
|
"""
|
||||||
|
logger.info('Running setup.py develop for %s', name)
|
||||||
|
|
||||||
|
args = make_setuptools_develop_args(
|
||||||
|
setup_py_path,
|
||||||
|
global_options=global_options,
|
||||||
|
install_options=install_options,
|
||||||
|
no_user_config=isolated,
|
||||||
|
prefix=prefix,
|
||||||
|
home=home,
|
||||||
|
use_user_site=use_user_site,
|
||||||
|
)
|
||||||
|
|
||||||
|
with indent_log():
|
||||||
|
with build_env:
|
||||||
|
call_subprocess(
|
||||||
|
args,
|
||||||
|
cwd=unpacked_source_directory,
|
||||||
|
)
|
142
sources/pip_20.1/_internal/operations/install/legacy.py
Normal file
142
sources/pip_20.1/_internal/operations/install/legacy.py
Normal file
|
@ -0,0 +1,142 @@
|
||||||
|
"""Legacy installation process, i.e. `setup.py install`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from distutils.util import change_root
|
||||||
|
|
||||||
|
from pip._internal.utils.deprecation import deprecated
|
||||||
|
from pip._internal.utils.logging import indent_log
|
||||||
|
from pip._internal.utils.misc import ensure_dir
|
||||||
|
from pip._internal.utils.setuptools_build import make_setuptools_install_args
|
||||||
|
from pip._internal.utils.subprocess import runner_with_spinner_message
|
||||||
|
from pip._internal.utils.temp_dir import TempDirectory
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import List, Optional, Sequence
|
||||||
|
|
||||||
|
from pip._internal.build_env import BuildEnvironment
|
||||||
|
from pip._internal.models.scheme import Scheme
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class LegacyInstallFailure(Exception):
|
||||||
|
def __init__(self):
|
||||||
|
# type: () -> None
|
||||||
|
self.parent = sys.exc_info()
|
||||||
|
|
||||||
|
|
||||||
|
def install(
|
||||||
|
install_options, # type: List[str]
|
||||||
|
global_options, # type: Sequence[str]
|
||||||
|
root, # type: Optional[str]
|
||||||
|
home, # type: Optional[str]
|
||||||
|
prefix, # type: Optional[str]
|
||||||
|
use_user_site, # type: bool
|
||||||
|
pycompile, # type: bool
|
||||||
|
scheme, # type: Scheme
|
||||||
|
setup_py_path, # type: str
|
||||||
|
isolated, # type: bool
|
||||||
|
req_name, # type: str
|
||||||
|
build_env, # type: BuildEnvironment
|
||||||
|
unpacked_source_directory, # type: str
|
||||||
|
req_description, # type: str
|
||||||
|
):
|
||||||
|
# type: (...) -> bool
|
||||||
|
|
||||||
|
header_dir = scheme.headers
|
||||||
|
|
||||||
|
with TempDirectory(kind="record") as temp_dir:
|
||||||
|
try:
|
||||||
|
record_filename = os.path.join(temp_dir.path, 'install-record.txt')
|
||||||
|
install_args = make_setuptools_install_args(
|
||||||
|
setup_py_path,
|
||||||
|
global_options=global_options,
|
||||||
|
install_options=install_options,
|
||||||
|
record_filename=record_filename,
|
||||||
|
root=root,
|
||||||
|
prefix=prefix,
|
||||||
|
header_dir=header_dir,
|
||||||
|
home=home,
|
||||||
|
use_user_site=use_user_site,
|
||||||
|
no_user_config=isolated,
|
||||||
|
pycompile=pycompile,
|
||||||
|
)
|
||||||
|
|
||||||
|
runner = runner_with_spinner_message(
|
||||||
|
"Running setup.py install for {}".format(req_name)
|
||||||
|
)
|
||||||
|
with indent_log(), build_env:
|
||||||
|
runner(
|
||||||
|
cmd=install_args,
|
||||||
|
cwd=unpacked_source_directory,
|
||||||
|
)
|
||||||
|
|
||||||
|
if not os.path.exists(record_filename):
|
||||||
|
logger.debug('Record file %s not found', record_filename)
|
||||||
|
# Signal to the caller that we didn't install the new package
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
# Signal to the caller that we didn't install the new package
|
||||||
|
raise LegacyInstallFailure
|
||||||
|
|
||||||
|
# At this point, we have successfully installed the requirement.
|
||||||
|
|
||||||
|
# We intentionally do not use any encoding to read the file because
|
||||||
|
# setuptools writes the file using distutils.file_util.write_file,
|
||||||
|
# which does not specify an encoding.
|
||||||
|
with open(record_filename) as f:
|
||||||
|
record_lines = f.read().splitlines()
|
||||||
|
|
||||||
|
def prepend_root(path):
|
||||||
|
# type: (str) -> str
|
||||||
|
if root is None or not os.path.isabs(path):
|
||||||
|
return path
|
||||||
|
else:
|
||||||
|
return change_root(root, path)
|
||||||
|
|
||||||
|
for line in record_lines:
|
||||||
|
directory = os.path.dirname(line)
|
||||||
|
if directory.endswith('.egg-info'):
|
||||||
|
egg_info_dir = prepend_root(directory)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
deprecated(
|
||||||
|
reason=(
|
||||||
|
"{} did not indicate that it installed an "
|
||||||
|
".egg-info directory. Only setup.py projects "
|
||||||
|
"generating .egg-info directories are supported."
|
||||||
|
).format(req_description),
|
||||||
|
replacement=(
|
||||||
|
"for maintainers: updating the setup.py of {0}. "
|
||||||
|
"For users: contact the maintainers of {0} to let "
|
||||||
|
"them know to update their setup.py.".format(
|
||||||
|
req_name
|
||||||
|
)
|
||||||
|
),
|
||||||
|
gone_in="20.2",
|
||||||
|
issue=6998,
|
||||||
|
)
|
||||||
|
# FIXME: put the record somewhere
|
||||||
|
return True
|
||||||
|
|
||||||
|
new_lines = []
|
||||||
|
for line in record_lines:
|
||||||
|
filename = line.strip()
|
||||||
|
if os.path.isdir(filename):
|
||||||
|
filename += os.path.sep
|
||||||
|
new_lines.append(
|
||||||
|
os.path.relpath(prepend_root(filename), egg_info_dir)
|
||||||
|
)
|
||||||
|
new_lines.sort()
|
||||||
|
ensure_dir(egg_info_dir)
|
||||||
|
inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt')
|
||||||
|
with open(inst_files_path, 'w') as f:
|
||||||
|
f.write('\n'.join(new_lines) + '\n')
|
||||||
|
|
||||||
|
return True
|
631
sources/pip_20.1/_internal/operations/install/wheel.py
Normal file
631
sources/pip_20.1/_internal/operations/install/wheel.py
Normal file
|
@ -0,0 +1,631 @@
|
||||||
|
"""Support for installing and building the "wheel" binary package format.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: strict-optional=False
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import collections
|
||||||
|
import compileall
|
||||||
|
import contextlib
|
||||||
|
import csv
|
||||||
|
import logging
|
||||||
|
import os.path
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
import stat
|
||||||
|
import sys
|
||||||
|
import warnings
|
||||||
|
from base64 import urlsafe_b64encode
|
||||||
|
from itertools import starmap
|
||||||
|
from zipfile import ZipFile
|
||||||
|
|
||||||
|
from pip._vendor import pkg_resources
|
||||||
|
from pip._vendor.distlib.scripts import ScriptMaker
|
||||||
|
from pip._vendor.distlib.util import get_export_entry
|
||||||
|
from pip._vendor.six import StringIO
|
||||||
|
|
||||||
|
from pip._internal.exceptions import InstallationError
|
||||||
|
from pip._internal.locations import get_major_minor_version
|
||||||
|
from pip._internal.models.direct_url import DIRECT_URL_METADATA_NAME, DirectUrl
|
||||||
|
from pip._internal.utils.filesystem import adjacent_tmp_file, replace
|
||||||
|
from pip._internal.utils.misc import captured_stdout, ensure_dir, hash_file
|
||||||
|
from pip._internal.utils.temp_dir import TempDirectory
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
from pip._internal.utils.unpacking import current_umask, unpack_file
|
||||||
|
from pip._internal.utils.wheel import parse_wheel
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from email.message import Message
|
||||||
|
from typing import (
|
||||||
|
Dict, List, Optional, Sequence, Tuple, Any,
|
||||||
|
Iterable, Iterator, Callable, Set,
|
||||||
|
)
|
||||||
|
|
||||||
|
from pip._internal.models.scheme import Scheme
|
||||||
|
from pip._internal.utils.filesystem import NamedTemporaryFileResult
|
||||||
|
|
||||||
|
InstalledCSVRow = Tuple[str, ...]
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def normpath(src, p):
|
||||||
|
# type: (str, str) -> str
|
||||||
|
return os.path.relpath(src, p).replace(os.path.sep, '/')
|
||||||
|
|
||||||
|
|
||||||
|
def rehash(path, blocksize=1 << 20):
|
||||||
|
# type: (str, int) -> Tuple[str, str]
|
||||||
|
"""Return (encoded_digest, length) for path using hashlib.sha256()"""
|
||||||
|
h, length = hash_file(path, blocksize)
|
||||||
|
digest = 'sha256=' + urlsafe_b64encode(
|
||||||
|
h.digest()
|
||||||
|
).decode('latin1').rstrip('=')
|
||||||
|
# unicode/str python2 issues
|
||||||
|
return (digest, str(length)) # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
def csv_io_kwargs(mode):
|
||||||
|
# type: (str) -> Dict[str, Any]
|
||||||
|
"""Return keyword arguments to properly open a CSV file
|
||||||
|
in the given mode.
|
||||||
|
"""
|
||||||
|
if sys.version_info.major < 3:
|
||||||
|
return {'mode': '{}b'.format(mode)}
|
||||||
|
else:
|
||||||
|
return {'mode': mode, 'newline': ''}
|
||||||
|
|
||||||
|
|
||||||
|
def fix_script(path):
|
||||||
|
# type: (str) -> Optional[bool]
|
||||||
|
"""Replace #!python with #!/path/to/python
|
||||||
|
Return True if file was changed.
|
||||||
|
"""
|
||||||
|
# XXX RECORD hashes will need to be updated
|
||||||
|
if os.path.isfile(path):
|
||||||
|
with open(path, 'rb') as script:
|
||||||
|
firstline = script.readline()
|
||||||
|
if not firstline.startswith(b'#!python'):
|
||||||
|
return False
|
||||||
|
exename = sys.executable.encode(sys.getfilesystemencoding())
|
||||||
|
firstline = b'#!' + exename + os.linesep.encode("ascii")
|
||||||
|
rest = script.read()
|
||||||
|
with open(path, 'wb') as script:
|
||||||
|
script.write(firstline)
|
||||||
|
script.write(rest)
|
||||||
|
return True
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def wheel_root_is_purelib(metadata):
|
||||||
|
# type: (Message) -> bool
|
||||||
|
return metadata.get("Root-Is-Purelib", "").lower() == "true"
|
||||||
|
|
||||||
|
|
||||||
|
def get_entrypoints(filename):
|
||||||
|
# type: (str) -> Tuple[Dict[str, str], Dict[str, str]]
|
||||||
|
if not os.path.exists(filename):
|
||||||
|
return {}, {}
|
||||||
|
|
||||||
|
# This is done because you can pass a string to entry_points wrappers which
|
||||||
|
# means that they may or may not be valid INI files. The attempt here is to
|
||||||
|
# strip leading and trailing whitespace in order to make them valid INI
|
||||||
|
# files.
|
||||||
|
with open(filename) as fp:
|
||||||
|
data = StringIO()
|
||||||
|
for line in fp:
|
||||||
|
data.write(line.strip())
|
||||||
|
data.write("\n")
|
||||||
|
data.seek(0)
|
||||||
|
|
||||||
|
# get the entry points and then the script names
|
||||||
|
entry_points = pkg_resources.EntryPoint.parse_map(data)
|
||||||
|
console = entry_points.get('console_scripts', {})
|
||||||
|
gui = entry_points.get('gui_scripts', {})
|
||||||
|
|
||||||
|
def _split_ep(s):
|
||||||
|
# type: (pkg_resources.EntryPoint) -> Tuple[str, str]
|
||||||
|
"""get the string representation of EntryPoint,
|
||||||
|
remove space and split on '='
|
||||||
|
"""
|
||||||
|
split_parts = str(s).replace(" ", "").split("=")
|
||||||
|
return split_parts[0], split_parts[1]
|
||||||
|
|
||||||
|
# convert the EntryPoint objects into strings with module:function
|
||||||
|
console = dict(_split_ep(v) for v in console.values())
|
||||||
|
gui = dict(_split_ep(v) for v in gui.values())
|
||||||
|
return console, gui
|
||||||
|
|
||||||
|
|
||||||
|
def message_about_scripts_not_on_PATH(scripts):
|
||||||
|
# type: (Sequence[str]) -> Optional[str]
|
||||||
|
"""Determine if any scripts are not on PATH and format a warning.
|
||||||
|
Returns a warning message if one or more scripts are not on PATH,
|
||||||
|
otherwise None.
|
||||||
|
"""
|
||||||
|
if not scripts:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Group scripts by the path they were installed in
|
||||||
|
grouped_by_dir = collections.defaultdict(set) # type: Dict[str, Set[str]]
|
||||||
|
for destfile in scripts:
|
||||||
|
parent_dir = os.path.dirname(destfile)
|
||||||
|
script_name = os.path.basename(destfile)
|
||||||
|
grouped_by_dir[parent_dir].add(script_name)
|
||||||
|
|
||||||
|
# We don't want to warn for directories that are on PATH.
|
||||||
|
not_warn_dirs = [
|
||||||
|
os.path.normcase(i).rstrip(os.sep) for i in
|
||||||
|
os.environ.get("PATH", "").split(os.pathsep)
|
||||||
|
]
|
||||||
|
# If an executable sits with sys.executable, we don't warn for it.
|
||||||
|
# This covers the case of venv invocations without activating the venv.
|
||||||
|
not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable)))
|
||||||
|
warn_for = {
|
||||||
|
parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items()
|
||||||
|
if os.path.normcase(parent_dir) not in not_warn_dirs
|
||||||
|
} # type: Dict[str, Set[str]]
|
||||||
|
if not warn_for:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Format a message
|
||||||
|
msg_lines = []
|
||||||
|
for parent_dir, dir_scripts in warn_for.items():
|
||||||
|
sorted_scripts = sorted(dir_scripts) # type: List[str]
|
||||||
|
if len(sorted_scripts) == 1:
|
||||||
|
start_text = "script {} is".format(sorted_scripts[0])
|
||||||
|
else:
|
||||||
|
start_text = "scripts {} are".format(
|
||||||
|
", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1]
|
||||||
|
)
|
||||||
|
|
||||||
|
msg_lines.append(
|
||||||
|
"The {} installed in '{}' which is not on PATH."
|
||||||
|
.format(start_text, parent_dir)
|
||||||
|
)
|
||||||
|
|
||||||
|
last_line_fmt = (
|
||||||
|
"Consider adding {} to PATH or, if you prefer "
|
||||||
|
"to suppress this warning, use --no-warn-script-location."
|
||||||
|
)
|
||||||
|
if len(msg_lines) == 1:
|
||||||
|
msg_lines.append(last_line_fmt.format("this directory"))
|
||||||
|
else:
|
||||||
|
msg_lines.append(last_line_fmt.format("these directories"))
|
||||||
|
|
||||||
|
# Add a note if any directory starts with ~
|
||||||
|
warn_for_tilde = any(
|
||||||
|
i[0] == "~" for i in os.environ.get("PATH", "").split(os.pathsep) if i
|
||||||
|
)
|
||||||
|
if warn_for_tilde:
|
||||||
|
tilde_warning_msg = (
|
||||||
|
"NOTE: The current PATH contains path(s) starting with `~`, "
|
||||||
|
"which may not be expanded by all applications."
|
||||||
|
)
|
||||||
|
msg_lines.append(tilde_warning_msg)
|
||||||
|
|
||||||
|
# Returns the formatted multiline message
|
||||||
|
return "\n".join(msg_lines)
|
||||||
|
|
||||||
|
|
||||||
|
def sorted_outrows(outrows):
|
||||||
|
# type: (Iterable[InstalledCSVRow]) -> List[InstalledCSVRow]
|
||||||
|
"""Return the given rows of a RECORD file in sorted order.
|
||||||
|
|
||||||
|
Each row is a 3-tuple (path, hash, size) and corresponds to a record of
|
||||||
|
a RECORD file (see PEP 376 and PEP 427 for details). For the rows
|
||||||
|
passed to this function, the size can be an integer as an int or string,
|
||||||
|
or the empty string.
|
||||||
|
"""
|
||||||
|
# Normally, there should only be one row per path, in which case the
|
||||||
|
# second and third elements don't come into play when sorting.
|
||||||
|
# However, in cases in the wild where a path might happen to occur twice,
|
||||||
|
# we don't want the sort operation to trigger an error (but still want
|
||||||
|
# determinism). Since the third element can be an int or string, we
|
||||||
|
# coerce each element to a string to avoid a TypeError in this case.
|
||||||
|
# For additional background, see--
|
||||||
|
# https://github.com/pypa/pip/issues/5868
|
||||||
|
return sorted(outrows, key=lambda row: tuple(str(x) for x in row))
|
||||||
|
|
||||||
|
|
||||||
|
def get_csv_rows_for_installed(
|
||||||
|
old_csv_rows, # type: Iterable[List[str]]
|
||||||
|
installed, # type: Dict[str, str]
|
||||||
|
changed, # type: Set[str]
|
||||||
|
generated, # type: List[str]
|
||||||
|
lib_dir, # type: str
|
||||||
|
):
|
||||||
|
# type: (...) -> List[InstalledCSVRow]
|
||||||
|
"""
|
||||||
|
:param installed: A map from archive RECORD path to installation RECORD
|
||||||
|
path.
|
||||||
|
"""
|
||||||
|
installed_rows = [] # type: List[InstalledCSVRow]
|
||||||
|
for row in old_csv_rows:
|
||||||
|
if len(row) > 3:
|
||||||
|
logger.warning(
|
||||||
|
'RECORD line has more than three elements: {}'.format(row)
|
||||||
|
)
|
||||||
|
# Make a copy because we are mutating the row.
|
||||||
|
row = list(row)
|
||||||
|
old_path = row[0]
|
||||||
|
new_path = installed.pop(old_path, old_path)
|
||||||
|
row[0] = new_path
|
||||||
|
if new_path in changed:
|
||||||
|
digest, length = rehash(new_path)
|
||||||
|
row[1] = digest
|
||||||
|
row[2] = length
|
||||||
|
installed_rows.append(tuple(row))
|
||||||
|
for f in generated:
|
||||||
|
digest, length = rehash(f)
|
||||||
|
installed_rows.append((normpath(f, lib_dir), digest, str(length)))
|
||||||
|
for f in installed:
|
||||||
|
installed_rows.append((installed[f], '', ''))
|
||||||
|
return installed_rows
|
||||||
|
|
||||||
|
|
||||||
|
class MissingCallableSuffix(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def _raise_for_invalid_entrypoint(specification):
|
||||||
|
# type: (str) -> None
|
||||||
|
entry = get_export_entry(specification)
|
||||||
|
if entry is not None and entry.suffix is None:
|
||||||
|
raise MissingCallableSuffix(str(entry))
|
||||||
|
|
||||||
|
|
||||||
|
class PipScriptMaker(ScriptMaker):
|
||||||
|
def make(self, specification, options=None):
|
||||||
|
# type: (str, Dict[str, Any]) -> List[str]
|
||||||
|
_raise_for_invalid_entrypoint(specification)
|
||||||
|
return super(PipScriptMaker, self).make(specification, options)
|
||||||
|
|
||||||
|
|
||||||
|
def install_unpacked_wheel(
|
||||||
|
name, # type: str
|
||||||
|
wheeldir, # type: str
|
||||||
|
wheel_zip, # type: ZipFile
|
||||||
|
scheme, # type: Scheme
|
||||||
|
req_description, # type: str
|
||||||
|
pycompile=True, # type: bool
|
||||||
|
warn_script_location=True, # type: bool
|
||||||
|
direct_url=None, # type: Optional[DirectUrl]
|
||||||
|
):
|
||||||
|
# type: (...) -> None
|
||||||
|
"""Install a wheel.
|
||||||
|
|
||||||
|
:param name: Name of the project to install
|
||||||
|
:param wheeldir: Base directory of the unpacked wheel
|
||||||
|
:param wheel_zip: open ZipFile for wheel being installed
|
||||||
|
:param scheme: Distutils scheme dictating the install directories
|
||||||
|
:param req_description: String used in place of the requirement, for
|
||||||
|
logging
|
||||||
|
:param pycompile: Whether to byte-compile installed Python files
|
||||||
|
:param warn_script_location: Whether to check that scripts are installed
|
||||||
|
into a directory on PATH
|
||||||
|
:raises UnsupportedWheel:
|
||||||
|
* when the directory holds an unpacked wheel with incompatible
|
||||||
|
Wheel-Version
|
||||||
|
* when the .dist-info dir does not match the wheel
|
||||||
|
"""
|
||||||
|
# TODO: Investigate and break this up.
|
||||||
|
# TODO: Look into moving this into a dedicated class for representing an
|
||||||
|
# installation.
|
||||||
|
|
||||||
|
source = wheeldir.rstrip(os.path.sep) + os.path.sep
|
||||||
|
|
||||||
|
info_dir, metadata = parse_wheel(wheel_zip, name)
|
||||||
|
|
||||||
|
if wheel_root_is_purelib(metadata):
|
||||||
|
lib_dir = scheme.purelib
|
||||||
|
else:
|
||||||
|
lib_dir = scheme.platlib
|
||||||
|
|
||||||
|
subdirs = os.listdir(source)
|
||||||
|
data_dirs = [s for s in subdirs if s.endswith('.data')]
|
||||||
|
|
||||||
|
# Record details of the files moved
|
||||||
|
# installed = files copied from the wheel to the destination
|
||||||
|
# changed = files changed while installing (scripts #! line typically)
|
||||||
|
# generated = files newly generated during the install (script wrappers)
|
||||||
|
installed = {} # type: Dict[str, str]
|
||||||
|
changed = set()
|
||||||
|
generated = [] # type: List[str]
|
||||||
|
|
||||||
|
# Compile all of the pyc files that we're going to be installing
|
||||||
|
if pycompile:
|
||||||
|
with captured_stdout() as stdout:
|
||||||
|
with warnings.catch_warnings():
|
||||||
|
warnings.filterwarnings('ignore')
|
||||||
|
compileall.compile_dir(source, force=True, quiet=True)
|
||||||
|
logger.debug(stdout.getvalue())
|
||||||
|
|
||||||
|
def record_installed(srcfile, destfile, modified=False):
|
||||||
|
# type: (str, str, bool) -> None
|
||||||
|
"""Map archive RECORD paths to installation RECORD paths."""
|
||||||
|
oldpath = normpath(srcfile, wheeldir)
|
||||||
|
newpath = normpath(destfile, lib_dir)
|
||||||
|
installed[oldpath] = newpath
|
||||||
|
if modified:
|
||||||
|
changed.add(destfile)
|
||||||
|
|
||||||
|
def clobber(
|
||||||
|
source, # type: str
|
||||||
|
dest, # type: str
|
||||||
|
is_base, # type: bool
|
||||||
|
fixer=None, # type: Optional[Callable[[str], Any]]
|
||||||
|
filter=None # type: Optional[Callable[[str], bool]]
|
||||||
|
):
|
||||||
|
# type: (...) -> None
|
||||||
|
ensure_dir(dest) # common for the 'include' path
|
||||||
|
|
||||||
|
for dir, subdirs, files in os.walk(source):
|
||||||
|
basedir = dir[len(source):].lstrip(os.path.sep)
|
||||||
|
destdir = os.path.join(dest, basedir)
|
||||||
|
if is_base and basedir == '':
|
||||||
|
subdirs[:] = [s for s in subdirs if not s.endswith('.data')]
|
||||||
|
for f in files:
|
||||||
|
# Skip unwanted files
|
||||||
|
if filter and filter(f):
|
||||||
|
continue
|
||||||
|
srcfile = os.path.join(dir, f)
|
||||||
|
destfile = os.path.join(dest, basedir, f)
|
||||||
|
# directory creation is lazy and after the file filtering above
|
||||||
|
# to ensure we don't install empty dirs; empty dirs can't be
|
||||||
|
# uninstalled.
|
||||||
|
ensure_dir(destdir)
|
||||||
|
|
||||||
|
# copyfile (called below) truncates the destination if it
|
||||||
|
# exists and then writes the new contents. This is fine in most
|
||||||
|
# cases, but can cause a segfault if pip has loaded a shared
|
||||||
|
# object (e.g. from pyopenssl through its vendored urllib3)
|
||||||
|
# Since the shared object is mmap'd an attempt to call a
|
||||||
|
# symbol in it will then cause a segfault. Unlinking the file
|
||||||
|
# allows writing of new contents while allowing the process to
|
||||||
|
# continue to use the old copy.
|
||||||
|
if os.path.exists(destfile):
|
||||||
|
os.unlink(destfile)
|
||||||
|
|
||||||
|
# We use copyfile (not move, copy, or copy2) to be extra sure
|
||||||
|
# that we are not moving directories over (copyfile fails for
|
||||||
|
# directories) as well as to ensure that we are not copying
|
||||||
|
# over any metadata because we want more control over what
|
||||||
|
# metadata we actually copy over.
|
||||||
|
shutil.copyfile(srcfile, destfile)
|
||||||
|
|
||||||
|
# Copy over the metadata for the file, currently this only
|
||||||
|
# includes the atime and mtime.
|
||||||
|
st = os.stat(srcfile)
|
||||||
|
if hasattr(os, "utime"):
|
||||||
|
os.utime(destfile, (st.st_atime, st.st_mtime))
|
||||||
|
|
||||||
|
# If our file is executable, then make our destination file
|
||||||
|
# executable.
|
||||||
|
if os.access(srcfile, os.X_OK):
|
||||||
|
st = os.stat(srcfile)
|
||||||
|
permissions = (
|
||||||
|
st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
|
||||||
|
)
|
||||||
|
os.chmod(destfile, permissions)
|
||||||
|
|
||||||
|
changed = False
|
||||||
|
if fixer:
|
||||||
|
changed = fixer(destfile)
|
||||||
|
record_installed(srcfile, destfile, changed)
|
||||||
|
|
||||||
|
clobber(source, lib_dir, True)
|
||||||
|
|
||||||
|
dest_info_dir = os.path.join(lib_dir, info_dir)
|
||||||
|
|
||||||
|
# Get the defined entry points
|
||||||
|
ep_file = os.path.join(dest_info_dir, 'entry_points.txt')
|
||||||
|
console, gui = get_entrypoints(ep_file)
|
||||||
|
|
||||||
|
def is_entrypoint_wrapper(name):
|
||||||
|
# type: (str) -> bool
|
||||||
|
# EP, EP.exe and EP-script.py are scripts generated for
|
||||||
|
# entry point EP by setuptools
|
||||||
|
if name.lower().endswith('.exe'):
|
||||||
|
matchname = name[:-4]
|
||||||
|
elif name.lower().endswith('-script.py'):
|
||||||
|
matchname = name[:-10]
|
||||||
|
elif name.lower().endswith(".pya"):
|
||||||
|
matchname = name[:-4]
|
||||||
|
else:
|
||||||
|
matchname = name
|
||||||
|
# Ignore setuptools-generated scripts
|
||||||
|
return (matchname in console or matchname in gui)
|
||||||
|
|
||||||
|
for datadir in data_dirs:
|
||||||
|
fixer = None
|
||||||
|
filter = None
|
||||||
|
for subdir in os.listdir(os.path.join(wheeldir, datadir)):
|
||||||
|
fixer = None
|
||||||
|
if subdir == 'scripts':
|
||||||
|
fixer = fix_script
|
||||||
|
filter = is_entrypoint_wrapper
|
||||||
|
source = os.path.join(wheeldir, datadir, subdir)
|
||||||
|
dest = getattr(scheme, subdir)
|
||||||
|
clobber(source, dest, False, fixer=fixer, filter=filter)
|
||||||
|
|
||||||
|
maker = PipScriptMaker(None, scheme.scripts)
|
||||||
|
|
||||||
|
# Ensure old scripts are overwritten.
|
||||||
|
# See https://github.com/pypa/pip/issues/1800
|
||||||
|
maker.clobber = True
|
||||||
|
|
||||||
|
# Ensure we don't generate any variants for scripts because this is almost
|
||||||
|
# never what somebody wants.
|
||||||
|
# See https://bitbucket.org/pypa/distlib/issue/35/
|
||||||
|
maker.variants = {''}
|
||||||
|
|
||||||
|
# This is required because otherwise distlib creates scripts that are not
|
||||||
|
# executable.
|
||||||
|
# See https://bitbucket.org/pypa/distlib/issue/32/
|
||||||
|
maker.set_mode = True
|
||||||
|
|
||||||
|
scripts_to_generate = []
|
||||||
|
|
||||||
|
# Special case pip and setuptools to generate versioned wrappers
|
||||||
|
#
|
||||||
|
# The issue is that some projects (specifically, pip and setuptools) use
|
||||||
|
# code in setup.py to create "versioned" entry points - pip2.7 on Python
|
||||||
|
# 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
|
||||||
|
# the wheel metadata at build time, and so if the wheel is installed with
|
||||||
|
# a *different* version of Python the entry points will be wrong. The
|
||||||
|
# correct fix for this is to enhance the metadata to be able to describe
|
||||||
|
# such versioned entry points, but that won't happen till Metadata 2.0 is
|
||||||
|
# available.
|
||||||
|
# In the meantime, projects using versioned entry points will either have
|
||||||
|
# incorrect versioned entry points, or they will not be able to distribute
|
||||||
|
# "universal" wheels (i.e., they will need a wheel per Python version).
|
||||||
|
#
|
||||||
|
# Because setuptools and pip are bundled with _ensurepip and virtualenv,
|
||||||
|
# we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
|
||||||
|
# override the versioned entry points in the wheel and generate the
|
||||||
|
# correct ones. This code is purely a short-term measure until Metadata 2.0
|
||||||
|
# is available.
|
||||||
|
#
|
||||||
|
# To add the level of hack in this section of code, in order to support
|
||||||
|
# ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
|
||||||
|
# variable which will control which version scripts get installed.
|
||||||
|
#
|
||||||
|
# ENSUREPIP_OPTIONS=altinstall
|
||||||
|
# - Only pipX.Y and easy_install-X.Y will be generated and installed
|
||||||
|
# ENSUREPIP_OPTIONS=install
|
||||||
|
# - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
|
||||||
|
# that this option is technically if ENSUREPIP_OPTIONS is set and is
|
||||||
|
# not altinstall
|
||||||
|
# DEFAULT
|
||||||
|
# - The default behavior is to install pip, pipX, pipX.Y, easy_install
|
||||||
|
# and easy_install-X.Y.
|
||||||
|
pip_script = console.pop('pip', None)
|
||||||
|
if pip_script:
|
||||||
|
if "ENSUREPIP_OPTIONS" not in os.environ:
|
||||||
|
scripts_to_generate.append('pip = ' + pip_script)
|
||||||
|
|
||||||
|
if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
|
||||||
|
scripts_to_generate.append(
|
||||||
|
'pip{} = {}'.format(sys.version_info[0], pip_script)
|
||||||
|
)
|
||||||
|
|
||||||
|
scripts_to_generate.append(
|
||||||
|
'pip{} = {}'.format(get_major_minor_version(), pip_script)
|
||||||
|
)
|
||||||
|
# Delete any other versioned pip entry points
|
||||||
|
pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
|
||||||
|
for k in pip_ep:
|
||||||
|
del console[k]
|
||||||
|
easy_install_script = console.pop('easy_install', None)
|
||||||
|
if easy_install_script:
|
||||||
|
if "ENSUREPIP_OPTIONS" not in os.environ:
|
||||||
|
scripts_to_generate.append(
|
||||||
|
'easy_install = ' + easy_install_script
|
||||||
|
)
|
||||||
|
|
||||||
|
scripts_to_generate.append(
|
||||||
|
'easy_install-{} = {}'.format(
|
||||||
|
get_major_minor_version(), easy_install_script
|
||||||
|
)
|
||||||
|
)
|
||||||
|
# Delete any other versioned easy_install entry points
|
||||||
|
easy_install_ep = [
|
||||||
|
k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)
|
||||||
|
]
|
||||||
|
for k in easy_install_ep:
|
||||||
|
del console[k]
|
||||||
|
|
||||||
|
# Generate the console and GUI entry points specified in the wheel
|
||||||
|
scripts_to_generate.extend(starmap('{} = {}'.format, console.items()))
|
||||||
|
|
||||||
|
gui_scripts_to_generate = list(starmap('{} = {}'.format, gui.items()))
|
||||||
|
|
||||||
|
generated_console_scripts = [] # type: List[str]
|
||||||
|
|
||||||
|
try:
|
||||||
|
generated_console_scripts = maker.make_multiple(scripts_to_generate)
|
||||||
|
generated.extend(generated_console_scripts)
|
||||||
|
|
||||||
|
generated.extend(
|
||||||
|
maker.make_multiple(gui_scripts_to_generate, {'gui': True})
|
||||||
|
)
|
||||||
|
except MissingCallableSuffix as e:
|
||||||
|
entry = e.args[0]
|
||||||
|
raise InstallationError(
|
||||||
|
"Invalid script entry point: {} for req: {} - A callable "
|
||||||
|
"suffix is required. Cf https://packaging.python.org/"
|
||||||
|
"specifications/entry-points/#use-for-scripts for more "
|
||||||
|
"information.".format(entry, req_description)
|
||||||
|
)
|
||||||
|
|
||||||
|
if warn_script_location:
|
||||||
|
msg = message_about_scripts_not_on_PATH(generated_console_scripts)
|
||||||
|
if msg is not None:
|
||||||
|
logger.warning(msg)
|
||||||
|
|
||||||
|
generated_file_mode = 0o666 & ~current_umask()
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def _generate_file(path, **kwargs):
|
||||||
|
# type: (str, **Any) -> Iterator[NamedTemporaryFileResult]
|
||||||
|
with adjacent_tmp_file(path, **kwargs) as f:
|
||||||
|
yield f
|
||||||
|
os.chmod(f.name, generated_file_mode)
|
||||||
|
replace(f.name, path)
|
||||||
|
|
||||||
|
# Record pip as the installer
|
||||||
|
installer_path = os.path.join(dest_info_dir, 'INSTALLER')
|
||||||
|
with _generate_file(installer_path) as installer_file:
|
||||||
|
installer_file.write(b'pip\n')
|
||||||
|
generated.append(installer_path)
|
||||||
|
|
||||||
|
# Record the PEP 610 direct URL reference
|
||||||
|
if direct_url is not None:
|
||||||
|
direct_url_path = os.path.join(dest_info_dir, DIRECT_URL_METADATA_NAME)
|
||||||
|
with _generate_file(direct_url_path) as direct_url_file:
|
||||||
|
direct_url_file.write(direct_url.to_json().encode("utf-8"))
|
||||||
|
generated.append(direct_url_path)
|
||||||
|
|
||||||
|
# Record details of all files installed
|
||||||
|
record_path = os.path.join(dest_info_dir, 'RECORD')
|
||||||
|
with open(record_path, **csv_io_kwargs('r')) as record_file:
|
||||||
|
rows = get_csv_rows_for_installed(
|
||||||
|
csv.reader(record_file),
|
||||||
|
installed=installed,
|
||||||
|
changed=changed,
|
||||||
|
generated=generated,
|
||||||
|
lib_dir=lib_dir)
|
||||||
|
with _generate_file(record_path, **csv_io_kwargs('w')) as record_file:
|
||||||
|
writer = csv.writer(record_file)
|
||||||
|
writer.writerows(sorted_outrows(rows)) # sort to simplify testing
|
||||||
|
|
||||||
|
|
||||||
|
def install_wheel(
|
||||||
|
name, # type: str
|
||||||
|
wheel_path, # type: str
|
||||||
|
scheme, # type: Scheme
|
||||||
|
req_description, # type: str
|
||||||
|
pycompile=True, # type: bool
|
||||||
|
warn_script_location=True, # type: bool
|
||||||
|
_temp_dir_for_testing=None, # type: Optional[str]
|
||||||
|
direct_url=None, # type: Optional[DirectUrl]
|
||||||
|
):
|
||||||
|
# type: (...) -> None
|
||||||
|
with TempDirectory(
|
||||||
|
path=_temp_dir_for_testing, kind="unpacked-wheel"
|
||||||
|
) as unpacked_dir, ZipFile(wheel_path, allowZip64=True) as z:
|
||||||
|
unpack_file(wheel_path, unpacked_dir.path)
|
||||||
|
install_unpacked_wheel(
|
||||||
|
name=name,
|
||||||
|
wheeldir=unpacked_dir.path,
|
||||||
|
wheel_zip=z,
|
||||||
|
scheme=scheme,
|
||||||
|
req_description=req_description,
|
||||||
|
pycompile=pycompile,
|
||||||
|
warn_script_location=warn_script_location,
|
||||||
|
direct_url=direct_url,
|
||||||
|
)
|
568
sources/pip_20.1/_internal/operations/prepare.py
Normal file
568
sources/pip_20.1/_internal/operations/prepare.py
Normal file
|
@ -0,0 +1,568 @@
|
||||||
|
"""Prepares a distribution for installation
|
||||||
|
"""
|
||||||
|
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: strict-optional=False
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import mimetypes
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
from pip._vendor import requests
|
||||||
|
from pip._vendor.six import PY2
|
||||||
|
|
||||||
|
from pip._internal.distributions import (
|
||||||
|
make_distribution_for_install_requirement,
|
||||||
|
)
|
||||||
|
from pip._internal.distributions.installed import InstalledDistribution
|
||||||
|
from pip._internal.exceptions import (
|
||||||
|
DirectoryUrlHashUnsupported,
|
||||||
|
HashMismatch,
|
||||||
|
HashUnpinned,
|
||||||
|
InstallationError,
|
||||||
|
PreviousBuildDirError,
|
||||||
|
VcsHashUnsupported,
|
||||||
|
)
|
||||||
|
from pip._internal.utils.filesystem import copy2_fixed
|
||||||
|
from pip._internal.utils.hashes import MissingHashes
|
||||||
|
from pip._internal.utils.logging import indent_log
|
||||||
|
from pip._internal.utils.misc import (
|
||||||
|
display_path,
|
||||||
|
hide_url,
|
||||||
|
path_to_display,
|
||||||
|
rmtree,
|
||||||
|
)
|
||||||
|
from pip._internal.utils.temp_dir import TempDirectory
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
from pip._internal.utils.unpacking import unpack_file
|
||||||
|
from pip._internal.vcs import vcs
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import (
|
||||||
|
Callable, List, Optional, Tuple,
|
||||||
|
)
|
||||||
|
|
||||||
|
from mypy_extensions import TypedDict
|
||||||
|
|
||||||
|
from pip._internal.distributions import AbstractDistribution
|
||||||
|
from pip._internal.index.package_finder import PackageFinder
|
||||||
|
from pip._internal.models.link import Link
|
||||||
|
from pip._internal.network.download import Downloader
|
||||||
|
from pip._internal.req.req_install import InstallRequirement
|
||||||
|
from pip._internal.req.req_tracker import RequirementTracker
|
||||||
|
from pip._internal.utils.hashes import Hashes
|
||||||
|
|
||||||
|
if PY2:
|
||||||
|
CopytreeKwargs = TypedDict(
|
||||||
|
'CopytreeKwargs',
|
||||||
|
{
|
||||||
|
'ignore': Callable[[str, List[str]], List[str]],
|
||||||
|
'symlinks': bool,
|
||||||
|
},
|
||||||
|
total=False,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
CopytreeKwargs = TypedDict(
|
||||||
|
'CopytreeKwargs',
|
||||||
|
{
|
||||||
|
'copy_function': Callable[[str, str], None],
|
||||||
|
'ignore': Callable[[str, List[str]], List[str]],
|
||||||
|
'ignore_dangling_symlinks': bool,
|
||||||
|
'symlinks': bool,
|
||||||
|
},
|
||||||
|
total=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_prepared_distribution(
|
||||||
|
req, # type: InstallRequirement
|
||||||
|
req_tracker, # type: RequirementTracker
|
||||||
|
finder, # type: PackageFinder
|
||||||
|
build_isolation # type: bool
|
||||||
|
):
|
||||||
|
# type: (...) -> AbstractDistribution
|
||||||
|
"""Prepare a distribution for installation.
|
||||||
|
"""
|
||||||
|
abstract_dist = make_distribution_for_install_requirement(req)
|
||||||
|
with req_tracker.track(req):
|
||||||
|
abstract_dist.prepare_distribution_metadata(finder, build_isolation)
|
||||||
|
return abstract_dist
|
||||||
|
|
||||||
|
|
||||||
|
def unpack_vcs_link(link, location):
|
||||||
|
# type: (Link, str) -> None
|
||||||
|
vcs_backend = vcs.get_backend_for_scheme(link.scheme)
|
||||||
|
assert vcs_backend is not None
|
||||||
|
vcs_backend.unpack(location, url=hide_url(link.url))
|
||||||
|
|
||||||
|
|
||||||
|
class File(object):
|
||||||
|
def __init__(self, path, content_type):
|
||||||
|
# type: (str, str) -> None
|
||||||
|
self.path = path
|
||||||
|
self.content_type = content_type
|
||||||
|
|
||||||
|
|
||||||
|
def get_http_url(
|
||||||
|
link, # type: Link
|
||||||
|
downloader, # type: Downloader
|
||||||
|
download_dir=None, # type: Optional[str]
|
||||||
|
hashes=None, # type: Optional[Hashes]
|
||||||
|
):
|
||||||
|
# type: (...) -> File
|
||||||
|
temp_dir = TempDirectory(kind="unpack", globally_managed=True)
|
||||||
|
# If a download dir is specified, is the file already downloaded there?
|
||||||
|
already_downloaded_path = None
|
||||||
|
if download_dir:
|
||||||
|
already_downloaded_path = _check_download_dir(
|
||||||
|
link, download_dir, hashes
|
||||||
|
)
|
||||||
|
|
||||||
|
if already_downloaded_path:
|
||||||
|
from_path = already_downloaded_path
|
||||||
|
content_type = mimetypes.guess_type(from_path)[0]
|
||||||
|
else:
|
||||||
|
# let's download to a tmp dir
|
||||||
|
from_path, content_type = _download_http_url(
|
||||||
|
link, downloader, temp_dir.path, hashes
|
||||||
|
)
|
||||||
|
|
||||||
|
return File(from_path, content_type)
|
||||||
|
|
||||||
|
|
||||||
|
def _copy2_ignoring_special_files(src, dest):
|
||||||
|
# type: (str, str) -> None
|
||||||
|
"""Copying special files is not supported, but as a convenience to users
|
||||||
|
we skip errors copying them. This supports tools that may create e.g.
|
||||||
|
socket files in the project source directory.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
copy2_fixed(src, dest)
|
||||||
|
except shutil.SpecialFileError as e:
|
||||||
|
# SpecialFileError may be raised due to either the source or
|
||||||
|
# destination. If the destination was the cause then we would actually
|
||||||
|
# care, but since the destination directory is deleted prior to
|
||||||
|
# copy we ignore all of them assuming it is caused by the source.
|
||||||
|
logger.warning(
|
||||||
|
"Ignoring special file error '%s' encountered copying %s to %s.",
|
||||||
|
str(e),
|
||||||
|
path_to_display(src),
|
||||||
|
path_to_display(dest),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _copy_source_tree(source, target):
|
||||||
|
# type: (str, str) -> None
|
||||||
|
target_abspath = os.path.abspath(target)
|
||||||
|
target_basename = os.path.basename(target_abspath)
|
||||||
|
target_dirname = os.path.dirname(target_abspath)
|
||||||
|
|
||||||
|
def ignore(d, names):
|
||||||
|
# type: (str, List[str]) -> List[str]
|
||||||
|
skipped = [] # type: List[str]
|
||||||
|
if d == source:
|
||||||
|
# Pulling in those directories can potentially be very slow,
|
||||||
|
# exclude the following directories if they appear in the top
|
||||||
|
# level dir (and only it).
|
||||||
|
# See discussion at https://github.com/pypa/pip/pull/6770
|
||||||
|
skipped += ['.tox', '.nox']
|
||||||
|
if os.path.abspath(d) == target_dirname:
|
||||||
|
# Prevent an infinite recursion if the target is in source.
|
||||||
|
# This can happen when TMPDIR is set to ${PWD}/...
|
||||||
|
# and we copy PWD to TMPDIR.
|
||||||
|
skipped += [target_basename]
|
||||||
|
return skipped
|
||||||
|
|
||||||
|
kwargs = dict(ignore=ignore, symlinks=True) # type: CopytreeKwargs
|
||||||
|
|
||||||
|
if not PY2:
|
||||||
|
# Python 2 does not support copy_function, so we only ignore
|
||||||
|
# errors on special file copy in Python 3.
|
||||||
|
kwargs['copy_function'] = _copy2_ignoring_special_files
|
||||||
|
|
||||||
|
shutil.copytree(source, target, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def get_file_url(
|
||||||
|
link, # type: Link
|
||||||
|
download_dir=None, # type: Optional[str]
|
||||||
|
hashes=None # type: Optional[Hashes]
|
||||||
|
):
|
||||||
|
# type: (...) -> File
|
||||||
|
"""Get file and optionally check its hash.
|
||||||
|
"""
|
||||||
|
# If a download dir is specified, is the file already there and valid?
|
||||||
|
already_downloaded_path = None
|
||||||
|
if download_dir:
|
||||||
|
already_downloaded_path = _check_download_dir(
|
||||||
|
link, download_dir, hashes
|
||||||
|
)
|
||||||
|
|
||||||
|
if already_downloaded_path:
|
||||||
|
from_path = already_downloaded_path
|
||||||
|
else:
|
||||||
|
from_path = link.file_path
|
||||||
|
|
||||||
|
# If --require-hashes is off, `hashes` is either empty, the
|
||||||
|
# link's embedded hash, or MissingHashes; it is required to
|
||||||
|
# match. If --require-hashes is on, we are satisfied by any
|
||||||
|
# hash in `hashes` matching: a URL-based or an option-based
|
||||||
|
# one; no internet-sourced hash will be in `hashes`.
|
||||||
|
if hashes:
|
||||||
|
hashes.check_against_path(from_path)
|
||||||
|
|
||||||
|
content_type = mimetypes.guess_type(from_path)[0]
|
||||||
|
|
||||||
|
return File(from_path, content_type)
|
||||||
|
|
||||||
|
|
||||||
|
def unpack_url(
|
||||||
|
link, # type: Link
|
||||||
|
location, # type: str
|
||||||
|
downloader, # type: Downloader
|
||||||
|
download_dir=None, # type: Optional[str]
|
||||||
|
hashes=None, # type: Optional[Hashes]
|
||||||
|
):
|
||||||
|
# type: (...) -> Optional[File]
|
||||||
|
"""Unpack link into location, downloading if required.
|
||||||
|
|
||||||
|
:param hashes: A Hashes object, one of whose embedded hashes must match,
|
||||||
|
or HashMismatch will be raised. If the Hashes is empty, no matches are
|
||||||
|
required, and unhashable types of requirements (like VCS ones, which
|
||||||
|
would ordinarily raise HashUnsupported) are allowed.
|
||||||
|
"""
|
||||||
|
# non-editable vcs urls
|
||||||
|
if link.is_vcs:
|
||||||
|
unpack_vcs_link(link, location)
|
||||||
|
return None
|
||||||
|
|
||||||
|
# If it's a url to a local directory
|
||||||
|
if link.is_existing_dir():
|
||||||
|
if os.path.isdir(location):
|
||||||
|
rmtree(location)
|
||||||
|
_copy_source_tree(link.file_path, location)
|
||||||
|
return None
|
||||||
|
|
||||||
|
# file urls
|
||||||
|
if link.is_file:
|
||||||
|
file = get_file_url(link, download_dir, hashes=hashes)
|
||||||
|
|
||||||
|
# http urls
|
||||||
|
else:
|
||||||
|
file = get_http_url(
|
||||||
|
link,
|
||||||
|
downloader,
|
||||||
|
download_dir,
|
||||||
|
hashes=hashes,
|
||||||
|
)
|
||||||
|
|
||||||
|
# unpack the archive to the build dir location. even when only downloading
|
||||||
|
# archives, they have to be unpacked to parse dependencies
|
||||||
|
unpack_file(file.path, location, file.content_type)
|
||||||
|
|
||||||
|
return file
|
||||||
|
|
||||||
|
|
||||||
|
def _download_http_url(
|
||||||
|
link, # type: Link
|
||||||
|
downloader, # type: Downloader
|
||||||
|
temp_dir, # type: str
|
||||||
|
hashes, # type: Optional[Hashes]
|
||||||
|
):
|
||||||
|
# type: (...) -> Tuple[str, str]
|
||||||
|
"""Download link url into temp_dir using provided session"""
|
||||||
|
download = downloader(link)
|
||||||
|
|
||||||
|
file_path = os.path.join(temp_dir, download.filename)
|
||||||
|
with open(file_path, 'wb') as content_file:
|
||||||
|
for chunk in download.chunks:
|
||||||
|
content_file.write(chunk)
|
||||||
|
|
||||||
|
if hashes:
|
||||||
|
hashes.check_against_path(file_path)
|
||||||
|
|
||||||
|
return file_path, download.response.headers.get('content-type', '')
|
||||||
|
|
||||||
|
|
||||||
|
def _check_download_dir(link, download_dir, hashes):
|
||||||
|
# type: (Link, str, Optional[Hashes]) -> Optional[str]
|
||||||
|
""" Check download_dir for previously downloaded file with correct hash
|
||||||
|
If a correct file is found return its path else None
|
||||||
|
"""
|
||||||
|
download_path = os.path.join(download_dir, link.filename)
|
||||||
|
|
||||||
|
if not os.path.exists(download_path):
|
||||||
|
return None
|
||||||
|
|
||||||
|
# If already downloaded, does its hash match?
|
||||||
|
logger.info('File was already downloaded %s', download_path)
|
||||||
|
if hashes:
|
||||||
|
try:
|
||||||
|
hashes.check_against_path(download_path)
|
||||||
|
except HashMismatch:
|
||||||
|
logger.warning(
|
||||||
|
'Previously-downloaded file %s has bad hash. '
|
||||||
|
'Re-downloading.',
|
||||||
|
download_path
|
||||||
|
)
|
||||||
|
os.unlink(download_path)
|
||||||
|
return None
|
||||||
|
return download_path
|
||||||
|
|
||||||
|
|
||||||
|
class RequirementPreparer(object):
|
||||||
|
"""Prepares a Requirement
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
build_dir, # type: str
|
||||||
|
download_dir, # type: Optional[str]
|
||||||
|
src_dir, # type: str
|
||||||
|
wheel_download_dir, # type: Optional[str]
|
||||||
|
build_isolation, # type: bool
|
||||||
|
req_tracker, # type: RequirementTracker
|
||||||
|
downloader, # type: Downloader
|
||||||
|
finder, # type: PackageFinder
|
||||||
|
require_hashes, # type: bool
|
||||||
|
use_user_site, # type: bool
|
||||||
|
):
|
||||||
|
# type: (...) -> None
|
||||||
|
super(RequirementPreparer, self).__init__()
|
||||||
|
|
||||||
|
self.src_dir = src_dir
|
||||||
|
self.build_dir = build_dir
|
||||||
|
self.req_tracker = req_tracker
|
||||||
|
self.downloader = downloader
|
||||||
|
self.finder = finder
|
||||||
|
|
||||||
|
# Where still-packed archives should be written to. If None, they are
|
||||||
|
# not saved, and are deleted immediately after unpacking.
|
||||||
|
self.download_dir = download_dir
|
||||||
|
|
||||||
|
# Where still-packed .whl files should be written to. If None, they are
|
||||||
|
# written to the download_dir parameter. Separate to download_dir to
|
||||||
|
# permit only keeping wheel archives for pip wheel.
|
||||||
|
self.wheel_download_dir = wheel_download_dir
|
||||||
|
|
||||||
|
# NOTE
|
||||||
|
# download_dir and wheel_download_dir overlap semantically and may
|
||||||
|
# be combined if we're willing to have non-wheel archives present in
|
||||||
|
# the wheelhouse output by 'pip wheel'.
|
||||||
|
|
||||||
|
# Is build isolation allowed?
|
||||||
|
self.build_isolation = build_isolation
|
||||||
|
|
||||||
|
# Should hash-checking be required?
|
||||||
|
self.require_hashes = require_hashes
|
||||||
|
|
||||||
|
# Should install in user site-packages?
|
||||||
|
self.use_user_site = use_user_site
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _download_should_save(self):
|
||||||
|
# type: () -> bool
|
||||||
|
if not self.download_dir:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if os.path.exists(self.download_dir):
|
||||||
|
return True
|
||||||
|
|
||||||
|
logger.critical('Could not find download directory')
|
||||||
|
raise InstallationError(
|
||||||
|
"Could not find or access download directory '{}'"
|
||||||
|
.format(self.download_dir))
|
||||||
|
|
||||||
|
def prepare_linked_requirement(
|
||||||
|
self,
|
||||||
|
req, # type: InstallRequirement
|
||||||
|
):
|
||||||
|
# type: (...) -> AbstractDistribution
|
||||||
|
"""Prepare a requirement that would be obtained from req.link
|
||||||
|
"""
|
||||||
|
assert req.link
|
||||||
|
link = req.link
|
||||||
|
|
||||||
|
# TODO: Breakup into smaller functions
|
||||||
|
if link.scheme == 'file':
|
||||||
|
path = link.file_path
|
||||||
|
logger.info('Processing %s', display_path(path))
|
||||||
|
else:
|
||||||
|
logger.info('Collecting %s', req.req or req)
|
||||||
|
|
||||||
|
download_dir = self.download_dir
|
||||||
|
if link.is_wheel and self.wheel_download_dir:
|
||||||
|
# when doing 'pip wheel` we download wheels to a
|
||||||
|
# dedicated dir.
|
||||||
|
download_dir = self.wheel_download_dir
|
||||||
|
|
||||||
|
if link.is_wheel:
|
||||||
|
if download_dir:
|
||||||
|
# When downloading, we only unpack wheels to get
|
||||||
|
# metadata.
|
||||||
|
autodelete_unpacked = True
|
||||||
|
else:
|
||||||
|
# When installing a wheel, we use the unpacked
|
||||||
|
# wheel.
|
||||||
|
autodelete_unpacked = False
|
||||||
|
else:
|
||||||
|
# We always delete unpacked sdists after pip runs.
|
||||||
|
autodelete_unpacked = True
|
||||||
|
|
||||||
|
with indent_log():
|
||||||
|
# Since source_dir is only set for editable requirements.
|
||||||
|
assert req.source_dir is None
|
||||||
|
req.ensure_has_source_dir(self.build_dir, autodelete_unpacked)
|
||||||
|
# If a checkout exists, it's unwise to keep going. version
|
||||||
|
# inconsistencies are logged later, but do not fail the
|
||||||
|
# installation.
|
||||||
|
# FIXME: this won't upgrade when there's an existing
|
||||||
|
# package unpacked in `req.source_dir`
|
||||||
|
if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
|
||||||
|
raise PreviousBuildDirError(
|
||||||
|
"pip can't proceed with requirements '{}' due to a"
|
||||||
|
" pre-existing build directory ({}). This is "
|
||||||
|
"likely due to a previous installation that failed"
|
||||||
|
". pip is being responsible and not assuming it "
|
||||||
|
"can delete this. Please delete it and try again."
|
||||||
|
.format(req, req.source_dir)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Now that we have the real link, we can tell what kind of
|
||||||
|
# requirements we have and raise some more informative errors
|
||||||
|
# than otherwise. (For example, we can raise VcsHashUnsupported
|
||||||
|
# for a VCS URL rather than HashMissing.)
|
||||||
|
if self.require_hashes:
|
||||||
|
# We could check these first 2 conditions inside
|
||||||
|
# unpack_url and save repetition of conditions, but then
|
||||||
|
# we would report less-useful error messages for
|
||||||
|
# unhashable requirements, complaining that there's no
|
||||||
|
# hash provided.
|
||||||
|
if link.is_vcs:
|
||||||
|
raise VcsHashUnsupported()
|
||||||
|
elif link.is_existing_dir():
|
||||||
|
raise DirectoryUrlHashUnsupported()
|
||||||
|
if not req.original_link and not req.is_pinned:
|
||||||
|
# Unpinned packages are asking for trouble when a new
|
||||||
|
# version is uploaded. This isn't a security check, but
|
||||||
|
# it saves users a surprising hash mismatch in the
|
||||||
|
# future.
|
||||||
|
#
|
||||||
|
# file:/// URLs aren't pinnable, so don't complain
|
||||||
|
# about them not being pinned.
|
||||||
|
raise HashUnpinned()
|
||||||
|
|
||||||
|
hashes = req.hashes(trust_internet=not self.require_hashes)
|
||||||
|
if self.require_hashes and not hashes:
|
||||||
|
# Known-good hashes are missing for this requirement, so
|
||||||
|
# shim it with a facade object that will provoke hash
|
||||||
|
# computation and then raise a HashMissing exception
|
||||||
|
# showing the user what the hash should be.
|
||||||
|
hashes = MissingHashes()
|
||||||
|
|
||||||
|
try:
|
||||||
|
local_file = unpack_url(
|
||||||
|
link, req.source_dir, self.downloader, download_dir,
|
||||||
|
hashes=hashes,
|
||||||
|
)
|
||||||
|
except requests.HTTPError as exc:
|
||||||
|
logger.critical(
|
||||||
|
'Could not install requirement %s because of error %s',
|
||||||
|
req,
|
||||||
|
exc,
|
||||||
|
)
|
||||||
|
raise InstallationError(
|
||||||
|
'Could not install requirement {} because of HTTP '
|
||||||
|
'error {} for URL {}'.format(req, exc, link)
|
||||||
|
)
|
||||||
|
|
||||||
|
# For use in later processing, preserve the file path on the
|
||||||
|
# requirement.
|
||||||
|
if local_file:
|
||||||
|
req.local_file_path = local_file.path
|
||||||
|
|
||||||
|
abstract_dist = _get_prepared_distribution(
|
||||||
|
req, self.req_tracker, self.finder, self.build_isolation,
|
||||||
|
)
|
||||||
|
|
||||||
|
if download_dir:
|
||||||
|
if link.is_existing_dir():
|
||||||
|
logger.info('Link is a directory, ignoring download_dir')
|
||||||
|
elif local_file:
|
||||||
|
download_location = os.path.join(
|
||||||
|
download_dir, link.filename
|
||||||
|
)
|
||||||
|
if not os.path.exists(download_location):
|
||||||
|
shutil.copy(local_file.path, download_location)
|
||||||
|
logger.info(
|
||||||
|
'Saved %s', display_path(download_location)
|
||||||
|
)
|
||||||
|
|
||||||
|
if self._download_should_save:
|
||||||
|
# Make a .zip of the source_dir we already created.
|
||||||
|
if link.is_vcs:
|
||||||
|
req.archive(self.download_dir)
|
||||||
|
return abstract_dist
|
||||||
|
|
||||||
|
def prepare_editable_requirement(
|
||||||
|
self,
|
||||||
|
req, # type: InstallRequirement
|
||||||
|
):
|
||||||
|
# type: (...) -> AbstractDistribution
|
||||||
|
"""Prepare an editable requirement
|
||||||
|
"""
|
||||||
|
assert req.editable, "cannot prepare a non-editable req as editable"
|
||||||
|
|
||||||
|
logger.info('Obtaining %s', req)
|
||||||
|
|
||||||
|
with indent_log():
|
||||||
|
if self.require_hashes:
|
||||||
|
raise InstallationError(
|
||||||
|
'The editable requirement {} cannot be installed when '
|
||||||
|
'requiring hashes, because there is no single file to '
|
||||||
|
'hash.'.format(req)
|
||||||
|
)
|
||||||
|
req.ensure_has_source_dir(self.src_dir)
|
||||||
|
req.update_editable(not self._download_should_save)
|
||||||
|
|
||||||
|
abstract_dist = _get_prepared_distribution(
|
||||||
|
req, self.req_tracker, self.finder, self.build_isolation,
|
||||||
|
)
|
||||||
|
|
||||||
|
if self._download_should_save:
|
||||||
|
req.archive(self.download_dir)
|
||||||
|
req.check_if_exists(self.use_user_site)
|
||||||
|
|
||||||
|
return abstract_dist
|
||||||
|
|
||||||
|
def prepare_installed_requirement(
|
||||||
|
self,
|
||||||
|
req, # type: InstallRequirement
|
||||||
|
skip_reason # type: str
|
||||||
|
):
|
||||||
|
# type: (...) -> AbstractDistribution
|
||||||
|
"""Prepare an already-installed requirement
|
||||||
|
"""
|
||||||
|
assert req.satisfied_by, "req should have been satisfied but isn't"
|
||||||
|
assert skip_reason is not None, (
|
||||||
|
"did not get skip reason skipped but req.satisfied_by "
|
||||||
|
"is set to {}".format(req.satisfied_by)
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
'Requirement %s: %s (%s)',
|
||||||
|
skip_reason, req, req.satisfied_by.version
|
||||||
|
)
|
||||||
|
with indent_log():
|
||||||
|
if self.require_hashes:
|
||||||
|
logger.debug(
|
||||||
|
'Since it is already installed, we are trusting this '
|
||||||
|
'package without checking its hash. To ensure a '
|
||||||
|
'completely repeatable environment, install into an '
|
||||||
|
'empty virtualenv.'
|
||||||
|
)
|
||||||
|
abstract_dist = InstalledDistribution(req)
|
||||||
|
|
||||||
|
return abstract_dist
|
196
sources/pip_20.1/_internal/pyproject.py
Normal file
196
sources/pip_20.1/_internal/pyproject.py
Normal file
|
@ -0,0 +1,196 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import io
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from collections import namedtuple
|
||||||
|
|
||||||
|
from pip._vendor import six, toml
|
||||||
|
from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
|
||||||
|
|
||||||
|
from pip._internal.exceptions import InstallationError
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Any, Optional, List
|
||||||
|
|
||||||
|
|
||||||
|
def _is_list_of_str(obj):
|
||||||
|
# type: (Any) -> bool
|
||||||
|
return (
|
||||||
|
isinstance(obj, list) and
|
||||||
|
all(isinstance(item, six.string_types) for item in obj)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def make_pyproject_path(unpacked_source_directory):
|
||||||
|
# type: (str) -> str
|
||||||
|
path = os.path.join(unpacked_source_directory, 'pyproject.toml')
|
||||||
|
|
||||||
|
# Python2 __file__ should not be unicode
|
||||||
|
if six.PY2 and isinstance(path, six.text_type):
|
||||||
|
path = path.encode(sys.getfilesystemencoding())
|
||||||
|
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
BuildSystemDetails = namedtuple('BuildSystemDetails', [
|
||||||
|
'requires', 'backend', 'check', 'backend_path'
|
||||||
|
])
|
||||||
|
|
||||||
|
|
||||||
|
def load_pyproject_toml(
|
||||||
|
use_pep517, # type: Optional[bool]
|
||||||
|
pyproject_toml, # type: str
|
||||||
|
setup_py, # type: str
|
||||||
|
req_name # type: str
|
||||||
|
):
|
||||||
|
# type: (...) -> Optional[BuildSystemDetails]
|
||||||
|
"""Load the pyproject.toml file.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
use_pep517 - Has the user requested PEP 517 processing? None
|
||||||
|
means the user hasn't explicitly specified.
|
||||||
|
pyproject_toml - Location of the project's pyproject.toml file
|
||||||
|
setup_py - Location of the project's setup.py file
|
||||||
|
req_name - The name of the requirement we're processing (for
|
||||||
|
error reporting)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
None if we should use the legacy code path, otherwise a tuple
|
||||||
|
(
|
||||||
|
requirements from pyproject.toml,
|
||||||
|
name of PEP 517 backend,
|
||||||
|
requirements we should check are installed after setting
|
||||||
|
up the build environment
|
||||||
|
directory paths to import the backend from (backend-path),
|
||||||
|
relative to the project root.
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
has_pyproject = os.path.isfile(pyproject_toml)
|
||||||
|
has_setup = os.path.isfile(setup_py)
|
||||||
|
|
||||||
|
if has_pyproject:
|
||||||
|
with io.open(pyproject_toml, encoding="utf-8") as f:
|
||||||
|
pp_toml = toml.load(f)
|
||||||
|
build_system = pp_toml.get("build-system")
|
||||||
|
else:
|
||||||
|
build_system = None
|
||||||
|
|
||||||
|
# The following cases must use PEP 517
|
||||||
|
# We check for use_pep517 being non-None and falsey because that means
|
||||||
|
# the user explicitly requested --no-use-pep517. The value 0 as
|
||||||
|
# opposed to False can occur when the value is provided via an
|
||||||
|
# environment variable or config file option (due to the quirk of
|
||||||
|
# strtobool() returning an integer in pip's configuration code).
|
||||||
|
if has_pyproject and not has_setup:
|
||||||
|
if use_pep517 is not None and not use_pep517:
|
||||||
|
raise InstallationError(
|
||||||
|
"Disabling PEP 517 processing is invalid: "
|
||||||
|
"project does not have a setup.py"
|
||||||
|
)
|
||||||
|
use_pep517 = True
|
||||||
|
elif build_system and "build-backend" in build_system:
|
||||||
|
if use_pep517 is not None and not use_pep517:
|
||||||
|
raise InstallationError(
|
||||||
|
"Disabling PEP 517 processing is invalid: "
|
||||||
|
"project specifies a build backend of {} "
|
||||||
|
"in pyproject.toml".format(
|
||||||
|
build_system["build-backend"]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
use_pep517 = True
|
||||||
|
|
||||||
|
# If we haven't worked out whether to use PEP 517 yet,
|
||||||
|
# and the user hasn't explicitly stated a preference,
|
||||||
|
# we do so if the project has a pyproject.toml file.
|
||||||
|
elif use_pep517 is None:
|
||||||
|
use_pep517 = has_pyproject
|
||||||
|
|
||||||
|
# At this point, we know whether we're going to use PEP 517.
|
||||||
|
assert use_pep517 is not None
|
||||||
|
|
||||||
|
# If we're using the legacy code path, there is nothing further
|
||||||
|
# for us to do here.
|
||||||
|
if not use_pep517:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if build_system is None:
|
||||||
|
# Either the user has a pyproject.toml with no build-system
|
||||||
|
# section, or the user has no pyproject.toml, but has opted in
|
||||||
|
# explicitly via --use-pep517.
|
||||||
|
# In the absence of any explicit backend specification, we
|
||||||
|
# assume the setuptools backend that most closely emulates the
|
||||||
|
# traditional direct setup.py execution, and require wheel and
|
||||||
|
# a version of setuptools that supports that backend.
|
||||||
|
|
||||||
|
build_system = {
|
||||||
|
"requires": ["setuptools>=40.8.0", "wheel"],
|
||||||
|
"build-backend": "setuptools.build_meta:__legacy__",
|
||||||
|
}
|
||||||
|
|
||||||
|
# If we're using PEP 517, we have build system information (either
|
||||||
|
# from pyproject.toml, or defaulted by the code above).
|
||||||
|
# Note that at this point, we do not know if the user has actually
|
||||||
|
# specified a backend, though.
|
||||||
|
assert build_system is not None
|
||||||
|
|
||||||
|
# Ensure that the build-system section in pyproject.toml conforms
|
||||||
|
# to PEP 518.
|
||||||
|
error_template = (
|
||||||
|
"{package} has a pyproject.toml file that does not comply "
|
||||||
|
"with PEP 518: {reason}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Specifying the build-system table but not the requires key is invalid
|
||||||
|
if "requires" not in build_system:
|
||||||
|
raise InstallationError(
|
||||||
|
error_template.format(package=req_name, reason=(
|
||||||
|
"it has a 'build-system' table but not "
|
||||||
|
"'build-system.requires' which is mandatory in the table"
|
||||||
|
))
|
||||||
|
)
|
||||||
|
|
||||||
|
# Error out if requires is not a list of strings
|
||||||
|
requires = build_system["requires"]
|
||||||
|
if not _is_list_of_str(requires):
|
||||||
|
raise InstallationError(error_template.format(
|
||||||
|
package=req_name,
|
||||||
|
reason="'build-system.requires' is not a list of strings.",
|
||||||
|
))
|
||||||
|
|
||||||
|
# Each requirement must be valid as per PEP 508
|
||||||
|
for requirement in requires:
|
||||||
|
try:
|
||||||
|
Requirement(requirement)
|
||||||
|
except InvalidRequirement:
|
||||||
|
raise InstallationError(
|
||||||
|
error_template.format(
|
||||||
|
package=req_name,
|
||||||
|
reason=(
|
||||||
|
"'build-system.requires' contains an invalid "
|
||||||
|
"requirement: {!r}".format(requirement)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
backend = build_system.get("build-backend")
|
||||||
|
backend_path = build_system.get("backend-path", [])
|
||||||
|
check = [] # type: List[str]
|
||||||
|
if backend is None:
|
||||||
|
# If the user didn't specify a backend, we assume they want to use
|
||||||
|
# the setuptools backend. But we can't be sure they have included
|
||||||
|
# a version of setuptools which supplies the backend, or wheel
|
||||||
|
# (which is needed by the backend) in their requirements. So we
|
||||||
|
# make a note to check that those requirements are present once
|
||||||
|
# we have set up the environment.
|
||||||
|
# This is quite a lot of work to check for a very specific case. But
|
||||||
|
# the problem is, that case is potentially quite common - projects that
|
||||||
|
# adopted PEP 518 early for the ability to specify requirements to
|
||||||
|
# execute setup.py, but never considered needing to mention the build
|
||||||
|
# tools themselves. The original PEP 518 code had a similar check (but
|
||||||
|
# implemented in a different way).
|
||||||
|
backend = "setuptools.build_meta:__legacy__"
|
||||||
|
check = ["setuptools>=40.8.0", "wheel"]
|
||||||
|
|
||||||
|
return BuildSystemDetails(requires, backend, check, backend_path)
|
92
sources/pip_20.1/_internal/req/__init__.py
Normal file
92
sources/pip_20.1/_internal/req/__init__.py
Normal file
|
@ -0,0 +1,92 @@
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: strict-optional=False
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from pip._internal.utils.logging import indent_log
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
from .req_file import parse_requirements
|
||||||
|
from .req_install import InstallRequirement
|
||||||
|
from .req_set import RequirementSet
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Any, List, Sequence
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"RequirementSet", "InstallRequirement",
|
||||||
|
"parse_requirements", "install_given_reqs",
|
||||||
|
]
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class InstallationResult(object):
|
||||||
|
def __init__(self, name):
|
||||||
|
# type: (str) -> None
|
||||||
|
self.name = name
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
# type: () -> str
|
||||||
|
return "InstallationResult(name={!r})".format(self.name)
|
||||||
|
|
||||||
|
|
||||||
|
def install_given_reqs(
|
||||||
|
to_install, # type: List[InstallRequirement]
|
||||||
|
install_options, # type: List[str]
|
||||||
|
global_options=(), # type: Sequence[str]
|
||||||
|
*args, # type: Any
|
||||||
|
**kwargs # type: Any
|
||||||
|
):
|
||||||
|
# type: (...) -> List[InstallationResult]
|
||||||
|
"""
|
||||||
|
Install everything in the given list.
|
||||||
|
|
||||||
|
(to be called after having downloaded and unpacked the packages)
|
||||||
|
"""
|
||||||
|
|
||||||
|
if to_install:
|
||||||
|
logger.info(
|
||||||
|
'Installing collected packages: %s',
|
||||||
|
', '.join([req.name for req in to_install]),
|
||||||
|
)
|
||||||
|
|
||||||
|
installed = []
|
||||||
|
|
||||||
|
with indent_log():
|
||||||
|
for requirement in to_install:
|
||||||
|
if requirement.should_reinstall:
|
||||||
|
logger.info('Attempting uninstall: %s', requirement.name)
|
||||||
|
with indent_log():
|
||||||
|
uninstalled_pathset = requirement.uninstall(
|
||||||
|
auto_confirm=True
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
requirement.install(
|
||||||
|
install_options,
|
||||||
|
global_options,
|
||||||
|
*args,
|
||||||
|
**kwargs
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
should_rollback = (
|
||||||
|
requirement.should_reinstall and
|
||||||
|
not requirement.install_succeeded
|
||||||
|
)
|
||||||
|
# if install did not succeed, rollback previous uninstall
|
||||||
|
if should_rollback:
|
||||||
|
uninstalled_pathset.rollback()
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
should_commit = (
|
||||||
|
requirement.should_reinstall and
|
||||||
|
requirement.install_succeeded
|
||||||
|
)
|
||||||
|
if should_commit:
|
||||||
|
uninstalled_pathset.commit()
|
||||||
|
|
||||||
|
installed.append(InstallationResult(requirement.name))
|
||||||
|
|
||||||
|
return installed
|
464
sources/pip_20.1/_internal/req/constructors.py
Normal file
464
sources/pip_20.1/_internal/req/constructors.py
Normal file
|
@ -0,0 +1,464 @@
|
||||||
|
"""Backing implementation for InstallRequirement's various constructors
|
||||||
|
|
||||||
|
The idea here is that these formed a major chunk of InstallRequirement's size
|
||||||
|
so, moving them and support code dedicated to them outside of that class
|
||||||
|
helps creates for better understandability for the rest of the code.
|
||||||
|
|
||||||
|
These are meant to be used elsewhere within pip to create instances of
|
||||||
|
InstallRequirement.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: strict-optional=False
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
from pip._vendor.packaging.markers import Marker
|
||||||
|
from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
|
||||||
|
from pip._vendor.packaging.specifiers import Specifier
|
||||||
|
from pip._vendor.pkg_resources import RequirementParseError, parse_requirements
|
||||||
|
|
||||||
|
from pip._internal.exceptions import InstallationError
|
||||||
|
from pip._internal.models.index import PyPI, TestPyPI
|
||||||
|
from pip._internal.models.link import Link
|
||||||
|
from pip._internal.models.wheel import Wheel
|
||||||
|
from pip._internal.pyproject import make_pyproject_path
|
||||||
|
from pip._internal.req.req_install import InstallRequirement
|
||||||
|
from pip._internal.utils.filetypes import ARCHIVE_EXTENSIONS
|
||||||
|
from pip._internal.utils.misc import is_installable_dir, splitext
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
from pip._internal.utils.urls import path_to_url
|
||||||
|
from pip._internal.vcs import is_url, vcs
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import (
|
||||||
|
Any, Dict, Optional, Set, Tuple, Union,
|
||||||
|
)
|
||||||
|
from pip._internal.req.req_file import ParsedRequirement
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"install_req_from_editable", "install_req_from_line",
|
||||||
|
"parse_editable"
|
||||||
|
]
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
operators = Specifier._operators.keys()
|
||||||
|
|
||||||
|
|
||||||
|
def is_archive_file(name):
|
||||||
|
# type: (str) -> bool
|
||||||
|
"""Return True if `name` is a considered as an archive file."""
|
||||||
|
ext = splitext(name)[1].lower()
|
||||||
|
if ext in ARCHIVE_EXTENSIONS:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _strip_extras(path):
|
||||||
|
# type: (str) -> Tuple[str, Optional[str]]
|
||||||
|
m = re.match(r'^(.+)(\[[^\]]+\])$', path)
|
||||||
|
extras = None
|
||||||
|
if m:
|
||||||
|
path_no_extras = m.group(1)
|
||||||
|
extras = m.group(2)
|
||||||
|
else:
|
||||||
|
path_no_extras = path
|
||||||
|
|
||||||
|
return path_no_extras, extras
|
||||||
|
|
||||||
|
|
||||||
|
def convert_extras(extras):
|
||||||
|
# type: (Optional[str]) -> Set[str]
|
||||||
|
if not extras:
|
||||||
|
return set()
|
||||||
|
return Requirement("placeholder" + extras.lower()).extras
|
||||||
|
|
||||||
|
|
||||||
|
def parse_editable(editable_req):
|
||||||
|
# type: (str) -> Tuple[Optional[str], str, Optional[Set[str]]]
|
||||||
|
"""Parses an editable requirement into:
|
||||||
|
- a requirement name
|
||||||
|
- an URL
|
||||||
|
- extras
|
||||||
|
- editable options
|
||||||
|
Accepted requirements:
|
||||||
|
svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir
|
||||||
|
.[some_extra]
|
||||||
|
"""
|
||||||
|
|
||||||
|
url = editable_req
|
||||||
|
|
||||||
|
# If a file path is specified with extras, strip off the extras.
|
||||||
|
url_no_extras, extras = _strip_extras(url)
|
||||||
|
|
||||||
|
if os.path.isdir(url_no_extras):
|
||||||
|
if not os.path.exists(os.path.join(url_no_extras, 'setup.py')):
|
||||||
|
msg = (
|
||||||
|
'File "setup.py" not found. Directory cannot be installed '
|
||||||
|
'in editable mode: {}'.format(os.path.abspath(url_no_extras))
|
||||||
|
)
|
||||||
|
pyproject_path = make_pyproject_path(url_no_extras)
|
||||||
|
if os.path.isfile(pyproject_path):
|
||||||
|
msg += (
|
||||||
|
'\n(A "pyproject.toml" file was found, but editable '
|
||||||
|
'mode currently requires a setup.py based build.)'
|
||||||
|
)
|
||||||
|
raise InstallationError(msg)
|
||||||
|
|
||||||
|
# Treating it as code that has already been checked out
|
||||||
|
url_no_extras = path_to_url(url_no_extras)
|
||||||
|
|
||||||
|
if url_no_extras.lower().startswith('file:'):
|
||||||
|
package_name = Link(url_no_extras).egg_fragment
|
||||||
|
if extras:
|
||||||
|
return (
|
||||||
|
package_name,
|
||||||
|
url_no_extras,
|
||||||
|
Requirement("placeholder" + extras.lower()).extras,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return package_name, url_no_extras, None
|
||||||
|
|
||||||
|
for version_control in vcs:
|
||||||
|
if url.lower().startswith('{}:'.format(version_control)):
|
||||||
|
url = '{}+{}'.format(version_control, url)
|
||||||
|
break
|
||||||
|
|
||||||
|
if '+' not in url:
|
||||||
|
raise InstallationError(
|
||||||
|
'{} is not a valid editable requirement. '
|
||||||
|
'It should either be a path to a local project or a VCS URL '
|
||||||
|
'(beginning with svn+, git+, hg+, or bzr+).'.format(editable_req)
|
||||||
|
)
|
||||||
|
|
||||||
|
vc_type = url.split('+', 1)[0].lower()
|
||||||
|
|
||||||
|
if not vcs.get_backend(vc_type):
|
||||||
|
backends = ", ".join([bends.name + '+URL' for bends in vcs.backends])
|
||||||
|
error_message = "For --editable={}, " \
|
||||||
|
"only {} are currently supported".format(
|
||||||
|
editable_req, backends)
|
||||||
|
raise InstallationError(error_message)
|
||||||
|
|
||||||
|
package_name = Link(url).egg_fragment
|
||||||
|
if not package_name:
|
||||||
|
raise InstallationError(
|
||||||
|
"Could not detect requirement name for '{}', please specify one "
|
||||||
|
"with #egg=your_package_name".format(editable_req)
|
||||||
|
)
|
||||||
|
return package_name, url, None
|
||||||
|
|
||||||
|
|
||||||
|
def deduce_helpful_msg(req):
|
||||||
|
# type: (str) -> str
|
||||||
|
"""Returns helpful msg in case requirements file does not exist,
|
||||||
|
or cannot be parsed.
|
||||||
|
|
||||||
|
:params req: Requirements file path
|
||||||
|
"""
|
||||||
|
msg = ""
|
||||||
|
if os.path.exists(req):
|
||||||
|
msg = " It does exist."
|
||||||
|
# Try to parse and check if it is a requirements file.
|
||||||
|
try:
|
||||||
|
with open(req, 'r') as fp:
|
||||||
|
# parse first line only
|
||||||
|
next(parse_requirements(fp.read()))
|
||||||
|
msg += (
|
||||||
|
"The argument you provided "
|
||||||
|
"({}) appears to be a"
|
||||||
|
" requirements file. If that is the"
|
||||||
|
" case, use the '-r' flag to install"
|
||||||
|
" the packages specified within it."
|
||||||
|
).format(req)
|
||||||
|
except RequirementParseError:
|
||||||
|
logger.debug("Cannot parse '{}' as requirements \
|
||||||
|
file".format(req), exc_info=True)
|
||||||
|
else:
|
||||||
|
msg += " File '{}' does not exist.".format(req)
|
||||||
|
return msg
|
||||||
|
|
||||||
|
|
||||||
|
class RequirementParts(object):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
requirement, # type: Optional[Requirement]
|
||||||
|
link, # type: Optional[Link]
|
||||||
|
markers, # type: Optional[Marker]
|
||||||
|
extras, # type: Set[str]
|
||||||
|
):
|
||||||
|
self.requirement = requirement
|
||||||
|
self.link = link
|
||||||
|
self.markers = markers
|
||||||
|
self.extras = extras
|
||||||
|
|
||||||
|
|
||||||
|
def parse_req_from_editable(editable_req):
|
||||||
|
# type: (str) -> RequirementParts
|
||||||
|
name, url, extras_override = parse_editable(editable_req)
|
||||||
|
|
||||||
|
if name is not None:
|
||||||
|
try:
|
||||||
|
req = Requirement(name)
|
||||||
|
except InvalidRequirement:
|
||||||
|
raise InstallationError("Invalid requirement: '{}'".format(name))
|
||||||
|
else:
|
||||||
|
req = None
|
||||||
|
|
||||||
|
link = Link(url)
|
||||||
|
|
||||||
|
return RequirementParts(req, link, None, extras_override)
|
||||||
|
|
||||||
|
|
||||||
|
# ---- The actual constructors follow ----
|
||||||
|
|
||||||
|
|
||||||
|
def install_req_from_editable(
|
||||||
|
editable_req, # type: str
|
||||||
|
comes_from=None, # type: Optional[Union[InstallRequirement, str]]
|
||||||
|
use_pep517=None, # type: Optional[bool]
|
||||||
|
isolated=False, # type: bool
|
||||||
|
options=None, # type: Optional[Dict[str, Any]]
|
||||||
|
constraint=False # type: bool
|
||||||
|
):
|
||||||
|
# type: (...) -> InstallRequirement
|
||||||
|
|
||||||
|
parts = parse_req_from_editable(editable_req)
|
||||||
|
|
||||||
|
return InstallRequirement(
|
||||||
|
parts.requirement,
|
||||||
|
comes_from=comes_from,
|
||||||
|
editable=True,
|
||||||
|
link=parts.link,
|
||||||
|
constraint=constraint,
|
||||||
|
use_pep517=use_pep517,
|
||||||
|
isolated=isolated,
|
||||||
|
install_options=options.get("install_options", []) if options else [],
|
||||||
|
global_options=options.get("global_options", []) if options else [],
|
||||||
|
hash_options=options.get("hashes", {}) if options else {},
|
||||||
|
extras=parts.extras,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _looks_like_path(name):
|
||||||
|
# type: (str) -> bool
|
||||||
|
"""Checks whether the string "looks like" a path on the filesystem.
|
||||||
|
|
||||||
|
This does not check whether the target actually exists, only judge from the
|
||||||
|
appearance.
|
||||||
|
|
||||||
|
Returns true if any of the following conditions is true:
|
||||||
|
* a path separator is found (either os.path.sep or os.path.altsep);
|
||||||
|
* a dot is found (which represents the current directory).
|
||||||
|
"""
|
||||||
|
if os.path.sep in name:
|
||||||
|
return True
|
||||||
|
if os.path.altsep is not None and os.path.altsep in name:
|
||||||
|
return True
|
||||||
|
if name.startswith("."):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _get_url_from_path(path, name):
|
||||||
|
# type: (str, str) -> str
|
||||||
|
"""
|
||||||
|
First, it checks whether a provided path is an installable directory
|
||||||
|
(e.g. it has a setup.py). If it is, returns the path.
|
||||||
|
|
||||||
|
If false, check if the path is an archive file (such as a .whl).
|
||||||
|
The function checks if the path is a file. If false, if the path has
|
||||||
|
an @, it will treat it as a PEP 440 URL requirement and return the path.
|
||||||
|
"""
|
||||||
|
if _looks_like_path(name) and os.path.isdir(path):
|
||||||
|
if is_installable_dir(path):
|
||||||
|
return path_to_url(path)
|
||||||
|
raise InstallationError(
|
||||||
|
"Directory {name!r} is not installable. Neither 'setup.py' "
|
||||||
|
"nor 'pyproject.toml' found.".format(**locals())
|
||||||
|
)
|
||||||
|
if not is_archive_file(path):
|
||||||
|
return None
|
||||||
|
if os.path.isfile(path):
|
||||||
|
return path_to_url(path)
|
||||||
|
urlreq_parts = name.split('@', 1)
|
||||||
|
if len(urlreq_parts) >= 2 and not _looks_like_path(urlreq_parts[0]):
|
||||||
|
# If the path contains '@' and the part before it does not look
|
||||||
|
# like a path, try to treat it as a PEP 440 URL req instead.
|
||||||
|
return None
|
||||||
|
logger.warning(
|
||||||
|
'Requirement %r looks like a filename, but the '
|
||||||
|
'file does not exist',
|
||||||
|
name
|
||||||
|
)
|
||||||
|
return path_to_url(path)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_req_from_line(name, line_source):
|
||||||
|
# type: (str, Optional[str]) -> RequirementParts
|
||||||
|
if is_url(name):
|
||||||
|
marker_sep = '; '
|
||||||
|
else:
|
||||||
|
marker_sep = ';'
|
||||||
|
if marker_sep in name:
|
||||||
|
name, markers_as_string = name.split(marker_sep, 1)
|
||||||
|
markers_as_string = markers_as_string.strip()
|
||||||
|
if not markers_as_string:
|
||||||
|
markers = None
|
||||||
|
else:
|
||||||
|
markers = Marker(markers_as_string)
|
||||||
|
else:
|
||||||
|
markers = None
|
||||||
|
name = name.strip()
|
||||||
|
req_as_string = None
|
||||||
|
path = os.path.normpath(os.path.abspath(name))
|
||||||
|
link = None
|
||||||
|
extras_as_string = None
|
||||||
|
|
||||||
|
if is_url(name):
|
||||||
|
link = Link(name)
|
||||||
|
else:
|
||||||
|
p, extras_as_string = _strip_extras(path)
|
||||||
|
url = _get_url_from_path(p, name)
|
||||||
|
if url is not None:
|
||||||
|
link = Link(url)
|
||||||
|
|
||||||
|
# it's a local file, dir, or url
|
||||||
|
if link:
|
||||||
|
# Handle relative file URLs
|
||||||
|
if link.scheme == 'file' and re.search(r'\.\./', link.url):
|
||||||
|
link = Link(
|
||||||
|
path_to_url(os.path.normpath(os.path.abspath(link.path))))
|
||||||
|
# wheel file
|
||||||
|
if link.is_wheel:
|
||||||
|
wheel = Wheel(link.filename) # can raise InvalidWheelFilename
|
||||||
|
req_as_string = "{wheel.name}=={wheel.version}".format(**locals())
|
||||||
|
else:
|
||||||
|
# set the req to the egg fragment. when it's not there, this
|
||||||
|
# will become an 'unnamed' requirement
|
||||||
|
req_as_string = link.egg_fragment
|
||||||
|
|
||||||
|
# a requirement specifier
|
||||||
|
else:
|
||||||
|
req_as_string = name
|
||||||
|
|
||||||
|
extras = convert_extras(extras_as_string)
|
||||||
|
|
||||||
|
def with_source(text):
|
||||||
|
# type: (str) -> str
|
||||||
|
if not line_source:
|
||||||
|
return text
|
||||||
|
return '{} (from {})'.format(text, line_source)
|
||||||
|
|
||||||
|
if req_as_string is not None:
|
||||||
|
try:
|
||||||
|
req = Requirement(req_as_string)
|
||||||
|
except InvalidRequirement:
|
||||||
|
if os.path.sep in req_as_string:
|
||||||
|
add_msg = "It looks like a path."
|
||||||
|
add_msg += deduce_helpful_msg(req_as_string)
|
||||||
|
elif ('=' in req_as_string and
|
||||||
|
not any(op in req_as_string for op in operators)):
|
||||||
|
add_msg = "= is not a valid operator. Did you mean == ?"
|
||||||
|
else:
|
||||||
|
add_msg = ''
|
||||||
|
msg = with_source(
|
||||||
|
'Invalid requirement: {!r}'.format(req_as_string)
|
||||||
|
)
|
||||||
|
if add_msg:
|
||||||
|
msg += '\nHint: {}'.format(add_msg)
|
||||||
|
raise InstallationError(msg)
|
||||||
|
else:
|
||||||
|
req = None
|
||||||
|
|
||||||
|
return RequirementParts(req, link, markers, extras)
|
||||||
|
|
||||||
|
|
||||||
|
def install_req_from_line(
|
||||||
|
name, # type: str
|
||||||
|
comes_from=None, # type: Optional[Union[str, InstallRequirement]]
|
||||||
|
use_pep517=None, # type: Optional[bool]
|
||||||
|
isolated=False, # type: bool
|
||||||
|
options=None, # type: Optional[Dict[str, Any]]
|
||||||
|
constraint=False, # type: bool
|
||||||
|
line_source=None, # type: Optional[str]
|
||||||
|
):
|
||||||
|
# type: (...) -> InstallRequirement
|
||||||
|
"""Creates an InstallRequirement from a name, which might be a
|
||||||
|
requirement, directory containing 'setup.py', filename, or URL.
|
||||||
|
|
||||||
|
:param line_source: An optional string describing where the line is from,
|
||||||
|
for logging purposes in case of an error.
|
||||||
|
"""
|
||||||
|
parts = parse_req_from_line(name, line_source)
|
||||||
|
|
||||||
|
return InstallRequirement(
|
||||||
|
parts.requirement, comes_from, link=parts.link, markers=parts.markers,
|
||||||
|
use_pep517=use_pep517, isolated=isolated,
|
||||||
|
install_options=options.get("install_options", []) if options else [],
|
||||||
|
global_options=options.get("global_options", []) if options else [],
|
||||||
|
hash_options=options.get("hashes", {}) if options else {},
|
||||||
|
constraint=constraint,
|
||||||
|
extras=parts.extras,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def install_req_from_req_string(
|
||||||
|
req_string, # type: str
|
||||||
|
comes_from=None, # type: Optional[InstallRequirement]
|
||||||
|
isolated=False, # type: bool
|
||||||
|
use_pep517=None # type: Optional[bool]
|
||||||
|
):
|
||||||
|
# type: (...) -> InstallRequirement
|
||||||
|
try:
|
||||||
|
req = Requirement(req_string)
|
||||||
|
except InvalidRequirement:
|
||||||
|
raise InstallationError("Invalid requirement: '{}'".format(req_string))
|
||||||
|
|
||||||
|
domains_not_allowed = [
|
||||||
|
PyPI.file_storage_domain,
|
||||||
|
TestPyPI.file_storage_domain,
|
||||||
|
]
|
||||||
|
if (req.url and comes_from and comes_from.link and
|
||||||
|
comes_from.link.netloc in domains_not_allowed):
|
||||||
|
# Explicitly disallow pypi packages that depend on external urls
|
||||||
|
raise InstallationError(
|
||||||
|
"Packages installed from PyPI cannot depend on packages "
|
||||||
|
"which are not also hosted on PyPI.\n"
|
||||||
|
"{} depends on {} ".format(comes_from.name, req)
|
||||||
|
)
|
||||||
|
|
||||||
|
return InstallRequirement(
|
||||||
|
req, comes_from, isolated=isolated, use_pep517=use_pep517
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def install_req_from_parsed_requirement(
|
||||||
|
parsed_req, # type: ParsedRequirement
|
||||||
|
isolated=False, # type: bool
|
||||||
|
use_pep517=None # type: Optional[bool]
|
||||||
|
):
|
||||||
|
# type: (...) -> InstallRequirement
|
||||||
|
if parsed_req.is_editable:
|
||||||
|
req = install_req_from_editable(
|
||||||
|
parsed_req.requirement,
|
||||||
|
comes_from=parsed_req.comes_from,
|
||||||
|
use_pep517=use_pep517,
|
||||||
|
constraint=parsed_req.constraint,
|
||||||
|
isolated=isolated,
|
||||||
|
)
|
||||||
|
|
||||||
|
else:
|
||||||
|
req = install_req_from_line(
|
||||||
|
parsed_req.requirement,
|
||||||
|
comes_from=parsed_req.comes_from,
|
||||||
|
use_pep517=use_pep517,
|
||||||
|
isolated=isolated,
|
||||||
|
options=parsed_req.options,
|
||||||
|
constraint=parsed_req.constraint,
|
||||||
|
line_source=parsed_req.line_source,
|
||||||
|
)
|
||||||
|
return req
|
582
sources/pip_20.1/_internal/req/req_file.py
Normal file
582
sources/pip_20.1/_internal/req/req_file.py
Normal file
|
@ -0,0 +1,582 @@
|
||||||
|
"""
|
||||||
|
Requirements file parsing
|
||||||
|
"""
|
||||||
|
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: strict-optional=False
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import optparse
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import shlex
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||||
|
|
||||||
|
from pip._internal.cli import cmdoptions
|
||||||
|
from pip._internal.exceptions import (
|
||||||
|
InstallationError,
|
||||||
|
RequirementsFileParseError,
|
||||||
|
)
|
||||||
|
from pip._internal.models.search_scope import SearchScope
|
||||||
|
from pip._internal.utils.encoding import auto_decode
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
from pip._internal.utils.urls import get_url_scheme
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from optparse import Values
|
||||||
|
from typing import (
|
||||||
|
Any, Callable, Dict, Iterator, List, NoReturn, Optional, Text, Tuple,
|
||||||
|
)
|
||||||
|
|
||||||
|
from pip._internal.index.package_finder import PackageFinder
|
||||||
|
from pip._internal.network.session import PipSession
|
||||||
|
|
||||||
|
ReqFileLines = Iterator[Tuple[int, Text]]
|
||||||
|
|
||||||
|
LineParser = Callable[[Text], Tuple[str, Values]]
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ['parse_requirements']
|
||||||
|
|
||||||
|
SCHEME_RE = re.compile(r'^(http|https|file):', re.I)
|
||||||
|
COMMENT_RE = re.compile(r'(^|\s+)#.*$')
|
||||||
|
|
||||||
|
# Matches environment variable-style values in '${MY_VARIABLE_1}' with the
|
||||||
|
# variable name consisting of only uppercase letters, digits or the '_'
|
||||||
|
# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1,
|
||||||
|
# 2013 Edition.
|
||||||
|
ENV_VAR_RE = re.compile(r'(?P<var>\$\{(?P<name>[A-Z0-9_]+)\})')
|
||||||
|
|
||||||
|
SUPPORTED_OPTIONS = [
|
||||||
|
cmdoptions.index_url,
|
||||||
|
cmdoptions.extra_index_url,
|
||||||
|
cmdoptions.no_index,
|
||||||
|
cmdoptions.constraints,
|
||||||
|
cmdoptions.requirements,
|
||||||
|
cmdoptions.editable,
|
||||||
|
cmdoptions.find_links,
|
||||||
|
cmdoptions.no_binary,
|
||||||
|
cmdoptions.only_binary,
|
||||||
|
cmdoptions.require_hashes,
|
||||||
|
cmdoptions.pre,
|
||||||
|
cmdoptions.trusted_host,
|
||||||
|
cmdoptions.always_unzip, # Deprecated
|
||||||
|
] # type: List[Callable[..., optparse.Option]]
|
||||||
|
|
||||||
|
# options to be passed to requirements
|
||||||
|
SUPPORTED_OPTIONS_REQ = [
|
||||||
|
cmdoptions.install_options,
|
||||||
|
cmdoptions.global_options,
|
||||||
|
cmdoptions.hash,
|
||||||
|
] # type: List[Callable[..., optparse.Option]]
|
||||||
|
|
||||||
|
# the 'dest' string values
|
||||||
|
SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ]
|
||||||
|
|
||||||
|
|
||||||
|
class ParsedRequirement(object):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
requirement, # type:str
|
||||||
|
is_editable, # type: bool
|
||||||
|
comes_from, # type: str
|
||||||
|
constraint, # type: bool
|
||||||
|
options=None, # type: Optional[Dict[str, Any]]
|
||||||
|
line_source=None, # type: Optional[str]
|
||||||
|
):
|
||||||
|
# type: (...) -> None
|
||||||
|
self.requirement = requirement
|
||||||
|
self.is_editable = is_editable
|
||||||
|
self.comes_from = comes_from
|
||||||
|
self.options = options
|
||||||
|
self.constraint = constraint
|
||||||
|
self.line_source = line_source
|
||||||
|
|
||||||
|
|
||||||
|
class ParsedLine(object):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
filename, # type: str
|
||||||
|
lineno, # type: int
|
||||||
|
comes_from, # type: str
|
||||||
|
args, # type: str
|
||||||
|
opts, # type: Values
|
||||||
|
constraint, # type: bool
|
||||||
|
):
|
||||||
|
# type: (...) -> None
|
||||||
|
self.filename = filename
|
||||||
|
self.lineno = lineno
|
||||||
|
self.comes_from = comes_from
|
||||||
|
self.opts = opts
|
||||||
|
self.constraint = constraint
|
||||||
|
|
||||||
|
if args:
|
||||||
|
self.is_requirement = True
|
||||||
|
self.is_editable = False
|
||||||
|
self.requirement = args
|
||||||
|
elif opts.editables:
|
||||||
|
self.is_requirement = True
|
||||||
|
self.is_editable = True
|
||||||
|
# We don't support multiple -e on one line
|
||||||
|
self.requirement = opts.editables[0]
|
||||||
|
else:
|
||||||
|
self.is_requirement = False
|
||||||
|
|
||||||
|
|
||||||
|
def parse_requirements(
|
||||||
|
filename, # type: str
|
||||||
|
session, # type: PipSession
|
||||||
|
finder=None, # type: Optional[PackageFinder]
|
||||||
|
comes_from=None, # type: Optional[str]
|
||||||
|
options=None, # type: Optional[optparse.Values]
|
||||||
|
constraint=False, # type: bool
|
||||||
|
):
|
||||||
|
# type: (...) -> Iterator[ParsedRequirement]
|
||||||
|
"""Parse a requirements file and yield InstallRequirement instances.
|
||||||
|
|
||||||
|
:param filename: Path or url of requirements file.
|
||||||
|
:param session: PipSession instance.
|
||||||
|
:param finder: Instance of pip.index.PackageFinder.
|
||||||
|
:param comes_from: Origin description of requirements.
|
||||||
|
:param options: cli options.
|
||||||
|
:param constraint: If true, parsing a constraint file rather than
|
||||||
|
requirements file.
|
||||||
|
"""
|
||||||
|
line_parser = get_line_parser(finder)
|
||||||
|
parser = RequirementsFileParser(session, line_parser, comes_from)
|
||||||
|
|
||||||
|
for parsed_line in parser.parse(filename, constraint):
|
||||||
|
parsed_req = handle_line(
|
||||||
|
parsed_line,
|
||||||
|
options=options,
|
||||||
|
finder=finder,
|
||||||
|
session=session
|
||||||
|
)
|
||||||
|
if parsed_req is not None:
|
||||||
|
yield parsed_req
|
||||||
|
|
||||||
|
|
||||||
|
def preprocess(content):
|
||||||
|
# type: (Text) -> ReqFileLines
|
||||||
|
"""Split, filter, and join lines, and return a line iterator
|
||||||
|
|
||||||
|
:param content: the content of the requirements file
|
||||||
|
"""
|
||||||
|
lines_enum = enumerate(content.splitlines(), start=1) # type: ReqFileLines
|
||||||
|
lines_enum = join_lines(lines_enum)
|
||||||
|
lines_enum = ignore_comments(lines_enum)
|
||||||
|
lines_enum = expand_env_variables(lines_enum)
|
||||||
|
return lines_enum
|
||||||
|
|
||||||
|
|
||||||
|
def handle_requirement_line(
|
||||||
|
line, # type: ParsedLine
|
||||||
|
options=None, # type: Optional[optparse.Values]
|
||||||
|
):
|
||||||
|
# type: (...) -> ParsedRequirement
|
||||||
|
|
||||||
|
# preserve for the nested code path
|
||||||
|
line_comes_from = '{} {} (line {})'.format(
|
||||||
|
'-c' if line.constraint else '-r', line.filename, line.lineno,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert line.is_requirement
|
||||||
|
|
||||||
|
if line.is_editable:
|
||||||
|
# For editable requirements, we don't support per-requirement
|
||||||
|
# options, so just return the parsed requirement.
|
||||||
|
return ParsedRequirement(
|
||||||
|
requirement=line.requirement,
|
||||||
|
is_editable=line.is_editable,
|
||||||
|
comes_from=line_comes_from,
|
||||||
|
constraint=line.constraint,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
if options:
|
||||||
|
# Disable wheels if the user has specified build options
|
||||||
|
cmdoptions.check_install_build_global(options, line.opts)
|
||||||
|
|
||||||
|
# get the options that apply to requirements
|
||||||
|
req_options = {}
|
||||||
|
for dest in SUPPORTED_OPTIONS_REQ_DEST:
|
||||||
|
if dest in line.opts.__dict__ and line.opts.__dict__[dest]:
|
||||||
|
req_options[dest] = line.opts.__dict__[dest]
|
||||||
|
|
||||||
|
line_source = 'line {} of {}'.format(line.lineno, line.filename)
|
||||||
|
return ParsedRequirement(
|
||||||
|
requirement=line.requirement,
|
||||||
|
is_editable=line.is_editable,
|
||||||
|
comes_from=line_comes_from,
|
||||||
|
constraint=line.constraint,
|
||||||
|
options=req_options,
|
||||||
|
line_source=line_source,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def handle_option_line(
|
||||||
|
opts, # type: Values
|
||||||
|
filename, # type: str
|
||||||
|
lineno, # type: int
|
||||||
|
finder=None, # type: Optional[PackageFinder]
|
||||||
|
options=None, # type: Optional[optparse.Values]
|
||||||
|
session=None, # type: Optional[PipSession]
|
||||||
|
):
|
||||||
|
# type: (...) -> None
|
||||||
|
|
||||||
|
# percolate hash-checking option upward
|
||||||
|
if opts.require_hashes:
|
||||||
|
options.require_hashes = opts.require_hashes
|
||||||
|
|
||||||
|
# set finder options
|
||||||
|
elif finder:
|
||||||
|
find_links = finder.find_links
|
||||||
|
index_urls = finder.index_urls
|
||||||
|
if opts.index_url:
|
||||||
|
index_urls = [opts.index_url]
|
||||||
|
if opts.no_index is True:
|
||||||
|
index_urls = []
|
||||||
|
if opts.extra_index_urls:
|
||||||
|
index_urls.extend(opts.extra_index_urls)
|
||||||
|
if opts.find_links:
|
||||||
|
# FIXME: it would be nice to keep track of the source
|
||||||
|
# of the find_links: support a find-links local path
|
||||||
|
# relative to a requirements file.
|
||||||
|
value = opts.find_links[0]
|
||||||
|
req_dir = os.path.dirname(os.path.abspath(filename))
|
||||||
|
relative_to_reqs_file = os.path.join(req_dir, value)
|
||||||
|
if os.path.exists(relative_to_reqs_file):
|
||||||
|
value = relative_to_reqs_file
|
||||||
|
find_links.append(value)
|
||||||
|
|
||||||
|
search_scope = SearchScope(
|
||||||
|
find_links=find_links,
|
||||||
|
index_urls=index_urls,
|
||||||
|
)
|
||||||
|
finder.search_scope = search_scope
|
||||||
|
|
||||||
|
if opts.pre:
|
||||||
|
finder.set_allow_all_prereleases()
|
||||||
|
|
||||||
|
if session:
|
||||||
|
for host in opts.trusted_hosts or []:
|
||||||
|
source = 'line {} of {}'.format(lineno, filename)
|
||||||
|
session.add_trusted_host(host, source=source)
|
||||||
|
|
||||||
|
|
||||||
|
def handle_line(
|
||||||
|
line, # type: ParsedLine
|
||||||
|
options=None, # type: Optional[optparse.Values]
|
||||||
|
finder=None, # type: Optional[PackageFinder]
|
||||||
|
session=None, # type: Optional[PipSession]
|
||||||
|
):
|
||||||
|
# type: (...) -> Optional[ParsedRequirement]
|
||||||
|
"""Handle a single parsed requirements line; This can result in
|
||||||
|
creating/yielding requirements, or updating the finder.
|
||||||
|
|
||||||
|
:param line: The parsed line to be processed.
|
||||||
|
:param options: CLI options.
|
||||||
|
:param finder: The finder - updated by non-requirement lines.
|
||||||
|
:param session: The session - updated by non-requirement lines.
|
||||||
|
|
||||||
|
Returns a ParsedRequirement object if the line is a requirement line,
|
||||||
|
otherwise returns None.
|
||||||
|
|
||||||
|
For lines that contain requirements, the only options that have an effect
|
||||||
|
are from SUPPORTED_OPTIONS_REQ, and they are scoped to the
|
||||||
|
requirement. Other options from SUPPORTED_OPTIONS may be present, but are
|
||||||
|
ignored.
|
||||||
|
|
||||||
|
For lines that do not contain requirements, the only options that have an
|
||||||
|
effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may
|
||||||
|
be present, but are ignored. These lines may contain multiple options
|
||||||
|
(although our docs imply only one is supported), and all our parsed and
|
||||||
|
affect the finder.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if line.is_requirement:
|
||||||
|
parsed_req = handle_requirement_line(line, options)
|
||||||
|
return parsed_req
|
||||||
|
else:
|
||||||
|
handle_option_line(
|
||||||
|
line.opts,
|
||||||
|
line.filename,
|
||||||
|
line.lineno,
|
||||||
|
finder,
|
||||||
|
options,
|
||||||
|
session,
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class RequirementsFileParser(object):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
session, # type: PipSession
|
||||||
|
line_parser, # type: LineParser
|
||||||
|
comes_from, # type: str
|
||||||
|
):
|
||||||
|
# type: (...) -> None
|
||||||
|
self._session = session
|
||||||
|
self._line_parser = line_parser
|
||||||
|
self._comes_from = comes_from
|
||||||
|
|
||||||
|
def parse(self, filename, constraint):
|
||||||
|
# type: (str, bool) -> Iterator[ParsedLine]
|
||||||
|
"""Parse a given file, yielding parsed lines.
|
||||||
|
"""
|
||||||
|
for line in self._parse_and_recurse(filename, constraint):
|
||||||
|
yield line
|
||||||
|
|
||||||
|
def _parse_and_recurse(self, filename, constraint):
|
||||||
|
# type: (str, bool) -> Iterator[ParsedLine]
|
||||||
|
for line in self._parse_file(filename, constraint):
|
||||||
|
if (
|
||||||
|
not line.is_requirement and
|
||||||
|
(line.opts.requirements or line.opts.constraints)
|
||||||
|
):
|
||||||
|
# parse a nested requirements file
|
||||||
|
if line.opts.requirements:
|
||||||
|
req_path = line.opts.requirements[0]
|
||||||
|
nested_constraint = False
|
||||||
|
else:
|
||||||
|
req_path = line.opts.constraints[0]
|
||||||
|
nested_constraint = True
|
||||||
|
|
||||||
|
# original file is over http
|
||||||
|
if SCHEME_RE.search(filename):
|
||||||
|
# do a url join so relative paths work
|
||||||
|
req_path = urllib_parse.urljoin(filename, req_path)
|
||||||
|
# original file and nested file are paths
|
||||||
|
elif not SCHEME_RE.search(req_path):
|
||||||
|
# do a join so relative paths work
|
||||||
|
req_path = os.path.join(
|
||||||
|
os.path.dirname(filename), req_path,
|
||||||
|
)
|
||||||
|
|
||||||
|
for inner_line in self._parse_and_recurse(
|
||||||
|
req_path, nested_constraint,
|
||||||
|
):
|
||||||
|
yield inner_line
|
||||||
|
else:
|
||||||
|
yield line
|
||||||
|
|
||||||
|
def _parse_file(self, filename, constraint):
|
||||||
|
# type: (str, bool) -> Iterator[ParsedLine]
|
||||||
|
_, content = get_file_content(
|
||||||
|
filename, self._session, comes_from=self._comes_from
|
||||||
|
)
|
||||||
|
|
||||||
|
lines_enum = preprocess(content)
|
||||||
|
|
||||||
|
for line_number, line in lines_enum:
|
||||||
|
try:
|
||||||
|
args_str, opts = self._line_parser(line)
|
||||||
|
except OptionParsingError as e:
|
||||||
|
# add offending line
|
||||||
|
msg = 'Invalid requirement: {}\n{}'.format(line, e.msg)
|
||||||
|
raise RequirementsFileParseError(msg)
|
||||||
|
|
||||||
|
yield ParsedLine(
|
||||||
|
filename,
|
||||||
|
line_number,
|
||||||
|
self._comes_from,
|
||||||
|
args_str,
|
||||||
|
opts,
|
||||||
|
constraint,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_line_parser(finder):
|
||||||
|
# type: (Optional[PackageFinder]) -> LineParser
|
||||||
|
def parse_line(line):
|
||||||
|
# type: (Text) -> Tuple[str, Values]
|
||||||
|
# Build new parser for each line since it accumulates appendable
|
||||||
|
# options.
|
||||||
|
parser = build_parser()
|
||||||
|
defaults = parser.get_default_values()
|
||||||
|
defaults.index_url = None
|
||||||
|
if finder:
|
||||||
|
defaults.format_control = finder.format_control
|
||||||
|
|
||||||
|
args_str, options_str = break_args_options(line)
|
||||||
|
# Prior to 2.7.3, shlex cannot deal with unicode entries
|
||||||
|
if sys.version_info < (2, 7, 3):
|
||||||
|
# https://github.com/python/mypy/issues/1174
|
||||||
|
options_str = options_str.encode('utf8') # type: ignore
|
||||||
|
|
||||||
|
# https://github.com/python/mypy/issues/1174
|
||||||
|
opts, _ = parser.parse_args(
|
||||||
|
shlex.split(options_str), defaults) # type: ignore
|
||||||
|
|
||||||
|
return args_str, opts
|
||||||
|
|
||||||
|
return parse_line
|
||||||
|
|
||||||
|
|
||||||
|
def break_args_options(line):
|
||||||
|
# type: (Text) -> Tuple[str, Text]
|
||||||
|
"""Break up the line into an args and options string. We only want to shlex
|
||||||
|
(and then optparse) the options, not the args. args can contain markers
|
||||||
|
which are corrupted by shlex.
|
||||||
|
"""
|
||||||
|
tokens = line.split(' ')
|
||||||
|
args = []
|
||||||
|
options = tokens[:]
|
||||||
|
for token in tokens:
|
||||||
|
if token.startswith('-') or token.startswith('--'):
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
args.append(token)
|
||||||
|
options.pop(0)
|
||||||
|
return ' '.join(args), ' '.join(options) # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
class OptionParsingError(Exception):
|
||||||
|
def __init__(self, msg):
|
||||||
|
# type: (str) -> None
|
||||||
|
self.msg = msg
|
||||||
|
|
||||||
|
|
||||||
|
def build_parser():
|
||||||
|
# type: () -> optparse.OptionParser
|
||||||
|
"""
|
||||||
|
Return a parser for parsing requirement lines
|
||||||
|
"""
|
||||||
|
parser = optparse.OptionParser(add_help_option=False)
|
||||||
|
|
||||||
|
option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ
|
||||||
|
for option_factory in option_factories:
|
||||||
|
option = option_factory()
|
||||||
|
parser.add_option(option)
|
||||||
|
|
||||||
|
# By default optparse sys.exits on parsing errors. We want to wrap
|
||||||
|
# that in our own exception.
|
||||||
|
def parser_exit(self, msg):
|
||||||
|
# type: (Any, str) -> NoReturn
|
||||||
|
raise OptionParsingError(msg)
|
||||||
|
# NOTE: mypy disallows assigning to a method
|
||||||
|
# https://github.com/python/mypy/issues/2427
|
||||||
|
parser.exit = parser_exit # type: ignore
|
||||||
|
|
||||||
|
return parser
|
||||||
|
|
||||||
|
|
||||||
|
def join_lines(lines_enum):
|
||||||
|
# type: (ReqFileLines) -> ReqFileLines
|
||||||
|
"""Joins a line ending in '\' with the previous line (except when following
|
||||||
|
comments). The joined line takes on the index of the first line.
|
||||||
|
"""
|
||||||
|
primary_line_number = None
|
||||||
|
new_line = [] # type: List[Text]
|
||||||
|
for line_number, line in lines_enum:
|
||||||
|
if not line.endswith('\\') or COMMENT_RE.match(line):
|
||||||
|
if COMMENT_RE.match(line):
|
||||||
|
# this ensures comments are always matched later
|
||||||
|
line = ' ' + line
|
||||||
|
if new_line:
|
||||||
|
new_line.append(line)
|
||||||
|
yield primary_line_number, ''.join(new_line)
|
||||||
|
new_line = []
|
||||||
|
else:
|
||||||
|
yield line_number, line
|
||||||
|
else:
|
||||||
|
if not new_line:
|
||||||
|
primary_line_number = line_number
|
||||||
|
new_line.append(line.strip('\\'))
|
||||||
|
|
||||||
|
# last line contains \
|
||||||
|
if new_line:
|
||||||
|
yield primary_line_number, ''.join(new_line)
|
||||||
|
|
||||||
|
# TODO: handle space after '\'.
|
||||||
|
|
||||||
|
|
||||||
|
def ignore_comments(lines_enum):
|
||||||
|
# type: (ReqFileLines) -> ReqFileLines
|
||||||
|
"""
|
||||||
|
Strips comments and filter empty lines.
|
||||||
|
"""
|
||||||
|
for line_number, line in lines_enum:
|
||||||
|
line = COMMENT_RE.sub('', line)
|
||||||
|
line = line.strip()
|
||||||
|
if line:
|
||||||
|
yield line_number, line
|
||||||
|
|
||||||
|
|
||||||
|
def expand_env_variables(lines_enum):
|
||||||
|
# type: (ReqFileLines) -> ReqFileLines
|
||||||
|
"""Replace all environment variables that can be retrieved via `os.getenv`.
|
||||||
|
|
||||||
|
The only allowed format for environment variables defined in the
|
||||||
|
requirement file is `${MY_VARIABLE_1}` to ensure two things:
|
||||||
|
|
||||||
|
1. Strings that contain a `$` aren't accidentally (partially) expanded.
|
||||||
|
2. Ensure consistency across platforms for requirement files.
|
||||||
|
|
||||||
|
These points are the result of a discussion on the `github pull
|
||||||
|
request #3514 <https://github.com/pypa/pip/pull/3514>`_.
|
||||||
|
|
||||||
|
Valid characters in variable names follow the `POSIX standard
|
||||||
|
<http://pubs.opengroup.org/onlinepubs/9699919799/>`_ and are limited
|
||||||
|
to uppercase letter, digits and the `_` (underscore).
|
||||||
|
"""
|
||||||
|
for line_number, line in lines_enum:
|
||||||
|
for env_var, var_name in ENV_VAR_RE.findall(line):
|
||||||
|
value = os.getenv(var_name)
|
||||||
|
if not value:
|
||||||
|
continue
|
||||||
|
|
||||||
|
line = line.replace(env_var, value)
|
||||||
|
|
||||||
|
yield line_number, line
|
||||||
|
|
||||||
|
|
||||||
|
def get_file_content(url, session, comes_from=None):
|
||||||
|
# type: (str, PipSession, Optional[str]) -> Tuple[str, Text]
|
||||||
|
"""Gets the content of a file; it may be a filename, file: URL, or
|
||||||
|
http: URL. Returns (location, content). Content is unicode.
|
||||||
|
Respects # -*- coding: declarations on the retrieved files.
|
||||||
|
|
||||||
|
:param url: File path or url.
|
||||||
|
:param session: PipSession instance.
|
||||||
|
:param comes_from: Origin description of requirements.
|
||||||
|
"""
|
||||||
|
scheme = get_url_scheme(url)
|
||||||
|
|
||||||
|
if scheme in ['http', 'https']:
|
||||||
|
# FIXME: catch some errors
|
||||||
|
resp = session.get(url)
|
||||||
|
resp.raise_for_status()
|
||||||
|
return resp.url, resp.text
|
||||||
|
|
||||||
|
elif scheme == 'file':
|
||||||
|
if comes_from and comes_from.startswith('http'):
|
||||||
|
raise InstallationError(
|
||||||
|
'Requirements file {} references URL {}, '
|
||||||
|
'which is local'.format(comes_from, url)
|
||||||
|
)
|
||||||
|
|
||||||
|
path = url.split(':', 1)[1]
|
||||||
|
path = path.replace('\\', '/')
|
||||||
|
match = _url_slash_drive_re.match(path)
|
||||||
|
if match:
|
||||||
|
path = match.group(1) + ':' + path.split('|', 1)[1]
|
||||||
|
path = urllib_parse.unquote(path)
|
||||||
|
if path.startswith('/'):
|
||||||
|
path = '/' + path.lstrip('/')
|
||||||
|
url = path
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(url, 'rb') as f:
|
||||||
|
content = auto_decode(f.read())
|
||||||
|
except IOError as exc:
|
||||||
|
raise InstallationError(
|
||||||
|
'Could not open requirements file: {}'.format(exc)
|
||||||
|
)
|
||||||
|
return url, content
|
||||||
|
|
||||||
|
|
||||||
|
_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)
|
850
sources/pip_20.1/_internal/req/req_install.py
Normal file
850
sources/pip_20.1/_internal/req/req_install.py
Normal file
|
@ -0,0 +1,850 @@
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: strict-optional=False
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
import zipfile
|
||||||
|
|
||||||
|
from pip._vendor import pkg_resources, six
|
||||||
|
from pip._vendor.packaging.requirements import Requirement
|
||||||
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
|
from pip._vendor.packaging.version import Version
|
||||||
|
from pip._vendor.packaging.version import parse as parse_version
|
||||||
|
from pip._vendor.pep517.wrappers import Pep517HookCaller
|
||||||
|
|
||||||
|
from pip._internal.build_env import NoOpBuildEnvironment
|
||||||
|
from pip._internal.exceptions import InstallationError
|
||||||
|
from pip._internal.locations import get_scheme
|
||||||
|
from pip._internal.models.link import Link
|
||||||
|
from pip._internal.operations.build.metadata import generate_metadata
|
||||||
|
from pip._internal.operations.build.metadata_legacy import \
|
||||||
|
generate_metadata as generate_metadata_legacy
|
||||||
|
from pip._internal.operations.install.editable_legacy import \
|
||||||
|
install_editable as install_editable_legacy
|
||||||
|
from pip._internal.operations.install.legacy import LegacyInstallFailure
|
||||||
|
from pip._internal.operations.install.legacy import install as install_legacy
|
||||||
|
from pip._internal.operations.install.wheel import install_wheel
|
||||||
|
from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path
|
||||||
|
from pip._internal.req.req_uninstall import UninstallPathSet
|
||||||
|
from pip._internal.utils.deprecation import deprecated
|
||||||
|
from pip._internal.utils.direct_url_helpers import direct_url_from_link
|
||||||
|
from pip._internal.utils.hashes import Hashes
|
||||||
|
from pip._internal.utils.logging import indent_log
|
||||||
|
from pip._internal.utils.misc import (
|
||||||
|
ask_path_exists,
|
||||||
|
backup_dir,
|
||||||
|
display_path,
|
||||||
|
dist_in_site_packages,
|
||||||
|
dist_in_usersite,
|
||||||
|
get_installed_version,
|
||||||
|
hide_url,
|
||||||
|
redact_auth_from_url,
|
||||||
|
)
|
||||||
|
from pip._internal.utils.packaging import get_metadata
|
||||||
|
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||||
|
from pip._internal.vcs import vcs
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import (
|
||||||
|
Any, Dict, Iterable, List, Optional, Sequence, Union,
|
||||||
|
)
|
||||||
|
from pip._internal.build_env import BuildEnvironment
|
||||||
|
from pip._vendor.pkg_resources import Distribution
|
||||||
|
from pip._vendor.packaging.specifiers import SpecifierSet
|
||||||
|
from pip._vendor.packaging.markers import Marker
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_dist(metadata_directory):
|
||||||
|
# type: (str) -> Distribution
|
||||||
|
"""Return a pkg_resources.Distribution for the provided
|
||||||
|
metadata directory.
|
||||||
|
"""
|
||||||
|
dist_dir = metadata_directory.rstrip(os.sep)
|
||||||
|
|
||||||
|
# Build a PathMetadata object, from path to metadata. :wink:
|
||||||
|
base_dir, dist_dir_name = os.path.split(dist_dir)
|
||||||
|
metadata = pkg_resources.PathMetadata(base_dir, dist_dir)
|
||||||
|
|
||||||
|
# Determine the correct Distribution object type.
|
||||||
|
if dist_dir.endswith(".egg-info"):
|
||||||
|
dist_cls = pkg_resources.Distribution
|
||||||
|
dist_name = os.path.splitext(dist_dir_name)[0]
|
||||||
|
else:
|
||||||
|
assert dist_dir.endswith(".dist-info")
|
||||||
|
dist_cls = pkg_resources.DistInfoDistribution
|
||||||
|
dist_name = os.path.splitext(dist_dir_name)[0].split("-")[0]
|
||||||
|
|
||||||
|
return dist_cls(
|
||||||
|
base_dir,
|
||||||
|
project_name=dist_name,
|
||||||
|
metadata=metadata,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class InstallRequirement(object):
|
||||||
|
"""
|
||||||
|
Represents something that may be installed later on, may have information
|
||||||
|
about where to fetch the relevant requirement and also contains logic for
|
||||||
|
installing the said requirement.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
req, # type: Optional[Requirement]
|
||||||
|
comes_from, # type: Optional[Union[str, InstallRequirement]]
|
||||||
|
editable=False, # type: bool
|
||||||
|
link=None, # type: Optional[Link]
|
||||||
|
markers=None, # type: Optional[Marker]
|
||||||
|
use_pep517=None, # type: Optional[bool]
|
||||||
|
isolated=False, # type: bool
|
||||||
|
install_options=None, # type: Optional[List[str]]
|
||||||
|
global_options=None, # type: Optional[List[str]]
|
||||||
|
hash_options=None, # type: Optional[Dict[str, List[str]]]
|
||||||
|
constraint=False, # type: bool
|
||||||
|
extras=() # type: Iterable[str]
|
||||||
|
):
|
||||||
|
# type: (...) -> None
|
||||||
|
assert req is None or isinstance(req, Requirement), req
|
||||||
|
self.req = req
|
||||||
|
self.comes_from = comes_from
|
||||||
|
self.constraint = constraint
|
||||||
|
self.editable = editable
|
||||||
|
|
||||||
|
# source_dir is the local directory where the linked requirement is
|
||||||
|
# located, or unpacked. In case unpacking is needed, creating and
|
||||||
|
# populating source_dir is done by the RequirementPreparer. Note this
|
||||||
|
# is not necessarily the directory where pyproject.toml or setup.py is
|
||||||
|
# located - that one is obtained via unpacked_source_directory.
|
||||||
|
self.source_dir = None # type: Optional[str]
|
||||||
|
if self.editable:
|
||||||
|
assert link
|
||||||
|
if link.is_file:
|
||||||
|
self.source_dir = os.path.normpath(
|
||||||
|
os.path.abspath(link.file_path)
|
||||||
|
)
|
||||||
|
|
||||||
|
if link is None and req and req.url:
|
||||||
|
# PEP 508 URL requirement
|
||||||
|
link = Link(req.url)
|
||||||
|
self.link = self.original_link = link
|
||||||
|
self.original_link_is_in_wheel_cache = False
|
||||||
|
|
||||||
|
# Path to any downloaded or already-existing package.
|
||||||
|
self.local_file_path = None # type: Optional[str]
|
||||||
|
if self.link and self.link.is_file:
|
||||||
|
self.local_file_path = self.link.file_path
|
||||||
|
|
||||||
|
if extras:
|
||||||
|
self.extras = extras
|
||||||
|
elif req:
|
||||||
|
self.extras = {
|
||||||
|
pkg_resources.safe_extra(extra) for extra in req.extras
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
self.extras = set()
|
||||||
|
if markers is None and req:
|
||||||
|
markers = req.marker
|
||||||
|
self.markers = markers
|
||||||
|
|
||||||
|
# This holds the pkg_resources.Distribution object if this requirement
|
||||||
|
# is already available:
|
||||||
|
self.satisfied_by = None # type: Optional[Distribution]
|
||||||
|
# Whether the installation process should try to uninstall an existing
|
||||||
|
# distribution before installing this requirement.
|
||||||
|
self.should_reinstall = False
|
||||||
|
# Temporary build location
|
||||||
|
self._temp_build_dir = None # type: Optional[TempDirectory]
|
||||||
|
# Set to True after successful installation
|
||||||
|
self.install_succeeded = None # type: Optional[bool]
|
||||||
|
# Supplied options
|
||||||
|
self.install_options = install_options if install_options else []
|
||||||
|
self.global_options = global_options if global_options else []
|
||||||
|
self.hash_options = hash_options if hash_options else {}
|
||||||
|
# Set to True after successful preparation of this requirement
|
||||||
|
self.prepared = False
|
||||||
|
self.is_direct = False
|
||||||
|
|
||||||
|
# Set by the legacy resolver when the requirement has been downloaded
|
||||||
|
# TODO: This introduces a strong coupling between the resolver and the
|
||||||
|
# requirement (the coupling was previously between the resolver
|
||||||
|
# and the requirement set). This should be refactored to allow
|
||||||
|
# the requirement to decide for itself when it has been
|
||||||
|
# successfully downloaded - but that is more tricky to get right,
|
||||||
|
# se we are making the change in stages.
|
||||||
|
self.successfully_downloaded = False
|
||||||
|
|
||||||
|
self.isolated = isolated
|
||||||
|
self.build_env = NoOpBuildEnvironment() # type: BuildEnvironment
|
||||||
|
|
||||||
|
# For PEP 517, the directory where we request the project metadata
|
||||||
|
# gets stored. We need this to pass to build_wheel, so the backend
|
||||||
|
# can ensure that the wheel matches the metadata (see the PEP for
|
||||||
|
# details).
|
||||||
|
self.metadata_directory = None # type: Optional[str]
|
||||||
|
|
||||||
|
# The static build requirements (from pyproject.toml)
|
||||||
|
self.pyproject_requires = None # type: Optional[List[str]]
|
||||||
|
|
||||||
|
# Build requirements that we will check are available
|
||||||
|
self.requirements_to_check = [] # type: List[str]
|
||||||
|
|
||||||
|
# The PEP 517 backend we should use to build the project
|
||||||
|
self.pep517_backend = None # type: Optional[Pep517HookCaller]
|
||||||
|
|
||||||
|
# Are we using PEP 517 for this requirement?
|
||||||
|
# After pyproject.toml has been loaded, the only valid values are True
|
||||||
|
# and False. Before loading, None is valid (meaning "use the default").
|
||||||
|
# Setting an explicit value before loading pyproject.toml is supported,
|
||||||
|
# but after loading this flag should be treated as read only.
|
||||||
|
self.use_pep517 = use_pep517
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
# type: () -> str
|
||||||
|
if self.req:
|
||||||
|
s = str(self.req)
|
||||||
|
if self.link:
|
||||||
|
s += ' from {}'.format(redact_auth_from_url(self.link.url))
|
||||||
|
elif self.link:
|
||||||
|
s = redact_auth_from_url(self.link.url)
|
||||||
|
else:
|
||||||
|
s = '<InstallRequirement>'
|
||||||
|
if self.satisfied_by is not None:
|
||||||
|
s += ' in {}'.format(display_path(self.satisfied_by.location))
|
||||||
|
if self.comes_from:
|
||||||
|
if isinstance(self.comes_from, six.string_types):
|
||||||
|
comes_from = self.comes_from # type: Optional[str]
|
||||||
|
else:
|
||||||
|
comes_from = self.comes_from.from_path()
|
||||||
|
if comes_from:
|
||||||
|
s += ' (from {})'.format(comes_from)
|
||||||
|
return s
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
# type: () -> str
|
||||||
|
return '<{} object: {} editable={!r}>'.format(
|
||||||
|
self.__class__.__name__, str(self), self.editable)
|
||||||
|
|
||||||
|
def format_debug(self):
|
||||||
|
# type: () -> str
|
||||||
|
"""An un-tested helper for getting state, for debugging.
|
||||||
|
"""
|
||||||
|
attributes = vars(self)
|
||||||
|
names = sorted(attributes)
|
||||||
|
|
||||||
|
state = (
|
||||||
|
"{}={!r}".format(attr, attributes[attr]) for attr in sorted(names)
|
||||||
|
)
|
||||||
|
return '<{name} object: {{{state}}}>'.format(
|
||||||
|
name=self.__class__.__name__,
|
||||||
|
state=", ".join(state),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Things that are valid for all kinds of requirements?
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
# type: () -> Optional[str]
|
||||||
|
if self.req is None:
|
||||||
|
return None
|
||||||
|
return six.ensure_str(pkg_resources.safe_name(self.req.name))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def specifier(self):
|
||||||
|
# type: () -> SpecifierSet
|
||||||
|
return self.req.specifier
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_pinned(self):
|
||||||
|
# type: () -> bool
|
||||||
|
"""Return whether I am pinned to an exact version.
|
||||||
|
|
||||||
|
For example, some-package==1.2 is pinned; some-package>1.2 is not.
|
||||||
|
"""
|
||||||
|
specifiers = self.specifier
|
||||||
|
return (len(specifiers) == 1 and
|
||||||
|
next(iter(specifiers)).operator in {'==', '==='})
|
||||||
|
|
||||||
|
@property
|
||||||
|
def installed_version(self):
|
||||||
|
# type: () -> Optional[str]
|
||||||
|
return get_installed_version(self.name)
|
||||||
|
|
||||||
|
def match_markers(self, extras_requested=None):
|
||||||
|
# type: (Optional[Iterable[str]]) -> bool
|
||||||
|
if not extras_requested:
|
||||||
|
# Provide an extra to safely evaluate the markers
|
||||||
|
# without matching any extra
|
||||||
|
extras_requested = ('',)
|
||||||
|
if self.markers is not None:
|
||||||
|
return any(
|
||||||
|
self.markers.evaluate({'extra': extra})
|
||||||
|
for extra in extras_requested)
|
||||||
|
else:
|
||||||
|
return True
|
||||||
|
|
||||||
|
@property
|
||||||
|
def has_hash_options(self):
|
||||||
|
# type: () -> bool
|
||||||
|
"""Return whether any known-good hashes are specified as options.
|
||||||
|
|
||||||
|
These activate --require-hashes mode; hashes specified as part of a
|
||||||
|
URL do not.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return bool(self.hash_options)
|
||||||
|
|
||||||
|
def hashes(self, trust_internet=True):
|
||||||
|
# type: (bool) -> Hashes
|
||||||
|
"""Return a hash-comparer that considers my option- and URL-based
|
||||||
|
hashes to be known-good.
|
||||||
|
|
||||||
|
Hashes in URLs--ones embedded in the requirements file, not ones
|
||||||
|
downloaded from an index server--are almost peers with ones from
|
||||||
|
flags. They satisfy --require-hashes (whether it was implicitly or
|
||||||
|
explicitly activated) but do not activate it. md5 and sha224 are not
|
||||||
|
allowed in flags, which should nudge people toward good algos. We
|
||||||
|
always OR all hashes together, even ones from URLs.
|
||||||
|
|
||||||
|
:param trust_internet: Whether to trust URL-based (#md5=...) hashes
|
||||||
|
downloaded from the internet, as by populate_link()
|
||||||
|
|
||||||
|
"""
|
||||||
|
good_hashes = self.hash_options.copy()
|
||||||
|
link = self.link if trust_internet else self.original_link
|
||||||
|
if link and link.hash:
|
||||||
|
good_hashes.setdefault(link.hash_name, []).append(link.hash)
|
||||||
|
return Hashes(good_hashes)
|
||||||
|
|
||||||
|
def from_path(self):
|
||||||
|
# type: () -> Optional[str]
|
||||||
|
"""Format a nice indicator to show where this "comes from"
|
||||||
|
"""
|
||||||
|
if self.req is None:
|
||||||
|
return None
|
||||||
|
s = str(self.req)
|
||||||
|
if self.comes_from:
|
||||||
|
if isinstance(self.comes_from, six.string_types):
|
||||||
|
comes_from = self.comes_from
|
||||||
|
else:
|
||||||
|
comes_from = self.comes_from.from_path()
|
||||||
|
if comes_from:
|
||||||
|
s += '->' + comes_from
|
||||||
|
return s
|
||||||
|
|
||||||
|
def ensure_build_location(self, build_dir, autodelete):
|
||||||
|
# type: (str, bool) -> str
|
||||||
|
assert build_dir is not None
|
||||||
|
if self._temp_build_dir is not None:
|
||||||
|
assert self._temp_build_dir.path
|
||||||
|
return self._temp_build_dir.path
|
||||||
|
if self.req is None:
|
||||||
|
# Some systems have /tmp as a symlink which confuses custom
|
||||||
|
# builds (such as numpy). Thus, we ensure that the real path
|
||||||
|
# is returned.
|
||||||
|
self._temp_build_dir = TempDirectory(
|
||||||
|
kind=tempdir_kinds.REQ_BUILD, globally_managed=True
|
||||||
|
)
|
||||||
|
|
||||||
|
return self._temp_build_dir.path
|
||||||
|
if self.editable:
|
||||||
|
name = self.name.lower()
|
||||||
|
else:
|
||||||
|
name = self.name
|
||||||
|
# FIXME: Is there a better place to create the build_dir? (hg and bzr
|
||||||
|
# need this)
|
||||||
|
if not os.path.exists(build_dir):
|
||||||
|
logger.debug('Creating directory %s', build_dir)
|
||||||
|
os.makedirs(build_dir)
|
||||||
|
actual_build_dir = os.path.join(build_dir, name)
|
||||||
|
# `None` indicates that we respect the globally-configured deletion
|
||||||
|
# settings, which is what we actually want when auto-deleting.
|
||||||
|
delete_arg = None if autodelete else False
|
||||||
|
return TempDirectory(
|
||||||
|
path=actual_build_dir,
|
||||||
|
delete=delete_arg,
|
||||||
|
kind=tempdir_kinds.REQ_BUILD,
|
||||||
|
globally_managed=True,
|
||||||
|
).path
|
||||||
|
|
||||||
|
def _set_requirement(self):
|
||||||
|
# type: () -> None
|
||||||
|
"""Set requirement after generating metadata.
|
||||||
|
"""
|
||||||
|
assert self.req is None
|
||||||
|
assert self.metadata is not None
|
||||||
|
assert self.source_dir is not None
|
||||||
|
|
||||||
|
# Construct a Requirement object from the generated metadata
|
||||||
|
if isinstance(parse_version(self.metadata["Version"]), Version):
|
||||||
|
op = "=="
|
||||||
|
else:
|
||||||
|
op = "==="
|
||||||
|
|
||||||
|
self.req = Requirement(
|
||||||
|
"".join([
|
||||||
|
self.metadata["Name"],
|
||||||
|
op,
|
||||||
|
self.metadata["Version"],
|
||||||
|
])
|
||||||
|
)
|
||||||
|
|
||||||
|
def warn_on_mismatching_name(self):
|
||||||
|
# type: () -> None
|
||||||
|
metadata_name = canonicalize_name(self.metadata["Name"])
|
||||||
|
if canonicalize_name(self.req.name) == metadata_name:
|
||||||
|
# Everything is fine.
|
||||||
|
return
|
||||||
|
|
||||||
|
# If we're here, there's a mismatch. Log a warning about it.
|
||||||
|
logger.warning(
|
||||||
|
'Generating metadata for package %s '
|
||||||
|
'produced metadata for project name %s. Fix your '
|
||||||
|
'#egg=%s fragments.',
|
||||||
|
self.name, metadata_name, self.name
|
||||||
|
)
|
||||||
|
self.req = Requirement(metadata_name)
|
||||||
|
|
||||||
|
def check_if_exists(self, use_user_site):
|
||||||
|
# type: (bool) -> None
|
||||||
|
"""Find an installed distribution that satisfies or conflicts
|
||||||
|
with this requirement, and set self.satisfied_by or
|
||||||
|
self.should_reinstall appropriately.
|
||||||
|
"""
|
||||||
|
if self.req is None:
|
||||||
|
return
|
||||||
|
# get_distribution() will resolve the entire list of requirements
|
||||||
|
# anyway, and we've already determined that we need the requirement
|
||||||
|
# in question, so strip the marker so that we don't try to
|
||||||
|
# evaluate it.
|
||||||
|
no_marker = Requirement(str(self.req))
|
||||||
|
no_marker.marker = None
|
||||||
|
try:
|
||||||
|
self.satisfied_by = pkg_resources.get_distribution(str(no_marker))
|
||||||
|
except pkg_resources.DistributionNotFound:
|
||||||
|
return
|
||||||
|
except pkg_resources.VersionConflict:
|
||||||
|
existing_dist = pkg_resources.get_distribution(
|
||||||
|
self.req.name
|
||||||
|
)
|
||||||
|
if use_user_site:
|
||||||
|
if dist_in_usersite(existing_dist):
|
||||||
|
self.should_reinstall = True
|
||||||
|
elif (running_under_virtualenv() and
|
||||||
|
dist_in_site_packages(existing_dist)):
|
||||||
|
raise InstallationError(
|
||||||
|
"Will not install to the user site because it will "
|
||||||
|
"lack sys.path precedence to {} in {}".format(
|
||||||
|
existing_dist.project_name, existing_dist.location)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.should_reinstall = True
|
||||||
|
else:
|
||||||
|
if self.editable and self.satisfied_by:
|
||||||
|
self.should_reinstall = True
|
||||||
|
# when installing editables, nothing pre-existing should ever
|
||||||
|
# satisfy
|
||||||
|
self.satisfied_by = None
|
||||||
|
|
||||||
|
# Things valid for wheels
|
||||||
|
@property
|
||||||
|
def is_wheel(self):
|
||||||
|
# type: () -> bool
|
||||||
|
if not self.link:
|
||||||
|
return False
|
||||||
|
return self.link.is_wheel
|
||||||
|
|
||||||
|
# Things valid for sdists
|
||||||
|
@property
|
||||||
|
def unpacked_source_directory(self):
|
||||||
|
# type: () -> str
|
||||||
|
return os.path.join(
|
||||||
|
self.source_dir,
|
||||||
|
self.link and self.link.subdirectory_fragment or '')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def setup_py_path(self):
|
||||||
|
# type: () -> str
|
||||||
|
assert self.source_dir, "No source dir for {}".format(self)
|
||||||
|
setup_py = os.path.join(self.unpacked_source_directory, 'setup.py')
|
||||||
|
|
||||||
|
# Python2 __file__ should not be unicode
|
||||||
|
if six.PY2 and isinstance(setup_py, six.text_type):
|
||||||
|
setup_py = setup_py.encode(sys.getfilesystemencoding())
|
||||||
|
|
||||||
|
return setup_py
|
||||||
|
|
||||||
|
@property
|
||||||
|
def pyproject_toml_path(self):
|
||||||
|
# type: () -> str
|
||||||
|
assert self.source_dir, "No source dir for {}".format(self)
|
||||||
|
return make_pyproject_path(self.unpacked_source_directory)
|
||||||
|
|
||||||
|
def load_pyproject_toml(self):
|
||||||
|
# type: () -> None
|
||||||
|
"""Load the pyproject.toml file.
|
||||||
|
|
||||||
|
After calling this routine, all of the attributes related to PEP 517
|
||||||
|
processing for this requirement have been set. In particular, the
|
||||||
|
use_pep517 attribute can be used to determine whether we should
|
||||||
|
follow the PEP 517 or legacy (setup.py) code path.
|
||||||
|
"""
|
||||||
|
pyproject_toml_data = load_pyproject_toml(
|
||||||
|
self.use_pep517,
|
||||||
|
self.pyproject_toml_path,
|
||||||
|
self.setup_py_path,
|
||||||
|
str(self)
|
||||||
|
)
|
||||||
|
|
||||||
|
if pyproject_toml_data is None:
|
||||||
|
self.use_pep517 = False
|
||||||
|
return
|
||||||
|
|
||||||
|
self.use_pep517 = True
|
||||||
|
requires, backend, check, backend_path = pyproject_toml_data
|
||||||
|
self.requirements_to_check = check
|
||||||
|
self.pyproject_requires = requires
|
||||||
|
self.pep517_backend = Pep517HookCaller(
|
||||||
|
self.unpacked_source_directory, backend, backend_path=backend_path,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _generate_metadata(self):
|
||||||
|
# type: () -> str
|
||||||
|
"""Invokes metadata generator functions, with the required arguments.
|
||||||
|
"""
|
||||||
|
if not self.use_pep517:
|
||||||
|
assert self.unpacked_source_directory
|
||||||
|
|
||||||
|
return generate_metadata_legacy(
|
||||||
|
build_env=self.build_env,
|
||||||
|
setup_py_path=self.setup_py_path,
|
||||||
|
source_dir=self.unpacked_source_directory,
|
||||||
|
isolated=self.isolated,
|
||||||
|
details=self.name or "from {}".format(self.link)
|
||||||
|
)
|
||||||
|
|
||||||
|
assert self.pep517_backend is not None
|
||||||
|
|
||||||
|
return generate_metadata(
|
||||||
|
build_env=self.build_env,
|
||||||
|
backend=self.pep517_backend,
|
||||||
|
)
|
||||||
|
|
||||||
|
def prepare_metadata(self):
|
||||||
|
# type: () -> None
|
||||||
|
"""Ensure that project metadata is available.
|
||||||
|
|
||||||
|
Under PEP 517, call the backend hook to prepare the metadata.
|
||||||
|
Under legacy processing, call setup.py egg-info.
|
||||||
|
"""
|
||||||
|
assert self.source_dir
|
||||||
|
|
||||||
|
with indent_log():
|
||||||
|
self.metadata_directory = self._generate_metadata()
|
||||||
|
|
||||||
|
# Act on the newly generated metadata, based on the name and version.
|
||||||
|
if not self.name:
|
||||||
|
self._set_requirement()
|
||||||
|
else:
|
||||||
|
self.warn_on_mismatching_name()
|
||||||
|
|
||||||
|
self.assert_source_matches_version()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def metadata(self):
|
||||||
|
# type: () -> Any
|
||||||
|
if not hasattr(self, '_metadata'):
|
||||||
|
self._metadata = get_metadata(self.get_dist())
|
||||||
|
|
||||||
|
return self._metadata
|
||||||
|
|
||||||
|
def get_dist(self):
|
||||||
|
# type: () -> Distribution
|
||||||
|
return _get_dist(self.metadata_directory)
|
||||||
|
|
||||||
|
def assert_source_matches_version(self):
|
||||||
|
# type: () -> None
|
||||||
|
assert self.source_dir
|
||||||
|
version = self.metadata['version']
|
||||||
|
if self.req.specifier and version not in self.req.specifier:
|
||||||
|
logger.warning(
|
||||||
|
'Requested %s, but installing version %s',
|
||||||
|
self,
|
||||||
|
version,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.debug(
|
||||||
|
'Source in %s has version %s, which satisfies requirement %s',
|
||||||
|
display_path(self.source_dir),
|
||||||
|
version,
|
||||||
|
self,
|
||||||
|
)
|
||||||
|
|
||||||
|
# For both source distributions and editables
|
||||||
|
def ensure_has_source_dir(self, parent_dir, autodelete=False):
|
||||||
|
# type: (str, bool) -> None
|
||||||
|
"""Ensure that a source_dir is set.
|
||||||
|
|
||||||
|
This will create a temporary build dir if the name of the requirement
|
||||||
|
isn't known yet.
|
||||||
|
|
||||||
|
:param parent_dir: The ideal pip parent_dir for the source_dir.
|
||||||
|
Generally src_dir for editables and build_dir for sdists.
|
||||||
|
:return: self.source_dir
|
||||||
|
"""
|
||||||
|
if self.source_dir is None:
|
||||||
|
self.source_dir = self.ensure_build_location(
|
||||||
|
parent_dir, autodelete
|
||||||
|
)
|
||||||
|
|
||||||
|
# For editable installations
|
||||||
|
def update_editable(self, obtain=True):
|
||||||
|
# type: (bool) -> None
|
||||||
|
if not self.link:
|
||||||
|
logger.debug(
|
||||||
|
"Cannot update repository at %s; repository location is "
|
||||||
|
"unknown",
|
||||||
|
self.source_dir,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
assert self.editable
|
||||||
|
assert self.source_dir
|
||||||
|
if self.link.scheme == 'file':
|
||||||
|
# Static paths don't get updated
|
||||||
|
return
|
||||||
|
assert '+' in self.link.url, \
|
||||||
|
"bad url: {self.link.url!r}".format(**locals())
|
||||||
|
vc_type, url = self.link.url.split('+', 1)
|
||||||
|
vcs_backend = vcs.get_backend(vc_type)
|
||||||
|
if vcs_backend:
|
||||||
|
if not self.link.is_vcs:
|
||||||
|
reason = (
|
||||||
|
"This form of VCS requirement is being deprecated: {}."
|
||||||
|
).format(
|
||||||
|
self.link.url
|
||||||
|
)
|
||||||
|
replacement = None
|
||||||
|
if self.link.url.startswith("git+git@"):
|
||||||
|
replacement = (
|
||||||
|
"git+https://git@example.com/..., "
|
||||||
|
"git+ssh://git@example.com/..., "
|
||||||
|
"or the insecure git+git://git@example.com/..."
|
||||||
|
)
|
||||||
|
deprecated(reason, replacement, gone_in="21.0", issue=7554)
|
||||||
|
hidden_url = hide_url(self.link.url)
|
||||||
|
if obtain:
|
||||||
|
vcs_backend.obtain(self.source_dir, url=hidden_url)
|
||||||
|
else:
|
||||||
|
vcs_backend.export(self.source_dir, url=hidden_url)
|
||||||
|
else:
|
||||||
|
assert 0, (
|
||||||
|
'Unexpected version control type (in {}): {}'.format(
|
||||||
|
self.link, vc_type))
|
||||||
|
|
||||||
|
# Top-level Actions
|
||||||
|
def uninstall(self, auto_confirm=False, verbose=False):
|
||||||
|
# type: (bool, bool) -> Optional[UninstallPathSet]
|
||||||
|
"""
|
||||||
|
Uninstall the distribution currently satisfying this requirement.
|
||||||
|
|
||||||
|
Prompts before removing or modifying files unless
|
||||||
|
``auto_confirm`` is True.
|
||||||
|
|
||||||
|
Refuses to delete or modify files outside of ``sys.prefix`` -
|
||||||
|
thus uninstallation within a virtual environment can only
|
||||||
|
modify that virtual environment, even if the virtualenv is
|
||||||
|
linked to global site-packages.
|
||||||
|
|
||||||
|
"""
|
||||||
|
assert self.req
|
||||||
|
try:
|
||||||
|
dist = pkg_resources.get_distribution(self.req.name)
|
||||||
|
except pkg_resources.DistributionNotFound:
|
||||||
|
logger.warning("Skipping %s as it is not installed.", self.name)
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
logger.info('Found existing installation: %s', dist)
|
||||||
|
|
||||||
|
uninstalled_pathset = UninstallPathSet.from_dist(dist)
|
||||||
|
uninstalled_pathset.remove(auto_confirm, verbose)
|
||||||
|
return uninstalled_pathset
|
||||||
|
|
||||||
|
def _get_archive_name(self, path, parentdir, rootdir):
|
||||||
|
# type: (str, str, str) -> str
|
||||||
|
|
||||||
|
def _clean_zip_name(name, prefix):
|
||||||
|
# type: (str, str) -> str
|
||||||
|
assert name.startswith(prefix + os.path.sep), (
|
||||||
|
"name {name!r} doesn't start with prefix {prefix!r}"
|
||||||
|
.format(**locals())
|
||||||
|
)
|
||||||
|
name = name[len(prefix) + 1:]
|
||||||
|
name = name.replace(os.path.sep, '/')
|
||||||
|
return name
|
||||||
|
|
||||||
|
path = os.path.join(parentdir, path)
|
||||||
|
name = _clean_zip_name(path, rootdir)
|
||||||
|
return self.name + '/' + name
|
||||||
|
|
||||||
|
def archive(self, build_dir):
|
||||||
|
# type: (str) -> None
|
||||||
|
"""Saves archive to provided build_dir.
|
||||||
|
|
||||||
|
Used for saving downloaded VCS requirements as part of `pip download`.
|
||||||
|
"""
|
||||||
|
assert self.source_dir
|
||||||
|
|
||||||
|
create_archive = True
|
||||||
|
archive_name = '{}-{}.zip'.format(self.name, self.metadata["version"])
|
||||||
|
archive_path = os.path.join(build_dir, archive_name)
|
||||||
|
|
||||||
|
if os.path.exists(archive_path):
|
||||||
|
response = ask_path_exists(
|
||||||
|
'The file {} exists. (i)gnore, (w)ipe, '
|
||||||
|
'(b)ackup, (a)bort '.format(
|
||||||
|
display_path(archive_path)),
|
||||||
|
('i', 'w', 'b', 'a'))
|
||||||
|
if response == 'i':
|
||||||
|
create_archive = False
|
||||||
|
elif response == 'w':
|
||||||
|
logger.warning('Deleting %s', display_path(archive_path))
|
||||||
|
os.remove(archive_path)
|
||||||
|
elif response == 'b':
|
||||||
|
dest_file = backup_dir(archive_path)
|
||||||
|
logger.warning(
|
||||||
|
'Backing up %s to %s',
|
||||||
|
display_path(archive_path),
|
||||||
|
display_path(dest_file),
|
||||||
|
)
|
||||||
|
shutil.move(archive_path, dest_file)
|
||||||
|
elif response == 'a':
|
||||||
|
sys.exit(-1)
|
||||||
|
|
||||||
|
if not create_archive:
|
||||||
|
return
|
||||||
|
|
||||||
|
zip_output = zipfile.ZipFile(
|
||||||
|
archive_path, 'w', zipfile.ZIP_DEFLATED, allowZip64=True,
|
||||||
|
)
|
||||||
|
with zip_output:
|
||||||
|
dir = os.path.normcase(
|
||||||
|
os.path.abspath(self.unpacked_source_directory)
|
||||||
|
)
|
||||||
|
for dirpath, dirnames, filenames in os.walk(dir):
|
||||||
|
for dirname in dirnames:
|
||||||
|
dir_arcname = self._get_archive_name(
|
||||||
|
dirname, parentdir=dirpath, rootdir=dir,
|
||||||
|
)
|
||||||
|
zipdir = zipfile.ZipInfo(dir_arcname + '/')
|
||||||
|
zipdir.external_attr = 0x1ED << 16 # 0o755
|
||||||
|
zip_output.writestr(zipdir, '')
|
||||||
|
for filename in filenames:
|
||||||
|
file_arcname = self._get_archive_name(
|
||||||
|
filename, parentdir=dirpath, rootdir=dir,
|
||||||
|
)
|
||||||
|
filename = os.path.join(dirpath, filename)
|
||||||
|
zip_output.write(filename, file_arcname)
|
||||||
|
|
||||||
|
logger.info('Saved %s', display_path(archive_path))
|
||||||
|
|
||||||
|
def install(
|
||||||
|
self,
|
||||||
|
install_options, # type: List[str]
|
||||||
|
global_options=None, # type: Optional[Sequence[str]]
|
||||||
|
root=None, # type: Optional[str]
|
||||||
|
home=None, # type: Optional[str]
|
||||||
|
prefix=None, # type: Optional[str]
|
||||||
|
warn_script_location=True, # type: bool
|
||||||
|
use_user_site=False, # type: bool
|
||||||
|
pycompile=True # type: bool
|
||||||
|
):
|
||||||
|
# type: (...) -> None
|
||||||
|
scheme = get_scheme(
|
||||||
|
self.name,
|
||||||
|
user=use_user_site,
|
||||||
|
home=home,
|
||||||
|
root=root,
|
||||||
|
isolated=self.isolated,
|
||||||
|
prefix=prefix,
|
||||||
|
)
|
||||||
|
|
||||||
|
global_options = global_options if global_options is not None else []
|
||||||
|
if self.editable:
|
||||||
|
install_editable_legacy(
|
||||||
|
install_options,
|
||||||
|
global_options,
|
||||||
|
prefix=prefix,
|
||||||
|
home=home,
|
||||||
|
use_user_site=use_user_site,
|
||||||
|
name=self.name,
|
||||||
|
setup_py_path=self.setup_py_path,
|
||||||
|
isolated=self.isolated,
|
||||||
|
build_env=self.build_env,
|
||||||
|
unpacked_source_directory=self.unpacked_source_directory,
|
||||||
|
)
|
||||||
|
self.install_succeeded = True
|
||||||
|
return
|
||||||
|
|
||||||
|
if self.is_wheel:
|
||||||
|
assert self.local_file_path
|
||||||
|
direct_url = None
|
||||||
|
if self.original_link:
|
||||||
|
direct_url = direct_url_from_link(
|
||||||
|
self.original_link,
|
||||||
|
self.source_dir,
|
||||||
|
self.original_link_is_in_wheel_cache,
|
||||||
|
)
|
||||||
|
install_wheel(
|
||||||
|
self.name,
|
||||||
|
self.local_file_path,
|
||||||
|
scheme=scheme,
|
||||||
|
req_description=str(self.req),
|
||||||
|
pycompile=pycompile,
|
||||||
|
warn_script_location=warn_script_location,
|
||||||
|
direct_url=direct_url,
|
||||||
|
)
|
||||||
|
self.install_succeeded = True
|
||||||
|
return
|
||||||
|
|
||||||
|
# TODO: Why don't we do this for editable installs?
|
||||||
|
|
||||||
|
# Extend the list of global and install options passed on to
|
||||||
|
# the setup.py call with the ones from the requirements file.
|
||||||
|
# Options specified in requirements file override those
|
||||||
|
# specified on the command line, since the last option given
|
||||||
|
# to setup.py is the one that is used.
|
||||||
|
global_options = list(global_options) + self.global_options
|
||||||
|
install_options = list(install_options) + self.install_options
|
||||||
|
|
||||||
|
try:
|
||||||
|
success = install_legacy(
|
||||||
|
install_options=install_options,
|
||||||
|
global_options=global_options,
|
||||||
|
root=root,
|
||||||
|
home=home,
|
||||||
|
prefix=prefix,
|
||||||
|
use_user_site=use_user_site,
|
||||||
|
pycompile=pycompile,
|
||||||
|
scheme=scheme,
|
||||||
|
setup_py_path=self.setup_py_path,
|
||||||
|
isolated=self.isolated,
|
||||||
|
req_name=self.name,
|
||||||
|
build_env=self.build_env,
|
||||||
|
unpacked_source_directory=self.unpacked_source_directory,
|
||||||
|
req_description=str(self.req),
|
||||||
|
)
|
||||||
|
except LegacyInstallFailure as exc:
|
||||||
|
self.install_succeeded = False
|
||||||
|
six.reraise(*exc.parent)
|
||||||
|
except Exception:
|
||||||
|
self.install_succeeded = True
|
||||||
|
raise
|
||||||
|
|
||||||
|
self.install_succeeded = success
|
202
sources/pip_20.1/_internal/req/req_set.py
Normal file
202
sources/pip_20.1/_internal/req/req_set.py
Normal file
|
@ -0,0 +1,202 @@
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: strict-optional=False
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
|
|
||||||
|
from pip._internal.exceptions import InstallationError
|
||||||
|
from pip._internal.models.wheel import Wheel
|
||||||
|
from pip._internal.utils import compatibility_tags
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import Dict, Iterable, List, Optional, Tuple
|
||||||
|
from pip._internal.req.req_install import InstallRequirement
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class RequirementSet(object):
|
||||||
|
|
||||||
|
def __init__(self, check_supported_wheels=True):
|
||||||
|
# type: (bool) -> None
|
||||||
|
"""Create a RequirementSet.
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.requirements = OrderedDict() # type: Dict[str, InstallRequirement] # noqa: E501
|
||||||
|
self.check_supported_wheels = check_supported_wheels
|
||||||
|
|
||||||
|
self.unnamed_requirements = [] # type: List[InstallRequirement]
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
# type: () -> str
|
||||||
|
requirements = sorted(
|
||||||
|
(req for req in self.requirements.values() if not req.comes_from),
|
||||||
|
key=lambda req: canonicalize_name(req.name),
|
||||||
|
)
|
||||||
|
return ' '.join(str(req.req) for req in requirements)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
# type: () -> str
|
||||||
|
requirements = sorted(
|
||||||
|
self.requirements.values(),
|
||||||
|
key=lambda req: canonicalize_name(req.name),
|
||||||
|
)
|
||||||
|
|
||||||
|
format_string = '<{classname} object; {count} requirement(s): {reqs}>'
|
||||||
|
return format_string.format(
|
||||||
|
classname=self.__class__.__name__,
|
||||||
|
count=len(requirements),
|
||||||
|
reqs=', '.join(str(req.req) for req in requirements),
|
||||||
|
)
|
||||||
|
|
||||||
|
def add_unnamed_requirement(self, install_req):
|
||||||
|
# type: (InstallRequirement) -> None
|
||||||
|
assert not install_req.name
|
||||||
|
self.unnamed_requirements.append(install_req)
|
||||||
|
|
||||||
|
def add_named_requirement(self, install_req):
|
||||||
|
# type: (InstallRequirement) -> None
|
||||||
|
assert install_req.name
|
||||||
|
|
||||||
|
project_name = canonicalize_name(install_req.name)
|
||||||
|
self.requirements[project_name] = install_req
|
||||||
|
|
||||||
|
def add_requirement(
|
||||||
|
self,
|
||||||
|
install_req, # type: InstallRequirement
|
||||||
|
parent_req_name=None, # type: Optional[str]
|
||||||
|
extras_requested=None # type: Optional[Iterable[str]]
|
||||||
|
):
|
||||||
|
# type: (...) -> Tuple[List[InstallRequirement], Optional[InstallRequirement]] # noqa: E501
|
||||||
|
"""Add install_req as a requirement to install.
|
||||||
|
|
||||||
|
:param parent_req_name: The name of the requirement that needed this
|
||||||
|
added. The name is used because when multiple unnamed requirements
|
||||||
|
resolve to the same name, we could otherwise end up with dependency
|
||||||
|
links that point outside the Requirements set. parent_req must
|
||||||
|
already be added. Note that None implies that this is a user
|
||||||
|
supplied requirement, vs an inferred one.
|
||||||
|
:param extras_requested: an iterable of extras used to evaluate the
|
||||||
|
environment markers.
|
||||||
|
:return: Additional requirements to scan. That is either [] if
|
||||||
|
the requirement is not applicable, or [install_req] if the
|
||||||
|
requirement is applicable and has just been added.
|
||||||
|
"""
|
||||||
|
# If the markers do not match, ignore this requirement.
|
||||||
|
if not install_req.match_markers(extras_requested):
|
||||||
|
logger.info(
|
||||||
|
"Ignoring %s: markers '%s' don't match your environment",
|
||||||
|
install_req.name, install_req.markers,
|
||||||
|
)
|
||||||
|
return [], None
|
||||||
|
|
||||||
|
# If the wheel is not supported, raise an error.
|
||||||
|
# Should check this after filtering out based on environment markers to
|
||||||
|
# allow specifying different wheels based on the environment/OS, in a
|
||||||
|
# single requirements file.
|
||||||
|
if install_req.link and install_req.link.is_wheel:
|
||||||
|
wheel = Wheel(install_req.link.filename)
|
||||||
|
tags = compatibility_tags.get_supported()
|
||||||
|
if (self.check_supported_wheels and not wheel.supported(tags)):
|
||||||
|
raise InstallationError(
|
||||||
|
"{} is not a supported wheel on this platform.".format(
|
||||||
|
wheel.filename)
|
||||||
|
)
|
||||||
|
|
||||||
|
# This next bit is really a sanity check.
|
||||||
|
assert install_req.is_direct == (parent_req_name is None), (
|
||||||
|
"a direct req shouldn't have a parent and also, "
|
||||||
|
"a non direct req should have a parent"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Unnamed requirements are scanned again and the requirement won't be
|
||||||
|
# added as a dependency until after scanning.
|
||||||
|
if not install_req.name:
|
||||||
|
self.add_unnamed_requirement(install_req)
|
||||||
|
return [install_req], None
|
||||||
|
|
||||||
|
try:
|
||||||
|
existing_req = self.get_requirement(install_req.name)
|
||||||
|
except KeyError:
|
||||||
|
existing_req = None
|
||||||
|
|
||||||
|
has_conflicting_requirement = (
|
||||||
|
parent_req_name is None and
|
||||||
|
existing_req and
|
||||||
|
not existing_req.constraint and
|
||||||
|
existing_req.extras == install_req.extras and
|
||||||
|
existing_req.req.specifier != install_req.req.specifier
|
||||||
|
)
|
||||||
|
if has_conflicting_requirement:
|
||||||
|
raise InstallationError(
|
||||||
|
"Double requirement given: {} (already in {}, name={!r})"
|
||||||
|
.format(install_req, existing_req, install_req.name)
|
||||||
|
)
|
||||||
|
|
||||||
|
# When no existing requirement exists, add the requirement as a
|
||||||
|
# dependency and it will be scanned again after.
|
||||||
|
if not existing_req:
|
||||||
|
self.add_named_requirement(install_req)
|
||||||
|
# We'd want to rescan this requirement later
|
||||||
|
return [install_req], install_req
|
||||||
|
|
||||||
|
# Assume there's no need to scan, and that we've already
|
||||||
|
# encountered this for scanning.
|
||||||
|
if install_req.constraint or not existing_req.constraint:
|
||||||
|
return [], existing_req
|
||||||
|
|
||||||
|
does_not_satisfy_constraint = (
|
||||||
|
install_req.link and
|
||||||
|
not (
|
||||||
|
existing_req.link and
|
||||||
|
install_req.link.path == existing_req.link.path
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if does_not_satisfy_constraint:
|
||||||
|
raise InstallationError(
|
||||||
|
"Could not satisfy constraints for '{}': "
|
||||||
|
"installation from path or url cannot be "
|
||||||
|
"constrained to a version".format(install_req.name)
|
||||||
|
)
|
||||||
|
# If we're now installing a constraint, mark the existing
|
||||||
|
# object for real installation.
|
||||||
|
existing_req.constraint = False
|
||||||
|
existing_req.extras = tuple(sorted(
|
||||||
|
set(existing_req.extras) | set(install_req.extras)
|
||||||
|
))
|
||||||
|
logger.debug(
|
||||||
|
"Setting %s extras to: %s",
|
||||||
|
existing_req, existing_req.extras,
|
||||||
|
)
|
||||||
|
# Return the existing requirement for addition to the parent and
|
||||||
|
# scanning again.
|
||||||
|
return [existing_req], existing_req
|
||||||
|
|
||||||
|
def has_requirement(self, name):
|
||||||
|
# type: (str) -> bool
|
||||||
|
project_name = canonicalize_name(name)
|
||||||
|
|
||||||
|
return (
|
||||||
|
project_name in self.requirements and
|
||||||
|
not self.requirements[project_name].constraint
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_requirement(self, name):
|
||||||
|
# type: (str) -> InstallRequirement
|
||||||
|
project_name = canonicalize_name(name)
|
||||||
|
|
||||||
|
if project_name in self.requirements:
|
||||||
|
return self.requirements[project_name]
|
||||||
|
|
||||||
|
raise KeyError("No project with the name {name!r}".format(**locals()))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def all_requirements(self):
|
||||||
|
# type: () -> List[InstallRequirement]
|
||||||
|
return self.unnamed_requirements + list(self.requirements.values())
|
151
sources/pip_20.1/_internal/req/req_tracker.py
Normal file
151
sources/pip_20.1/_internal/req/req_tracker.py
Normal file
|
@ -0,0 +1,151 @@
|
||||||
|
# The following comment should be removed at some point in the future.
|
||||||
|
# mypy: strict-optional=False
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import contextlib
|
||||||
|
import errno
|
||||||
|
import hashlib
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
from pip._vendor import contextlib2
|
||||||
|
|
||||||
|
from pip._internal.utils.temp_dir import TempDirectory
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from types import TracebackType
|
||||||
|
from typing import Dict, Iterator, Optional, Set, Type, Union
|
||||||
|
from pip._internal.req.req_install import InstallRequirement
|
||||||
|
from pip._internal.models.link import Link
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def update_env_context_manager(**changes):
|
||||||
|
# type: (str) -> Iterator[None]
|
||||||
|
target = os.environ
|
||||||
|
|
||||||
|
# Save values from the target and change them.
|
||||||
|
non_existent_marker = object()
|
||||||
|
saved_values = {} # type: Dict[str, Union[object, str]]
|
||||||
|
for name, new_value in changes.items():
|
||||||
|
try:
|
||||||
|
saved_values[name] = target[name]
|
||||||
|
except KeyError:
|
||||||
|
saved_values[name] = non_existent_marker
|
||||||
|
target[name] = new_value
|
||||||
|
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
# Restore original values in the target.
|
||||||
|
for name, original_value in saved_values.items():
|
||||||
|
if original_value is non_existent_marker:
|
||||||
|
del target[name]
|
||||||
|
else:
|
||||||
|
assert isinstance(original_value, str) # for mypy
|
||||||
|
target[name] = original_value
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def get_requirement_tracker():
|
||||||
|
# type: () -> Iterator[RequirementTracker]
|
||||||
|
root = os.environ.get('PIP_REQ_TRACKER')
|
||||||
|
with contextlib2.ExitStack() as ctx:
|
||||||
|
if root is None:
|
||||||
|
root = ctx.enter_context(
|
||||||
|
TempDirectory(kind='req-tracker')
|
||||||
|
).path
|
||||||
|
ctx.enter_context(update_env_context_manager(PIP_REQ_TRACKER=root))
|
||||||
|
logger.debug("Initialized build tracking at %s", root)
|
||||||
|
|
||||||
|
with RequirementTracker(root) as tracker:
|
||||||
|
yield tracker
|
||||||
|
|
||||||
|
|
||||||
|
class RequirementTracker(object):
|
||||||
|
|
||||||
|
def __init__(self, root):
|
||||||
|
# type: (str) -> None
|
||||||
|
self._root = root
|
||||||
|
self._entries = set() # type: Set[InstallRequirement]
|
||||||
|
logger.debug("Created build tracker: %s", self._root)
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
# type: () -> RequirementTracker
|
||||||
|
logger.debug("Entered build tracker: %s", self._root)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(
|
||||||
|
self,
|
||||||
|
exc_type, # type: Optional[Type[BaseException]]
|
||||||
|
exc_val, # type: Optional[BaseException]
|
||||||
|
exc_tb # type: Optional[TracebackType]
|
||||||
|
):
|
||||||
|
# type: (...) -> None
|
||||||
|
self.cleanup()
|
||||||
|
|
||||||
|
def _entry_path(self, link):
|
||||||
|
# type: (Link) -> str
|
||||||
|
hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest()
|
||||||
|
return os.path.join(self._root, hashed)
|
||||||
|
|
||||||
|
def add(self, req):
|
||||||
|
# type: (InstallRequirement) -> None
|
||||||
|
"""Add an InstallRequirement to build tracking.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Get the file to write information about this requirement.
|
||||||
|
entry_path = self._entry_path(req.link)
|
||||||
|
|
||||||
|
# Try reading from the file. If it exists and can be read from, a build
|
||||||
|
# is already in progress, so a LookupError is raised.
|
||||||
|
try:
|
||||||
|
with open(entry_path) as fp:
|
||||||
|
contents = fp.read()
|
||||||
|
except IOError as e:
|
||||||
|
# if the error is anything other than "file does not exist", raise.
|
||||||
|
if e.errno != errno.ENOENT:
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
message = '{} is already being built: {}'.format(
|
||||||
|
req.link, contents)
|
||||||
|
raise LookupError(message)
|
||||||
|
|
||||||
|
# If we're here, req should really not be building already.
|
||||||
|
assert req not in self._entries
|
||||||
|
|
||||||
|
# Start tracking this requirement.
|
||||||
|
with open(entry_path, 'w') as fp:
|
||||||
|
fp.write(str(req))
|
||||||
|
self._entries.add(req)
|
||||||
|
|
||||||
|
logger.debug('Added %s to build tracker %r', req, self._root)
|
||||||
|
|
||||||
|
def remove(self, req):
|
||||||
|
# type: (InstallRequirement) -> None
|
||||||
|
"""Remove an InstallRequirement from build tracking.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Delete the created file and the corresponding entries.
|
||||||
|
os.unlink(self._entry_path(req.link))
|
||||||
|
self._entries.remove(req)
|
||||||
|
|
||||||
|
logger.debug('Removed %s from build tracker %r', req, self._root)
|
||||||
|
|
||||||
|
def cleanup(self):
|
||||||
|
# type: () -> None
|
||||||
|
for req in set(self._entries):
|
||||||
|
self.remove(req)
|
||||||
|
|
||||||
|
logger.debug("Removed build tracker: %r", self._root)
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def track(self, req):
|
||||||
|
# type: (InstallRequirement) -> Iterator[None]
|
||||||
|
self.add(req)
|
||||||
|
yield
|
||||||
|
self.remove(req)
|
649
sources/pip_20.1/_internal/req/req_uninstall.py
Normal file
649
sources/pip_20.1/_internal/req/req_uninstall.py
Normal file
|
@ -0,0 +1,649 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import csv
|
||||||
|
import functools
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import sysconfig
|
||||||
|
|
||||||
|
from pip._vendor import pkg_resources
|
||||||
|
|
||||||
|
from pip._internal.exceptions import UninstallationError
|
||||||
|
from pip._internal.locations import bin_py, bin_user
|
||||||
|
from pip._internal.utils.compat import WINDOWS, cache_from_source, uses_pycache
|
||||||
|
from pip._internal.utils.logging import indent_log
|
||||||
|
from pip._internal.utils.misc import (
|
||||||
|
FakeFile,
|
||||||
|
ask,
|
||||||
|
dist_in_usersite,
|
||||||
|
dist_is_local,
|
||||||
|
egg_link_path,
|
||||||
|
is_local,
|
||||||
|
normalize_path,
|
||||||
|
renames,
|
||||||
|
rmtree,
|
||||||
|
)
|
||||||
|
from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory
|
||||||
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||||
|
|
||||||
|
if MYPY_CHECK_RUNNING:
|
||||||
|
from typing import (
|
||||||
|
Any, Callable, Dict, Iterable, Iterator, List, Optional, Set, Tuple,
|
||||||
|
)
|
||||||
|
from pip._vendor.pkg_resources import Distribution
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _script_names(dist, script_name, is_gui):
|
||||||
|
# type: (Distribution, str, bool) -> List[str]
|
||||||
|
"""Create the fully qualified name of the files created by
|
||||||
|
{console,gui}_scripts for the given ``dist``.
|
||||||
|
Returns the list of file names
|
||||||
|
"""
|
||||||
|
if dist_in_usersite(dist):
|
||||||
|
bin_dir = bin_user
|
||||||
|
else:
|
||||||
|
bin_dir = bin_py
|
||||||
|
exe_name = os.path.join(bin_dir, script_name)
|
||||||
|
paths_to_remove = [exe_name]
|
||||||
|
if WINDOWS:
|
||||||
|
paths_to_remove.append(exe_name + '.exe')
|
||||||
|
paths_to_remove.append(exe_name + '.exe.manifest')
|
||||||
|
if is_gui:
|
||||||
|
paths_to_remove.append(exe_name + '-script.pyw')
|
||||||
|
else:
|
||||||
|
paths_to_remove.append(exe_name + '-script.py')
|
||||||
|
return paths_to_remove
|
||||||
|
|
||||||
|
|
||||||
|
def _unique(fn):
|
||||||
|
# type: (Callable[..., Iterator[Any]]) -> Callable[..., Iterator[Any]]
|
||||||
|
@functools.wraps(fn)
|
||||||
|
def unique(*args, **kw):
|
||||||
|
# type: (Any, Any) -> Iterator[Any]
|
||||||
|
seen = set() # type: Set[Any]
|
||||||
|
for item in fn(*args, **kw):
|
||||||
|
if item not in seen:
|
||||||
|
seen.add(item)
|
||||||
|
yield item
|
||||||
|
return unique
|
||||||
|
|
||||||
|
|
||||||
|
@_unique
|
||||||
|
def uninstallation_paths(dist):
|
||||||
|
# type: (Distribution) -> Iterator[str]
|
||||||
|
"""
|
||||||
|
Yield all the uninstallation paths for dist based on RECORD-without-.py[co]
|
||||||
|
|
||||||
|
Yield paths to all the files in RECORD. For each .py file in RECORD, add
|
||||||
|
the .pyc and .pyo in the same directory.
|
||||||
|
|
||||||
|
UninstallPathSet.add() takes care of the __pycache__ .py[co].
|
||||||
|
"""
|
||||||
|
r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD')))
|
||||||
|
for row in r:
|
||||||
|
path = os.path.join(dist.location, row[0])
|
||||||
|
yield path
|
||||||
|
if path.endswith('.py'):
|
||||||
|
dn, fn = os.path.split(path)
|
||||||
|
base = fn[:-3]
|
||||||
|
path = os.path.join(dn, base + '.pyc')
|
||||||
|
yield path
|
||||||
|
path = os.path.join(dn, base + '.pyo')
|
||||||
|
yield path
|
||||||
|
|
||||||
|
|
||||||
|
def compact(paths):
|
||||||
|
# type: (Iterable[str]) -> Set[str]
|
||||||
|
"""Compact a path set to contain the minimal number of paths
|
||||||
|
necessary to contain all paths in the set. If /a/path/ and
|
||||||
|
/a/path/to/a/file.txt are both in the set, leave only the
|
||||||
|
shorter path."""
|
||||||
|
|
||||||
|
sep = os.path.sep
|
||||||
|
short_paths = set() # type: Set[str]
|
||||||
|
for path in sorted(paths, key=len):
|
||||||
|
should_skip = any(
|
||||||
|
path.startswith(shortpath.rstrip("*")) and
|
||||||
|
path[len(shortpath.rstrip("*").rstrip(sep))] == sep
|
||||||
|
for shortpath in short_paths
|
||||||
|
)
|
||||||
|
if not should_skip:
|
||||||
|
short_paths.add(path)
|
||||||
|
return short_paths
|
||||||
|
|
||||||
|
|
||||||
|
def compress_for_rename(paths):
|
||||||
|
# type: (Iterable[str]) -> Set[str]
|
||||||
|
"""Returns a set containing the paths that need to be renamed.
|
||||||
|
|
||||||
|
This set may include directories when the original sequence of paths
|
||||||
|
included every file on disk.
|
||||||
|
"""
|
||||||
|
case_map = dict((os.path.normcase(p), p) for p in paths)
|
||||||
|
remaining = set(case_map)
|
||||||
|
unchecked = sorted(set(os.path.split(p)[0]
|
||||||
|
for p in case_map.values()), key=len)
|
||||||
|
wildcards = set() # type: Set[str]
|
||||||
|
|
||||||
|
def norm_join(*a):
|
||||||
|
# type: (str) -> str
|
||||||
|
return os.path.normcase(os.path.join(*a))
|
||||||
|
|
||||||
|
for root in unchecked:
|
||||||
|
if any(os.path.normcase(root).startswith(w)
|
||||||
|
for w in wildcards):
|
||||||
|
# This directory has already been handled.
|
||||||
|
continue
|
||||||
|
|
||||||
|
all_files = set() # type: Set[str]
|
||||||
|
all_subdirs = set() # type: Set[str]
|
||||||
|
for dirname, subdirs, files in os.walk(root):
|
||||||
|
all_subdirs.update(norm_join(root, dirname, d)
|
||||||
|
for d in subdirs)
|
||||||
|
all_files.update(norm_join(root, dirname, f)
|
||||||
|
for f in files)
|
||||||
|
# If all the files we found are in our remaining set of files to
|
||||||
|
# remove, then remove them from the latter set and add a wildcard
|
||||||
|
# for the directory.
|
||||||
|
if not (all_files - remaining):
|
||||||
|
remaining.difference_update(all_files)
|
||||||
|
wildcards.add(root + os.sep)
|
||||||
|
|
||||||
|
return set(map(case_map.__getitem__, remaining)) | wildcards
|
||||||
|
|
||||||
|
|
||||||
|
def compress_for_output_listing(paths):
|
||||||
|
# type: (Iterable[str]) -> Tuple[Set[str], Set[str]]
|
||||||
|
"""Returns a tuple of 2 sets of which paths to display to user
|
||||||
|
|
||||||
|
The first set contains paths that would be deleted. Files of a package
|
||||||
|
are not added and the top-level directory of the package has a '*' added
|
||||||
|
at the end - to signify that all it's contents are removed.
|
||||||
|
|
||||||
|
The second set contains files that would have been skipped in the above
|
||||||
|
folders.
|
||||||
|
"""
|
||||||
|
|
||||||
|
will_remove = set(paths)
|
||||||
|
will_skip = set()
|
||||||
|
|
||||||
|
# Determine folders and files
|
||||||
|
folders = set()
|
||||||
|
files = set()
|
||||||
|
for path in will_remove:
|
||||||
|
if path.endswith(".pyc"):
|
||||||
|
continue
|
||||||
|
if path.endswith("__init__.py") or ".dist-info" in path:
|
||||||
|
folders.add(os.path.dirname(path))
|
||||||
|
files.add(path)
|
||||||
|
|
||||||
|
# probably this one https://github.com/python/mypy/issues/390
|
||||||
|
_normcased_files = set(map(os.path.normcase, files)) # type: ignore
|
||||||
|
|
||||||
|
folders = compact(folders)
|
||||||
|
|
||||||
|
# This walks the tree using os.walk to not miss extra folders
|
||||||
|
# that might get added.
|
||||||
|
for folder in folders:
|
||||||
|
for dirpath, _, dirfiles in os.walk(folder):
|
||||||
|
for fname in dirfiles:
|
||||||
|
if fname.endswith(".pyc"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
file_ = os.path.join(dirpath, fname)
|
||||||
|
if (os.path.isfile(file_) and
|
||||||
|
os.path.normcase(file_) not in _normcased_files):
|
||||||
|
# We are skipping this file. Add it to the set.
|
||||||
|
will_skip.add(file_)
|
||||||
|
|
||||||
|
will_remove = files | {
|
||||||
|
os.path.join(folder, "*") for folder in folders
|
||||||
|
}
|
||||||
|
|
||||||
|
return will_remove, will_skip
|
||||||
|
|
||||||
|
|
||||||
|
class StashedUninstallPathSet(object):
|
||||||
|
"""A set of file rename operations to stash files while
|
||||||
|
tentatively uninstalling them."""
|
||||||
|
def __init__(self):
|
||||||
|
# type: () -> None
|
||||||
|
# Mapping from source file root to [Adjacent]TempDirectory
|
||||||
|
# for files under that directory.
|
||||||
|
self._save_dirs = {} # type: Dict[str, TempDirectory]
|
||||||
|
# (old path, new path) tuples for each move that may need
|
||||||
|
# to be undone.
|
||||||
|
self._moves = [] # type: List[Tuple[str, str]]
|
||||||
|
|
||||||
|
def _get_directory_stash(self, path):
|
||||||
|
# type: (str) -> str
|
||||||
|
"""Stashes a directory.
|
||||||
|
|
||||||
|
Directories are stashed adjacent to their original location if
|
||||||
|
possible, or else moved/copied into the user's temp dir."""
|
||||||
|
|
||||||
|
try:
|
||||||
|
save_dir = AdjacentTempDirectory(path) # type: TempDirectory
|
||||||
|
except OSError:
|
||||||
|
save_dir = TempDirectory(kind="uninstall")
|
||||||
|
self._save_dirs[os.path.normcase(path)] = save_dir
|
||||||
|
|
||||||
|
return save_dir.path
|
||||||
|
|
||||||
|
def _get_file_stash(self, path):
|
||||||
|
# type: (str) -> str
|
||||||
|
"""Stashes a file.
|
||||||
|
|
||||||
|
If no root has been provided, one will be created for the directory
|
||||||
|
in the user's temp directory."""
|
||||||
|
path = os.path.normcase(path)
|
||||||
|
head, old_head = os.path.dirname(path), None
|
||||||
|
save_dir = None
|
||||||
|
|
||||||
|
while head != old_head:
|
||||||
|
try:
|
||||||
|
save_dir = self._save_dirs[head]
|
||||||
|
break
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
head, old_head = os.path.dirname(head), head
|
||||||
|
else:
|
||||||
|
# Did not find any suitable root
|
||||||
|
head = os.path.dirname(path)
|
||||||
|
save_dir = TempDirectory(kind='uninstall')
|
||||||
|
self._save_dirs[head] = save_dir
|
||||||
|
|
||||||
|
relpath = os.path.relpath(path, head)
|
||||||
|
if relpath and relpath != os.path.curdir:
|
||||||
|
return os.path.join(save_dir.path, relpath)
|
||||||
|
return save_dir.path
|
||||||
|
|
||||||
|
def stash(self, path):
|
||||||
|
# type: (str) -> str
|
||||||
|
"""Stashes the directory or file and returns its new location.
|
||||||
|
Handle symlinks as files to avoid modifying the symlink targets.
|
||||||
|
"""
|
||||||
|
path_is_dir = os.path.isdir(path) and not os.path.islink(path)
|
||||||
|
if path_is_dir:
|
||||||
|
new_path = self._get_directory_stash(path)
|
||||||
|
else:
|
||||||
|
new_path = self._get_file_stash(path)
|
||||||
|
|
||||||
|
self._moves.append((path, new_path))
|
||||||
|
if (path_is_dir and os.path.isdir(new_path)):
|
||||||
|
# If we're moving a directory, we need to
|
||||||
|
# remove the destination first or else it will be
|
||||||
|
# moved to inside the existing directory.
|
||||||
|
# We just created new_path ourselves, so it will
|
||||||
|
# be removable.
|
||||||
|
os.rmdir(new_path)
|
||||||
|
renames(path, new_path)
|
||||||
|
return new_path
|
||||||
|
|
||||||
|
def commit(self):
|
||||||
|
# type: () -> None
|
||||||
|
"""Commits the uninstall by removing stashed files."""
|
||||||
|
for _, save_dir in self._save_dirs.items():
|
||||||
|
save_dir.cleanup()
|
||||||
|
self._moves = []
|
||||||
|
self._save_dirs = {}
|
||||||
|
|
||||||
|
def rollback(self):
|
||||||
|
# type: () -> None
|
||||||
|
"""Undoes the uninstall by moving stashed files back."""
|
||||||
|
for p in self._moves:
|
||||||
|
logger.info("Moving to %s\n from %s", *p)
|
||||||
|
|
||||||
|
for new_path, path in self._moves:
|
||||||
|
try:
|
||||||
|
logger.debug('Replacing %s from %s', new_path, path)
|
||||||
|
if os.path.isfile(new_path) or os.path.islink(new_path):
|
||||||
|
os.unlink(new_path)
|
||||||
|
elif os.path.isdir(new_path):
|
||||||
|
rmtree(new_path)
|
||||||
|
renames(path, new_path)
|
||||||
|
except OSError as ex:
|
||||||
|
logger.error("Failed to restore %s", new_path)
|
||||||
|
logger.debug("Exception: %s", ex)
|
||||||
|
|
||||||
|
self.commit()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def can_rollback(self):
|
||||||
|
# type: () -> bool
|
||||||
|
return bool(self._moves)
|
||||||
|
|
||||||
|
|
||||||
|
class UninstallPathSet(object):
|
||||||
|
"""A set of file paths to be removed in the uninstallation of a
|
||||||
|
requirement."""
|
||||||
|
def __init__(self, dist):
|
||||||
|
# type: (Distribution) -> None
|
||||||
|
self.paths = set() # type: Set[str]
|
||||||
|
self._refuse = set() # type: Set[str]
|
||||||
|
self.pth = {} # type: Dict[str, UninstallPthEntries]
|
||||||
|
self.dist = dist
|
||||||
|
self._moved_paths = StashedUninstallPathSet()
|
||||||
|
|
||||||
|
def _permitted(self, path):
|
||||||
|
# type: (str) -> bool
|
||||||
|
"""
|
||||||
|
Return True if the given path is one we are permitted to
|
||||||
|
remove/modify, False otherwise.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return is_local(path)
|
||||||
|
|
||||||
|
def add(self, path):
|
||||||
|
# type: (str) -> None
|
||||||
|
head, tail = os.path.split(path)
|
||||||
|
|
||||||
|
# we normalize the head to resolve parent directory symlinks, but not
|
||||||
|
# the tail, since we only want to uninstall symlinks, not their targets
|
||||||
|
path = os.path.join(normalize_path(head), os.path.normcase(tail))
|
||||||
|
|
||||||
|
if not os.path.exists(path):
|
||||||
|
return
|
||||||
|
if self._permitted(path):
|
||||||
|
self.paths.add(path)
|
||||||
|
else:
|
||||||
|
self._refuse.add(path)
|
||||||
|
|
||||||
|
# __pycache__ files can show up after 'installed-files.txt' is created,
|
||||||
|
# due to imports
|
||||||
|
if os.path.splitext(path)[1] == '.py' and uses_pycache:
|
||||||
|
self.add(cache_from_source(path))
|
||||||
|
|
||||||
|
def add_pth(self, pth_file, entry):
|
||||||
|
# type: (str, str) -> None
|
||||||
|
pth_file = normalize_path(pth_file)
|
||||||
|
if self._permitted(pth_file):
|
||||||
|
if pth_file not in self.pth:
|
||||||
|
self.pth[pth_file] = UninstallPthEntries(pth_file)
|
||||||
|
self.pth[pth_file].add(entry)
|
||||||
|
else:
|
||||||
|
self._refuse.add(pth_file)
|
||||||
|
|
||||||
|
def remove(self, auto_confirm=False, verbose=False):
|
||||||
|
# type: (bool, bool) -> None
|
||||||
|
"""Remove paths in ``self.paths`` with confirmation (unless
|
||||||
|
``auto_confirm`` is True)."""
|
||||||
|
|
||||||
|
if not self.paths:
|
||||||
|
logger.info(
|
||||||
|
"Can't uninstall '%s'. No files were found to uninstall.",
|
||||||
|
self.dist.project_name,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
dist_name_version = (
|
||||||
|
self.dist.project_name + "-" + self.dist.version
|
||||||
|
)
|
||||||
|
logger.info('Uninstalling %s:', dist_name_version)
|
||||||
|
|
||||||
|
with indent_log():
|
||||||
|
if auto_confirm or self._allowed_to_proceed(verbose):
|
||||||
|
moved = self._moved_paths
|
||||||
|
|
||||||
|
for_rename = compress_for_rename(self.paths)
|
||||||
|
|
||||||
|
for path in sorted(compact(for_rename)):
|
||||||
|
moved.stash(path)
|
||||||
|
logger.debug('Removing file or directory %s', path)
|
||||||
|
|
||||||
|
for pth in self.pth.values():
|
||||||
|
pth.remove()
|
||||||
|
|
||||||
|
logger.info('Successfully uninstalled %s', dist_name_version)
|
||||||
|
|
||||||
|
def _allowed_to_proceed(self, verbose):
|
||||||
|
# type: (bool) -> bool
|
||||||
|
"""Display which files would be deleted and prompt for confirmation
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _display(msg, paths):
|
||||||
|
# type: (str, Iterable[str]) -> None
|
||||||
|
if not paths:
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info(msg)
|
||||||
|
with indent_log():
|
||||||
|
for path in sorted(compact(paths)):
|
||||||
|
logger.info(path)
|
||||||
|
|
||||||
|
if not verbose:
|
||||||
|
will_remove, will_skip = compress_for_output_listing(self.paths)
|
||||||
|
else:
|
||||||
|
# In verbose mode, display all the files that are going to be
|
||||||
|
# deleted.
|
||||||
|
will_remove = set(self.paths)
|
||||||
|
will_skip = set()
|
||||||
|
|
||||||
|
_display('Would remove:', will_remove)
|
||||||
|
_display('Would not remove (might be manually added):', will_skip)
|
||||||
|
_display('Would not remove (outside of prefix):', self._refuse)
|
||||||
|
if verbose:
|
||||||
|
_display('Will actually move:', compress_for_rename(self.paths))
|
||||||
|
|
||||||
|
return ask('Proceed (y/n)? ', ('y', 'n')) == 'y'
|
||||||
|
|
||||||
|
def rollback(self):
|
||||||
|
# type: () -> None
|
||||||
|
"""Rollback the changes previously made by remove()."""
|
||||||
|
if not self._moved_paths.can_rollback:
|
||||||
|
logger.error(
|
||||||
|
"Can't roll back %s; was not uninstalled",
|
||||||
|
self.dist.project_name,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
logger.info('Rolling back uninstall of %s', self.dist.project_name)
|
||||||
|
self._moved_paths.rollback()
|
||||||
|
for pth in self.pth.values():
|
||||||
|
pth.rollback()
|
||||||
|
|
||||||
|
def commit(self):
|
||||||
|
# type: () -> None
|
||||||
|
"""Remove temporary save dir: rollback will no longer be possible."""
|
||||||
|
self._moved_paths.commit()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dist(cls, dist):
|
||||||
|
# type: (Distribution) -> UninstallPathSet
|
||||||
|
dist_path = normalize_path(dist.location)
|
||||||
|
if not dist_is_local(dist):
|
||||||
|
logger.info(
|
||||||
|
"Not uninstalling %s at %s, outside environment %s",
|
||||||
|
dist.key,
|
||||||
|
dist_path,
|
||||||
|
sys.prefix,
|
||||||
|
)
|
||||||
|
return cls(dist)
|
||||||
|
|
||||||
|
if dist_path in {p for p in {sysconfig.get_path("stdlib"),
|
||||||
|
sysconfig.get_path("platstdlib")}
|
||||||
|
if p}:
|
||||||
|
logger.info(
|
||||||
|
"Not uninstalling %s at %s, as it is in the standard library.",
|
||||||
|
dist.key,
|
||||||
|
dist_path,
|
||||||
|
)
|
||||||
|
return cls(dist)
|
||||||
|
|
||||||
|
paths_to_remove = cls(dist)
|
||||||
|
develop_egg_link = egg_link_path(dist)
|
||||||
|
develop_egg_link_egg_info = '{}.egg-info'.format(
|
||||||
|
pkg_resources.to_filename(dist.project_name))
|
||||||
|
egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info)
|
||||||
|
# Special case for distutils installed package
|
||||||
|
distutils_egg_info = getattr(dist._provider, 'path', None)
|
||||||
|
|
||||||
|
# Uninstall cases order do matter as in the case of 2 installs of the
|
||||||
|
# same package, pip needs to uninstall the currently detected version
|
||||||
|
if (egg_info_exists and dist.egg_info.endswith('.egg-info') and
|
||||||
|
not dist.egg_info.endswith(develop_egg_link_egg_info)):
|
||||||
|
# if dist.egg_info.endswith(develop_egg_link_egg_info), we
|
||||||
|
# are in fact in the develop_egg_link case
|
||||||
|
paths_to_remove.add(dist.egg_info)
|
||||||
|
if dist.has_metadata('installed-files.txt'):
|
||||||
|
for installed_file in dist.get_metadata(
|
||||||
|
'installed-files.txt').splitlines():
|
||||||
|
path = os.path.normpath(
|
||||||
|
os.path.join(dist.egg_info, installed_file)
|
||||||
|
)
|
||||||
|
paths_to_remove.add(path)
|
||||||
|
# FIXME: need a test for this elif block
|
||||||
|
# occurs with --single-version-externally-managed/--record outside
|
||||||
|
# of pip
|
||||||
|
elif dist.has_metadata('top_level.txt'):
|
||||||
|
if dist.has_metadata('namespace_packages.txt'):
|
||||||
|
namespaces = dist.get_metadata('namespace_packages.txt')
|
||||||
|
else:
|
||||||
|
namespaces = []
|
||||||
|
for top_level_pkg in [
|
||||||
|
p for p
|
||||||
|
in dist.get_metadata('top_level.txt').splitlines()
|
||||||
|
if p and p not in namespaces]:
|
||||||
|
path = os.path.join(dist.location, top_level_pkg)
|
||||||
|
paths_to_remove.add(path)
|
||||||
|
paths_to_remove.add(path + '.py')
|
||||||
|
paths_to_remove.add(path + '.pyc')
|
||||||
|
paths_to_remove.add(path + '.pyo')
|
||||||
|
|
||||||
|
elif distutils_egg_info:
|
||||||
|
raise UninstallationError(
|
||||||
|
"Cannot uninstall {!r}. It is a distutils installed project "
|
||||||
|
"and thus we cannot accurately determine which files belong "
|
||||||
|
"to it which would lead to only a partial uninstall.".format(
|
||||||
|
dist.project_name,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
elif dist.location.endswith('.egg'):
|
||||||
|
# package installed by easy_install
|
||||||
|
# We cannot match on dist.egg_name because it can slightly vary
|
||||||
|
# i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg
|
||||||
|
paths_to_remove.add(dist.location)
|
||||||
|
easy_install_egg = os.path.split(dist.location)[1]
|
||||||
|
easy_install_pth = os.path.join(os.path.dirname(dist.location),
|
||||||
|
'easy-install.pth')
|
||||||
|
paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)
|
||||||
|
|
||||||
|
elif egg_info_exists and dist.egg_info.endswith('.dist-info'):
|
||||||
|
for path in uninstallation_paths(dist):
|
||||||
|
paths_to_remove.add(path)
|
||||||
|
|
||||||
|
elif develop_egg_link:
|
||||||
|
# develop egg
|
||||||
|
with open(develop_egg_link, 'r') as fh:
|
||||||
|
link_pointer = os.path.normcase(fh.readline().strip())
|
||||||
|
assert (link_pointer == dist.location), (
|
||||||
|
'Egg-link {} does not match installed location of {} '
|
||||||
|
'(at {})'.format(
|
||||||
|
link_pointer, dist.project_name, dist.location)
|
||||||
|
)
|
||||||
|
paths_to_remove.add(develop_egg_link)
|
||||||
|
easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),
|
||||||
|
'easy-install.pth')
|
||||||
|
paths_to_remove.add_pth(easy_install_pth, dist.location)
|
||||||
|
|
||||||
|
else:
|
||||||
|
logger.debug(
|
||||||
|
'Not sure how to uninstall: %s - Check: %s',
|
||||||
|
dist, dist.location,
|
||||||
|
)
|
||||||
|
|
||||||
|
# find distutils scripts= scripts
|
||||||
|
if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
|
||||||
|
for script in dist.metadata_listdir('scripts'):
|
||||||
|
if dist_in_usersite(dist):
|
||||||
|
bin_dir = bin_user
|
||||||
|
else:
|
||||||
|
bin_dir = bin_py
|
||||||
|
paths_to_remove.add(os.path.join(bin_dir, script))
|
||||||
|
if WINDOWS:
|
||||||
|
paths_to_remove.add(os.path.join(bin_dir, script) + '.bat')
|
||||||
|
|
||||||
|
# find console_scripts
|
||||||
|
_scripts_to_remove = []
|
||||||
|
console_scripts = dist.get_entry_map(group='console_scripts')
|
||||||
|
for name in console_scripts.keys():
|
||||||
|
_scripts_to_remove.extend(_script_names(dist, name, False))
|
||||||
|
# find gui_scripts
|
||||||
|
gui_scripts = dist.get_entry_map(group='gui_scripts')
|
||||||
|
for name in gui_scripts.keys():
|
||||||
|
_scripts_to_remove.extend(_script_names(dist, name, True))
|
||||||
|
|
||||||
|
for s in _scripts_to_remove:
|
||||||
|
paths_to_remove.add(s)
|
||||||
|
|
||||||
|
return paths_to_remove
|
||||||
|
|
||||||
|
|
||||||
|
class UninstallPthEntries(object):
|
||||||
|
def __init__(self, pth_file):
|
||||||
|
# type: (str) -> None
|
||||||
|
self.file = pth_file
|
||||||
|
self.entries = set() # type: Set[str]
|
||||||
|
self._saved_lines = None # type: Optional[List[bytes]]
|
||||||
|
|
||||||
|
def add(self, entry):
|
||||||
|
# type: (str) -> None
|
||||||
|
entry = os.path.normcase(entry)
|
||||||
|
# On Windows, os.path.normcase converts the entry to use
|
||||||
|
# backslashes. This is correct for entries that describe absolute
|
||||||
|
# paths outside of site-packages, but all the others use forward
|
||||||
|
# slashes.
|
||||||
|
# os.path.splitdrive is used instead of os.path.isabs because isabs
|
||||||
|
# treats non-absolute paths with drive letter markings like c:foo\bar
|
||||||
|
# as absolute paths. It also does not recognize UNC paths if they don't
|
||||||
|
# have more than "\\sever\share". Valid examples: "\\server\share\" or
|
||||||
|
# "\\server\share\folder". Python 2.7.8+ support UNC in splitdrive.
|
||||||
|
if WINDOWS and not os.path.splitdrive(entry)[0]:
|
||||||
|
entry = entry.replace('\\', '/')
|
||||||
|
self.entries.add(entry)
|
||||||
|
|
||||||
|
def remove(self):
|
||||||
|
# type: () -> None
|
||||||
|
logger.debug('Removing pth entries from %s:', self.file)
|
||||||
|
|
||||||
|
# If the file doesn't exist, log a warning and return
|
||||||
|
if not os.path.isfile(self.file):
|
||||||
|
logger.warning(
|
||||||
|
"Cannot remove entries from nonexistent file {}".format(
|
||||||
|
self.file)
|
||||||
|
)
|
||||||
|
return
|
||||||
|
with open(self.file, 'rb') as fh:
|
||||||
|
# windows uses '\r\n' with py3k, but uses '\n' with py2.x
|
||||||
|
lines = fh.readlines()
|
||||||
|
self._saved_lines = lines
|
||||||
|
if any(b'\r\n' in line for line in lines):
|
||||||
|
endline = '\r\n'
|
||||||
|
else:
|
||||||
|
endline = '\n'
|
||||||
|
# handle missing trailing newline
|
||||||
|
if lines and not lines[-1].endswith(endline.encode("utf-8")):
|
||||||
|
lines[-1] = lines[-1] + endline.encode("utf-8")
|
||||||
|
for entry in self.entries:
|
||||||
|
try:
|
||||||
|
logger.debug('Removing entry: %s', entry)
|
||||||
|
lines.remove((entry + endline).encode("utf-8"))
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
with open(self.file, 'wb') as fh:
|
||||||
|
fh.writelines(lines)
|
||||||
|
|
||||||
|
def rollback(self):
|
||||||
|
# type: () -> bool
|
||||||
|
if self._saved_lines is None:
|
||||||
|
logger.error(
|
||||||
|
'Cannot roll back changes to %s, none were made', self.file
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
logger.debug('Rolling %s back to previous state', self.file)
|
||||||
|
with open(self.file, 'wb') as fh:
|
||||||
|
fh.writelines(self._saved_lines)
|
||||||
|
return True
|
0
sources/pip_20.1/_internal/resolution/__init__.py
Normal file
0
sources/pip_20.1/_internal/resolution/__init__.py
Normal file
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue