Skip to content
Snippets Groups Projects

requires.txt is not mandatory for metadata generator

Merged Marek Chrastina requested to merge extras into master
Compare and
2 files
+ 886
261
Compare changes
  • Side-by-side
  • Inline
Files
2
+ 885
260
@@ -2,22 +2,30 @@
pipdeps
"""
import argparse
import itertools
import json
import distutils.version
import os
import platform
import pprint
import re
import subprocess
import sys
import urllib2
import tarfile
import tempfile
import urllib2
import zipfile
import wheel.metadata
import tabulate
import packaging.specifiers
import packaging.version
import pip._internal.utils.misc
import wheel.metadata
# https://www.python.org/dev/peps/pep-0508/#environment-markers
PY_VER = ".".join(map(str, sys.version_info[:2]))
SYS_PLAT = sys.platform
PLAT_PY_IMPL = platform.python_implementation()
def arg_parse():
"""
@@ -35,117 +43,293 @@ def arg_parse():
action='store_true',
help="upgrade upgradeable packages")
group.add_argument('-s', '--show',
nargs='+',
nargs='*',
help="show detailed info about upgradeable packages")
return parser.parse_args()
def get_pyver():
def upgrade_package(data):
"""
return running python version
pip install --upgrade "<package>==<versions>"
"""
return ".".join(map(str, sys.version_info[:3]))
to_upgrade = []
for package, version in data:
to_upgrade.append("%s==%s" % (package, version))
subprocess.check_call(
["pip", "install", "--upgrade", " ".join(to_upgrade)],
stderr=subprocess.STDOUT
)
def is_strict_version(version):
def get_json(url):
"""
Return true if version is strict, otherwise return false
Return url json
"""
try:
distutils.version.StrictVersion(version)
except ValueError:
return False
return True
return json.load(urllib2.urlopen(urllib2.Request(url)))
def version_conform_specifiers(version, specifiers):
def file_download(url):
"""
check if version conforms specifiers
Download file from url as temporary file
It returns file object
"""
tmp_file = tempfile.NamedTemporaryFile(delete=False)
rfile = urllib2.urlopen(url)
with tmp_file as output:
output.write(rfile.read())
return tmp_file
def merge_two_dicts(in_x, in_y):
"""
Return merge of two dictionaries
"""
out = in_x.copy()
out.update(in_y)
return out
def is_version(version):
"""
Return true if version satisfy regex, otherwise return false
"""
if re.compile(r'^(\d+) \. (\d+) (\. (\d+))? (\. (\d+))?$', re.VERBOSE).search(version) or \
re.compile(r'^(\d+) \. (\d+) (\. (\d+))? (rc(\d+))?$', re.VERBOSE).search(version):
return True
return False
def is_in_specifiers(version, specifiers):
"""
Return true if version satisfy specifiers, otherwise return false
"""
if not specifiers:
return True
elif version is None:
return True
else:
ver = packaging.version.Version(version)
spec = packaging.specifiers.SpecifierSet(",".join(specifiers))
if spec.contains(ver):
return True
return False
# https://github.com/pypa/packaging/pull/92
ver = packaging.version.LegacyVersion(version)
specifiers = [
packaging.specifiers.LegacySpecifier(s.strip()) for s in specifiers if s.strip()]
return all(s.contains(ver) for s in specifiers)
def upgrade_package(package, versions):
def is_in_conditions(condition):
"""
pip install --upgrade "<package><versions>"
Return true if condition satisfy sys_platform and python_version and
platform_python_implementation, otherwise return false
"""
subprocess.check_call(
["pip", "install", "--upgrade", "%s==%s" % (package, "".join(versions))],
stderr=subprocess.STDOUT
)
if not condition:
return True
# pylint: disable=eval-used
return eval(
condition.replace("sys_platform", '"%s"' % SYS_PLAT) \
.replace("python_version", '"%s"' % PY_VER) \
.replace("platform_python_implementation", '"%s"' % PLAT_PY_IMPL))
def get_pip_list():
def is_in_extra(extra, req_extra):
"""
pip list
Return true if extra satisfy, otherwise return false
"""
outdated_packages = subprocess.check_output(["pip", "list"])
return [line.split()[0] for line in outdated_packages.strip().split("\n")[2:]]
if extra is None or extra in req_extra:
return True
return False
def file_download(url):
def specifiers_intersection(specifiers):
"""
Download file from url as temporary file
It returns file object
Return intersection of specifiers, otherwise return None
"""
tmp_file = tempfile.NamedTemporaryFile(delete=False)
rfile = urllib2.urlopen(url)
with tmp_file as output:
output.write(rfile.read())
return tmp_file
if not specifiers:
return []
specifiers = [packaging.specifiers.LegacySpecifier(s.strip()) for s in specifiers if s.strip()]
left_boarder = [s for s in specifiers if s.operator in ['>', '>=']]
if left_boarder:
max_left = sorted([s.version for s in left_boarder],
key=packaging.specifiers.LegacyVersion, reverse=True)[0]
max_left_op = [s.operator for s in left_boarder if s.version == max_left]
if '>' in max_left_op:
max_left_op = '>'
else:
max_left_op = '>='
right_boarder = [s for s in specifiers if s.operator in ['<', '<=']]
if right_boarder:
min_right = sorted([s.version for s in right_boarder],
key=packaging.specifiers.LegacyVersion)[0]
min_right_op = [s.operator for s in right_boarder if s.version == min_right]
if '<' in min_right_op:
min_right_op = '<'
else:
min_right_op = '<='
equals = [s for s in specifiers if s.operator in ['==']]
if equals:
cmp_v = list(set([s.version for s in equals]))[0]
if all([packaging.version.LegacyVersion(cmp_v) == packaging.version.LegacyVersion(item) \
for item in list(set([s.version for s in equals]))]):
equals = cmp_v
else:
return None
notequals = [s for s in specifiers if s.operator in ['!=']]
notequals = list(set([s.version for s in notequals]))
boarders = []
if left_boarder and right_boarder:
if packaging.version.LegacyVersion(max_left) > packaging.version.LegacyVersion(min_right):
return None
elif packaging.version.LegacyVersion(max_left) == \
packaging.version.LegacyVersion(min_right):
if max_left_op in ['>='] and min_right_op in ['<=']:
max_left_op = '=='
right_boarder = None
else:
return None
if left_boarder:
boarders.append("%s%s" % (max_left_op, max_left))
if right_boarder:
boarders.append("%s%s" % (min_right_op, min_right))
if boarders and notequals:
for item in notequals:
if is_in_specifiers(item, boarders):
boarders.append("!=%s" % item)
elif not boarders and notequals:
for item in notequals:
boarders.append("!=%s" % item)
if boarders and equals:
if is_in_specifiers(equals, boarders):
return ["==%s" % equals]
return None
elif not boarders and equals:
return ["==%s" % equals]
return boarders
def get_jsonpipdeptree():
def select_upkgs(data, rkey):
"""
pipdeptree --json-tree
Return data packages having requested key
"""
pipdeptree = subprocess.check_output(
["pipdeptree", "--json-tree"],
stderr=subprocess.STDOUT
result = []
for pkg, pkg_data in data.iteritems():
if rkey in pkg_data.keys():
result.append(pkg)
return result
def print_list(data):
"""
Print upgradeable versions
"""
upkgs = select_upkgs(data, 'upgradeable_version')
if not upkgs:
print "There is nothing to upgrade."
return 0
tab_data = []
for pkg in sorted(upkgs):
tab_data.append([pkg, data[pkg]['installed_version'], data[pkg]['upgradeable_version']])
print tabulate.tabulate(
tab_data,
['package', 'installed_version', 'upgradeable_version']
)
return json.loads(pipdeptree.strip())
return 1
def get_json(url):
def get_pkg_data():
"""
Return url json
Return package data
"""
return json.load(urllib2.urlopen(urllib2.Request(url)))
packages_data = {}
# pylint: disable=protected-access
for pkg in pip._internal.utils.misc.get_installed_distributions():
pkg_name, pkg_ver, _pkg_extra = pkginfo(str(pkg))
rev = {'installed_version': pkg_ver,
'requires': [pkginfo(str(dep), repair=True) for dep in pkg.requires()]}
packages_data[pkg_name] = rev
packages_data = insert_extras(packages_data)
packages_data = insert_availables(packages_data)
packages_data = insert_news(packages_data)
def json_search(jsonpipdeptree, package, key):
while True:
new_packages_data = new_packages(packages_data)
if not new_packages_data:
break
new_packages_data = insert_availables(new_packages_data)
new_packages_data = insert_news(new_packages_data)
packages_data = merge_two_dicts(packages_data, new_packages_data)
check_new_extras(packages_data)
return packages_data
def pkginfo(data, req_extra=None, repair=False):
"""
find package dependencies in json tree
Return parsed pkginfo
"""
if isinstance(jsonpipdeptree, dict):
keys = jsonpipdeptree.keys()
if 'package_name' in keys and key in keys:
if re.search(r'^%s$' % package, jsonpipdeptree['package_name'], re.IGNORECASE):
yield jsonpipdeptree[key]
for child_val in json_search(jsonpipdeptree['dependencies'], package, key):
yield child_val
elif isinstance(jsonpipdeptree, list):
for item in jsonpipdeptree:
for item_val in json_search(item, package, key):
yield item_val
extra_match = re.compile(
r"""^(?P<package>.*?)(;\s*(?P<condition>.*?)(extra == '(?P<extra>.*?)')?)$""").search(data)
if extra_match:
groupdict = extra_match.groupdict()
condition = groupdict['condition']
extra = groupdict['extra']
package = groupdict['package']
if condition.endswith(' and '):
condition = condition[:-5]
mysearch = re.compile(r'(extra == .*)').search(condition)
if mysearch:
extra = mysearch.group(1)
condition = condition.replace(extra, '')
if not condition:
condition = None
extra = re.compile(r'extra == (.*)').search(extra).group(1).replace('"', "")
else:
condition, extra = None, None
package = data
if not is_in_conditions(condition):
return None
pkg_name, pkg_extra, pkg_ver = re.compile(
r'([\w\.\-]*)(\[\w*\])?(.*)').search(package).groups()
if pkg_extra:
pkg_extra = pkg_extra.replace("[", "").replace("]", "").lower()
pkg_ver = pkg_ver.replace("(", "").replace(")", "").strip()
if not pkg_ver:
pkg_ver = []
else:
if repair:
try:
pkg_ver = re.compile(r'^(\d.*)$').search(pkg_ver).group(1)
except AttributeError:
pass
pkg_ver = pkg_ver.split(",")
if not is_in_extra(extra, req_extra):
return None
return (pkg_name.lower(), pkg_ver, pkg_extra)
def get_highest_version(package, data):
def insert_extras(data):
"""
Return upgradeable version if possible, otherwise return installed version
Insert extras
"""
try:
version = data[package]['upgradeable_version']
except KeyError:
version = data[package]['installed_version']
return version
for key in data.keys():
extra = []
for pkg, pkg_data in data.iteritems():
for dep in pkg_data['requires']:
if dep[0] == key:
if dep[2]:
extra.append(dep[2])
data[key]['extras'] = extra
if extra:
# pylint: disable=protected-access
for pkg in pip._internal.utils.misc.get_installed_distributions():
pkg_name, _pkg_ver, _pkg_extra = pkginfo(str(pkg))
if pkg_name == key:
data[key]['requires'] += [pkginfo(str(dep), repair=True, req_extra=extra) \
for dep in pkg.requires(extras=extra)]
return data
def find_available_vers(package_name, pyver):
def insert_availables(data):
"""
Return descending list of available strict version
Insert available versions
"""
for pkg, pkg_data in data.iteritems():
if 'available_version' in pkg_data.keys():
continue
try:
data[pkg]['available_version'] = get_available_vers(pkg)
except urllib2.HTTPError:
data[pkg]['available_version'] = []
return data
def get_available_vers(package):
"""
Return descending list of public available strict version
"""
versions = []
try:
data = get_json("https://pypi.python.org/pypi/%s/json" % (package_name,))
data = get_json("https://pypi.python.org/pypi/%s/json" % (package))
except urllib2.HTTPError, err:
print "%s %s" % (err, err.url)
raise urllib2.HTTPError(err.url, err.code, None, err.hdrs, err.fp)
@@ -154,41 +338,108 @@ def find_available_vers(package_name, pyver):
requires_python = []
for item in data["releases"][release]:
if item['requires_python'] is not None:
requires_python.append(item['requires_python'])
if is_strict_version(release) and version_conform_specifiers(pyver, requires_python):
for reqpyt in item['requires_python'].split(","):
requires_python.append(reqpyt.strip())
if requires_python:
requires_python = list(set(requires_python))
if is_version(release) and is_in_specifiers(PY_VER, requires_python):
versions.append(release)
return sorted(versions, key=distutils.version.StrictVersion, reverse=True)
return sorted(versions, key=packaging.specifiers.LegacyVersion, reverse=True)
def get_newer_vers(available_version, required_version, installed_version=None):
def select_news(available_version, installed_version=None):
"""
Return list of newer versions which conforms pipdeptree dependencies, otherwise return none.
Select versions newer than installed version, if it is known
"""
if required_version is None:
result = [aver for aver in list(available_version)]
return sorted(result, key=distutils.version.StrictVersion, reverse=True)
if [rver for rver in required_version if re.search(r'(^==.*|^\d.*)', rver) is not None]:
return None
result = []
av_version = list(available_version)
while True:
if installed_version is None:
return sorted(available_version, key=packaging.specifiers.LegacyVersion, reverse=True)
iver = packaging.version.Version(installed_version)
return sorted([aver for aver in available_version if packaging.version.Version(aver) > iver],
key=packaging.specifiers.LegacyVersion, reverse=True)
def insert_news(data):
"""
Insert new versions
"""
for pkg, pkg_data in data.iteritems():
if 'new_version' in pkg_data.keys():
continue
try:
version = av_version.pop(0)
except IndexError:
break
aver = packaging.version.Version(version)
rver = packaging.specifiers.SpecifierSet(",".join(required_version))
if rver.contains(aver):
if installed_version is not None:
iver = packaging.version.Version(installed_version)
if aver == iver:
break
elif aver > iver:
result.append(version)
else:
result.append(version)
if result:
return sorted(result, key=distutils.version.StrictVersion, reverse=True)
return None
new_version = select_news(pkg_data['available_version'], pkg_data['installed_version'])
except KeyError:
new_version = select_news(pkg_data['available_version'])
if new_version:
res = {}
for version in new_version:
content = parse_metadata(get_metadata(pkg, version), pkg_data['extras'])
res[version] = content
if res:
pkg_data['new_version'] = res
return data
def new_packages(data):
"""
Return new packages as dictionary
"""
out = {}
arr = []
pkg_list = data.keys()
for pkg, pkg_data in data.iteritems():
try:
for _ver, ver_data in pkg_data['new_version'].iteritems():
for dep in ver_data:
if dep[0] not in pkg_list:
arr.append(dep)
except KeyError:
pass
for item in list(set([i[0] for i in arr])):
extras = []
for pkg, _req, extra in arr:
if pkg == item and extra is not None:
extras.append(extra)
out[item] = {'extras': extras}
return out
def check_new_extras(data):
"""
Check if there are new extras
"""
extra_pkgs = []
pkg_list = data.keys()
for pkg, pkg_data in data.iteritems():
try:
for _ver, ver_data in pkg_data['new_version'].iteritems():
for dep in ver_data:
if dep[0] in pkg_list and dep[2] is not None:
extra_pkgs.append(dep)
except KeyError:
pass
for pkg, _req, extra in extra_pkgs:
if extra not in data[pkg]['extras']:
raise Exception('There are new extras!')
def check_extras(data):
"""
Check if there are extras in upgradeable packages
"""
for package in select_upkgs(data, 'upgradeable_version'):
if data[package]['extras']:
raise Exception('There are extras in upgradeable packages!')
def check_co_branches(data):
"""
Check if there branches with intersection of packages
"""
co_branches = []
package_branches = get_branches(data)
for branch in package_branches:
for pkg, reqs in package_branches.iteritems():
if pkg == branch:
continue
if len(package_branches[branch]+reqs) != len(list(set(package_branches[branch]+reqs))):
co_branches.append(branch)
co_branches = list(set(co_branches))
if co_branches:
raise Exception('There are branches with intersection of packages!')
def write_metadata(tmp_file):
"""
@@ -223,7 +474,7 @@ def get_metadata(package, version):
if item['packagetype'] == 'sdist':
tmp_file = file_download(item['url'])
write_metadata(tmp_file)
if os.path.isfile('/tmp/requires.txt') and os.path.isfile('/tmp/PKG-INFO'):
if os.path.isfile('/tmp/PKG-INFO'):
metadata = [
line.decode('utf-8') \
for line in wheel.metadata.pkginfo_to_metadata('/tmp', '/tmp/PKG-INFO') \
@@ -249,7 +500,7 @@ def get_metadata(package, version):
break
return metadata
def parse_metadata(metadata, pyver):
def parse_metadata(metadata, extra):
"""
Return dependencies parsed from metadata
"""
@@ -257,157 +508,537 @@ def parse_metadata(metadata, pyver):
if 'Metadata-Version' in line.decode('utf-8'):
metadata_version = line.replace('Metadata-Version:', '').strip()
break
if packaging.version.Version(metadata_version) >= packaging.version.Version('2.0'):
out = []
for dep in [
line.replace('Requires-Dist:', '').strip() \
for line in metadata if re.search(r'^Requires-Dist:', line)]:
if ';' in dep:
dep = dep.split(';')
if 'python_version' in dep[1]:
if packaging.specifiers.SpecifierSet(
dep[1].replace('python_version', '').replace('"', '').strip()) \
.contains(packaging.version.Version(pyver)):
dep = dep[0]
else:
continue
else:
continue
dep = dep.split()
try:
pkg = re.search(r'(.*)(\[.*\])', dep[0]).group(1)
except AttributeError:
pkg = dep[0]
try:
pkg = re.search(r'(^[\w\.\-]*)(.*)', dep[0]).group(1)
dep.append(re.search(r'(^[\w\.\-]*)(.*)', dep[0]).group(2))
except AttributeError:
pkg = dep[0]
try:
ver = dep[1].replace('(', '').replace(')', '').replace(';', '')
except IndexError:
ver = None
out.append((pkg, ver))
arr = []
if metadata_version and \
packaging.version.Version(metadata_version) >= packaging.version.Version('2.0'):
arr = []
lines = [line.replace('Requires-Dist:', '').strip() \
for line in metadata if re.search(r'^Requires-Dist:', line)]
for line in lines:
data = pkginfo(str(line), req_extra=extra, repair=True)
if data:
arr.append(pkginfo(str(line), req_extra=extra, repair=True))
return arr
def pvector(package, data):
"""
Return vector of package versions
"""
out = []
if 'new_version' not in data[package].keys():
out.append((package, data[package]['installed_version']))
else:
if 'upgradeable_version' in data[package].keys():
out.append((package, data[package]['upgradeable_version']))
else:
if 'installed_version' in data[package].keys():
out.append((package, data[package]['installed_version']))
for ver in sorted(data[package]['new_version'].keys(),
key=packaging.specifiers.LegacyVersion):
if 'upgradeable_version' in data[package].keys():
if packaging.specifiers.LegacyVersion(ver) > \
packaging.specifiers.LegacyVersion(data[package]['upgradeable_version']):
out.append((package, ver))
else:
out.append((package, ver))
return out
def find_new_dependencies(package, version, package_list, pyver):
def single_multi(data):
"""
Return package dependencies parsed from pypi json
Return list of packages with new versions and list of packages without new versions
"""
content = parse_metadata(get_metadata(package, version), pyver)
for pkg, ver in content:
try:
if pkg in package_list:
yield (pkg, ver)
pkg_list, single, multi = [], [], []
for pkg, pkg_data in data.iteritems():
if 'requires' in pkg_data.keys():
pkg_list.append(pkg)
for pkg in pkg_list:
vec = pvector(pkg, data)
if len(vec) == 1:
single.append(*vec)
elif len(vec) > 1:
multi.append(vec)
single = list(set([item[0] for item in single]))
multi = list(set([item[0] for pkg_data in multi for item in pkg_data]))
return single, multi
def move_incompatible(data, to_delete):
"""
Move new version to incompatible
"""
if not to_delete:
return data
for package, version in to_delete:
if 'incompatible_version' not in data[package].keys():
data[package]['incompatible_version'] = {}
data[package]['incompatible_version'][version] = data[package]['new_version'][version]
del data[package]['new_version'][version]
if not data[package]['new_version']:
del data[package]['new_version']
return data
def get_compatible(versions, reqs, inverse=False):
"""
Return compatible versions
"""
specifiers = specifiers_intersection([i for i in itertools.chain(*[req[1] for req in reqs])])
if inverse:
v_versions = [version for version in versions if not is_in_specifiers(version, specifiers)]
else:
v_versions = [version for version in versions if is_in_specifiers(version, specifiers)]
return sorted(v_versions, key=packaging.specifiers.LegacyVersion, reverse=True)
def del_hards(data):
"""
Return list of packages and their versions that does not satisfy
requirements of packages without new version
"""
package_no_news, package_with_news = single_multi(data)
deps = []
for package in package_no_news:
if 'requires' in data[package].keys():
if 'upgradeable_version' in data[package].keys():
uver = data[package]['upgradeable_version']
deps += [dep for dep in data[package]['new_version'][uver] if dep[1] or dep[2]]
else:
try:
for child in find_new_dependencies(
pkg,
get_newer_vers(find_available_vers(pkg, pyver), ver, None)[0],
package_list,
pyver
):
yield child
except TypeError:
pass
except AttributeError:
pass
deps += [dep for dep in data[package]['requires'] if dep[1] or dep[2]]
hard_requirements = {}
for item in list(set([pkg[0] for pkg in deps])):
reqs, extras = [], []
for pkg, req, extra in deps:
if pkg == item:
reqs += req
if extra:
extras += extra
hard_requirements[item] = {'installed_version': data[item]['installed_version'],
'requirements': list(set(reqs)),
'extras': list(set(extras))}
to_delete = []
for pkg in package_with_news+not_installed(data):
for ver, ver_data in data[pkg]['new_version'].iteritems():
for dep, req, extra in ver_data:
if dep in hard_requirements.keys():
if specifiers_intersection(req+hard_requirements[dep]['requirements']) is None:
to_delete.append((pkg, ver))
return to_delete
def del_no_news(data):
"""
Return list of packages and their versions that does not satisfy packages without new version
"""
to_delete = []
package_no_news, package_with_news = single_multi(data)
for package in package_with_news+not_installed(data):
reqs = []
for pkg, pkg_data in data.iteritems():
if pkg != package and 'requires' in pkg_data.keys() and pkg in package_no_news:
if 'upgradeable_version' in pkg_data.keys():
uver = pkg_data['upgradeable_version']
reqs += [item for item in pkg_data['new_version'][uver] if item[0] == package]
else:
reqs += [item for item in pkg_data['requires'] if item[0] == package]
versions = [pkg[1] for pkg in pvector(package, data)]
incver = get_compatible(versions, reqs, inverse=True)
for version in versions:
if version in incver:
to_delete.append((package, version))
return to_delete
def del_one_ver(data):
"""
If all packages requirements lead to one specific version, return list of thath packages
"""
to_delete = []
_package_no_news, package_with_news = single_multi(data)
for package in package_with_news+not_installed(data):
reqs = []
for pkg, pkg_data in data.iteritems():
uver = None
if pkg != package and 'requires' in pkg_data.keys():
if 'upgradeable_version' in pkg_data.keys():
uver = pkg_data['upgradeable_version']
add_reqs(reqs, pkg_data['requires'], pkg=package)
else:
add_reqs(reqs, pkg_data['requires'], pkg=package)
if 'new_version' in pkg_data.keys():
for ver, ver_data in pkg_data['new_version'].iteritems():
if uver:
if packaging.specifiers.LegacyVersion(ver) <= \
packaging.specifiers.LegacyVersion(uver):
continue
add_reqs(reqs, ver_data, pkg=package)
specifiers = specifiers_intersection(
[i for i in itertools.chain(*[req[1] for req in reqs])])
if specifiers:
if len(specifiers) == 1 and '==' in specifiers[0]:
versions = [pkg[1] for pkg in pvector(package, data)]
versions.remove(specifiers[0].replace('==', ''))
for version in versions:
to_delete.append((package, version))
return to_delete
def get_deps(data, package):
"""
Return package deep requirements
"""
try:
content = data[package]
except KeyError:
content = []
for pkg in content:
yield pkg
for child in get_deps(data, pkg):
yield child
def not_installed(data):
"""
Return not installed packages
"""
not_i = []
for pkg, pkg_data in data.iteritems():
if 'requires' not in pkg_data.keys():
not_i.append(pkg)
return not_i
def get_no_news_req(data):
"""
Return requirements of packages without new versions
"""
reqs = {}
package_no_news, _package_with_news = single_multi(data)
for package in package_no_news:
version = pvector(package, data)[0][1]
reqs = save_version(reqs, data, package, version)
return reqs
def depless_vers(res):
def save_version(r_data, p_data, pkg, ver):
"""
If there is no dependencies or versionless dependencies, return the upgradeable version,
otherwise return None
Save the highest package version
"""
depless = []
for ver, deps in res.iteritems():
if not deps:
depless.append(ver)
if 'installed_version' in p_data[pkg].keys() and p_data[pkg]['installed_version'] == ver:
r_data[pkg] = p_data[pkg]['requires']
else:
r_data[pkg] = p_data[pkg]['new_version'][ver]
return r_data
def add_reqs(reqs, data, pkg=None, addpkg=None):
"""
Append requirements
"""
for dep, req, extra in data:
if pkg and dep != pkg:
continue
if addpkg:
reqs.append(addpkg)
else:
if not [dep for dep in deps if dep[1] is not None]:
depless.append(ver)
if depless:
depless = sorted(depless, key=distutils.version.StrictVersion, reverse=True)[0]
reqs.append((dep, req, extra))
def save_ic(out, package, incompatible=None, compatible=None):
"""
Save compatible/incompatible version
"""
if package not in out.keys():
out[package] = {'incompatible': [], 'compatible': None}
if incompatible:
out[package]['incompatible'].append(incompatible)
if compatible:
out[package]['compatible'] = compatible
return out
def get_reqs(data, package=None):
"""
Get requirements
"""
if package:
reqs = []
for pkg, pkg_data in data.iteritems():
uver = None
if pkg != package and 'requires' in data[pkg].keys():
if 'upgradeable_version' in data[pkg].keys():
uver = data[pkg]['upgradeable_version']
add_reqs(reqs, data[pkg]['new_version'][uver], pkg=package, addpkg=pkg)
else:
add_reqs(reqs, data[pkg]['requires'], pkg=package, addpkg=pkg)
if 'new_version' in data[pkg].keys():
for ver, ver_data in data[pkg]['new_version'].iteritems():
if uver:
if packaging.specifiers.LegacyVersion(ver) <= \
packaging.specifiers.LegacyVersion(uver):
continue
add_reqs(reqs, ver_data, pkg=package, addpkg=pkg)
else:
depless = None
return depless
def collect_packages(package_list, jsonpipdeptree, pyver=None):
"""
Collect data about packages as dictionary
"""
result = {}
for package in package_list:
installed_version = "".join(list(set(
[_ for _ in json_search(jsonpipdeptree, package, 'installed_version')])))
required_version = []
for dep in list(set(
[_ for _ in json_search(jsonpipdeptree, package, 'required_version')]
)):
if 'Any' not in dep:
required_version.append(dep)
try:
available_version = find_available_vers(package, pyver)
except urllib2.HTTPError:
available_version = [installed_version]
newer_version = get_newer_vers(available_version, required_version, installed_version)
rev = {'installed_version': installed_version,
'required_version': required_version,
'available_version': available_version}
if newer_version is not None:
res = {}
for version in newer_version:
res[version] = [
_ for _ in find_new_dependencies(package, version, package_list, pyver)]
rev['newer_version'] = res
out = {}
for pkg, pkg_data in data.iteritems():
reqs = []
uver = None
if 'upgradeable_version' in pkg_data.keys():
uver = pkg_data['upgradeable_version']
add_reqs(reqs, pkg_data['new_version'][uver])
elif 'requires' in pkg_data.keys():
add_reqs(reqs, pkg_data['requires'])
if 'new_version' in pkg_data.keys():
for ver, ver_data in pkg_data['new_version'].iteritems():
if uver:
if packaging.specifiers.LegacyVersion(ver) <= \
packaging.specifiers.LegacyVersion(uver):
continue
add_reqs(reqs, ver_data)
out[pkg] = list(set([req[0] for req in reqs]))
return out
depless = depless_vers(res)
if depless:
rev['upgradeable_version'] = depless
def phase_one(data):
"""
Partial resolve upgrades
"""
out, no_requires, only_no_news_requires, no_requires_deps = {}, {}, {}, {}
package_no_news, package_with_news = single_multi(data)
for pkg in package_with_news:
dep_in_no_news_vers = []
dep_vers = []
uver = None
if 'upgradeable_version' in data[pkg].keys():
uver = data[pkg]['upgradeable_version']
if 'new_version' in data[pkg].keys():
for ver, ver_data in data[pkg]['new_version'].iteritems():
if uver:
if packaging.specifiers.LegacyVersion(ver) <= \
packaging.specifiers.LegacyVersion(uver):
continue
if ver_data:
reqs = [dep[0] for dep in ver_data]
reqs_not_in_no_news = [dep for dep in reqs if dep not in package_no_news]
if not reqs_not_in_no_news:
dep_in_no_news_vers.append(ver)
else:
dep_vers.append(ver)
if dep_vers:
no_requires[pkg] = dep_vers
if dep_in_no_news_vers:
only_no_news_requires[pkg] = dep_in_no_news_vers
result[package] = rev
return result
for package, version in no_requires.iteritems():
reqs = []
for pkg, pkg_data in data.iteritems():
uver = None
if pkg != package and 'requires' in pkg_data.keys():
if 'upgradeable_version' in pkg_data.keys():
uver = pkg_data['upgradeable_version']
add_reqs(reqs, pkg_data['new_version'][uver], pkg=package, addpkg=pkg)
else:
add_reqs(reqs, pkg_data['requires'], pkg=package, addpkg=pkg)
if 'new_version' in pkg_data.keys():
for ver, ver_data in pkg_data['new_version'].iteritems():
if uver:
if packaging.specifiers.LegacyVersion(ver) <= \
packaging.specifiers.LegacyVersion(uver):
continue
add_reqs(reqs, ver_data, pkg=package, addpkg=pkg)
if reqs:
no_requires_deps[package] = list(set(reqs))
else:
out = save_ic(out, package,
compatible=sorted(no_requires[package],
key=packaging.specifiers.LegacyVersion,
reverse=True)[0])
def check_deps(deps, packages_data):
for package, dep in no_requires_deps.iteritems():
if all([pkg in package_no_news for pkg in dep]):
reqs = []
for pkg in dep:
if 'upgradeable_version' in data[pkg].keys():
uver = data[pkg]['upgradeable_version']
add_reqs(reqs, data[pkg]['new_version'][uver], pkg=package)
else:
add_reqs(reqs, data[pkg]['requires'], pkg=package)
compatible = get_compatible(no_requires[package], reqs)
for version in no_requires[package]:
if version not in compatible:
out = save_ic(out, package, incompatible=version)
if compatible:
out = save_ic(out, package, compatible=compatible[0])
for package, versions in only_no_news_requires.iteritems():
reqs = []
for pkg, pkg_data in data.iteritems():
uver = None
if pkg != package and 'requires' in data[pkg].keys():
if 'upgradeable_version' in data[pkg].keys():
uver = data[pkg]['upgradeable_version']
add_reqs(reqs, data[pkg]['new_version'][uver], pkg=package, addpkg=pkg)
else:
add_reqs(reqs, data[pkg]['requires'], pkg=package, addpkg=pkg)
if 'new_version' in data[pkg].keys():
for ver, ver_data in data[pkg]['new_version'].iteritems():
if uver:
if packaging.specifiers.LegacyVersion(ver) <= \
packaging.specifiers.LegacyVersion(uver):
continue
add_reqs(reqs, ver_data, pkg=package, addpkg=pkg)
if all([item in package_no_news for item in list(set(reqs))]):
out = save_ic(out, package,
compatible=sorted(versions,
key=packaging.specifiers.LegacyVersion,
reverse=True)[0])
return out
def get_branches(data):
"""
Return true, if all package dependencies conforms
Return branches
"""
ndeps = []
for item in deps:
if item[1] is not None:
ndeps.append(
version_conform_specifiers(
get_highest_version(item[0], packages_data),
packages_data[item[0]]['required_version']+[item[1]]
)
)
return all(ndeps)
branches = []
package_reqs = {}
_package_no_news, package_with_news = single_multi(data)
package_with_news = package_with_news+not_installed(data)
package_info = get_reqs(data)
for package in package_with_news:
package_reqs[package] = list(set([i for i in get_deps(package_info, package)]))
for package in package_with_news:
res = []
for pkg, deps in package_reqs.iteritems():
if pkg == package:
continue
if package in deps:
res.append(pkg)
if not res:
branches.append(package)
package_info = {}
for branch in branches:
package_info[branch] = [i for i in package_reqs[branch] if i in package_with_news]
return package_info
def select_pkgs(packages_data, rkey):
def cross_packages(data):
"""
Return data packages having requested key
Return cross packages
"""
result = {}
for pkg, pkg_data in packages_data.iteritems():
if rkey in pkg_data.keys():
result[pkg] = pkg_data
return result
cross_branches = []
out, pkg_reqs = {}, {}
package_branches = get_branches(data)
_package_no_news, package_with_news = single_multi(data)
package_with_news = package_with_news+not_installed(data)
for package in package_with_news:
res = []
for pkg, reqs in package_branches.iteritems():
if package in reqs:
res.append(pkg)
if len(res) > 1:
cross_branches.append(package)
for package in package_with_news:
if package not in cross_branches:
version = pvector(package, data)[0][1]
pkg_reqs = save_version(pkg_reqs, data, package, version)
merged_reqs = merge_two_dicts(pkg_reqs, get_no_news_req(data))
for package in cross_branches:
reqs = []
for pkg, pkg_data in merged_reqs.iteritems():
add_reqs(reqs, pkg_data, pkg=package)
compatible = get_compatible([pkg[1] for pkg in pvector(package, data)], reqs)
if compatible:
out = save_ic(out, package, compatible=compatible[0])
return out
def print_list(upgradeable_pkgs):
def ibranch(data, fix=False):
"""
Provides list option
Return upgradeable versions of independent branch
"""
if upgradeable_pkgs:
data = []
for pkg, pkg_data in sorted(upgradeable_pkgs.iteritems(), key=lambda x: x[0].lower()):
data.append([pkg, pkg_data['installed_version'], pkg_data['upgradeable_version']])
print tabulate.tabulate(
data,
['package', 'installed_version', 'upgradeable_version']
)
sys.exit(1)
else:
print "There is nothing to upgrade."
sys.exit(0)
co_branches = []
out, final_result = {}, {}
no_news_req = get_no_news_req(data)
package_branches = get_branches(data)
_package_no_news, package_with_news = single_multi(data)
for branch in package_branches:
for pkg, reqs in package_branches.iteritems():
if pkg == branch:
continue
if len(package_branches[branch]+reqs) != len(list(set(package_branches[branch]+reqs))):
co_branches.append(branch)
co_branches = list(set(co_branches))
for branch in package_branches:
if branch in co_branches:
continue
if fix:
version = pvector(branch, data)[0][1]
pkg_reqs = save_version({}, data, branch, version)
merged_reqs = merge_two_dicts(pkg_reqs, no_news_req)
packages = [pvector(pkg, data)[:2] \
for pkg in package_branches[branch] if pkg in package_with_news]
else:
merged_reqs = no_news_req.copy()
packages = [pvector(branch, data)]+[pvector(pkg, data) \
for pkg in package_branches[branch] if pkg in package_with_news]
for comb in list(itertools.product(*packages)):
pkg_reqs = {}
for package, version in comb:
pkg_reqs = save_version(pkg_reqs, data, package, version)
merged_reqs = merge_two_dicts(merged_reqs, pkg_reqs)
for package, version in comb:
reqs = []
for pkg, pkg_data in merged_reqs.iteritems():
add_reqs(reqs, pkg_data, pkg=package)
specifiers = specifiers_intersection(
[i for i in itertools.chain(*[req[1] for req in reqs])])
final_result[comb] = is_in_specifiers(version, specifiers)
for comb in final_result:
if final_result[comb]:
sumary = 0
for package, version in comb:
sumary += pvector(package, data).index((package, version))
final_result[comb] = sumary
high = 0
for comb, summary in final_result.iteritems():
if summary > high:
high = summary
if high > 0:
res = []
for comb, summary in final_result.iteritems():
if summary == high:
res.append(comb)
for package, version in res[0]:
if data[package]['installed_version'] != version:
out = save_ic(out, package, compatible=version)
return out
def p_upgrade(data, pkg, compatible=None, incompatible=None):
"""
Partial upgrade
"""
if compatible:
data[pkg]['upgradeable_version'] = compatible
if incompatible:
for version in incompatible:
if compatible and version not in compatible:
data = move_incompatible(data, [(pkg, version)])
elif not compatible:
data = move_incompatible(data, [(pkg, version)])
return data
def first_loop(data):
"""
Upgrade loop
"""
while True:
to_delete_hards = del_hards(data)
data = move_incompatible(data, to_delete_hards)
to_delete_no_news = del_no_news(data)
data = move_incompatible(data, to_delete_no_news)
to_delete_one_ver = del_one_ver(data)
data = move_incompatible(data, to_delete_one_ver)
phase_one_packages = phase_one(data)
for pkg, pkg_data in phase_one_packages.iteritems():
data = p_upgrade(data, pkg, compatible=pkg_data['compatible'],
incompatible=pkg_data['incompatible'])
cross_pkgs = cross_packages(data)
for pkg, pkg_data in cross_pkgs.iteritems():
data = p_upgrade(data, pkg, compatible=pkg_data['compatible'],
incompatible=pkg_data['incompatible'])
i_branch = ibranch(data, fix=True)
for pkg, pkg_data in i_branch.iteritems():
data = p_upgrade(data, pkg, compatible=pkg_data['compatible'])
if all([not to_delete_hards, not to_delete_no_news, not to_delete_one_ver,
not phase_one_packages, not cross_pkgs, not i_branch]):
break
return data
def main():
"""
@@ -415,38 +1046,32 @@ def main():
"""
os.environ["PYTHONWARNINGS"] = "ignore:DEPRECATION"
arguments = arg_parse()
pyver = get_pyver()
pkglist = get_pip_list()
jsonpipdeptree = get_jsonpipdeptree()
packages_data = collect_packages(pkglist, jsonpipdeptree, pyver=pyver)
for pkg, pkg_data in sorted(
select_pkgs(packages_data, 'newer_version').iteritems(), key=lambda x: x[0].lower()
):
pkg_keys = pkg_data.keys()
if 'newer_version' in pkg_keys and 'upgradeable_version' not in pkg_keys:
for ver, deps in sorted(
pkg_data['newer_version'].iteritems(),
key=lambda x: distutils.version.StrictVersion(x[0]),
reverse=True
):
ndeps = check_deps(deps, packages_data)
if ndeps:
packages_data[pkg]['upgradeable_version'] = ver
break
upgradeable_pkgs = select_pkgs(packages_data, 'upgradeable_version')
packages_data = get_pkg_data()
packages_data = first_loop(packages_data)
i_branch = ibranch(packages_data)
for package, data in i_branch.iteritems():
if data['compatible']:
packages_data[package]['upgradeable_version'] = data['compatible']
check_co_branches(packages_data)
check_extras(packages_data)
if arguments.list:
print_list(upgradeable_pkgs)
if arguments.show:
for pkg in arguments.show:
sys.exit(print_list(packages_data))
if arguments.show is not None:
if arguments.show:
pkgs = arguments.show
else:
pkgs = packages_data
for pkg in pkgs:
pprint.pprint({pkg: packages_data[pkg]})
sys.exit(0)
if arguments.upgrade:
if 'pip' in upgradeable_pkgs.keys():
upgrade_package('pip', upgradeable_pkgs['pip']['upgradeable_version'])
del upgradeable_pkgs['pip']
for pkg, pkg_data in sorted(upgradeable_pkgs.iteritems(), key=lambda x: x[0].lower()):
upgrade_package(pkg, pkg_data['upgradeable_version'])
to_upgrade = []
for pkg in sorted(select_upkgs(packages_data, 'upgradeable_version')):
to_upgrade.append((pkg, packages_data[pkg]['upgradeable_version']))
upgrade_package(to_upgrade)
print "Done."
if __name__ == "__main__":
Loading