Skip to content
Snippets Groups Projects

Case vanished module; all deps are taken from metadata; hotfix extras

Merged Marek Chrastina requested to merge fix into master
1 file
+ 111
40
Compare changes
  • Side-by-side
  • Inline
+ 111
40
@@ -13,6 +13,7 @@ import urllib2
@@ -13,6 +13,7 @@ import urllib2
import tarfile
import tarfile
import tempfile
import tempfile
import zipfile
import zipfile
 
import wheel.metadata
import tabulate
import tabulate
import packaging.specifiers
import packaging.specifiers
@@ -143,7 +144,11 @@ def find_available_vers(package_name, pyver):
@@ -143,7 +144,11 @@ def find_available_vers(package_name, pyver):
Return descending list of available strict version
Return descending list of available strict version
"""
"""
versions = []
versions = []
data = get_json("https://pypi.python.org/pypi/%s/json" % (package_name,))
try:
 
data = get_json("https://pypi.python.org/pypi/%s/json" % (package_name,))
 
except urllib2.HTTPError, err:
 
print "%s %s" % (err, err.url)
 
raise urllib2.HTTPError(err.url, err.code, None, err.hdrs, err.fp)
releases = data["releases"].keys()
releases = data["releases"].keys()
for release in releases:
for release in releases:
requires_python = []
requires_python = []
@@ -158,6 +163,9 @@ def get_newer_vers(available_version, required_version, installed_version=None):
@@ -158,6 +163,9 @@ def get_newer_vers(available_version, required_version, installed_version=None):
"""
"""
Return list of newer versions which conforms pipdeptree dependencies, otherwise return none.
Return list of newer versions which conforms pipdeptree dependencies, otherwise return none.
"""
"""
 
if required_version is None:
 
result = [aver for aver in list(available_version)]
 
return sorted(result, key=distutils.version.StrictVersion, reverse=True)
if [rver for rver in required_version if re.search(r'(^==.*|^\d.*)', rver) is not None]:
if [rver for rver in required_version if re.search(r'(^==.*|^\d.*)', rver) is not None]:
return None
return None
result = []
result = []
@@ -182,60 +190,120 @@ def get_newer_vers(available_version, required_version, installed_version=None):
@@ -182,60 +190,120 @@ def get_newer_vers(available_version, required_version, installed_version=None):
return sorted(result, key=distutils.version.StrictVersion, reverse=True)
return sorted(result, key=distutils.version.StrictVersion, reverse=True)
return None
return None
def parse_requires_txt(package, version):
def write_metadata(tmp_file):
"""
"""
Return content of requires.txt until first [ appears
Write package metadata
"""
"""
content = None
try:
release_data = get_json("https://pypi.python.org/pypi/%s/%s/json" % (package, version,))
tar_file = tarfile.open(tmp_file.name, 'r')
for item in release_data['releases'][version]:
for member in tar_file.getmembers():
 
if 'requires.txt' in member.name:
 
with open('/tmp/requires.txt', 'w') as tmpf:
 
tmpf.write(tar_file.extractfile(member).read())
 
if 'PKG-INFO' in member.name:
 
with open('/tmp/PKG-INFO', 'w') as tmpf:
 
tmpf.write(tar_file.extractfile(member).read())
 
except tarfile.ReadError:
 
zip_file = zipfile.ZipFile(tmp_file.name, 'r')
 
for member in zip_file.namelist():
 
if 'requires.txt' in member:
 
with open('/tmp/requires.txt', 'w') as tmpf:
 
tmpf.write(zip_file.read(member))
 
if 'PKG-INFO' in member:
 
with open('/tmp/PKG-INFO', 'w') as tmpf:
 
tmpf.write(zip_file.read(member))
 
 
def get_metadata(package, version):
 
"""
 
Return package metadata
 
"""
 
for item in get_json("https://pypi.python.org/pypi/%s/%s/json" % (package, version,)) \
 
['releases'][version]:
if item['packagetype'] == 'sdist':
if item['packagetype'] == 'sdist':
tmp_file = file_download(item['url'])
tmp_file = file_download(item['url'])
 
write_metadata(tmp_file)
 
if os.path.isfile('/tmp/requires.txt') and os.path.isfile('/tmp/PKG-INFO'):
 
metadata = [
 
line.decode('utf-8') \
 
for line in wheel.metadata.pkginfo_to_metadata('/tmp', '/tmp/PKG-INFO') \
 
.as_string().splitlines()]
 
os.unlink('/tmp/requires.txt')
 
os.unlink('/tmp/PKG-INFO')
 
os.unlink(tmp_file.name)
 
break
 
elif item['packagetype'] == 'bdist_wheel':
 
tmp_file = file_download(item['url'])
 
zip_file = zipfile.ZipFile(tmp_file.name, 'r')
 
for member in zip_file.namelist():
 
if 'METADATA' in member:
 
metadata = [line.decode('utf-8') for line in zip_file.read(member).splitlines()]
 
os.unlink(tmp_file.name)
 
break
 
return metadata
 
 
def parse_metadata(metadata, pyver):
 
"""
 
Return dependencies parsed from metadata
 
"""
 
for line in metadata:
 
if 'Metadata-Version' in line.decode('utf-8'):
 
metadata_version = line.replace('Metadata-Version:', '').strip()
 
break
 
if packaging.version.Version(metadata_version) >= packaging.version.Version('2.0'):
 
out = []
 
for dep in [
 
line.replace('Requires-Dist:', '').strip() \
 
for line in metadata if re.search(r'^Requires-Dist:', line)]:
 
if ';' in dep:
 
dep = dep.split(';')
 
if 'python_version' in dep[1]:
 
if packaging.specifiers.SpecifierSet(
 
dep[1].replace('python_version', '').replace('"', '').strip()) \
 
.contains(packaging.version.Version(pyver)):
 
dep = dep[0]
 
else:
 
continue
 
else:
 
continue
 
dep = dep.split()
try:
try:
tar_file = tarfile.open(tmp_file.name, 'r')
pkg = re.search(r'(.*)(\[.*\])', dep[0]).group(1)
for member in tar_file.getmembers():
except AttributeError:
if 'requires.txt' in member.name:
pkg = dep[0]
content = tar_file.extractfile(member)
try:
except tarfile.ReadError:
pkg = re.search(r'(^[\w\.\-]*)(.*)', dep[0]).group(1)
zip_file = zipfile.ZipFile(tmp_file.name, 'r')
dep.append(re.search(r'(^[\w\.\-]*)(.*)', dep[0]).group(2))
for member in zip_file.namelist():
except AttributeError:
if 'requires.txt' in member:
pkg = dep[0]
content = zip_file.read(member)
try:
if content is not None:
ver = dep[1].replace('(', '').replace(')', '').replace(';', '')
par = []
except IndexError:
for line in content:
ver = None
if '[' in line:
out.append((pkg, ver))
break
return out
else:
par.append(line.strip())
content = "\n".join(par)
os.unlink(tmp_file.name)
return content
def find_new_dependencies(package, version, package_list, pyver):
def find_new_dependencies(package, version, package_list, pyver):
"""
"""
Return package dependencies parsed from pypi json
Return package dependencies parsed from pypi json
"""
"""
content = parse_requires_txt(package, version)
content = parse_metadata(get_metadata(package, version), pyver)
if content is not None:
for pkg, ver in content:
for line in content.split("\n"):
try:
try:
if pkg in package_list:
pkg = re.search(r'^([a-zA-Z0-9_.-]+)', line).group(0)
yield (pkg, ver)
dep = line.replace(pkg, '').strip()
else:
if not dep:
try:
dep = None
if pkg in package_list:
yield (pkg, dep)
else:
for child in find_new_dependencies(
for child in find_new_dependencies(
pkg,
pkg,
get_newer_vers(find_available_vers(pkg, pyver), dep, None)[0],
get_newer_vers(find_available_vers(pkg, pyver), ver, None)[0],
package_list,
package_list,
pyver
pyver
):
):
yield child
yield child
except AttributeError:
except TypeError:
pass
pass
 
except AttributeError:
 
pass
def depless_vers(res):
def depless_vers(res):
"""
"""
@@ -269,7 +337,10 @@ def collect_packages(package_list, jsonpipdeptree, pyver=None):
@@ -269,7 +337,10 @@ def collect_packages(package_list, jsonpipdeptree, pyver=None):
)):
)):
if 'Any' not in dep:
if 'Any' not in dep:
required_version.append(dep)
required_version.append(dep)
available_version = find_available_vers(package, pyver)
try:
 
available_version = find_available_vers(package, pyver)
 
except urllib2.HTTPError:
 
available_version = [installed_version]
newer_version = get_newer_vers(available_version, required_version, installed_version)
newer_version = get_newer_vers(available_version, required_version, installed_version)
rev = {'installed_version': installed_version,
rev = {'installed_version': installed_version,
'required_version': required_version,
'required_version': required_version,
Loading