| #!/usr/bin/env python |
| """ |
| |
| Utility for building Buildroot packages for existing PyPI packages |
| |
| Any package built by scanpypi should be manually checked for |
| errors. |
| """ |
| from __future__ import print_function |
| from __future__ import absolute_import |
| import argparse |
| import json |
| import six.moves.urllib.request |
| import six.moves.urllib.error |
| import six.moves.urllib.parse |
| import sys |
| import os |
| import shutil |
| import tarfile |
| import zipfile |
| import errno |
| import hashlib |
| import re |
| import textwrap |
| import tempfile |
| import imp |
| from functools import wraps |
| from six.moves import map |
| from six.moves import zip |
| from six.moves import input |
| if six.PY2: |
| import StringIO |
| else: |
| import io |
| |
| BUF_SIZE = 65536 |
| |
| try: |
| import spdx_lookup as liclookup |
| except ImportError: |
| # spdx_lookup is not installed |
| print('spdx_lookup module is not installed. This can lead to an ' |
| 'inaccurate licence detection. Please install it via\n' |
| 'pip install spdx_lookup') |
| liclookup = None |
| |
| |
| def setup_decorator(func, method): |
| """ |
| Decorator for distutils.core.setup and setuptools.setup. |
| Puts the arguments with which setup is called as a dict |
| Add key 'method' which should be either 'setuptools' or 'distutils'. |
| |
| Keyword arguments: |
| func -- either setuptools.setup or distutils.core.setup |
| method -- either 'setuptools' or 'distutils' |
| """ |
| |
| @wraps(func) |
| def closure(*args, **kwargs): |
| # Any python packages calls its setup function to be installed. |
| # Argument 'name' of this setup function is the package's name |
| BuildrootPackage.setup_args[kwargs['name']] = kwargs |
| BuildrootPackage.setup_args[kwargs['name']]['method'] = method |
| return closure |
| |
| # monkey patch |
| import setuptools # noqa E402 |
| setuptools.setup = setup_decorator(setuptools.setup, 'setuptools') |
| import distutils # noqa E402 |
| distutils.core.setup = setup_decorator(setuptools.setup, 'distutils') |
| |
| |
| def find_file_upper_case(filenames, path='./'): |
| """ |
| List generator: |
| Recursively find files that matches one of the specified filenames. |
| Returns a relative path starting with path argument. |
| |
| Keyword arguments: |
| filenames -- List of filenames to be found |
| path -- Path to the directory to search |
| """ |
| for root, dirs, files in os.walk(path): |
| for file in files: |
| if file.upper() in filenames: |
| yield (os.path.join(root, file)) |
| |
| |
| def pkg_buildroot_name(pkg_name): |
| """ |
| Returns the Buildroot package name for the PyPI package pkg_name. |
| Remove all non alphanumeric characters except - |
| Also lowers the name and adds 'python-' suffix |
| |
| Keyword arguments: |
| pkg_name -- String to rename |
| """ |
| name = re.sub('[^\w-]', '', pkg_name.lower()) |
| prefix = 'python-' |
| pattern = re.compile('^(?!' + prefix + ')(.+?)$') |
| name = pattern.sub(r'python-\1', name) |
| return name |
| |
| |
| class DownloadFailed(Exception): |
| pass |
| |
| |
| class BuildrootPackage(): |
| """This class's methods are not meant to be used individually please |
| use them in the correct order: |
| |
| __init__ |
| |
| download_package |
| |
| extract_package |
| |
| load_module |
| |
| get_requirements |
| |
| create_package_mk |
| |
| create_hash_file |
| |
| create_config_in |
| |
| """ |
| setup_args = {} |
| |
| def __init__(self, real_name, pkg_folder): |
| self.real_name = real_name |
| self.buildroot_name = pkg_buildroot_name(self.real_name) |
| self.pkg_dir = os.path.join(pkg_folder, self.buildroot_name) |
| self.mk_name = self.buildroot_name.upper().replace('-', '_') |
| self.as_string = None |
| self.md5_sum = None |
| self.metadata = None |
| self.metadata_name = None |
| self.metadata_url = None |
| self.pkg_req = None |
| self.setup_metadata = None |
| self.tmp_extract = None |
| self.used_url = None |
| self.filename = None |
| self.url = None |
| self.version = None |
| self.license_files = [] |
| |
| def fetch_package_info(self): |
| """ |
| Fetch a package's metadata from the python package index |
| """ |
| self.metadata_url = 'https://pypi.org/pypi/{pkg}/json'.format( |
| pkg=self.real_name) |
| try: |
| pkg_json = six.moves.urllib.request.urlopen(self.metadata_url).read().decode() |
| except six.moves.urllib.error.HTTPError as error: |
| print('ERROR:', error.getcode(), error.msg, file=sys.stderr) |
| print('ERROR: Could not find package {pkg}.\n' |
| 'Check syntax inside the python package index:\n' |
| 'https://pypi.python.org/pypi/ ' |
| .format(pkg=self.real_name)) |
| raise |
| except six.moves.urllib.error.URLError: |
| print('ERROR: Could not find package {pkg}.\n' |
| 'Check syntax inside the python package index:\n' |
| 'https://pypi.python.org/pypi/ ' |
| .format(pkg=self.real_name)) |
| raise |
| self.metadata = json.loads(pkg_json) |
| self.version = self.metadata['info']['version'] |
| self.metadata_name = self.metadata['info']['name'] |
| |
| def download_package(self): |
| """ |
| Download a package using metadata from pypi |
| """ |
| download = None |
| try: |
| self.metadata['urls'][0]['filename'] |
| except IndexError: |
| print( |
| 'Non-conventional package, ', |
| 'please check carefully after creation') |
| self.metadata['urls'] = [{ |
| 'packagetype': 'sdist', |
| 'url': self.metadata['info']['download_url'], |
| 'digests': None}] |
| # In this case, we can't get the name of the downloaded file |
| # from the pypi api, so we need to find it, this should work |
| urlpath = six.moves.urllib.parse.urlparse( |
| self.metadata['info']['download_url']).path |
| # urlparse().path give something like |
| # /path/to/file-version.tar.gz |
| # We use basename to remove /path/to |
| self.metadata['urls'][0]['filename'] = os.path.basename(urlpath) |
| for download_url in self.metadata['urls']: |
| if 'bdist' in download_url['packagetype']: |
| continue |
| try: |
| print('Downloading package {pkg} from {url}...'.format( |
| pkg=self.real_name, url=download_url['url'])) |
| download = six.moves.urllib.request.urlopen(download_url['url']) |
| except six.moves.urllib.error.HTTPError as http_error: |
| download = http_error |
| else: |
| self.used_url = download_url |
| self.as_string = download.read() |
| if not download_url['digests']['md5']: |
| break |
| self.md5_sum = hashlib.md5(self.as_string).hexdigest() |
| if self.md5_sum == download_url['digests']['md5']: |
| break |
| |
| if download is None: |
| raise DownloadFailed('Failed to download package {pkg}: ' |
| 'No source archive available' |
| .format(pkg=self.real_name)) |
| elif download.__class__ == six.moves.urllib.error.HTTPError: |
| raise download |
| |
| self.filename = self.used_url['filename'] |
| self.url = self.used_url['url'] |
| |
| def check_archive(self, members): |
| """ |
| Check archive content before extracting |
| |
| Keyword arguments: |
| members -- list of archive members |
| """ |
| # Protect against https://github.com/snyk/zip-slip-vulnerability |
| # Older python versions do not validate that the extracted files are |
| # inside the target directory. Detect and error out on evil paths |
| evil = [e for e in members if os.path.relpath(e).startswith(('/', '..'))] |
| if evil: |
| print('ERROR: Refusing to extract {} with suspicious members {}'.format( |
| self.filename, evil)) |
| sys.exit(1) |
| |
| def extract_package(self, tmp_path): |
| """ |
| Extract the package contents into a directrory |
| |
| Keyword arguments: |
| tmp_path -- directory where you want the package to be extracted |
| """ |
| if six.PY2: |
| as_file = StringIO.StringIO(self.as_string) |
| else: |
| as_file = io.BytesIO(self.as_string) |
| if self.filename[-3:] == 'zip': |
| with zipfile.ZipFile(as_file) as as_zipfile: |
| tmp_pkg = os.path.join(tmp_path, self.buildroot_name) |
| try: |
| os.makedirs(tmp_pkg) |
| except OSError as exception: |
| if exception.errno != errno.EEXIST: |
| print("ERROR: ", exception.strerror, file=sys.stderr) |
| return |
| print('WARNING:', exception.strerror, file=sys.stderr) |
| print('Removing {pkg}...'.format(pkg=tmp_pkg)) |
| shutil.rmtree(tmp_pkg) |
| os.makedirs(tmp_pkg) |
| self.check_archive(as_zipfile.namelist()) |
| as_zipfile.extractall(tmp_pkg) |
| pkg_filename = self.filename.split(".zip")[0] |
| else: |
| with tarfile.open(fileobj=as_file) as as_tarfile: |
| tmp_pkg = os.path.join(tmp_path, self.buildroot_name) |
| try: |
| os.makedirs(tmp_pkg) |
| except OSError as exception: |
| if exception.errno != errno.EEXIST: |
| print("ERROR: ", exception.strerror, file=sys.stderr) |
| return |
| print('WARNING:', exception.strerror, file=sys.stderr) |
| print('Removing {pkg}...'.format(pkg=tmp_pkg)) |
| shutil.rmtree(tmp_pkg) |
| os.makedirs(tmp_pkg) |
| self.check_archive(as_tarfile.getnames()) |
| as_tarfile.extractall(tmp_pkg) |
| pkg_filename = self.filename.split(".tar")[0] |
| |
| tmp_extract = '{folder}/{name}' |
| self.tmp_extract = tmp_extract.format( |
| folder=tmp_pkg, |
| name=pkg_filename) |
| |
| def load_setup(self): |
| """ |
| Loads the corresponding setup and store its metadata |
| """ |
| current_dir = os.getcwd() |
| os.chdir(self.tmp_extract) |
| sys.path.append(self.tmp_extract) |
| s_file, s_path, s_desc = imp.find_module('setup', [self.tmp_extract]) |
| setup = imp.load_module('setup', s_file, s_path, s_desc) |
| try: |
| self.setup_metadata = self.setup_args[self.metadata_name] |
| except KeyError: |
| # This means setup was not called which most likely mean that it is |
| # called through the if __name__ == '__main__' directive. |
| # In this case, we can only pray that it is called through a |
| # function called main() in setup.py. |
| setup.main() # Will raise AttributeError if not found |
| self.setup_metadata = self.setup_args[self.metadata_name] |
| # Here we must remove the module the hard way. |
| # We must do this because of a very specific case: if a package calls |
| # setup from the __main__ but does not come with a 'main()' function, |
| # for some reason setup.main() will successfully call the main |
| # function of a previous package... |
| sys.modules.pop('setup', None) |
| del setup |
| os.chdir(current_dir) |
| sys.path.remove(self.tmp_extract) |
| |
| def get_requirements(self, pkg_folder): |
| """ |
| Retrieve dependencies from the metadata found in the setup.py script of |
| a pypi package. |
| |
| Keyword Arguments: |
| pkg_folder -- location of the already created packages |
| """ |
| if 'install_requires' not in self.setup_metadata: |
| self.pkg_req = None |
| return set() |
| self.pkg_req = self.setup_metadata['install_requires'] |
| self.pkg_req = [re.sub('([-.\w]+).*', r'\1', req) |
| for req in self.pkg_req] |
| |
| # get rid of commented lines and also strip the package strings |
| self.pkg_req = [item.strip() for item in self.pkg_req |
| if len(item) > 0 and item[0] != '#'] |
| |
| req_not_found = self.pkg_req |
| self.pkg_req = list(map(pkg_buildroot_name, self.pkg_req)) |
| pkg_tuples = list(zip(req_not_found, self.pkg_req)) |
| # pkg_tuples is a list of tuples that looks like |
| # ('werkzeug','python-werkzeug') because I need both when checking if |
| # dependencies already exist or are already in the download list |
| req_not_found = set( |
| pkg[0] for pkg in pkg_tuples |
| if not os.path.isdir(pkg[1]) |
| ) |
| return req_not_found |
| |
| def __create_mk_header(self): |
| """ |
| Create the header of the <package_name>.mk file |
| """ |
| header = ['#' * 80 + '\n'] |
| header.append('#\n') |
| header.append('# {name}\n'.format(name=self.buildroot_name)) |
| header.append('#\n') |
| header.append('#' * 80 + '\n') |
| header.append('\n') |
| return header |
| |
| def __create_mk_download_info(self): |
| """ |
| Create the lines refering to the download information of the |
| <package_name>.mk file |
| """ |
| lines = [] |
| version_line = '{name}_VERSION = {version}\n'.format( |
| name=self.mk_name, |
| version=self.version) |
| lines.append(version_line) |
| |
| targz = self.filename.replace( |
| self.version, |
| '$({name}_VERSION)'.format(name=self.mk_name)) |
| targz_line = '{name}_SOURCE = {filename}\n'.format( |
| name=self.mk_name, |
| filename=targz) |
| lines.append(targz_line) |
| |
| if self.filename not in self.url: |
| # Sometimes the filename is in the url, sometimes it's not |
| site_url = self.url |
| else: |
| site_url = self.url[:self.url.find(self.filename)] |
| site_line = '{name}_SITE = {url}'.format(name=self.mk_name, |
| url=site_url) |
| site_line = site_line.rstrip('/') + '\n' |
| lines.append(site_line) |
| return lines |
| |
| def __create_mk_setup(self): |
| """ |
| Create the line refering to the setup method of the package of the |
| <package_name>.mk file |
| |
| There are two things you can use to make an installer |
| for a python package: distutils or setuptools |
| distutils comes with python but does not support dependencies. |
| distutils is mostly still there for backward support. |
| setuptools is what smart people use, |
| but it is not shipped with python :( |
| """ |
| lines = [] |
| setup_type_line = '{name}_SETUP_TYPE = {method}\n'.format( |
| name=self.mk_name, |
| method=self.setup_metadata['method']) |
| lines.append(setup_type_line) |
| return lines |
| |
| def __get_license_names(self, license_files): |
| """ |
| Try to determine the related license name. |
| |
| There are two possibilities. Either the script tries to |
| get license name from package's metadata or, if spdx_lookup |
| package is available, the script compares license files with |
| SPDX database. |
| """ |
| license_line = '' |
| if liclookup is None: |
| license_dict = { |
| 'Apache Software License': 'Apache-2.0', |
| 'BSD License': 'FIXME: please specify the exact BSD version', |
| 'European Union Public Licence 1.0': 'EUPL-1.0', |
| 'European Union Public Licence 1.1': 'EUPL-1.1', |
| "GNU General Public License": "GPL", |
| "GNU General Public License v2": "GPL-2.0", |
| "GNU General Public License v2 or later": "GPL-2.0+", |
| "GNU General Public License v3": "GPL-3.0", |
| "GNU General Public License v3 or later": "GPL-3.0+", |
| "GNU Lesser General Public License v2": "LGPL-2.1", |
| "GNU Lesser General Public License v2 or later": "LGPL-2.1+", |
| "GNU Lesser General Public License v3": "LGPL-3.0", |
| "GNU Lesser General Public License v3 or later": "LGPL-3.0+", |
| "GNU Library or Lesser General Public License": "LGPL-2.0", |
| "ISC License": "ISC", |
| "MIT License": "MIT", |
| "Mozilla Public License 1.0": "MPL-1.0", |
| "Mozilla Public License 1.1": "MPL-1.1", |
| "Mozilla Public License 2.0": "MPL-2.0", |
| "Zope Public License": "ZPL" |
| } |
| regexp = re.compile('^License :* *.* *:+ (.*)( \(.*\))?$') |
| classifiers_licenses = [regexp.sub(r"\1", lic) |
| for lic in self.metadata['info']['classifiers'] |
| if regexp.match(lic)] |
| licenses = [license_dict[x] if x in license_dict else x for x in classifiers_licenses] |
| if not len(licenses): |
| print('WARNING: License has been set to "{license}". It is most' |
| ' likely wrong, please change it if need be'.format( |
| license=', '.join(licenses))) |
| licenses = [self.metadata['info']['license']] |
| license_line = '{name}_LICENSE = {license}\n'.format( |
| name=self.mk_name, |
| license=', '.join(licenses)) |
| else: |
| license_names = [] |
| for license_file in license_files: |
| with open(license_file) as lic_file: |
| match = liclookup.match(lic_file.read()) |
| if match is not None and match.confidence >= 90.0: |
| license_names.append(match.license.id) |
| else: |
| license_names.append("FIXME: license id couldn't be detected") |
| |
| if len(license_names) > 0: |
| license_line = ('{name}_LICENSE =' |
| ' {names}\n'.format( |
| name=self.mk_name, |
| names=', '.join(license_names))) |
| |
| return license_line |
| |
| def __create_mk_license(self): |
| """ |
| Create the lines referring to the package's license informations of the |
| <package_name>.mk file |
| |
| The license's files are found by searching the package (case insensitive) |
| for files named license, license.txt etc. If more than one license file |
| is found, the user is asked to select which ones he wants to use. |
| """ |
| lines = [] |
| |
| filenames = ['LICENCE', 'LICENSE', 'LICENSE.RST', 'LICENSE.TXT', |
| 'COPYING', 'COPYING.TXT'] |
| self.license_files = list(find_file_upper_case(filenames, self.tmp_extract)) |
| |
| lines.append(self.__get_license_names(self.license_files)) |
| |
| license_files = [license.replace(self.tmp_extract, '')[1:] |
| for license in self.license_files] |
| if len(license_files) > 0: |
| if len(license_files) > 1: |
| print('More than one file found for license:', |
| ', '.join(license_files)) |
| license_files = [filename |
| for index, filename in enumerate(license_files)] |
| license_file_line = ('{name}_LICENSE_FILES =' |
| ' {files}\n'.format( |
| name=self.mk_name, |
| files=' '.join(license_files))) |
| lines.append(license_file_line) |
| else: |
| print('WARNING: No license file found,' |
| ' please specify it manually afterwards') |
| license_file_line = '# No license file found\n' |
| |
| return lines |
| |
| def __create_mk_requirements(self): |
| """ |
| Create the lines referring to the dependencies of the of the |
| <package_name>.mk file |
| |
| Keyword Arguments: |
| pkg_name -- name of the package |
| pkg_req -- dependencies of the package |
| """ |
| lines = [] |
| dependencies_line = ('{name}_DEPENDENCIES =' |
| ' {reqs}\n'.format( |
| name=self.mk_name, |
| reqs=' '.join(self.pkg_req))) |
| lines.append(dependencies_line) |
| return lines |
| |
| def create_package_mk(self): |
| """ |
| Create the lines corresponding to the <package_name>.mk file |
| """ |
| pkg_mk = '{name}.mk'.format(name=self.buildroot_name) |
| path_to_mk = os.path.join(self.pkg_dir, pkg_mk) |
| print('Creating {file}...'.format(file=path_to_mk)) |
| lines = self.__create_mk_header() |
| lines += self.__create_mk_download_info() |
| lines += self.__create_mk_setup() |
| lines += self.__create_mk_license() |
| |
| lines.append('\n') |
| lines.append('$(eval $(python-package))') |
| lines.append('\n') |
| with open(path_to_mk, 'w') as mk_file: |
| mk_file.writelines(lines) |
| |
| def create_hash_file(self): |
| """ |
| Create the lines corresponding to the <package_name>.hash files |
| """ |
| pkg_hash = '{name}.hash'.format(name=self.buildroot_name) |
| path_to_hash = os.path.join(self.pkg_dir, pkg_hash) |
| print('Creating {filename}...'.format(filename=path_to_hash)) |
| lines = [] |
| if self.used_url['digests']['md5'] and self.used_url['digests']['sha256']: |
| hash_header = '# md5, sha256 from {url}\n'.format( |
| url=self.metadata_url) |
| lines.append(hash_header) |
| hash_line = '{method}\t{digest} {filename}\n'.format( |
| method='md5', |
| digest=self.used_url['digests']['md5'], |
| filename=self.filename) |
| lines.append(hash_line) |
| hash_line = '{method}\t{digest} {filename}\n'.format( |
| method='sha256', |
| digest=self.used_url['digests']['sha256'], |
| filename=self.filename) |
| lines.append(hash_line) |
| |
| if self.license_files: |
| lines.append('# Locally computed sha256 checksums\n') |
| for license_file in self.license_files: |
| sha256 = hashlib.sha256() |
| with open(license_file, 'rb') as lic_f: |
| while True: |
| data = lic_f.read(BUF_SIZE) |
| if not data: |
| break |
| sha256.update(data) |
| hash_line = '{method}\t{digest} {filename}\n'.format( |
| method='sha256', |
| digest=sha256.hexdigest(), |
| filename=license_file.replace(self.tmp_extract, '')[1:]) |
| lines.append(hash_line) |
| |
| with open(path_to_hash, 'w') as hash_file: |
| hash_file.writelines(lines) |
| |
| def create_config_in(self): |
| """ |
| Creates the Config.in file of a package |
| """ |
| path_to_config = os.path.join(self.pkg_dir, 'Config.in') |
| print('Creating {file}...'.format(file=path_to_config)) |
| lines = [] |
| config_line = 'config BR2_PACKAGE_{name}\n'.format( |
| name=self.mk_name) |
| lines.append(config_line) |
| |
| bool_line = '\tbool "{name}"\n'.format(name=self.buildroot_name) |
| lines.append(bool_line) |
| if self.pkg_req: |
| for dep in self.pkg_req: |
| dep_line = '\tselect BR2_PACKAGE_{req} # runtime\n'.format( |
| req=dep.upper().replace('-', '_')) |
| lines.append(dep_line) |
| |
| lines.append('\thelp\n') |
| |
| help_lines = textwrap.wrap(self.metadata['info']['summary'], 62, |
| initial_indent='\t ', |
| subsequent_indent='\t ') |
| |
| # make sure a help text is terminated with a full stop |
| if help_lines[-1][-1] != '.': |
| help_lines[-1] += '.' |
| |
| # \t + two spaces is 3 char long |
| help_lines.append('') |
| help_lines.append('\t ' + self.metadata['info']['home_page']) |
| help_lines = [x + '\n' for x in help_lines] |
| lines += help_lines |
| |
| with open(path_to_config, 'w') as config_file: |
| config_file.writelines(lines) |
| |
| |
| def main(): |
| # Building the parser |
| parser = argparse.ArgumentParser( |
| description="Creates buildroot packages from the metadata of " |
| "an existing PyPI packages and include it " |
| "in menuconfig") |
| parser.add_argument("packages", |
| help="list of packages to be created", |
| nargs='+') |
| parser.add_argument("-o", "--output", |
| help=""" |
| Output directory for packages. |
| Default is ./package |
| """, |
| default='./package') |
| |
| args = parser.parse_args() |
| packages = list(set(args.packages)) |
| |
| # tmp_path is where we'll extract the files later |
| tmp_prefix = 'scanpypi-' |
| pkg_folder = args.output |
| tmp_path = tempfile.mkdtemp(prefix=tmp_prefix) |
| try: |
| for real_pkg_name in packages: |
| package = BuildrootPackage(real_pkg_name, pkg_folder) |
| print('buildroot package name for {}:'.format(package.real_name), |
| package.buildroot_name) |
| # First we download the package |
| # Most of the info we need can only be found inside the package |
| print('Package:', package.buildroot_name) |
| print('Fetching package', package.real_name) |
| try: |
| package.fetch_package_info() |
| except (six.moves.urllib.error.URLError, six.moves.urllib.error.HTTPError): |
| continue |
| if package.metadata_name.lower() == 'setuptools': |
| # setuptools imports itself, that does not work very well |
| # with the monkey path at the begining |
| print('Error: setuptools cannot be built using scanPyPI') |
| continue |
| |
| try: |
| package.download_package() |
| except six.moves.urllib.error.HTTPError as error: |
| print('Error: {code} {reason}'.format(code=error.code, |
| reason=error.reason)) |
| print('Error downloading package :', package.buildroot_name) |
| print() |
| continue |
| |
| # extract the tarball |
| try: |
| package.extract_package(tmp_path) |
| except (tarfile.ReadError, zipfile.BadZipfile): |
| print('Error extracting package {}'.format(package.real_name)) |
| print() |
| continue |
| |
| # Loading the package install info from the package |
| try: |
| package.load_setup() |
| except ImportError as err: |
| if 'buildutils' in err.message: |
| print('This package needs buildutils') |
| else: |
| raise |
| continue |
| except AttributeError as error: |
| print('Error: Could not install package {pkg}: {error}'.format( |
| pkg=package.real_name, error=error)) |
| continue |
| |
| # Package requirement are an argument of the setup function |
| req_not_found = package.get_requirements(pkg_folder) |
| req_not_found = req_not_found.difference(packages) |
| |
| packages += req_not_found |
| if req_not_found: |
| print('Added packages \'{pkgs}\' as dependencies of {pkg}' |
| .format(pkgs=", ".join(req_not_found), |
| pkg=package.buildroot_name)) |
| print('Checking if package {name} already exists...'.format( |
| name=package.pkg_dir)) |
| try: |
| os.makedirs(package.pkg_dir) |
| except OSError as exception: |
| if exception.errno != errno.EEXIST: |
| print("ERROR: ", exception.message, file=sys.stderr) |
| continue |
| print('Error: Package {name} already exists' |
| .format(name=package.pkg_dir)) |
| del_pkg = input( |
| 'Do you want to delete existing package ? [y/N]') |
| if del_pkg.lower() == 'y': |
| shutil.rmtree(package.pkg_dir) |
| os.makedirs(package.pkg_dir) |
| else: |
| continue |
| package.create_package_mk() |
| |
| package.create_hash_file() |
| |
| package.create_config_in() |
| print() |
| # printing an empty line for visual confort |
| finally: |
| shutil.rmtree(tmp_path) |
| |
| |
| if __name__ == "__main__": |
| main() |