aos-cube
Advanced tools
| import click | ||
| from aos.util import popen, locale_to_unicode | ||
| from aos.usertrace.do_report import set_op, report_op | ||
| from aos.managers.addon import AddonManager | ||
| @click.group(short_help="Install components or tools") | ||
| @click.pass_context | ||
| def cli(ctx): | ||
| pass | ||
| ''' | ||
| # Install command | ||
| # - To install component without specified version: install comp <component> | ||
| # - To install component with specified version: install comp <component=x.x.x> | ||
| # - To install component no mater what old version is installed: install comp -f <comp> | ||
| ''' | ||
| @cli.command("comp", short_help="Install components",\ | ||
| help="Install components.\n"\ | ||
| "\nIf 'COMPONENTS' argument provided with version info "\ | ||
| "(e.g. test=1.0.0), the specified version will be installed; "\ | ||
| "otherwise, the latest version will be installed.\n" | ||
| "\nIf '-L' option provided, local package file will be used; "\ | ||
| "otherwise, package source on remote server will be used.\n"\ | ||
| "\nIf '-f' option provided, the components will be installed "\ | ||
| "without asking for user confirm of removing old installed "\ | ||
| "version or dependency installation.") | ||
| @click.argument("components", required=False, nargs=-1, metavar="[COMPONENTS...]") | ||
| @click.option("-f", "--force", is_flag=True, help="Force to install components") | ||
| @click.option("-L", "--location", type=click.STRING, nargs=1,\ | ||
| help="Specify the location of the package file to install") | ||
| def install_component(components, force, location): | ||
| """ """ | ||
| am = AddonManager() | ||
| args = [] | ||
| if components: | ||
| args += components | ||
| cmd_content = 'comp' | ||
| if force: | ||
| cmd_content += ' -f' | ||
| if location: | ||
| location = locale_to_unicode(location) | ||
| cmd_content += ' -L' | ||
| if args: | ||
| cmd_content += ' ' + ' '.join(args) | ||
| set_op(op='install', content=cmd_content) | ||
| am.install(force, location, *args) | ||
| set_op(result='success') | ||
| report_op() | ||
| @cli.command("pypkg", short_help="Install pip pakcages") | ||
| @click.argument("pkgs", required=True, nargs=-1, metavar="[PACKAGES]...") | ||
| def install_pypkg(pkgs): | ||
| """ Run pip process to install Python packages. """ | ||
| cmd = ["pip", "install", "--upgrade"] | ||
| cmd_content = "install pypkg" | ||
| for p in pkgs: | ||
| cmd += [p] | ||
| cmd_content += " %s" % p | ||
| set_op(op='install', content=cmd_content) | ||
| try: | ||
| ret = popen(cmd) | ||
| if ret != 0: | ||
| cmd.insert(3, "--no-cache-dir") | ||
| popen(cmd) | ||
| except Exception as e: | ||
| set_op(result='fail: ' + format(e)) | ||
| report_op() | ||
| raise e | ||
| set_op(result='success') | ||
| report_op() |
| import click, json | ||
| from aos.managers.addon import AddonManager | ||
| from serial.tools.list_ports import comports | ||
| from aos.usertrace.do_report import set_op, report_op | ||
| @click.group(short_help="List components and devices") | ||
| @click.pass_context | ||
| def cli(ctx): | ||
| pass | ||
| @cli.command("devices", short_help="List devices on serial ports") | ||
| def list_devices(): | ||
| """ List devices on serial ports """ | ||
| set_op(op='list', content="devices") | ||
| arr = [] | ||
| for p in comports(): | ||
| j = json.dumps(p.__dict__) | ||
| arr.append(json.loads(j)) | ||
| print(json.dumps(arr, indent = 4)) | ||
| set_op(result='success') | ||
| report_op() | ||
| @cli.command("comp", short_help="List component information",\ | ||
| help="List the information of the components (specified "\ | ||
| "by 'COMPONENTS' argument). If 'COMPONENTS' argument "\ | ||
| "not provided, all components's information will be list.\n"\ | ||
| "\nIf '-r' option provided, the information of the remote "\ | ||
| "components are list (by default, the local ones are list).\n"\ | ||
| "\nIf '-a' option provided, all components(including "\ | ||
| "board/mcu/arch type), will be list (by default, "\ | ||
| "board/mcu/arch type are hidden).") | ||
| @click.argument("components", required=False, nargs=-1, metavar="[COMPONENTS...]") | ||
| @click.option("-r", "--remote", is_flag=True, help="List remote components") | ||
| @click.option("-a", "--all", is_flag=True, help="show all versions of all components") | ||
| def list_components(components, remote, all): | ||
| am = AddonManager() | ||
| args = [] | ||
| if components: | ||
| args += components | ||
| cmd_content = 'comp' | ||
| if remote: | ||
| cmd_content += ' -r' | ||
| if all: | ||
| cmd_content += ' -a' | ||
| if args: | ||
| cmd_content += ' ' + ' '.join(args) | ||
| set_op(op='list', content=cmd_content) | ||
| # Clean cached data first | ||
| am.list(remote, all, *args) | ||
| set_op(result='success') | ||
| report_op() |
| import os, sys | ||
| import click | ||
| from aos.util import error, simple_error, pqueryerr, locale_to_unicode, get_locale | ||
| from aos.usertrace.do_report import set_op, report_op | ||
| from aos.constant import GEN_NEWPROJECT, NO_SDK_HINT | ||
| # open command | ||
| @click.command("open", short_help="Open an example project", | ||
| help="Open the example project of a specified " | ||
| "component on a specified board.") | ||
| @click.argument("compname", required=True, nargs=1, metavar="<component_name>") | ||
| @click.option("-b", "--board", required=True, help="Board for creating project") | ||
| @click.option("-d", "--projectdir", help="The project directory") | ||
| def cli(compname, board, projectdir): | ||
| """ Open the example project of a specified component on a specified board. | ||
| Show more ARGS with: $ aos open help | ||
| """ | ||
| args = [compname, '-b ' + board] | ||
| if projectdir: | ||
| projectdir = locale_to_unicode(projectdir) | ||
| args += ['-d ' + projectdir] | ||
| cmd_content = ' '.join(args) | ||
| set_op(op='open', content=cmd_content) | ||
| if "AOS_SDK_PATH" not in os.environ: | ||
| error(NO_SDK_HINT) | ||
| else: | ||
| aos_sdk_path = os.environ["AOS_SDK_PATH"] | ||
| aos_sdk_path = locale_to_unicode(os.path.abspath(aos_sdk_path)) | ||
| # run get_newproject script | ||
| new_project_args = ["-b%s" % board, "-t%s" % (compname + '_app'), compname + 'app'] | ||
| if projectdir: | ||
| new_project_args += ["-d%s" % projectdir] | ||
| gen_newproject = os.path.join(aos_sdk_path, GEN_NEWPROJECT) | ||
| if os.path.isfile(gen_newproject): | ||
| cmd = ["python", gen_newproject] + list(new_project_args) | ||
| ret, err = pqueryerr(cmd) | ||
| if ret != 0: | ||
| simple_error("Failed to open example project, error: %s" % err.decode(get_locale())) | ||
| else: | ||
| error("No %s found for current release!" % gen_newproject) | ||
| ''' | ||
| # run aos create project command | ||
| try: | ||
| cmd = ["aos", "create", "project", "-b " + board, "-d " + projectdir, "-t " + compname, "compname"] | ||
| popen(cmd) | ||
| except Exception as e: | ||
| error("Failed to open example project for %s, error message: %s" % (compname, format(e))) | ||
| ''' | ||
| set_op(result='success') | ||
| report_op() |
| import click | ||
| from aos.usertrace.do_report import set_op, report_op | ||
| from aos.managers.addon import AddonManager | ||
| from aos.util import locale_to_unicode | ||
| @click.command("pack", short_help="Pack and make a component package",\ | ||
| help="Pack a component directory to make a package file.") | ||
| @click.argument("dir", required=True, nargs=1, type=click.Path(dir_okay=True)) | ||
| def cli(dir): | ||
| am = AddonManager() | ||
| dir = locale_to_unicode(dir) | ||
| set_op(op='pack', content=dir) | ||
| am.create(dir) | ||
| set_op(result='success') | ||
| report_op() |
| import click | ||
| from aos.util import popen | ||
| from aos.usertrace.do_report import set_op, report_op | ||
| from aos.managers.addon import AddonManager | ||
| @click.group(short_help="Remove the installed component or package") | ||
| @click.pass_context | ||
| def cli(ctx): | ||
| pass | ||
| @cli.command("comp", short_help="Remove components") | ||
| @click.argument("components", required=False, nargs=-1, metavar="[COMPONENTS...]") | ||
| @click.option("-a", "--all", is_flag=True, help="Uninstall all components") | ||
| def remove_component(components, all): | ||
| am = AddonManager() | ||
| args = [] | ||
| if components: | ||
| args += components | ||
| cmd_content = 'comp' | ||
| if all: | ||
| cmd_content += ' -a' | ||
| if args: | ||
| cmd_content += ' ' + ' '.join(args) | ||
| set_op(op='remove', content=cmd_content) | ||
| ret = am.uninstall(False, all, *args) | ||
| res = 'success' | ||
| if ret != 0: | ||
| res = 'fail' | ||
| set_op(result=res) | ||
| report_op() | ||
| @cli.command("pypkg", short_help="Uninstall pip package") | ||
| @click.argument("pkgs", required=True, nargs=-1, metavar="[PACKAGES...]") | ||
| @click.option("-y", "--yes", is_flag=True, help="Don't ask for confirmation of uninstall deletions.") | ||
| def remove_pypkg(pkgs, yes): | ||
| """ Run pip process to uninstall Python packages. """ | ||
| cmd_content = "pypkg" | ||
| cmd = ["pip", "uninstall"] | ||
| if yes: | ||
| cmd += ["-y"] | ||
| cmd_content += " -y" | ||
| for p in pkgs: | ||
| cmd += [p] | ||
| cmd_content += " %s" % p | ||
| set_op(op='remove', content=cmd_content) | ||
| try: | ||
| ret = popen(cmd) | ||
| if ret != 0: | ||
| set_op(result='fail: popen return none zero.') | ||
| report_op() | ||
| except Exception as e: | ||
| set_op(result='fail: ' + format(e)) | ||
| report_op() | ||
| raise e | ||
| set_op(result='success') | ||
| report_op() |
| import os | ||
| import sys | ||
| import json | ||
| import shutil | ||
| import re | ||
| import zipfile | ||
| try: | ||
| from aos.util import aos_input, cd_aos_root, get_host_os, debug | ||
| from aos.managers.misc import aos_info, aos_warning, aos_error, aos_print | ||
| from aos.managers import misc | ||
| from aos.managers import repo | ||
| from aos.managers import sqlitedb | ||
| from aos.constant import OS_REPO, OS_CACHE | ||
| from aos.managers import localpkg | ||
| from aos.managers import rpmfile | ||
| from aos.managers import queryserver | ||
| from aos.managers.constant import * | ||
| except Exception as e: | ||
| print("Exception when importing modules in addon: %s" % format(e)) | ||
| sys.exit(1) | ||
| pkg_name_max_len = 30 | ||
| pkg_version_max_len = 16 | ||
| def update_configin(): | ||
| pass | ||
| def is_parent_dir(dir, dirs): | ||
| # exclude the tailing '/' or '\' | ||
| dir = str(dir) | ||
| if dir.endswith(os.path.sep): | ||
| dir = dir[:-1] | ||
| for d in dirs: | ||
| d = str(d) | ||
| # exclude the tailing '/' or '\' | ||
| if d.endswith(os.path.sep): | ||
| d = d[:-1] | ||
| if len(d) > len(dir) and d.startswith(dir): | ||
| return True | ||
| return False | ||
| def print_upgrade_list_header(): | ||
| aos_info("Package"+" "*(pkg_name_max_len-len("Package"))+ | ||
| "Old version"+" "*(pkg_version_max_len-len("Old version"))+ | ||
| "New version") | ||
| aos_info("-"*(pkg_name_max_len+pkg_version_max_len*2)) | ||
| class Cache(): | ||
| def __init__(self, repourl=None, cachedir=None): | ||
| if not repourl: | ||
| self.repourl = OS_REPO | ||
| else: | ||
| self.repourl = repourl | ||
| if not cachedir: | ||
| self.cachedir = OS_CACHE | ||
| else: | ||
| self.cachedir = cachedir | ||
| self.primary_db_file = None | ||
| self.filelists_db_file = None | ||
| self.other_db_file = None | ||
| self.comp_info_db_file = None | ||
| self.primary_db = None | ||
| self.filelists_db = None | ||
| self.other_db = None | ||
| self.comp_info_db = None | ||
| self.repo = None | ||
| def __del__(self): | ||
| try: | ||
| # delete database | ||
| if self.primary_db_file: | ||
| self.primary_db_file.close() | ||
| if self.filelists_db_file: | ||
| self.filelists_db_file.close() | ||
| if self.other_db_file: | ||
| self.other_db_file.close() | ||
| # delete cache dir | ||
| if os.path.isdir(self.cachedir): | ||
| host_os = get_host_os() | ||
| # strange, rmtree on windows sometimes results in folder busy error | ||
| if False: #host_os == 'Win32': | ||
| #os.system('del /F ' + self.cachedir) | ||
| os.system('rm -fr ' + self.cachedir) | ||
| else: | ||
| shutil.rmtree(self.cachedir) | ||
| except Exception as e: | ||
| debug("Failure when cache.__del__: %s" % format(e)) | ||
| pass | ||
| def create(self): | ||
| """ Create cache dir """ | ||
| if not os.path.isdir(self.cachedir): | ||
| os.makedirs(self.cachedir) | ||
| mdtypes = ["primary_db", "filelists_db", "other_db"] | ||
| self.repo = repo.Repo(self.repourl, self.cachedir) | ||
| r = self.repo | ||
| r.getRepoFile() | ||
| for mdtype in mdtypes: | ||
| mdfile = r.getMDFile(mdtype) | ||
| outfile = misc.decompress(mdfile) | ||
| setattr(self, mdtype, outfile) | ||
| self.comp_info_db = r.getCompInfoFile() | ||
| def construct_repo_db(self): | ||
| # TODO: Do we need construct all the databases on __init__? | ||
| self.primary_db_file = sqlitedb.PrimaryDB(self.primary_db) | ||
| self.filelists_db_file = sqlitedb.FilelistsDB(self.filelists_db) | ||
| self.other_db_file = sqlitedb.OtherDB(self.other_db) | ||
| def construct_comp_info_db(self): | ||
| self.comp_info_db_file = sqlitedb.PrimaryDB(self.comp_info_db) | ||
| def get_pkg_full_name(name, version): | ||
| return '-'.join([name, version, pkg_rpm_postfix]) | ||
| class AddonManager(): | ||
| def __init__(self): | ||
| self.testing_env = False | ||
| self.cache = Cache() | ||
| self.cache.create() | ||
| self.cache.construct_repo_db() | ||
| self.localpkg = localpkg.LocalPkg() | ||
| self.querysvr = queryserver.QueryServer() | ||
| pass | ||
| def __del__(self): | ||
| pass | ||
| # Check if it's a valid wksp to do the add operations | ||
| def _is_wksp_valid(self): | ||
| if not self.localpkg.validate(): | ||
| return False | ||
| else: | ||
| return True | ||
| # Get full packge information, either remote or local | ||
| def _get_pkg_info(self, remote, names, pkgkey_required=False): | ||
| pkgs_info = [] | ||
| # Get pkg dictionary | ||
| if (remote): | ||
| if QUERY_PKGLIST: | ||
| osver = self.localpkg.getOsVersion() | ||
| if not osver: | ||
| aos_error("Failed to fetch OS version information!") | ||
| pkgs_info = self.querysvr.query_pkginfo(osver, names) | ||
| if not pkgs_info: | ||
| aos_warning("No information found on server for "\ | ||
| "AliOS Things version %s!" % osver) | ||
| return None | ||
| if pkgkey_required: | ||
| pkgs_info = self.cache.primary_db_file.getPackagesWithVersion(names) | ||
| else: | ||
| pkgs_info = self.cache.primary_db_file.getPackagesWithVersion(names) | ||
| else: | ||
| if not self._is_wksp_valid(): | ||
| return None | ||
| else: | ||
| pkgs_info = self.localpkg.getPackagesWithVersion(True, names) | ||
| return pkgs_info | ||
| # Get version specific packge information, either remote or local | ||
| def _get_versioned_pkg_info(self, remote, name_ver): | ||
| ret = [] | ||
| if not name_ver: | ||
| return [] | ||
| pkgs_info = self._get_pkg_info(remote, list(name_ver.keys())) | ||
| if pkgs_info: | ||
| for p in pkgs_info: | ||
| if name_ver[p[pkg_name_in_dict]] == p[pkg_version_in_dict]: | ||
| ret.append(p) | ||
| return ret | ||
| def _get_versioned_pkg_info_with_pkgkey(self, remote, name_ver): | ||
| ret = [] | ||
| if not name_ver: | ||
| return [] | ||
| pkgs_info = self._get_pkg_info(remote, list(name_ver.keys()), pkgkey_required=True) | ||
| if pkgs_info: | ||
| for p in pkgs_info: | ||
| if name_ver[p[pkg_name_in_dict]] == p[pkg_version_in_dict]: | ||
| ret.append(p) | ||
| return ret | ||
| # Remove the duplicate ones and retain the latest version | ||
| def _refine_pkg_info_to_latest(self, pkgs_info): | ||
| pkgs_info_new = [] | ||
| if not pkgs_info: | ||
| return [] | ||
| # Remove duplddicated | ||
| for item in pkgs_info: | ||
| operation = "append" | ||
| name = item[pkg_name_in_dict] | ||
| version = item[pkg_version_in_dict] | ||
| for i in pkgs_info_new: | ||
| if name != i[pkg_name_in_dict]: | ||
| continue | ||
| else: | ||
| if misc.version_greater_than(version, i[pkg_version_in_dict]): | ||
| operation = "replace" | ||
| else: | ||
| operation = "None" | ||
| # If match found, than no need continue to check | ||
| break | ||
| # if no break, than need insert the new item | ||
| if operation == "replace": | ||
| pkgs_info_new.remove(i) | ||
| pkgs_info_new.append(item) | ||
| elif operation == "append": | ||
| pkgs_info_new.append(item) | ||
| # exclude board and mcu components | ||
| ''' | ||
| for i in pkgs_info_new: | ||
| name = i[pkg_name_in_dict] | ||
| if name.startswith("board_") or name.startswith("mcu_") or name.startswith("mcu_"): | ||
| pkgs_info_new.remove(i) | ||
| ''' | ||
| return pkgs_info_new | ||
| def _is_version_match(self, osver, pkgname, pkgver): | ||
| # TODO | ||
| vers = self.querysvr.query_ver(pkgname, osver) | ||
| if pkgver in vers: | ||
| return True | ||
| else: | ||
| return False | ||
| def list(self, remote, showall, *arg): | ||
| # List in below format: | ||
| ''' | ||
| print format as below: | ||
| Package Version | ||
| -------------------------------------- -------- | ||
| soupsieve 1.9 | ||
| tornado 5.0.2 | ||
| urllib3 1.22 | ||
| ''' | ||
| names = [] | ||
| if not self._is_wksp_valid(): | ||
| return -1 | ||
| for a in arg: | ||
| names.append(a) | ||
| pkgs_info = self._get_pkg_info(remote, names) | ||
| if not pkgs_info: | ||
| if remote: | ||
| aos_warning("No component for this version of AliOS Things(%s) "\ | ||
| "found on server." % self.localpkg.getOsVersion()) | ||
| return -1 | ||
| if not showall: | ||
| pkgs_info_new = self._refine_pkg_info_to_latest(pkgs_info) | ||
| pkgs_info = pkgs_info_new | ||
| # Print the package infomation | ||
| aos_print("Package"+" "*(pkg_name_max_len-len("package")+1)+"Version") | ||
| aos_print('-'*pkg_name_max_len + ' ' + '-'*pkg_version_max_len) | ||
| for item in pkgs_info: | ||
| name = item[pkg_name_in_dict] | ||
| # Do not list board or mcu components if not to show all | ||
| if not showall: | ||
| if name.startswith("board_") or \ | ||
| name.startswith("mcu_") or \ | ||
| name.startswith("arch_"): | ||
| continue | ||
| version = item[pkg_version_in_dict] | ||
| sys.stdout.write(name) | ||
| sys.stdout.write(" "*(pkg_name_max_len-len(str(name))+1)) | ||
| sys.stdout.write(version) | ||
| sys.stdout.write("\n") | ||
| sys.stdout.flush() | ||
| return 0 | ||
| # install specified components, and their dependencies (TODO) | ||
| def install(self, force, dir, *arg): | ||
| names = [] | ||
| if not self._is_wksp_valid(): | ||
| return | ||
| for i in arg: | ||
| names.append(i) | ||
| if not dir and not names: | ||
| aos_error("No component specified!") | ||
| if dir: # install from provided file | ||
| self._install_from_file(force, dir) | ||
| else: # install from repo | ||
| # find all local and remote kgs, used by multiple places | ||
| all_remote_pkgs = self._get_pkg_info(True, []) | ||
| if not all_remote_pkgs: | ||
| aos_error("Terminated!") | ||
| latest_remote_pkgs = self._refine_pkg_info_to_latest(all_remote_pkgs) | ||
| all_old_pkgs = self.localpkg.getPackagesWithVersion(True, []) | ||
| ret = 0 | ||
| if QUERY_DEP: | ||
| ret = self._query_dep_from_remote_and_install(force, names, all_remote_pkgs, | ||
| latest_remote_pkgs, all_old_pkgs) | ||
| else: | ||
| ret = self._query_dep_from_local_and_install(force, names, all_remote_pkgs, | ||
| latest_remote_pkgs, all_old_pkgs) | ||
| if ret != 0: | ||
| aos_error("Installation terminated!") | ||
| def _query_dep_from_remote_and_install(self, force, names, all_remote_pkgs, latest_remote_pkgs, all_old_pkgs): | ||
| old_pkgs = [] | ||
| pkg_info = [] | ||
| pkg_info_new = [] | ||
| installed = [] | ||
| pkg_info_latest = {} | ||
| pkg_to_install = {} | ||
| deps = [] | ||
| missing_deps = {} | ||
| ret = 0 | ||
| namelist = [] | ||
| localall = False | ||
| # used to find pkgname in [{"pkgname":"pkgverson"},{},] | ||
| nameinlist = lambda x,y:x | ||
| for n in names: | ||
| # Check local | ||
| if not localall: | ||
| if n.startswith('board_') or \ | ||
| n.startswith('mcu_') or \ | ||
| n.startswith('arch_'): | ||
| localall = True | ||
| if not '=' in n: | ||
| namelist.append(n) | ||
| else: | ||
| namelist.append(re.findall(r'(.+)=.+', n)[0]) | ||
| # find already installed pkgs | ||
| already = [] | ||
| for p in all_old_pkgs: | ||
| if p[pkg_name_in_dict] in namelist: | ||
| already += ["%s=%s" % (p[pkg_name_in_dict], p[pkg_version_in_dict])] | ||
| # If there is any installed version, warn user to take action | ||
| if already: | ||
| aos_warning("These components have been installed: " + | ||
| ', '.join(str(x) for x in already)) | ||
| if not force: | ||
| # TODO: handle newer version install in a more elegant way? | ||
| err, choice = aos_input("Do you want to replace? type 'Y[es]' or 'N[o]': ") | ||
| if err != 0: | ||
| aos_error("[Error] Something bad happended, let's exit!") | ||
| choice = choice.strip() | ||
| if not choice == "Yes" and not choice == "Y": | ||
| aos_info("Installation terminated!") | ||
| return 0 | ||
| else: | ||
| aos_warning("You have chosen to replace the installed version. " | ||
| "Please make sure you know what is happening!!!") | ||
| osver = self.localpkg.getOsVersion() | ||
| if not osver: | ||
| aos_error("Unknown AliOS Things version!") | ||
| return -1 | ||
| # making {"pkgname":"pkgver"} dictionary | ||
| for n in names: | ||
| pkgname = None | ||
| pkgver = None | ||
| # Get package name and version information | ||
| if "=" in n: # specified version | ||
| pkgname = re.findall(r'(.+)=.+', n)[0] | ||
| pkgver = re.findall(r'.+=(.+)', n)[0] | ||
| if not pkgname or not pkgver: | ||
| aos_warning("Invalid component name or version specified: %s!" % format(n)) | ||
| return -1 | ||
| if not self._is_version_match(osver, pkgname, pkgver): | ||
| aos_warning("No matching version of component %s=%s for OS version %s found." % (pkgname, pkgver, osver)) | ||
| return -1 | ||
| else: # latest version | ||
| vers = self.querysvr.query_ver(n, osver) | ||
| if not vers: | ||
| aos_warning("No matching version found for pkg %s" % n) | ||
| return -1 | ||
| pkgname = n | ||
| pkgver = vers[0] | ||
| for v in vers: | ||
| if misc.version_greater_than(v, pkgver): | ||
| pkgver = v | ||
| if pkgname not in pkg_to_install.keys(): | ||
| pkg_to_install[pkgname] = pkgver | ||
| else: | ||
| if misc.version_greater_than(pkgver, pkg_to_install[pkgname]): | ||
| pkg_to_install[pkgname] = pkgver | ||
| # fetch dependency | ||
| dep = self._get_dependency_from_remote(pkgname, pkgver) | ||
| if not dep: | ||
| continue | ||
| # add missing dependency to install list | ||
| for d in dep: | ||
| if not d[DEP_NAME] in pkg_to_install.keys() and \ | ||
| not d[DEP_NAME] in list((lambda x:x[DEP_NAME])(x) for x in deps): | ||
| deps.extend([d]) | ||
| sys.stdout.write("The following packages will be installed: ") | ||
| for k,v in pkg_to_install.items(): | ||
| sys.stdout.write("%s=%s " % (k, v)) | ||
| sys.stdout.write("\n") | ||
| sys.stdout.flush() | ||
| # let user confirm the install list | ||
| if not force: | ||
| err, choice = aos_input("Continue to process? type 'Y[es]' or 'N[o]': ") | ||
| choice = choice.strip() | ||
| if not choice == 'Y' and not choice == 'Yes': | ||
| aos_warning("Installation terminated!") | ||
| return -1 | ||
| # find out more dependency | ||
| new_deps = deps | ||
| deps_missing = [] # content: [{"pkgname":"pkgver"},{},] | ||
| deps_missing_names = [] # content: ["names", "name2"] | ||
| deps_upgrade = [] # content: [{"pkgname":"pkgver"},{},] | ||
| deps_upgrade_names = [] # content: ["names", "names"] | ||
| deps_downgrade = [] # content: [{"pkgname":"pkgver"},{},] | ||
| deps_downgrade_names = [] # content: ["names", "name2"] | ||
| while len(new_deps) != 0: | ||
| to_check = new_deps | ||
| new_deps = [] | ||
| for d in to_check: | ||
| state = 'missing' | ||
| dname = d[DEP_NAME] | ||
| dminver = d[DEP_VER_MIN] | ||
| dmaxver = d[DEP_VER_MAX] | ||
| # if maxver not provided, replace with latest ver | ||
| if dmaxver == '0.0.0': | ||
| for p in latest_remote_pkgs: | ||
| if p[pkg_name_in_dict] == dname: | ||
| dmaxver = p[pkg_version_in_dict] | ||
| break | ||
| # examine if it's a reasonable version number | ||
| for p in all_old_pkgs: | ||
| pn = p[pkg_name_in_dict] | ||
| pv = p[pkg_version_in_dict] | ||
| if dname == pn: | ||
| if misc.version_greater_than(pv, dmaxver): | ||
| state = 'downgrade' | ||
| elif misc.version_greater_than(dminver, pv): | ||
| state = 'upgrade' | ||
| else: | ||
| state = 'useful' | ||
| break | ||
| if state == 'useful': | ||
| to_add_ver = pv | ||
| else: | ||
| to_add_ver = dmaxver | ||
| to_add = dict(zip([dname], [to_add_ver])) | ||
| if state == 'missing': | ||
| deps_missing.extend([to_add]) | ||
| deps_missing_names.extend([dname]) | ||
| elif state == 'upgrade': | ||
| deps_upgrade.extend([to_add]) | ||
| deps_upgrade_names.extend([dname]) | ||
| elif state == 'downgrade': | ||
| deps_downgrade.extend([to_add]) | ||
| deps_downgrade_names.extend([dname]) | ||
| # remote result format: [{"name":"xxx", "min":"xxx", "max":xxx}, {},] | ||
| dep = self._get_dependency_from_remote(dname, to_add_ver) | ||
| # if not already in to_install list, add as new dependency | ||
| for d in dep: | ||
| if d[DEP_NAME] in namelist or \ | ||
| d[DEP_NAME] in deps_missing_names + deps_upgrade_names + deps_downgrade_names: | ||
| continue | ||
| else: | ||
| new_deps.extend([d]) | ||
| if not force: | ||
| hint = False | ||
| if deps_missing_names: | ||
| hint = True | ||
| aos_info("The follwoing dependency will be installed: %s" % | ||
| ', '.join(str(x) for x in deps_missing_names)) | ||
| if deps_upgrade_names: | ||
| hint = True | ||
| aos_info("The follwoing dependency will be upgraded: %s" % | ||
| ', '.join(str(x) for x in deps_upgrade_names)) | ||
| if deps_downgrade_names: | ||
| hint = True | ||
| aos_info("The follwoing dependency will be downgraded: %s" % | ||
| ', '.join(str(x) for x in deps_downgrade_names)) | ||
| if hint: | ||
| err, choice = aos_input("Do you want to continue? type 'Y[es]' or 'N[o]': ") | ||
| if err != 0: | ||
| aos_error("[Error] Something bad happended, let's exit!") | ||
| choice = choice.strip() | ||
| if not choice == "Yes" and not choice == "Y": | ||
| aos_info("Installation terminated!") | ||
| return 0 | ||
| else: | ||
| aos_warning("You have chosen to replace the installed version. " | ||
| "Dependency will be upgraded or downgraded automatically!!!") | ||
| # combine all to install into one dict | ||
| for i in deps_missing: | ||
| pkg_to_install.update(i) | ||
| for i in deps_upgrade: | ||
| pkg_to_install.update(i) | ||
| for i in deps_downgrade: | ||
| pkg_to_install.update(i) | ||
| #pkg_to_install = dict(set(pkg_to_install)) | ||
| # find already installed pkgs | ||
| already = [] | ||
| for p in all_old_pkgs: | ||
| if p[pkg_name_in_dict] in pkg_to_install.keys(): | ||
| already += "%s" % p[pkg_name_in_dict] | ||
| rpkg_info = self._get_pkg_info(True, list(pkg_to_install.keys())) | ||
| if not rpkg_info: | ||
| aos_error("Terminated!") | ||
| for pkgname, pkgver in pkg_to_install.items(): | ||
| # Remove old first if any | ||
| if pkgname in already: | ||
| if self._uninstall(False, False, rpkg_info, [pkgname]) != 0: | ||
| aos_warning("Failed to uninstall old %s, new version" | ||
| " is not installed" % pkgname) | ||
| ret -= 1 | ||
| continue | ||
| # Download and install rpm | ||
| rpmname = get_pkg_full_name(pkgname, pkgver) | ||
| rpm = self.cache.repo.getPackage(rpmname) | ||
| if not os.path.isfile(rpm): | ||
| aos_warning("Failed to download pakcage, %s is not installed!" % pkgname) | ||
| ret = -1 | ||
| continue | ||
| r = rpmfile.RPMFile(rpm) | ||
| r.install(self.localpkg.aos_src_root) | ||
| # TODO: Update the related Config.in files | ||
| update_configin() | ||
| installed.append(pkgname) | ||
| aos_info("Component %s=%s installed in %s" % | ||
| (pkgname, pkgver, self.localpkg.aos_src_root)) | ||
| aos_info("Component(s): '%s' installed" % ', '.join(str(x) for x in installed)) | ||
| not_installed = set(pkg_to_install.keys()) - set(installed) | ||
| if not_installed: | ||
| aos_warning("Not installed components: '%s'! Please redo " | ||
| "installation for them!" % ', '.join(str(x) for x in not_installed)) | ||
| return ret | ||
| def _query_dep_from_local_and_install(self, force, names, all_remote_pkgs, latest_remote_pkgs, all_old_pkgs): | ||
| old_pkgs = [] | ||
| pkg_info = [] | ||
| pkg_info_new = [] | ||
| installed = [] | ||
| pkg_info_latest = {} | ||
| ret = 0 | ||
| namelist = [] | ||
| localall = False | ||
| for n in names: | ||
| # Check local | ||
| if not localall: | ||
| if n.startswith('board_') or \ | ||
| n.startswith('mcu_') or \ | ||
| n.startswith('arch_'): | ||
| localall = True | ||
| if not '=' in n: | ||
| namelist.append(n) | ||
| else: | ||
| namelist.append(re.findall(r'(.+)=.+', n)[0]) | ||
| old_pkgs = self.localpkg.getPackagesWithVersion(localall, namelist) | ||
| # If there is any installed version, warn user to take action | ||
| if old_pkgs: | ||
| old_info = lambda d:d[pkg_name_in_dict]+'='+d[pkg_version_in_dict] | ||
| aos_warning("These components have been installed: " + | ||
| ', '.join(map(old_info, old_pkgs))) | ||
| if not force: | ||
| # TODO: handle newer version install in a more elegant way? | ||
| err, choice = aos_input("Do you want to replace? type 'Y[es]' or 'N[o]': ") | ||
| if err != 0: | ||
| aos_error("[Error] Something bad happended, let's exit!") | ||
| choice = choice.strip() | ||
| if not choice == "Yes" and not choice == "Y": | ||
| aos_info("Installation terminated!") | ||
| return 0 | ||
| else: | ||
| aos_warning("You have chosen to replace the installed version. " | ||
| "Please make sure you know what is happening!!!") | ||
| pkg_info = self._get_pkg_info(True, namelist) | ||
| if not pkg_info: | ||
| aos_error("Terminated!") | ||
| pkg_info_namelist = [(lambda p:p[pkg_name_in_dict])(p) for p in pkg_info] | ||
| missing_namelist = set(namelist) - set(pkg_info_namelist) | ||
| if not pkg_info or len(missing_namelist): | ||
| aos_error("Failed to get remote component information for '%s'" % | ||
| ', '.join(missing_namelist)) | ||
| pkg_info_new = self._refine_pkg_info_to_latest(pkg_info) | ||
| for pkg in pkg_info_new: | ||
| pkg_info_latest[pkg[pkg_name_in_dict]] = pkg[pkg_version_in_dict] | ||
| osver = self.localpkg.getOsVersion() | ||
| if not osver: | ||
| aos_warning("Unknown AliOS Things version!") | ||
| return -1 | ||
| while len(names) > 0: | ||
| n = names[0] | ||
| pkgname = None | ||
| pkgver = None | ||
| # Get package name and version information | ||
| if "=" in n: # specified version | ||
| pkgname = re.findall(r'(.+)=.+', n)[0] | ||
| pkgver = re.findall(r'.+=(.+)', n)[0] | ||
| if not pkgname or not pkgver: | ||
| aos_warning("Invalid component name or version specified!") | ||
| ret -= 1 | ||
| continue | ||
| else: # latest version | ||
| pkgname = n | ||
| pkgver = pkg_info_latest[pkgname] | ||
| # Check if the OS and pkg version match | ||
| if not self._is_version_match(osver, pkgname, pkgver): | ||
| aos_warning("Component %s=%s not installed! Not expected OS " | ||
| "version %s." % (pkgname, pkgver, osver)) | ||
| ret -= 1 | ||
| continue | ||
| # Remove old first if any | ||
| if pkgname in map(lambda p:p[pkg_name_in_dict], old_pkgs): | ||
| if self._uninstall(False, False, pkg_info, [pkgname]) != 0: | ||
| aos_warning("Failed to uninstall old %s, new version" | ||
| " is not installed" % pkgname) | ||
| ret -= 1 | ||
| continue | ||
| # Download and install rpm | ||
| rpmname = get_pkg_full_name(pkgname, pkgver) | ||
| rpm = self.cache.repo.getPackage(rpmname) | ||
| r = rpmfile.RPMFile(rpm) | ||
| r.install(self.localpkg.aos_src_root) | ||
| # TODO: Update the related Config.in files | ||
| update_configin() | ||
| installed.append(pkgname) | ||
| aos_info("Component %s=%s installed in %s" % | ||
| (pkgname, pkgver, self.localpkg.aos_src_root)) | ||
| dep = self._get_dependency_from_local(pkgname) | ||
| if dep: | ||
| missing_dep = self._get_missing_pkg(dep) | ||
| missing_dep = list(set(missing_dep) - set(names)) | ||
| aos_info("Missing dependency found when installing " | ||
| "%s: %s" % (pkgname, str(missing_dep))) | ||
| if missing_dep: | ||
| missing_dep_info = self._get_pkg_info(True, missing_dep) | ||
| if not missing_dep_info: | ||
| aos_error("Terminated!") | ||
| missing_dep_info_new = self._refine_pkg_info_to_latest(missing_dep_info) | ||
| for pkg in missing_dep_info_new: | ||
| pkg_info_latest[pkg[pkg_name_in_dict]] = pkg[pkg_version_in_dict] | ||
| names.extend(missing_dep) | ||
| names.remove(n) | ||
| aos_info("Component(s): '%s' installed" % str(installed)) | ||
| not_installed = set(namelist) - set(installed) | ||
| if not_installed: | ||
| aos_warning("Not installed components: '%s'! Please redo " | ||
| "installation for them!" % str(not_installed)) | ||
| return ret | ||
| # Uninstall specified components only, no dependency! | ||
| def uninstall(self, force, all, *arg): | ||
| ret = 0 | ||
| names = [] | ||
| uninstall_all = False | ||
| if not self._is_wksp_valid(): | ||
| return -1 | ||
| for i in arg: | ||
| names.append(i) | ||
| if not names and not all: | ||
| aos_warning("No component specified! Nothing happened!") | ||
| return 0 | ||
| if not all: | ||
| ret = self._uninstall(force, all, None, names) | ||
| else: | ||
| aos_warning("You are going to uninstall all components. Is it the expected operation?") | ||
| err, choice = aos_input("Please confirm by typing 'Y[es]' or 'N[o]': ") | ||
| if err != 0: | ||
| aos_error("[Error] Something bad happended, let's exit!") | ||
| choice = choice.strip() | ||
| if choice == "Yes" or choice == "Y": | ||
| uninstall_all = True | ||
| else: | ||
| aos_info("Operation terminated.") | ||
| sys.exit(0) | ||
| # Clean top level dirs | ||
| if not self.testing_env and uninstall_all: | ||
| for d in list(OS_COMPONENT_DIRS) + list(OS_PLATFORM_DIRS): | ||
| d = os.path.join(self.localpkg.aos_src_root, d) | ||
| # delete all subfolder and files in top level dirs, but keep top level dirs themselves. | ||
| if not os.path.isdir(d): | ||
| continue | ||
| for filename in os.listdir(d): | ||
| filepath = os.path.join(d, filename) | ||
| try: | ||
| if os.path.isfile(filepath) or os.path.islink(filepath): | ||
| os.unlink(filepath) | ||
| elif os.path.isdir(filepath): | ||
| shutil.rmtree(filepath) | ||
| except Exception as e: | ||
| aos_error("[Error] Failed to delete %s, error: %s" % (filepath, format(e))) | ||
| if uninstall_all: | ||
| aos_info("All components have been uninstalled.") | ||
| return ret | ||
| def _uninstall(self, force, all, rpkgs, names): | ||
| old_pkgs = [] | ||
| uninstalled = [] | ||
| ret = 0 | ||
| if not names: | ||
| return 0 | ||
| localall = None | ||
| for n in names: | ||
| # Check local | ||
| if n.startswith('board_') or \ | ||
| n.startswith('mcu_') or \ | ||
| n.startswith('arch_'): | ||
| localall = True | ||
| if localall: | ||
| break | ||
| # Get local pkg name and list | ||
| old_pkgs = self.localpkg.getPackagesWithVersion(localall, names) | ||
| local_missing = set(names) - set(map(lambda x:x[pkg_name_in_dict], old_pkgs)) | ||
| if local_missing: | ||
| aos_warning("The following specified components are not" | ||
| " installed: " + str(local_missing)) | ||
| aos_warning("Uninstall process terminated!") | ||
| return -1 | ||
| # Get remote pkg info if not provided | ||
| if not rpkgs: | ||
| name_ver = {} | ||
| for p in old_pkgs: | ||
| name_ver[p[pkg_name_in_dict]] = p[pkg_version_in_dict] | ||
| pkg_info = self._get_versioned_pkg_info_with_pkgkey(True, name_ver) | ||
| else: | ||
| pkg_info = rpkgs | ||
| remote_missing = set(names) - set([(lambda p:p[pkg_name_in_dict])(p) for p in pkg_info]) | ||
| if remote_missing: | ||
| aos_warning("The following specified components are "\ | ||
| "not found on server: " + str(remote_missing)) | ||
| aos_warning("\nIf this is a component "\ | ||
| "locally created or copied, you need remove it manually.") | ||
| aos_warning("Uninstall process terminated!") | ||
| return -1 | ||
| # Iterate on each pkg | ||
| _sep = os.path.sep | ||
| for pkg in pkg_info: | ||
| failure = 0 | ||
| # Get remote filelist info according to pkgkey | ||
| if not pkg_pkgkey_in_dict in pkg.keys(): | ||
| aos_error("pkgkey is missing in the pkg info!") | ||
| pkgkey = pkg[pkg_pkgkey_in_dict] | ||
| filelist_info = self.cache.filelists_db_file.getFiles(pkgkey) | ||
| if not filelist_info: | ||
| aos_warning("Failed to get information for package %s, " | ||
| "not uninstalled!" % pkg[pkg_name_in_dict]) | ||
| ret -= 1 | ||
| continue | ||
| # Delete files, and dirs if become empty | ||
| dirs = list(map(lambda x:x[pkg_dirname_in_dict], filelist_info)) | ||
| while len(filelist_info) > 0: | ||
| i = filelist_info[0] | ||
| terminated = False | ||
| # if not leaf dir, iterate it later | ||
| if is_parent_dir(i[pkg_dirname_in_dict], dirs): | ||
| filelist_info.remove(i) | ||
| filelist_info.append(i) | ||
| continue | ||
| # delete files | ||
| dir = self.localpkg.aos_src_root + i[pkg_dirname_in_dict] | ||
| files = i[pkg_filenames_in_dict].split('/') | ||
| for f in files: | ||
| file = _sep.join([dir, f]) | ||
| if os.path.isfile(file): | ||
| try: | ||
| os.remove(file) | ||
| except Exception as e: | ||
| aos_warning("Failed to delete file %s (error: %s)" % (file, format(e))) | ||
| aos_warning("Component %s uninstall process terminated, please " | ||
| "remove the directory %s manually!" % | ||
| (pkg[pkg_name_in_dict], dir)) | ||
| terminated = True | ||
| break | ||
| else: | ||
| failure += 1 | ||
| aos_print("Warning: invalid file %s" % file) | ||
| if terminated: | ||
| dirs.remove(i[pkg_dirname_in_dict]) | ||
| filelist_info.remove(i) | ||
| ret -= 1 | ||
| continue | ||
| # delete the dir if become empty or if only ucube.py left in it | ||
| if os.path.isdir(dir): | ||
| _left = os.listdir(dir) | ||
| if not _left or (len(_left) == 1 and _left[0] == 'ucube.py'): | ||
| try: | ||
| shutil.rmtree(dir) | ||
| except Exception as e: | ||
| aos_warning("Failed to delete dirertory %s, error: %s" % (dir, format(e))) | ||
| aos_warning("Component %s uninstall process terminated, please " | ||
| "remove the directory %s manually!" % | ||
| (pkg[pkg_name_in_dict], dir)) | ||
| dirs.remove(i[pkg_dirname_in_dict]) | ||
| filelist_info.remove(i) | ||
| ret -= 1 | ||
| continue | ||
| # delete parent dir if becom empty, this deals with the | ||
| # cases like xxx/yyy/zzz/a.txt, when a.txt is deleted, | ||
| # xxx, yyy, and zzz all become empty, thus xxx, yyy, zzz | ||
| # all should be deleted, not only zzz. | ||
| terminated = False | ||
| parent_dir = os.path.abspath(os.path.join(dir, "..")) | ||
| while is_parent_dir(self.localpkg.aos_src_root, [parent_dir]): | ||
| if not os.path.isdir(parent_dir) or os.listdir(parent_dir): | ||
| break | ||
| # delete this dir since it becomes empty now | ||
| try: | ||
| shutil.rmtree(parent_dir) | ||
| except Exception as e: | ||
| aos_warning("Failed to delete dirertory %s, error: %s" % (dir, format(e))) | ||
| aos_warning("Component %s uninstall process terminated, please " | ||
| "remove the directory %s manually!" % | ||
| (pkg[pkg_name_in_dict], dir)) | ||
| terminated = True | ||
| break | ||
| parent_dir = os.path.abspath(os.path.join(parent_dir, "..")) | ||
| if terminated: | ||
| dirs.remove(i[pkg_dirname_in_dict]) | ||
| filelist_info.remove(i) | ||
| ret -= 1 | ||
| continue | ||
| dirs.remove(i[pkg_dirname_in_dict]) | ||
| filelist_info.remove(i) | ||
| # Update Config.in | ||
| update_configin() | ||
| if failure > 1: | ||
| aos_warning("Failed to uninstall component %s\n"\ | ||
| "Seems like something wrong with the local version of the component.\n"\ | ||
| "Please make sure the version numbers are not modified unexpectly."\ | ||
| % pkg[pkg_name_in_dict]) | ||
| else: | ||
| uninstalled.append(pkg[pkg_name_in_dict]) | ||
| if uninstalled: | ||
| aos_info("Component(s): '%s' uninstalled" % \ | ||
| ', '.join(str(x) for x in uninstalled)) | ||
| not_uninstalled = set(names) - set(uninstalled) | ||
| if not_uninstalled: | ||
| aos_warning("Not uninstalled components: '%s'! Please redo the " | ||
| "uninstallation for them!" % str(not_uninstalled)) | ||
| return ret | ||
| def upgrade(self, onlychk, *arg): | ||
| names = [] | ||
| upgrade_list = [] | ||
| local_dict = {} | ||
| remote_dict = {} | ||
| if not self._is_wksp_valid(): | ||
| return -1 | ||
| for i in arg: | ||
| names.append(i) | ||
| local_pkgs = self._get_pkg_info(False, names) | ||
| names = list((lambda x:x[pkg_name_in_dict])(p) for p in local_pkgs) | ||
| remote_pkgs = self._get_pkg_info(True, names) | ||
| if not remote_pkgs: | ||
| aos_error("Terminated!") | ||
| remote_pkgs = self._refine_pkg_info_to_latest(remote_pkgs) | ||
| remote_names = list((lambda x:x[pkg_name_in_dict])(p) for p in remote_pkgs) | ||
| if not local_pkgs: | ||
| aos_info("No component is installed.") | ||
| return 0 | ||
| missing_names = None | ||
| local_pkgs_new = [] | ||
| if len(local_pkgs) != len(remote_pkgs): | ||
| #aos_warning("local (len=%d): %s\n\nremote(len=%d): %s" % \ | ||
| # (len(local_pkgs), str(local_pkgs), \ | ||
| # len(remote_pkgs), str(remote_pkgs))) | ||
| missing_names = list(set(names) - set(remote_names)) | ||
| aos_warning("Some component(s) cannot be found on remote server: %s\n" | ||
| "Only the found components will be upgraded.\n" % | ||
| ', '.join(str(x) for x in missing_names)) | ||
| err, choice = aos_input("Do you want to continue? type 'Y[es]' or 'N[o]': ") | ||
| if err != 0: | ||
| aos_warning("Error happened!") | ||
| return -1 | ||
| else: | ||
| choice = choice.strip() | ||
| if choice == "Y" or choice == "Yes": | ||
| # exclude remote missing pkgs | ||
| for i in local_pkgs: | ||
| if i[pkg_name_in_dict] in remote_names: | ||
| new_local_names = list((lambda x:x[pkg_name_in_dict])(p) for p in local_pkgs_new) | ||
| if i[pkg_name_in_dict] in new_local_names: | ||
| # hack for publish components, which may exist in internal test | ||
| if i[pkg_localdir_in_dict].endswith('publish') or \ | ||
| i[pkg_localdir_in_dict].endswith('publish' + os.path.sep): | ||
| aos_warning("In upgrade(), seems like component %s(in %s) "\ | ||
| "is not a valid component, will ignore it." %\ | ||
| (i[pkg_name_in_dict], i[pkg_localdir_in_dict])) | ||
| continue | ||
| local_pkgs_new += [i] | ||
| else: | ||
| aos_info("Upgrade process terminated!") | ||
| return 0 | ||
| if local_pkgs_new: | ||
| local_pkgs = local_pkgs_new | ||
| if len(local_pkgs) != len(remote_pkgs): | ||
| aos_error("local(len=%d) mismatch remote(len=%d)!" % | ||
| (len(local_pkgs), len(remote_pkgs))) | ||
| k = lambda s:s[pkg_name_in_dict] | ||
| local_pkgs.sort(key=k) | ||
| remote_pkgs.sort(key=k) | ||
| _upgrade_header_print = False | ||
| for i in range(len(local_pkgs)): | ||
| if local_pkgs[i][pkg_version_in_dict] < remote_pkgs[i][pkg_version_in_dict]: | ||
| name = local_pkgs[i][pkg_name_in_dict] | ||
| old_version = local_pkgs[i][pkg_version_in_dict] | ||
| new_version = remote_pkgs[i][pkg_version_in_dict] | ||
| if onlychk: | ||
| # header print if not already | ||
| if not _upgrade_header_print: | ||
| print_upgrade_list_header() | ||
| _upgrade_header_print = True | ||
| # print pkg name and version information | ||
| sys.stdout.write(name) | ||
| sys.stdout.write(" "*(pkg_name_max_len-len(str(name)))) | ||
| sys.stdout.write(old_version) | ||
| sys.stdout.write(" "*(pkg_version_max_len-len(str(old_version)))) | ||
| sys.stdout.write(new_version) | ||
| sys.stdout.write("\n") | ||
| sys.stdout.flush() | ||
| upgrade_list.append(name+'='+new_version) | ||
| if not upgrade_list: | ||
| aos_info("Everything is up-to-date.") | ||
| elif not onlychk: | ||
| aos_print("The following packages as well as their dependency will be installed: ") | ||
| for u in upgrade_list: | ||
| aos_print("%s " % u) | ||
| aos_print("\n") | ||
| err, choice = aos_input("Do you want to continue? type 'Y[es]' or 'N[o]': ") | ||
| if err != 0: | ||
| aos_warning("Error happened!") | ||
| return -1 | ||
| else: | ||
| choice = choice.strip() | ||
| if choice == "Y" or choice == "Yes": | ||
| self.install(True, None, *tuple(upgrade_list)) | ||
| return 0 | ||
| def _get_dependency_from_local(self, pkgname): | ||
| return self.localpkg.getDependency(pkgname) | ||
| def _get_dependency_from_remote(self, pkgname, pkgver): | ||
| return self.querysvr.query_dep(pkgname, pkgver) | ||
| def _get_missing_pkg(self, pkgnames): | ||
| return self.localpkg.getMissingPackages(pkgnames) | ||
| def _get_missing_versioned_pkg(self, pkgs): | ||
| return self.localpkg.getMissingVersionedPackages(pkgs) | ||
| # Create a component with files from 'dir', and zip result placed in cur dir | ||
| def create(self, dir): | ||
| srcdir = dir | ||
| if not self._is_wksp_valid(): | ||
| return -1 | ||
| if not os.path.isabs(dir): | ||
| srcdir = os.path.abspath(dir) | ||
| rootdir = self.localpkg.aos_src_root | ||
| tmpdir = self.cache.cachedir | ||
| dstdir = os.getcwd() | ||
| srcdir_rel = os.path.relpath(srcdir, start=rootdir) | ||
| firstlevelfolder = re.findall(r'^([0-9a-zA-Z\._\- ]+).*', srcdir_rel) | ||
| if not firstlevelfolder: | ||
| aos_error("Invalid dir provided!") | ||
| else: | ||
| firstlevelfolder = firstlevelfolder[0] | ||
| # whether or not a valid component dir? | ||
| if not (os.path.isfile(os.path.join(srcdir, 'aos.mk'))): | ||
| aos_error("Not a valid component directory!") | ||
| # find the name and version | ||
| name = None | ||
| version = None | ||
| with open(os.path.join(srcdir, 'aos.mk'), 'r') as f: | ||
| data = f.read().splitlines() | ||
| for line in data: | ||
| # match name | ||
| if not name and re.findall(r'^\s*NAME\s*:?=.+', line): | ||
| pkgname = re.findall(r'.*=\s*(\S+)', line) | ||
| if pkgname: | ||
| name = pkgname[0] | ||
| if not version and re.findall(r'^.*\$\(NAME\)_VERSION\s*:?=.+', line): | ||
| pkgver = re.findall(r'.*=\s*(\S+)', line) | ||
| if pkgver: | ||
| version = pkgver[0] | ||
| break | ||
| if not name or not version: | ||
| aos_error("Name or version missing in the component makefile!") | ||
| # make the dir and copy the contents | ||
| newtopdir = os.path.join(tmpdir, firstlevelfolder) | ||
| try: | ||
| if os.path.exists(newtopdir): | ||
| shutil.rmtree(newtopdir) | ||
| #os.makedirs(os.path.join(tmpdir, srcdir_rel)) | ||
| shutil.copytree(srcdir, os.path.join(tmpdir, srcdir_rel)) | ||
| except Exception as e: | ||
| if os.path.exists(newtopdir): | ||
| shutil.rmtree(newtopdir) | ||
| aos_error("Failed to create (error: %s)!" % format(e)) | ||
| # zip the tmpdir | ||
| zipf = os.path.join(tmpdir, '-'.join([name, version]) + '.zip') | ||
| try: | ||
| # zip, include only relative path | ||
| os.chdir(tmpdir) | ||
| with zipfile.ZipFile(zipf, 'w', zipfile.ZIP_DEFLATED) as myzip: | ||
| for path, dirnames, filenames in os.walk(os.path.join(tmpdir, srcdir_rel)): | ||
| for file in filenames: | ||
| myzip.write(os.path.join(os.path.relpath(path, start=tmpdir), file)) | ||
| # copy to destination dir | ||
| shutil.copy(zipf, dstdir) | ||
| except Exception as e: | ||
| if os.path.exists(newtopdir): | ||
| shutil.rmtree(newtopdir) | ||
| if os.path.exists(zipf): | ||
| os.remove(zipf) | ||
| aos_error('Failed to write to zip file (error: %s)' % format(e)) | ||
| aos_info("Component successfully created in %s" % | ||
| os.path.join(dstdir, '-'.join([name, version]) + '.zip')) | ||
| return 0 | ||
| def _install_from_file(self, force, file): | ||
| ''' TODO: | ||
| handle force later, force will check if already installed | ||
| and take action only if user want to replace the installed. | ||
| ''' | ||
| if not dir: | ||
| aos_warning("No component file provided!") | ||
| return -1 | ||
| if not os.path.isfile(file): | ||
| aos_warning("Not a valid file provided!") | ||
| return -1 | ||
| file = os.path.abspath(file) | ||
| if zipfile.is_zipfile(file): | ||
| self._install_from_zip(file) | ||
| elif file.endswith(".rpm"): | ||
| r = rpmfile.RPMFile(file) | ||
| r.install(self.localpkg.aos_src_root) | ||
| else: | ||
| aos_warning("Seems not a valid local component file, only .zip and .rpm file accepted!") | ||
| return -1 | ||
| # TODO: Update the related Config.in files | ||
| update_configin() | ||
| aos_info("Local component %s is installed." % file) | ||
| return 0 | ||
| # Only file itself is installed, not as well as its dependency. | ||
| def _install_from_zip(self, file): | ||
| if not file: | ||
| return | ||
| ''' TODO: | ||
| 1. check there is a valid component in the zip file. | ||
| 2. check if there is already an existing one. | ||
| ''' | ||
| origdir = os.getcwd() | ||
| try: | ||
| os.chdir(self.localpkg.aos_src_root) | ||
| rootdir = os.getcwd() | ||
| with zipfile.ZipFile(file, "r") as myzip: | ||
| badcrc = myzip.testzip() | ||
| if badcrc: | ||
| aos_error("Bad CRC for file %s of the zip archive" % badcrc) | ||
| myzip.extractall(rootdir) | ||
| except Exception as e: | ||
| aos_error("Failed to install from zip file, error: %s!" % format(e)) | ||
| finally: | ||
| os.chdir(origdir) |
| from aos.constant import AOS_SERVER_URL, AOS_HTTP_HEADER, AOS_HTTP_METHOD | ||
| ''' | ||
| aos addon commands is only supported starting from ADDON_OS_VER_BASE. | ||
| old versions are not supported! | ||
| ''' | ||
| ADDON_OS_VER_BASE = '3.1.0' | ||
| ''' | ||
| AliOS Things source directory check points | ||
| ''' | ||
| aos_src_dir_chk1 = 'include/aos' | ||
| aos_src_dir_chk2 = 'core' | ||
| aos_src_dir_chk3 = 'components' | ||
| ''' | ||
| getPackagesxxx() in localpkg.py and sqlite.py will return a list of components. | ||
| Information in the list are as below: | ||
| [ | ||
| { | ||
| "name": "<comp_name1>", # required | ||
| "version": "<comp_version1>", # required | ||
| # optional hereafter | ||
| "pkgKey": <comp_pkgkey1>, # used by remote list, optional for local | ||
| "localdir": "<comp_dir1>", # used by local list, not applicable for remote | ||
| ... | ||
| }, | ||
| ... | ||
| ] | ||
| ''' | ||
| pkg_name_in_dict = "name" | ||
| pkg_version_in_dict = "version" | ||
| pkg_localdir_in_dict = "localdir" | ||
| pkg_pkgkey_in_dict = "pkgKey" | ||
| ''' | ||
| getFiles will return a list of file/dir information in below format: | ||
| [ | ||
| { | ||
| "pkgKey": "pkgkey1" | ||
| "dirname": "dir1", | ||
| "filenames": "filenames1" | ||
| }, | ||
| ... | ||
| ] | ||
| ''' | ||
| pkg_dirname_in_dict = "dirname" | ||
| pkg_filenames_in_dict = "filenames" | ||
| ''' | ||
| This is the postfix of the RPM file in repo. | ||
| RPM file name convention: <comp_name>-<comp_ver>-<rpm postfix> | ||
| e.g.: | ||
| activation-1.0.0-r0.aos.noarch.rpm | ||
| arch_armv7a-1.0.1-r0.aos.noarch.rpm | ||
| board_asr5501-1.0.0.1-r0.aos.noarch.rpm | ||
| mcu_moc108-1.0.1.2-r0.aos.noarch.rpm | ||
| zlib-1.0.0-r0.aos.noarch.rpm | ||
| ''' | ||
| pkg_rpm_postfix = 'r0.aos.noarch.rpm' | ||
| pkg_name_board_prefix = "board_" | ||
| pkg_name_mcu_prefix = "mcu_" | ||
| pkg_name_arch_prefix = "arch_" | ||
| ''' | ||
| Directories used for search components. | ||
| OS_COMPONENT_DIRS: normal AliOS Things provided componets | ||
| OS_PLATFORM_DIRS: 3rd party components | ||
| ''' | ||
| OS_COMPONENT_DIRS = (["components", "application", "core"]) | ||
| OS_PLATFORM_DIRS = (["platform"]) | ||
| # Components version/dependency information http query servers | ||
| ## general | ||
| PKG_QUERY_URL = AOS_SERVER_URL | ||
| PKG_QUERY_HTTP_HEADER = AOS_HTTP_HEADER | ||
| PKG_QUERY_HTTP_METHOD = AOS_HTTP_METHOD | ||
| ## ver query | ||
| PKG_VER_QUERY_API = "/getComponentVersionOfSystemVersion" | ||
| PKG_VER_QUERY_DATA_FORMAT = "{\"compName\": \"%s\", \"systemVersion\":\"%s\"}" | ||
| ## dep qeury | ||
| PKG_DEP_QUERY_API = "/getComponentDependency" | ||
| PKG_DEP_QUERY_DATA_FORMAT = "{\"name\": \"%s\", \"version\":\"%s\"}" | ||
| ## comp list qeury | ||
| PKG_CLIST_QUERY_API = "/getComponentListOfSystemVersion" | ||
| PKG_CLIST_QUERY_DATA_FORMAT = "{\"systemVersion\":\"%s\"}" | ||
| # Wether query dependency from remote server through HTTP api | ||
| QUERY_DEP = True | ||
| # Wether query pkg info list from remote server through HTTP api | ||
| QUERY_PKGLIST = True | ||
| # dependency query results from remote provides in below format: | ||
| # a list of dict {"min":'x.x.x[.x]', "max":'x.x.x[.x]', "name":'xxx'} | ||
| DEP_NAME = "name" | ||
| DEP_VER_MIN = "min" | ||
| DEP_VER_MAX = "max" | ||
| # use urlgrabber or requests as url pakcage, urlgrabber is not likely to be | ||
| # good choice, will switch to requests later. | ||
| PYURLPKG = 'requests' # 'urlgrabber' |
| import os | ||
| import sys | ||
| import re | ||
| from aos.util import get_host_os, get_aos_version, locale_to_unicode | ||
| from aos.managers.constant import * | ||
| from aos.managers.misc import version_greater_than, version_not_less_than, aos_warning | ||
| from aos.constant import NO_AOSSRC_HINT | ||
| def check_aos_src_root(dir): | ||
| original_dir = os.getcwd() | ||
| host_os = get_host_os() | ||
| if host_os == 'Win32': | ||
| sys_root = re.compile(r'^[A-Z]{1}:\\$') | ||
| else: | ||
| sys_root = re.compile('^/$') | ||
| os.chdir(dir) | ||
| while os.path.isdir('/'.join(['.', aos_src_dir_chk1])) == False or \ | ||
| os.path.isdir('/'.join(['.', aos_src_dir_chk2])) == False: | ||
| os.chdir('../') | ||
| if sys_root.match(os.getcwd()): | ||
| if os.path.isdir(original_dir): | ||
| os.chdir(original_dir) | ||
| return 'fail', None | ||
| aos_root_dir = os.getcwd() | ||
| if os.path.isdir(original_dir): | ||
| os.chdir(original_dir) | ||
| return 'success', aos_root_dir | ||
| class LocalPkg(): | ||
| def __init__(self): | ||
| self.aos_src_root = None | ||
| pass | ||
| def __enter__(self): | ||
| return self | ||
| def __exit(self, exc_type, exc_value, traceback): | ||
| pass | ||
| def validate(self): | ||
| # Check if curdir is a valid aos root first | ||
| curdir = os.getcwd() | ||
| ret, dir = check_aos_src_root(curdir) | ||
| if ret == 'success': | ||
| self.aos_src_root = os.path.abspath(dir) | ||
| else: | ||
| """ | ||
| # If curdir not hit, then check if AOS_SDK_PATH environment | ||
| # variable indicates a valid aos root | ||
| """ | ||
| aos_sdk_path = os.environ.get("AOS_SDK_PATH") | ||
| if aos_sdk_path and os.path.isdir(aos_sdk_path): | ||
| ret, dir = check_aos_src_root(aos_sdk_path) | ||
| if ret == 'success': | ||
| self.aos_src_root = os.path.abspath(dir) | ||
| if not self.aos_src_root: | ||
| aos_warning(NO_AOSSRC_HINT) | ||
| return False | ||
| else: | ||
| # aos_src_root will be widely used, so unicode encoding. | ||
| # otherwise errors in ' aos_src_root + str' and str.join() cases. | ||
| self.aos_src_root = locale_to_unicode(self.aos_src_root) | ||
| osver = self.getOsVersion() | ||
| if not osver: | ||
| aos_warning("Failed to find AliOS Things version inforamtion!") | ||
| return False | ||
| if not version_not_less_than(osver, ADDON_OS_VER_BASE): | ||
| aos_warning("\nThis command is not supported on your verson " | ||
| "of AliOS Things!\nYour version is '%s' while minimal " | ||
| "supported version is '%s'." % (osver, ADDON_OS_VER_BASE)) | ||
| return False | ||
| return True | ||
| # Return a dictionary with specified pakcage name with version information | ||
| def getPackagesWithVersion(self, searchall, names=[]): | ||
| pkgs_info = [] | ||
| pkgdirs = list(OS_COMPONENT_DIRS) | ||
| if searchall: | ||
| pkgdirs.extend(list(OS_PLATFORM_DIRS)) | ||
| pkgdirs = set(pkgdirs) | ||
| for dir in pkgdirs: | ||
| dir_to_walk = os.path.abspath(self.aos_src_root + os.path.sep + dir) | ||
| for dirpath, dirnames, filenames in os.walk(dir_to_walk, topdown=True): | ||
| if not "aos.mk" in filenames: | ||
| continue | ||
| else: | ||
| pkgname = None | ||
| pkgver = None | ||
| filename = os.path.abspath(dirpath + os.path.sep + 'aos.mk') | ||
| with open(filename, "r") as f: | ||
| data = f.read().splitlines() | ||
| for line in data: | ||
| #if "NAME :=" in line or "NAME:=" in line: | ||
| if not pkgname and re.findall(r'^.*NAME\s*:?=.+', line): | ||
| pkgname = re.findall(r'.*=\s*(\S+)', line) | ||
| # Skip if not in the specified name list | ||
| if names and pkgname and not pkgname[0] in names: | ||
| break | ||
| else: | ||
| continue | ||
| if not pkgver and re.findall(r'^.*\$\(NAME\)_VERSION\s*:?=.+', line): | ||
| pkgver = re.findall(r'.*=\s*(\S+)', line) | ||
| break | ||
| if names and pkgname and not pkgname[0] in names: | ||
| continue | ||
| # hack for yts and umesh lib comp | ||
| #if pkgname[0] in ['yts', 'umesh', 'umesh2'] and 'publish' in dirpath.split(os.path.sep): | ||
| # continue | ||
| # append to dict | ||
| if pkgname and pkgver: | ||
| keys = [pkg_name_in_dict, pkg_version_in_dict, pkg_localdir_in_dict] | ||
| values = [pkgname[0], pkgver[0], dirpath] | ||
| tmp = dict(zip(keys, values)) | ||
| pkgs_info.append(tmp) | ||
| # Sort by name | ||
| k = lambda s:s[pkg_name_in_dict] | ||
| pkgs_info.sort(key=k) | ||
| return pkgs_info | ||
| # Get local OS version | ||
| def getOsVersion(self): | ||
| # TODO | ||
| # get the os version from include/aos/kernel.h:SYSINFO_KERNEL_VERSION? | ||
| return get_aos_version(root=self.aos_src_root) | ||
| def getDependency(self, name): | ||
| dep = [] | ||
| pkgdirs = list(OS_COMPONENT_DIRS) | ||
| if not isinstance(name, str) or not name: | ||
| warning("No pkg name provided!") | ||
| return dep | ||
| if name.startswith("mcu_") or \ | ||
| name.startswith("board_") or \ | ||
| name.startswith("arch_"): | ||
| pkgdirs.extend(list(OS_PLATFORM_DIRS)) | ||
| pkgdirs = set(pkgdirs) | ||
| pkg_found = False | ||
| for dir in pkgdirs: | ||
| dir_to_walk = os.path.abspath(self.aos_src_root + os.path.sep + dir) | ||
| for dirpath, dirnames, filenames in os.walk(dir_to_walk, topdown=True): | ||
| if not "aos.mk" in filenames: | ||
| continue | ||
| else: | ||
| mkfile = os.path.abspath(dirpath + os.path.sep + 'aos.mk') | ||
| with open(mkfile, "r") as f: | ||
| data = f.read().splitlines() | ||
| for line in data: | ||
| if re.findall(r'^.*NAME\s*:?=.+', line): | ||
| pkgname = re.findall(r'.*=\s*(\S+)', line) | ||
| if pkgname and name == pkgname[0]: | ||
| pkg_found = True | ||
| break | ||
| if pkg_found: | ||
| break | ||
| if pkg_found: | ||
| break | ||
| if pkg_found: | ||
| with open(mkfile, "r") as f: | ||
| #data = f.read().splitlines() | ||
| l = f.readline() | ||
| while l: | ||
| comps = re.findall(r'^\s*\$\(NAME\)_COMPONENTS.*=\s*(\S+)', l) | ||
| if comps: | ||
| comps = re.split(r'[\s]', comps[0]) | ||
| if comps: | ||
| dep.extend(comps) | ||
| while re.findall(r'.*\\\s*$', l): | ||
| l = f.readline() | ||
| comps = re.split(r'[\s]', l) | ||
| if comps: | ||
| dep.extend(comps) | ||
| l = f.readline() | ||
| return dep | ||
| '''names provided in a list of str.''' | ||
| def getMissingPackages(self, names): | ||
| missing = [] | ||
| local_found = self.getPackagesWithVersion(True, names) | ||
| local_found = map(lambda x:x[pkg_name_in_dict], local_found) | ||
| missing = set(names) - set(local_found) | ||
| return missing | ||
| '''namevers provided in a list of dict: | ||
| {"min":'x.x.x[.x]', "max":'x.x.x[.x]', "name":'xxx'} | ||
| ''' | ||
| def getMissingVersionedPackages(self, namevers): | ||
| missing = [] | ||
| if not namevers: | ||
| return [] | ||
| local_found = self.getPackagesWithVersion(True, \ | ||
| list((lambda x:x[DEP_NAME])(x) for x in namevers)) | ||
| for p in namevers: | ||
| matched = False | ||
| name = p[DEP_NAME] | ||
| minver = p[DEP_VER_MIN] | ||
| maxver = p[DEP_VER_MAX] | ||
| for lp in local_found: | ||
| if name == lp[pkg_name_in_dict]: | ||
| ver = lp[pkg_version_in_dict] | ||
| if not version_greater_than(ver, maxver) and not version_greater_than(minver, ver): | ||
| matched = True | ||
| break | ||
| if not matched: | ||
| missing += [name] | ||
| return missing |
| from xml.etree import cElementTree | ||
| def ns_cleanup(qn): | ||
| """ Get the tag from string like: | ||
| {http://linux.duke.edu/metadata/repo}revision | ||
| """ | ||
| if qn.find('}') == -1: return qn | ||
| return qn.split('}')[1] | ||
| class RepoMDError(Exception): | ||
| pass | ||
| class RepoData: | ||
| """represents anything beneath a <data> tag""" | ||
| def __init__(self, elem): | ||
| self.type = None | ||
| if elem: | ||
| self.type = elem.attrib.get('type') | ||
| self.location = (None, None) | ||
| self.checksum = (None,None) # type,value | ||
| self.openchecksum = (None,None) # type,value | ||
| self.timestamp = None | ||
| self.dbversion = None | ||
| self.size = None | ||
| self.opensize = None | ||
| self.deltas = [] | ||
| if elem: | ||
| self.parse(elem) | ||
| def parse(self, elem): | ||
| for child in elem: | ||
| child_name = ns_cleanup(child.tag) | ||
| if child_name == 'location': | ||
| relative = child.attrib.get('href') | ||
| base = child.attrib.get('base') | ||
| self.location = (base, relative) | ||
| elif child_name == 'checksum': | ||
| csum_value = child.text | ||
| csum_type = child.attrib.get('type') | ||
| self.checksum = (csum_type,csum_value) | ||
| elif child_name == 'open-checksum': | ||
| csum_value = child.text | ||
| csum_type = child.attrib.get('type') | ||
| self.openchecksum = (csum_type, csum_value) | ||
| elif child_name == 'timestamp': | ||
| self.timestamp = child.text | ||
| elif child_name == 'database_version': | ||
| self.dbversion = child.text | ||
| elif child_name == 'size': | ||
| self.size = child.text | ||
| elif child_name == 'open-size': | ||
| self.opensize = child.text | ||
| elif child_name == 'delta': | ||
| delta = RepoData(child) | ||
| delta.type = self.type | ||
| self.deltas.append(delta) | ||
| class RepoMD(): | ||
| """represents the repomd xml file""" | ||
| def __init__(self, repomd_xml): | ||
| self.timestamp = 0 | ||
| self.repoData = {} | ||
| self.checksums = {} | ||
| self.length = 0 | ||
| self.revision = None | ||
| self.tags = {'content' : set(), 'distro' : {}, 'repo': set()} | ||
| if repomd_xml: | ||
| self.parse(repomd_xml) | ||
| def parse(self, repomd_xml): | ||
| with open(repomd_xml, "r") as f: | ||
| parser = cElementTree.iterparse(f) | ||
| try: | ||
| for event, elem in parser: | ||
| elem_name = ns_cleanup(elem.tag) | ||
| if elem_name == "data": | ||
| thisdata = RepoData(elem=elem) | ||
| self.repoData[thisdata.type] = thisdata | ||
| elif elem_name == "revision": | ||
| self.revision = elem.text | ||
| elif elem_name == "tags": | ||
| for child in elem: | ||
| child_name = ns_cleanup(child.tag) | ||
| if child_name == 'content': | ||
| self.tags['content'].add(child.text) | ||
| if child_name == 'distro': | ||
| cpeid = child.attrib.get('cpeid', '') | ||
| distro = self.tags['distro'].setdefault(cpeid,set()) | ||
| distro.add(child.text) | ||
| except SyntaxError as e: | ||
| raise RepoMDError("Damaged repomd.xml file") | ||
| def getData(self, type): | ||
| if type in self.repoData: | ||
| return self.repoData[type] | ||
| else: | ||
| raise RepoMDError("Requested datatype %s not available" % type) |
| import os | ||
| import re | ||
| import sys | ||
| import bz2 | ||
| import gzip | ||
| import shutil | ||
| from aos.util import error, warning | ||
| _available_compression = ['gz', 'bz2'] | ||
| class MiscError(Exception): | ||
| pass | ||
| def aos_print(msg): | ||
| sys.stdout.write("%s\n" % msg) | ||
| sys.stdout.flush() | ||
| def aos_info(msg): | ||
| sys.stdout.write("%s\n" % msg) | ||
| sys.stdout.flush() | ||
| def aos_warning(msg): | ||
| warning(msg) | ||
| def aos_error(msg): | ||
| error(msg) | ||
| def version_greater_than(v1, v2): | ||
| """ Return True is v1 > v2, False if v1 <= v2 """ | ||
| if not v1 or not v2: | ||
| return False | ||
| for i, j in zip(map(int, v1.split(".")), map(int, v2.split("."))): | ||
| if i == j: | ||
| continue | ||
| return i > j | ||
| return len(v1.split(".")) > len(v2.split(".")) | ||
| def version_not_less_than(v1, v2): | ||
| """ Return True is v1 > v2, False if v1 <= v2 """ | ||
| if not v1 or not v2: | ||
| return False | ||
| for i, j in zip(map(int, v1.split(".")), map(int, v2.split("."))): | ||
| if i == j: | ||
| continue | ||
| return i > j | ||
| return len(v1.split(".")) >= len(v2.split(".")) | ||
| def _decompress_chunked(source, dest, ztype): | ||
| """ Decompress metadata files """ | ||
| if ztype not in _available_compression: | ||
| raise MiscError("%s compression not available" % ztype) | ||
| if ztype == 'bz2': | ||
| with bz2.BZ2File(source) as read, open(dest, "wb") as write: | ||
| shutil.copyfileobj(read, write) | ||
| elif ztype == 'gz': | ||
| with gzip.open(source, "rb") as read, open(dest, "wb") as write: | ||
| shutil.copyfileobj(read, write) | ||
| def decompress(filename, dest=None): | ||
| """ take a filename and decompress it into the same relative location. | ||
| if the file is not compressed just return the file """ | ||
| out = dest | ||
| if not dest: | ||
| out = filename | ||
| if filename.endswith('.gz'): | ||
| ztype='gz' | ||
| if not dest: | ||
| out = filename.replace('.gz', '') | ||
| elif filename.endswith('.bz2'): | ||
| ztype='bz2' | ||
| if not dest: | ||
| out = filename.replace('.bz2', '') | ||
| else: | ||
| return filename | ||
| try: | ||
| _decompress_chunked(filename, out, ztype) | ||
| except: | ||
| os.unlink(out) | ||
| raise | ||
| return out | ||
| _re_compiled_glob_match = None | ||
| def re_glob(s): | ||
| """ Tests if a string is a shell wildcard. """ | ||
| global _re_compiled_glob_match | ||
| if _re_compiled_glob_match is None: | ||
| _re_compiled_glob_match = re.compile('[*?]|\[.+\]').search | ||
| return _re_compiled_glob_match(s) |
| class Package(): | ||
| def __init__(self): | ||
| pass | ||
| def install(self): | ||
| pass |
| import os | ||
| import re | ||
| from aos.managers.constant import * | ||
| from aos.managers.misc import aos_warning, aos_error | ||
| from aos.util import debug | ||
| if PYURLPKG == 'urlgrabber': | ||
| from urlgrabber.grabber import URLGrabber | ||
| from urlgrabber.grabber import URLGrabError | ||
| elif PYURLPKG == 'requests': | ||
| import requests | ||
| ''' related definitions | ||
| ## general | ||
| PKG_QUERY_URL = "http://xxx/cube/getComponentVersionOfSystemVersion" | ||
| PKG_QUERY_HTTP_HEADER = "Content-Type:application/json" | ||
| PKG_QUERY_HTTP_METHOD = "POST" | ||
| ## ver query | ||
| PKG_VER_QUERY_API = "/getComponentVersionOfSystemVersion" | ||
| PKG_VER_QUERY_DATA_FORMAT = "{\"compName\": \"%s\", \"systemVersion\":\"%s\"}" | ||
| ''' | ||
| class QueryServer(): | ||
| def __init__(self, url=None): | ||
| if url: | ||
| self.url = url | ||
| else: | ||
| self.url = PKG_QUERY_URL | ||
| self.hdr = PKG_QUERY_HTTP_HEADER | ||
| self.method = PKG_QUERY_HTTP_METHOD | ||
| self.ver_api = PKG_VER_QUERY_API | ||
| self.dep_api = PKG_DEP_QUERY_API | ||
| self.clist_api = PKG_CLIST_QUERY_API | ||
| self.ver_data_fmt = PKG_VER_QUERY_DATA_FORMAT | ||
| self.dep_data_fmt = PKG_DEP_QUERY_DATA_FORMAT | ||
| self.clist_data_fmt = PKG_CLIST_QUERY_DATA_FORMAT | ||
| def _query_helper(self, api, d): | ||
| result = None | ||
| hdrname = re.findall(r'(.+):.+', self.hdr) | ||
| hdrvalue = re.findall(r'.+:(.+)', self.hdr) | ||
| if not hdrname or not hdrvalue: | ||
| aos_warning("invalid http header provided: %s" % hdr) | ||
| return None | ||
| if PYURLPKG == 'urlgrabber': | ||
| ug = URLGrabber(reget='simple', http_headers=((hdrname[0], hdrvalue[0]),)) | ||
| try: | ||
| debug("url: %s, data: %s" % (self.url + api, d)) | ||
| f = ug.urlopen(self.url + api, data=d) | ||
| result = f.read() | ||
| except URLGrabError as e: | ||
| aos_warning("Failed to connect the query server (error: %s)!" % format(e)) | ||
| elif PYURLPKG == 'requests': | ||
| try: | ||
| http_header={hdrname[0]:hdrvalue[0]} | ||
| r = requests.post(self.url + api, headers=http_header, data=d) | ||
| if r.status_code == 200: | ||
| result = r.content | ||
| except Exception as e: | ||
| aos_warning("Failed to do http POST request for %s: %s" % (self.url + api, format(e))) | ||
| else: | ||
| aos_error("The url package %s is not supported!" % PYURLPKG) | ||
| return result | ||
| ''' | ||
| - Server flow: | ||
| ## API: /getComponentVersionOfSystemVersion | ||
| ## request | ||
| { | ||
| "compName": "mcu_moc108", | ||
| "systemVersion":"3.0.0" | ||
| } | ||
| ## response | ||
| { | ||
| "success":true/false, | ||
| "message":"success/xxx error", | ||
| "result":["1.0.1.2","2.0.0"] | ||
| } | ||
| - This API returns a list of version numbers. | ||
| ''' | ||
| def query_ver(self, pkgname, osver): | ||
| data = self.ver_data_fmt % (pkgname, osver) | ||
| #data = "{\"compName\": \"%s\", \"systemVersion\":\"%s\"}" % ("yloop", "3.0.0") | ||
| v = self._query_helper(self.ver_api, data) | ||
| if v: | ||
| vstr = bytes.decode(v) | ||
| if not v or not vstr.startswith("{\"success\":true"): | ||
| aos_warning("Failed to query version information") | ||
| return [] | ||
| #vstr = re.findall(r'.+,\"result\":\"\[(.+)\]\"}', v) | ||
| vstr = re.findall(r'.+,\"result\":\[(.+)\]}', vstr) | ||
| if vstr and vstr[0] != 'null': | ||
| return list((lambda x:x.strip("\""))(x) for x in vstr[0].split(',')) | ||
| else: | ||
| return [] | ||
| ''' | ||
| - Server flow: | ||
| ## API: /getComponentDependency | ||
| ## request | ||
| { | ||
| "name":"board_asr5501", | ||
| "version":"1.0.0" | ||
| } | ||
| ## response | ||
| { | ||
| "success":true/false, | ||
| "message":"success/xxx error", | ||
| "result":"[{'max_version':'','min_version':'1.0.0','name':'netmgr'},{'max_version':'','min_version':'1.0.0','name':'mcu_asr5501'},{'max_version':'','min_version':'1.0.0','name':'kernel_init'}]" | ||
| } | ||
| - This API returns a list of dict of pkg info: [{"min":"1.0.0","max":"3.0.0","name":"netmgr"}, {}, {}] | ||
| ''' | ||
| def query_dep(self, pkgname, pkgver): | ||
| data = self.dep_data_fmt % (pkgname, pkgver) | ||
| d = self._query_helper(self.dep_api, data) | ||
| if d: | ||
| dstr = bytes.decode(d) | ||
| if not d or not dstr.startswith('{"success":true'): | ||
| aos_warning("Failed to query version information") | ||
| return [] | ||
| dstr = re.findall(r'.+,\"result\":\"\[(.+)\]\"}', dstr) | ||
| if dstr: | ||
| dlist = dstr[0].split('},{') | ||
| else: | ||
| return [] | ||
| ret_list = [] | ||
| for d in dlist: | ||
| max = re.findall(r'.*max_version[\'\"]:[\'\"]([0-9|\.]*)[\'|\"].*', d) | ||
| min = re.findall(r'.*min_version[\'\"]:[\'\"]([0-9|\.]*)[\'|\"].*', d) | ||
| name = re.findall(r'.*name[\'\"]:[\'\"]([a-zA-Z0-9]+)[\'|\"].*', d) | ||
| if not max[0]: | ||
| max[0] = '0.0.0' | ||
| if not min[0]: | ||
| min[0] = '0.0.0' | ||
| if name: | ||
| ret_list.append(dict(zip([DEP_NAME, DEP_VER_MIN, DEP_VER_MAX], | ||
| [name[0], min[0], max[0]]))) | ||
| return ret_list | ||
| ''' | ||
| - Server flow: | ||
| ## API: /getComponentListOfSystemVersion | ||
| ## request | ||
| { | ||
| "systemVersion":"3.0.0" | ||
| } | ||
| ## response | ||
| { | ||
| "success":true/false, | ||
| "message":"success/xxx error", | ||
| "componentList":[ | ||
| {"name": "rhino", "versionList": ["1.0.1"]}, | ||
| {"name": "linkkit", "versionList": ["1.0.0", "2.0.0"]} | ||
| ] | ||
| } | ||
| - This API returns a list of dict of pkg name/ver information | ||
| ''' | ||
| def query_pkginfo(self, osver, names): | ||
| data = self.clist_data_fmt % (osver) | ||
| p = self._query_helper(self.clist_api, data) | ||
| if p: | ||
| pstr = bytes.decode(p) | ||
| if not p or not pstr.startswith('{"success":true'): | ||
| aos_warning("Failed to query pkg list information for OS %s" % osver) | ||
| return [] | ||
| # for test use | ||
| #pstr = '{"success":true/false,"message":"success/xxx error","componentList":[{"name": "rhino", "versionList": ["1.0.1"]},{"name": "linkkit", "versionList": ["1.0.0", "2.0.0"]}]}' | ||
| pstr = re.findall(r'.+,\"componentList\":\[(.+)\]}', pstr) | ||
| if pstr: | ||
| plist = pstr[0].split('},{') | ||
| else: | ||
| return [] | ||
| ret_list = [] | ||
| for p in plist: | ||
| name = re.findall(r'.*name[\'\"]:\s*[\'"]([^\'"]+)[\'"].*', p) | ||
| # find out "1.1.1", "2.2.2", ... | ||
| #vers = re.findall(r'.*versionList[\'\"]:\s*\[([0-9\.\'\",\s]+)\]}', p) | ||
| vers = re.findall(r'.*versionList[\'\"]:\s*\[([0-9\.\'",\s]+)\].*', p) | ||
| if not name: | ||
| continue | ||
| else: | ||
| name = name[0] | ||
| # if names provided, only add the matched | ||
| if names and name not in names: | ||
| continue | ||
| if vers: | ||
| for v in re.split(r', ', vers[0]): | ||
| ver = v.strip("\"") | ||
| if ver: | ||
| ret_list.append(dict(zip([pkg_name_in_dict, pkg_version_in_dict], [name, ver]))) | ||
| # Sort by name | ||
| k = lambda s:s[pkg_name_in_dict] | ||
| ret_list.sort(key=k) | ||
| return ret_list |
| import os | ||
| import tempfile | ||
| import click | ||
| from aos.managers.misc import aos_error | ||
| from aos.managers import metadata | ||
| from aos.managers.constant import PYURLPKG | ||
| if PYURLPKG == 'urlgrabbder': | ||
| from urlgrabber.grabber import URLGrabber | ||
| from urlgrabber.grabber import URLGrabError | ||
| elif PYURLPKG == 'requests': | ||
| import requests | ||
| class RepoError(Exception): | ||
| pass | ||
| class Repo(): | ||
| def __init__(self, repourl, cachedir): | ||
| self.repoMDFile = "repodata/repomd.xml" | ||
| self.url = repourl | ||
| self.cachedir = cachedir | ||
| def getFile(self, url, relative, localfile): | ||
| """ Download files from yum repo """ | ||
| result = '' | ||
| remote = url + '/' + relative | ||
| dest_dir = os.path.dirname(localfile) | ||
| if not os.path.exists(dest_dir): | ||
| os.makedirs(dest_dir) | ||
| if PYURLPKG == 'urlgrabber': | ||
| ug = URLGrabber(reget='simple') | ||
| try: | ||
| result = ug.urlgrab(remote, localfile, reget=None) | ||
| except URLGrabError as e: | ||
| if os.path.exists(localfile): | ||
| os.unlink(localfile) | ||
| raise RepoError("Error downloading file %s: %s" % (localfile, e)) | ||
| elif PYURLPKG == 'requests': | ||
| try: | ||
| r = requests.get(remote) | ||
| if r.status_code == 200: | ||
| with open(localfile, 'wb') as f: | ||
| f.write(r.content) | ||
| result = localfile | ||
| except Exception as e: | ||
| if os.path.exists(localfile): | ||
| os.unlink(localfile) | ||
| raise RepoError("Error downloading file %s: %s" % (localfile, e)) | ||
| else: | ||
| aos_error("The url package %s is not supported!" % PYURLPKG) | ||
| return result | ||
| def getCompInfoFile(self): | ||
| """ Download component_info_publish.db as localfile """ | ||
| comp_info_f = "component_info_publish.db" | ||
| localfile = os.path.join(self.cachedir, comp_info_f) | ||
| # Remove the old existing file | ||
| if os.path.exists(localfile): | ||
| os.unlink(localfile) | ||
| # Download a new file from server | ||
| try: | ||
| result = self.getFile(self.url, comp_info_f, localfile) | ||
| except RepoError as e: | ||
| pass | ||
| return localfile | ||
| def getRepoFile(self): | ||
| """ Download repomd.xml as localfile """ | ||
| repomd_xml = self.repoMDFile | ||
| localfile = os.path.join(self.cachedir, repomd_xml) | ||
| try: | ||
| tfname = tempfile.mktemp(prefix='repomd', suffix="tmp.xml", | ||
| dir=os.path.dirname(localfile)) | ||
| result = self.getFile(self.url, repomd_xml, localfile) | ||
| except Exception as e: | ||
| if os.path.exists(localfile): | ||
| os.unlink(tfname) | ||
| raise RepoError("Error downloading file %s: %s" % (localfile, e)) | ||
| try: | ||
| os.rename(result, localfile) | ||
| except: | ||
| os.unlink(tfname) | ||
| raise RepoError("Error renaming file %s to %s" % (result, localfile)) | ||
| return localfile | ||
| def getMDFile(self, mdtype): | ||
| """ Download metadata files: primary, filelists, other """ | ||
| repomd_xml = os.path.join(self.cachedir, self.repoMDFile) | ||
| if not os.path.exists(repomd_xml): | ||
| raise RepoError("No such file: %s" % repomd_xml) | ||
| repomd = metadata.RepoMD(repomd_xml) | ||
| data = repomd.getData(mdtype) | ||
| (r_base, remote) = data.location | ||
| localfile = os.path.join(self.cachedir, remote) | ||
| try: | ||
| result = self.getFile(self.url, remote, localfile) | ||
| except RepoError as e: | ||
| pass | ||
| return localfile | ||
| def getPackage(self, package): | ||
| """ Download rpm packages """ | ||
| remote = package | ||
| localfile = os.path.join(self.cachedir, package) | ||
| try: | ||
| result = self.getFile(self.url, remote, localfile) | ||
| except RepoError as e: | ||
| pass | ||
| return localfile |
| import os | ||
| import sys | ||
| import gzip | ||
| from io import BytesIO | ||
| import struct | ||
| import shutil | ||
| RPM_MAGIC = b'\xed\xab\xee\xdb' | ||
| GZIP_MAGIC = b'\x1f\x8b' | ||
| def gzip_decompress(data): | ||
| """ Decompress data with gzip """ | ||
| gzstream = BytesIO(data) | ||
| with gzip.GzipFile(fileobj=gzstream) as gzipper: | ||
| data = gzipper.read() | ||
| return data | ||
| class RPMFile(): | ||
| """ Class representing a rpm file """ | ||
| def __init__(self, rpmfile): | ||
| self.rpmfile = rpmfile | ||
| def is_rpm(self, fileobj): | ||
| """ Check for rpm magic """ | ||
| lead = fileobj.read(96) | ||
| return lead[0:4] == RPM_MAGIC | ||
| def rpm2cpio(self): | ||
| """ Convert rpm to cpio archive file """ | ||
| with open(self.rpmfile, "rb") as f: | ||
| if not self.is_rpm(f): | ||
| raise IOError("The input file is not a RPM package") | ||
| data = f.read() | ||
| idx = data.find(GZIP_MAGIC) | ||
| if idx != -1: | ||
| return gzip_decompress(data[idx:]) | ||
| else: | ||
| raise IOError("Unknown compress data format") | ||
| return data | ||
| def install(self, destdir): | ||
| if not os.path.isdir(destdir): | ||
| os.makedirs(destdir) | ||
| data = self.rpm2cpio() | ||
| cf = CpioFile() | ||
| cf.unpack_from(data) | ||
| for member in cf.members: | ||
| filename = member.name.decode() | ||
| destfile = os.path.join(destdir, filename) | ||
| basedir = os.path.dirname(destfile) | ||
| if not os.path.exists(basedir): | ||
| os.makedirs(basedir) | ||
| with open(destfile, "wb") as f: | ||
| f.write(member.content) | ||
| class CpioFile(): | ||
| """ Class representing an entire cpio file """ | ||
| _members = [] | ||
| def __init__(self): | ||
| self._members = [] | ||
| @property | ||
| def members(self): | ||
| return self._members | ||
| @property | ||
| def names(self): | ||
| return [member.name.decode() for member in self.members] | ||
| def unpack_from(self, block, offset=0): | ||
| pointer = offset | ||
| while 'TRAILER!!!' not in self.names: | ||
| cm = CpioMember() | ||
| self.members.append(cm.unpack_from(block, pointer)) | ||
| pointer += cm.size | ||
| del self.members[-1] | ||
| class CpioMember(): | ||
| """ Class representing a member of a cpio archive """ | ||
| coder = struct.Struct(b'6s8s8s8s8s8s8s8s8s8s8s8s8s8s') | ||
| name = None | ||
| magic = None | ||
| devmajor = None | ||
| devminor = None | ||
| ino = None | ||
| mode = None | ||
| uid = None | ||
| gid = None | ||
| nlink = None | ||
| rdevmajor = None | ||
| rdevminor = None | ||
| mtime = None | ||
| filesize = None | ||
| def unpack_from(self, block, offset=0): | ||
| (self.magic, ino, mode, uid, | ||
| gid, nlink, mtime, filesize, | ||
| devmajor, devminor, rdevmajor, rdevminor, | ||
| namesize, check) = self.coder.unpack_from(block, offset) | ||
| self.ino = int(ino,16) | ||
| self.mode = int(mode,16) | ||
| self.uid = int(uid,16) | ||
| self.gid = int(gid,16) | ||
| self.nlink = int(nlink,16) | ||
| self.devmajor = int(devmajor,16) | ||
| self.devminor = int(devminor,16) | ||
| self.rdevmajor = int(rdevmajor,16) | ||
| self.rdevminor = int(rdevminor,16) | ||
| self.mtime = int(mtime,16) | ||
| namesize = int(namesize,16) | ||
| self.filesize = int(filesize,16) | ||
| check = int(check,16) | ||
| namestart = offset + self.coder.size | ||
| nameend = namestart + namesize | ||
| datastart = nameend + ((4 - (nameend % 4)) % 4) # pad | ||
| dataend = datastart + self.filesize | ||
| self.name = block[namestart:nameend - 1] # drop the null | ||
| self.content = block[datastart:dataend] | ||
| if check != 0: | ||
| raise Exception("Checksum Error!") | ||
| return self | ||
| @property | ||
| def size(self): | ||
| retval = self.coder.size | ||
| retval += len(self.name) + 1 | ||
| retval += ((4 - (retval % 4)) % 4) | ||
| retval += self.filesize | ||
| retval += ((4 - (retval % 4)) % 4) | ||
| return retval |
| import os | ||
| from aos.managers import misc | ||
| try: | ||
| import sqlite3 as sqlite | ||
| except ImportError: | ||
| import sqlite | ||
| def executeSQL(cursor, query, params=None): | ||
| """ Execute a sqlite3 query """ | ||
| if params is None: | ||
| return cursor.execute(query) | ||
| return cursor.execute(query, params) | ||
| class SqliteDB(): | ||
| """ """ | ||
| def __init__(self, sqlitefile): | ||
| self._conn = sqlite.connect(sqlitefile) | ||
| self.cursor = self._conn.cursor() | ||
| def close(self): | ||
| self.cursor.close() | ||
| self._conn.close() | ||
| class PrimaryDB(SqliteDB): | ||
| def __init__(self, sqlitefile): | ||
| SqliteDB.__init__(self, sqlitefile) | ||
| self.pkglist = {} | ||
| def getPackages(self, names=[]): | ||
| """ Search pkgs with names. | ||
| The columns from packages table: | ||
| pkgKey, pkgId, name, arch, version, epoch, release, summary, description, location_href | ||
| """ | ||
| pkglist = [] | ||
| properties = ["pkgId", "pkgKey", "name", "epoch", "version", "release", "arch", "location_href"] | ||
| query = "select %s from packages " % ",".join(properties) | ||
| tmp = [] | ||
| for name in names: | ||
| tmp.append("name = '%s'" % name) | ||
| if names: | ||
| query += "where %s" % " OR ".join(tmp) | ||
| rows = executeSQL(self.cursor, query) | ||
| for row in rows: | ||
| tmp = dict(zip(properties, row)) | ||
| pkglist.append(tmp) | ||
| return pkglist | ||
| def getPackagesWithSpecificNameAndColumn(self, names=[], columns=[]): | ||
| """ Search specified columns of pkgs with specified names. | ||
| The columns from pakcages table: | ||
| pkgKey, pkgId, name, arch, version, epoch, release, summary, description, location_href | ||
| """ | ||
| pkglist = [] | ||
| query = "SELECT %s " % ",".join(columns) # select columns | ||
| query += "from packages " # table name | ||
| tmp = [] | ||
| for name in names: | ||
| tmp.append("name = '%s'" % name) | ||
| if names: | ||
| query += "WHERE %s" % " OR ".join(tmp) # specific pkg names | ||
| rows = executeSQL(self.cursor, query) | ||
| for row in rows: | ||
| tmp = dict(zip(columns, row)) | ||
| pkglist.append(tmp) | ||
| return pkglist | ||
| def getPackages2(self, names=[]): | ||
| columns = ["pkgId", "pkgKey", "name", "epoch", "version", "release", "arch", "location_href"] | ||
| return self.getPackagesWithSpecificNameAndColumn(names, columns) | ||
| def getPackagesWithVersion(self, names=[]): | ||
| columns = ["name", "version", "pkgKey"] | ||
| return self.getPackagesWithSpecificNameAndColumn(names, columns) | ||
| class FilelistsDB(SqliteDB): | ||
| def __init__(self, sqlitefile): | ||
| SqliteDB.__init__(self, sqlitefile) | ||
| def getFiles(self, pkgKey): | ||
| filelist = [] | ||
| properties = ["pkgKey", "dirname", "filenames"] | ||
| if not pkgKey: | ||
| return filelist | ||
| else: | ||
| query = "select %s from filelist where pkgKey = '%s'" % (",".join(properties), pkgKey) | ||
| rows = executeSQL(self.cursor, query) | ||
| for row in rows: | ||
| tmp = dict(zip(properties, row)) | ||
| filelist.append(tmp) | ||
| return filelist | ||
| class OtherDB(SqliteDB): | ||
| def __init__(self, sqlitefile): | ||
| SqliteDB.__init__(self, sqlitefile) | ||
| def test(self): | ||
| pass |
| import os | ||
| import sys | ||
| import json | ||
| sys.path.insert(0, os.path.split(os.path.realpath(__file__))[0] + "/../../") | ||
| from aos.constant import OS_REPO, OS_CACHE | ||
| from aos.managers import addon | ||
| from aos.managers import repo | ||
| from aos.managers import sqlitedb | ||
| from aos.managers import rpmfile | ||
| c = addon.Cache() | ||
| c.create() | ||
| db = sqlitedb.PrimaryDB(c.primary_db) | ||
| print(json.dumps(db.getPackages([]), indent=4)) | ||
| #db = sqlitedb.FilelistsDB(c.filelists_db) | ||
| #print(db.getFiles('603')) | ||
| r = repo.Repo(OS_REPO, OS_CACHE) | ||
| localfile = r.getPackage("activation-1.0.0-r0.aos.noarch.rpm") | ||
| #r = rpm.RPMFile("test/activation-1.0.0-r0.aos.noarch.rpm") | ||
| #r = rpmfile.RPMFile("test/buildsystem-1.0.1.3-r0.aos.noarch.rpm") | ||
| r = rpmfile.RPMFile(localfile) | ||
| r.install("/tmp/alios") |
| import os, sys, re, subprocess | ||
| try: | ||
| from aos.usertrace.report import Report | ||
| from aos.constant import AOS_INVESTIGATION_FILE, DEBUG_PRINT | ||
| from aos.util import debug, get_locale | ||
| except Exception as e: | ||
| print("Failure when import modules in do_repot: %s" % format(e)) | ||
| sys.exit(1) | ||
| def is_report_enabled(): | ||
| ret = False | ||
| if os.path.isfile(AOS_INVESTIGATION_FILE): | ||
| try: | ||
| with open(AOS_INVESTIGATION_FILE, 'r') as f: | ||
| lines = f.read().splitlines() | ||
| for l in lines: | ||
| l = l.strip() | ||
| if l.startswith('participate:'): | ||
| choice = re.findall(r'^participate:\s*(\S+)', l) | ||
| if not choice: | ||
| break | ||
| choice = choice[0].strip() | ||
| debug("participate? %s" % choice) | ||
| if choice == 'Yes' or choice == 'Y' or \ | ||
| choice == 'yes' or choice == 'y': | ||
| ret =True | ||
| break | ||
| except Exception as e: | ||
| pass | ||
| return ret | ||
| def do_report(args=None): | ||
| if not args: | ||
| cmd = ['python', __file__] | ||
| elif len(args) == 3: | ||
| cmd = ['python', __file__] + args | ||
| else: | ||
| return | ||
| try: | ||
| proc = subprocess.Popen(cmd, | ||
| stdout=subprocess.PIPE, | ||
| stderr=subprocess.PIPE, | ||
| stdin=subprocess.PIPE) | ||
| if DEBUG_PRINT: | ||
| out, err = proc.communicate() | ||
| if out: | ||
| debug(out.decode(get_locale())) | ||
| if err: | ||
| debug(err.decode(get_locale())) | ||
| except: | ||
| debug("Failure when reporting: %s" % format(e)) | ||
| pass | ||
| op_report = {'op': '', 'content': '', 'result': ''} | ||
| def format_escaped(str): | ||
| newstr = '' | ||
| first = True | ||
| if str: | ||
| str = str.replace('\\', '/') | ||
| for l in str.splitlines(): | ||
| if first: | ||
| first = False | ||
| else: | ||
| # fix escaped newline char issue in json string | ||
| newstr += "\\n" | ||
| newstr += l | ||
| return newstr | ||
| def init_op(): | ||
| #global op_report | ||
| #op_report = {'op': '', 'content': '', 'result': ''} | ||
| pass | ||
| def set_op(op=None, content=None, result=None): | ||
| global op_report | ||
| if op: | ||
| op_report['op'] = format_escaped(op) | ||
| if content: | ||
| op_report['content'] = format_escaped(content) | ||
| if result: | ||
| op_report['result'] = format_escaped(result) | ||
| def report_op(): | ||
| global op_report | ||
| if op_report['op'] and op_report['result']: | ||
| do_report([op_report['op'], op_report['content'], op_report['result']]) | ||
| op_report = {'op': '', 'content': '', 'result': ''} | ||
| if __name__ == '__main__': | ||
| if is_report_enabled(): | ||
| reporter = Report() | ||
| if len(sys.argv) == 1: | ||
| if not reporter.report_install(): | ||
| sys.exit(1) | ||
| elif len(sys.argv) == 4: | ||
| if not reporter.report_operate(sys.argv[1], sys.argv[2], sys.argv[3]): | ||
| sys.exit(1) | ||
| sys.exit(0) |
| import os, sys, time, re | ||
| try: | ||
| import requests | ||
| import platform | ||
| #import urllib | ||
| import requests | ||
| import json | ||
| import subprocess | ||
| from uuid import getnode | ||
| from aos.constant import AOS_SERVER_URL, AOS_HTTP_HEADER, AOS_HTTP_METHOD | ||
| from aos.util import debug | ||
| except Exception as e: | ||
| print("Failure when importing module in report: %s" % format(e)) | ||
| sys.exit(1) | ||
| # AOS query/report server | ||
| SERVER_URL = AOS_SERVER_URL | ||
| HTTP_HEADER = AOS_HTTP_HEADER #"Content-Type:application/json" | ||
| REPORT_API = "/reportInfo" | ||
| # uCube install report | ||
| UCUBE_INSTALL_REPORT_DATA_FMT = "\ | ||
| {\ | ||
| \"macaddr\": \"%s\",\ | ||
| \"hostos\": {\ | ||
| \"name\": \"%s\",\ | ||
| \"version\": \"%s\"\ | ||
| },\ | ||
| \"ip\": \"%s\",\ | ||
| \"location\": \"%s\",\ | ||
| \"ucubever\": \"%s\",\ | ||
| \"terminal\": \"%s\",\ | ||
| \"operateType\":\"cube install\"\ | ||
| }" | ||
| # uCube operation report | ||
| UCUBE_OPERATE_REPORT_DATA_FMT = "\ | ||
| {\ | ||
| \"macaddr\": \"%s\",\ | ||
| \"hostos\": {\ | ||
| \"name\": \"%s\",\ | ||
| \"version\": \"%s\"\ | ||
| },\ | ||
| \"ucubever\": \"%s\",\ | ||
| \"terminal\": \"%s\",\ | ||
| \"operateType\":\"%s\",\ | ||
| \"operateContent\":\"%s\",\ | ||
| \"operateResult\":\"%s\"\ | ||
| }" | ||
| def get_mac(): | ||
| mac = getnode() | ||
| return ':'.join(("%012x" % mac)[i:i+2] for i in range(0, 12, 2)) | ||
| # return timestamp in format: 'UTC+8 2020-02-11 00:00:00' | ||
| def get_timestamp(): | ||
| # timestamp, convert to '1970-01-01 00:00:00' format | ||
| now = int(time.time()) | ||
| now_local = time.localtime(now) | ||
| nowstr = time.strftime("%Y-%m-%d %H:%M:%S", now_local) | ||
| # timezone, convert to 'UTC+/-xx' format | ||
| tz = time.timezone | ||
| tz = (0 - tz) / 3600 | ||
| tzstr = 'UTC' | ||
| if tz >= 0: | ||
| tzstr += '+' | ||
| tzstr += str(tz) | ||
| return ' '.join([tzstr, nowstr]) | ||
| def get_hostos(): | ||
| name = platform.system() | ||
| version = platform.platform() | ||
| return name, version | ||
| # Might be very time consuming, ensure limit of using it | ||
| def get_location(): | ||
| ip = 'unknown' | ||
| country = 'unknown' | ||
| city = 'unknown' | ||
| url = "https://geolocation-db.com/json" | ||
| try: | ||
| #response = urllib.urlopen(url) | ||
| #data = json.loads(response.read()) | ||
| response = requests.get(url, timeout = 10) | ||
| if response.status_code != 200: | ||
| return ip, country, city | ||
| data = json.loads(response.text) | ||
| ''' | ||
| data example: {u'city': u'Shanghai', u'longitude': 121.3997, u'latitude': 31.0456, u'state': u'Shanghai', u'IPv4': u'117.143.170.234', u'country_code': u'CN', u'country_name': u'China', u'postal': None} | ||
| ''' | ||
| if data: | ||
| if u'IPv4' in data.keys(): | ||
| ip = data[u'IPv4'] | ||
| if u'city' in data.keys(): | ||
| city = data[u'city'] | ||
| if not city: | ||
| if u'state' in data.keys(): | ||
| city = data[u'state'] | ||
| if u'country_code' in data.keys(): | ||
| country = data[u'country_code'] | ||
| except Exception as e: | ||
| pass | ||
| return ip, country, city | ||
| def get_ucube_ver(): | ||
| from aos.__init__ import __version__ | ||
| return __version__ | ||
| def get_terminal(): | ||
| term = 'unknown' | ||
| try: | ||
| hostos = platform.system() | ||
| if hostos == 'Windows': | ||
| # Git bash | ||
| cmd = ['which ls'] | ||
| result = 'fail' | ||
| try: | ||
| out, err = aos.util.exec_cmd(cmd) | ||
| if not err and out and 'ls' in out: | ||
| result = 'success' | ||
| term = 'git bash' | ||
| except Exception as e: | ||
| result = 'fail' | ||
| if result == 'success': | ||
| return term | ||
| # CMD or powershell | ||
| cmd = '(dir 2>&1 *`|echo CMD);&<# rem #>echo PowerShell' | ||
| result = 'fail' | ||
| try: | ||
| out, err = aos.util.exec_cmd(cmd) | ||
| if not err and out: | ||
| if 'CMD' in out: | ||
| term = 'CMD' | ||
| result = 'success' | ||
| elif 'PowerShell' in out: | ||
| term = 'PowerShell' | ||
| result = 'success' | ||
| except Exception as e: | ||
| result = 'fail' | ||
| if result == 'success': | ||
| return term | ||
| elif hostos == 'Linux' or hostos == 'Darwin': | ||
| cmd = ['echo', '$0'] | ||
| result = 'fail' | ||
| try: | ||
| ''' seems not working | ||
| out, err = aos.util.exec_cmd(cmd) | ||
| if not err and out: | ||
| if out == 'sh' or out == '-sh': | ||
| term = 'sh' | ||
| result = 'success' | ||
| elif out == 'bash' or out == '-bash': | ||
| term = 'bash' | ||
| result = 'success' | ||
| elif out == 'csh' or out == '-csh': | ||
| term = 'csh' | ||
| result = 'success' | ||
| elif out == 'ksh' or out == '-ksh': | ||
| term = 'ksh' | ||
| result = 'success' | ||
| elif out == 'tcsh' or out == '-tcsh': | ||
| term = 'tcsh' | ||
| result = 'success' | ||
| elif out == 'zsh' or out == '-zsh': | ||
| term = 'zsh' | ||
| result = 'success' | ||
| ''' | ||
| try: | ||
| shell_info = {'/bin/sh': 'sh', | ||
| '/bin/bash': 'bash', | ||
| '/bin/csh': 'csh', | ||
| '/bin/ksh': 'ksh', | ||
| '/bin/tcsh': 'tcsh', | ||
| '/bin/zsh': 'zsh'} | ||
| out = subprocess.check_output(' '.join(cmd), shell=True) | ||
| out = out.strip() | ||
| if out in shell_info.keys(): | ||
| term = shell_info[out] | ||
| result = 'success' | ||
| except Exception as e: | ||
| result = 'fail' | ||
| except Exception as e: | ||
| result = 'fail' | ||
| if result == 'success': | ||
| return term | ||
| else: | ||
| pass | ||
| except Exception: | ||
| pass | ||
| return term | ||
| _mac = get_mac() | ||
| _hostname, _hostver = get_hostos() | ||
| _ucubever = get_ucube_ver() | ||
| _terminal = get_terminal() | ||
| class Report(): | ||
| def __init__(self, url=None): | ||
| if url: | ||
| self.url = url | ||
| else: | ||
| self.url = SERVER_URL | ||
| self.hdr = HTTP_HEADER | ||
| def _report_helper(self, api, d): | ||
| result = None | ||
| hdrname = re.findall(r'(.+):.+', self.hdr) | ||
| hdrvalue = re.findall(r'.+:(.+)', self.hdr) | ||
| if not hdrname or not hdrvalue: | ||
| return None | ||
| try: | ||
| http_header={hdrname[0]:hdrvalue[0]} | ||
| debug("reporting:\nurl - %s, headers - %s, data - %s" % | ||
| (self.url + api, format(http_header), d)) | ||
| body = d.encode(encoding='utf-8') | ||
| r = requests.post(self.url + api, headers=http_header, data=body) | ||
| debug("reported, return code: %d, return msg: %s" % (r.status_code, r.content)) | ||
| if r.status_code == 200: | ||
| result = r.content | ||
| except Exception as e: | ||
| debug("exception in _report_helper: %s" % format(e)) | ||
| pass | ||
| return result | ||
| def report_install(self): | ||
| _ip, _country, _city = get_location() | ||
| api = REPORT_API | ||
| data = UCUBE_INSTALL_REPORT_DATA_FMT % (_mac, _hostname, | ||
| _hostver, _ip, | ||
| '-'.join([_country, _city]), | ||
| _ucubever, _terminal) | ||
| return self._report_helper(api, data) | ||
| def report_operate(self, optype, op, opresult): | ||
| api = REPORT_API | ||
| data = UCUBE_OPERATE_REPORT_DATA_FMT % (_mac, _hostname, | ||
| _hostver, _ucubever, _terminal, | ||
| optype, op, opresult) | ||
| return self._report_helper(api, data) |
| Metadata-Version: 1.2 | ||
| Name: aos-cube | ||
| Version: 0.3.11 | ||
| Version: 0.5.8 | ||
| Summary: aos commmand line tool for AliOS-Things development. | ||
| Home-page: UNKNOWN | ||
| Home-page: https://pypi.org/project/aos-cube/ | ||
| Author: Alibaba | ||
@@ -174,3 +174,3 @@ Author-email: aliosthings@service.aliyun.com | ||
| Description: aos CUBE is the name of the `aos <http://aos.io>`_ command line tool, packaged as aos-cube, which enables the full aos workflow: repositories version control, maintaining dependencies, publishing code, updating from remotely hosted repositories, and invoking MXCHIP aos's own build system and export functions, among other operations. | ||
| Description: aos CUBE is the name of the `aos <http://aos.io>`_ command line tool, packaged as aos-cube, which enables the full aos workflow: repositories version control, maintaining dependencies, publishing code, updating from remotely hosted repositories, and invoking AliOS Things's own build system and export functions, among other operations. | ||
@@ -177,0 +177,0 @@ ## Installation |
@@ -9,1 +9,2 @@ pyserial | ||
| configparser | ||
| requests |
@@ -18,8 +18,27 @@ LICENSE | ||
| aos/commands/devices.py | ||
| aos/commands/install.py | ||
| aos/commands/list.py | ||
| aos/commands/make.py | ||
| aos/commands/monitor.py | ||
| aos/commands/open.py | ||
| aos/commands/ota.py | ||
| aos/commands/pack.py | ||
| aos/commands/remove.py | ||
| aos/commands/upgrade.py | ||
| aos/commands/upload.py | ||
| aos/managers/__init__.py | ||
| aos/managers/addon.py | ||
| aos/managers/constant.py | ||
| aos/managers/localpkg.py | ||
| aos/managers/metadata.py | ||
| aos/managers/misc.py | ||
| aos/managers/package.py | ||
| aos/managers/queryserver.py | ||
| aos/managers/repo.py | ||
| aos/managers/rpmfile.py | ||
| aos/managers/sqlitedb.py | ||
| aos/managers/test.py | ||
| aos/usertrace/__init__.py | ||
| aos/usertrace/do_report.py | ||
| aos/usertrace/report.py | ||
| aos_cube.egg-info/PKG-INFO | ||
@@ -26,0 +45,0 @@ aos_cube.egg-info/SOURCES.txt |
+3
-3
@@ -1,8 +0,8 @@ | ||
| import sys | ||
| import sys, os | ||
| __version__ = "0.3.11" | ||
| __version__ = "0.5.8" | ||
| __title__ = "aos-cube" | ||
| __description__ = ( | ||
| "aos commmand line tool for AliOS-Things development.") | ||
| __url__ = "" | ||
| __url__ = "https://pypi.org/project/aos-cube/" | ||
@@ -9,0 +9,0 @@ __author__ = "Alibaba" |
+8
-6
@@ -6,3 +6,4 @@ import os, sys | ||
| from imp import reload | ||
| if sys.version_info[0] == 2: | ||
| from imp import reload | ||
| from aos import __version__, __email__ | ||
@@ -73,7 +74,8 @@ from aos.util import error, ProcessException | ||
| def main(): | ||
| try: | ||
| reload(sys) | ||
| sys.setdefaultencoding('UTF8') | ||
| except: | ||
| pass | ||
| if sys.version_info[0] == 2: | ||
| try: | ||
| reload(sys) | ||
| sys.setdefaultencoding('UTF8') | ||
| except: | ||
| pass | ||
@@ -80,0 +82,0 @@ # Convert the "\\" to "/" on Windows for AOS_SDK_PATH |
| import os, sys | ||
| import click | ||
| from aos.util import cd_aos_root, error, popen | ||
| from aos.constant import CHECK_WRAPPER | ||
| from aos.util import cd_aos_root, error, simple_error, pqueryerr, get_locale | ||
| from aos.constant import CHECK_WRAPPER, NOT_INSIDE_SDK_HINT | ||
| from aos.usertrace.do_report import set_op, report_op | ||
| # Make command | ||
| @click.command("check", short_help="Call various check scripts, depends on what check implemented by current release ...") | ||
| @click.command("check", short_help="Do code check", | ||
| help="Call various check scripts, depends on what check " | ||
| "implemented by current release.\n\nPlease try " | ||
| "'aos check help' to explore more...") | ||
| @click.argument("args", required=False, nargs=-1, metavar="[ARGS...]") | ||
@@ -19,4 +23,10 @@ def cli(args): | ||
| if ret != 'success': | ||
| error("not in AliOS-Things source code directory") | ||
| error(NOT_INSIDE_SDK_HINT) | ||
| cmd_content = '' | ||
| if args: | ||
| cmd_content += ' '.join(list(args)) | ||
| set_op(op='check', content=cmd_content) | ||
| source_root = os.getcwd() | ||
@@ -27,7 +37,13 @@ if os.path.isdir(original_dir): | ||
| # Run check scripts | ||
| check_wrapper = "%s/%s" % (source_root, CHECK_WRAPPER) | ||
| check_wrapper = os.path.sep.join([source_root, CHECK_WRAPPER]) | ||
| print("check_wrapper is %s" % check_wrapper) | ||
| if os.path.isfile(check_wrapper): | ||
| cmd = ["python", check_wrapper] + list(args) | ||
| popen(cmd) | ||
| ret, err = pqueryerr(cmd) | ||
| if ret != 0: | ||
| simple_error("Failure when executing %s, error: %s" % (cmd, err.decode(get_locale()))) | ||
| else: | ||
| error("No check scripts found for current release!") | ||
| set_op(result='success') | ||
| report_op() |
+136
-16
| import os | ||
| import click | ||
| from aos.util import cd_aos_root, error, popen | ||
| from aos.constant import GEN_SAL_STAGING, GEN_NEWPROJECT, GEN_APPSOURCE | ||
| from aos.util import cd_aos_root, error, simple_error, get_locale, \ | ||
| pqueryerr, popen, error, locale_to_unicode | ||
| from aos.constant import GEN_SAL_STAGING, GEN_MAL_STAGING, \ | ||
| GEN_NEWPROJECT, GEN_APPSOURCE, \ | ||
| GEN_NEW_COMPONENT, NO_SDK_HINT, \ | ||
| NOT_INSIDE_SDK_HINT | ||
| from aos.usertrace.do_report import set_op, report_op | ||
| @click.group(short_help="Create project or component") | ||
@@ -31,8 +35,11 @@ @click.pass_context | ||
| cmd_content = 'saldriver ' + ' '.join(args) | ||
| set_op(op='create', content=cmd_content) | ||
| # Get aos source root directory | ||
| ret, original_dir = cd_aos_root() | ||
| if ret != 'success': | ||
| error("not in AliOS-Things source code directory") | ||
| error(NOT_INSIDE_SDK_HINT) | ||
| source_root = os.getcwd() | ||
| source_root = os.path.abspath(os.getcwd()) | ||
| if os.path.isdir(original_dir): | ||
@@ -42,9 +49,57 @@ os.chdir(original_dir) | ||
| # Run script GEN_SAL_STAGING | ||
| gen_sal_staging = "%s/%s" % (source_root, GEN_SAL_STAGING) | ||
| gen_sal_staging = os.path.join(source_root, GEN_SAL_STAGING) | ||
| if os.path.isfile(gen_sal_staging): | ||
| cmd = ["python", gen_sal_staging] + list(args) | ||
| popen(cmd) | ||
| ret, err = pqueryerr(cmd) | ||
| if ret != 0: | ||
| simple_error("Failed to generate SAL driver, error: %s" % err.decode(get_locale())) | ||
| else: | ||
| error("No %s found for current release!" % gen_sal_staging) | ||
| set_op(result='success') | ||
| report_op() | ||
| # Create mal driver from template | ||
| @cli.command("maldriver", short_help="Create MAL driver from template") | ||
| @click.argument("drivername", metavar="[DRIVERNAME]") | ||
| @click.option("-m", "--mfname", help="The manufacturer of device") | ||
| @click.option("-t", "--devicetype", required=True, | ||
| type=click.Choice(["gprs", "wifi", "lte", "nbiot", "eth", "other"]), help="The type of device") | ||
| @click.option("-a", "--author", help="The author of driver") | ||
| def create_mal_driver(drivername, mfname, devicetype, author): | ||
| """ Create MAL driver staging code from template """ | ||
| args = [drivername] | ||
| if mfname: | ||
| args += ["-m%s" % mfname] | ||
| if devicetype: | ||
| args += ["-t%s" % devicetype] | ||
| if author: | ||
| args += ["-a%s" % author] | ||
| cmd_content = 'maldriver ' + ' '.join(args) | ||
| set_op(op='create', content=cmd_content) | ||
| # Get aos source root directory | ||
| ret, original_dir = cd_aos_root() | ||
| if ret != 'success': | ||
| error(NOT_INSIDE_SDK_HINT) | ||
| source_root = os.path.abspath(os.getcwd()) | ||
| if os.path.isdir(original_dir): | ||
| os.chdir(original_dir) | ||
| # Run script GEN_MAL_STAGING | ||
| gen_mal_staging = os.path.join(source_root, GEN_MAL_STAGING) | ||
| if os.path.isfile(gen_mal_staging): | ||
| cmd = ["python", gen_mal_staging] + list(args) | ||
| ret, err = pqueryerr(cmd) | ||
| if ret != 0: | ||
| simple_error("Failed to generate MAL driver, error: %s" % err.decode(get_locale())) | ||
| else: | ||
| error("No %s found for current release!" % gen_mal_staging) | ||
| set_op(result='success') | ||
| report_op() | ||
| # Create project | ||
@@ -54,3 +109,3 @@ @cli.command("project", short_help="Create user project") | ||
| @click.option("-b", "--board", required=True, help="Board for creating project") | ||
| @click.option("-d", "--projectdir", required=True, help="The project directory") | ||
| @click.option("-d", "--projectdir", help="The project directory") | ||
| @click.option("-t", "--templateapp", help="Template application for creating project") | ||
@@ -67,34 +122,99 @@ def create_project(projectname, board, projectdir, templateapp): | ||
| cmd_content = 'project ' + ' '.join(args) | ||
| set_op(op='create', content=cmd_content) | ||
| if "AOS_SDK_PATH" not in os.environ: | ||
| error("No AliOS SDK installed") | ||
| error(NO_SDK_HINT) | ||
| else: | ||
| aos_sdk_path = os.environ["AOS_SDK_PATH"] | ||
| # AOS_SDK_PATH from environ may not unicode, so convert | ||
| aos_sdk_path = locale_to_unicode(aos_sdk_path) | ||
| gen_newproject = "%s/%s" % (aos_sdk_path, GEN_NEWPROJECT) | ||
| aos_sdk_path = os.path.abspath(aos_sdk_path) | ||
| gen_newproject = os.path.join(aos_sdk_path, GEN_NEWPROJECT) | ||
| if os.path.isfile(gen_newproject): | ||
| cmd = ["python", gen_newproject] + list(args) | ||
| popen(cmd) | ||
| ret = popen(cmd) | ||
| if ret != 0: | ||
| error("Failed to generate project, errorcode: %d" % ret) | ||
| else: | ||
| error("No %s found for current release!" % gen_newproject) | ||
| set_op(result='success') | ||
| report_op() | ||
| # Create sources | ||
| @cli.command("source", short_help="Add component sources to build") | ||
| @click.argument("sourcelist", metavar="[\"SOURCELIST\"]") | ||
| @click.argument("sourcelist", nargs=-1, metavar="<SOURCELIST>") | ||
| @click.option("-m", "--makefile", help="Target makefile to update") | ||
| def add_appsource(sourcelist, makefile): | ||
| """ Add component sources to aos.mk """ | ||
| args = [sourcelist] | ||
| args = [] | ||
| if not sourcelist: | ||
| return | ||
| else: | ||
| args += sourcelist | ||
| if makefile: | ||
| args += ["-m %s" % makefile] | ||
| cmd_content = 'source ' + ' '.join(args) | ||
| set_op(op='create', content=cmd_content) | ||
| if "AOS_SDK_PATH" not in os.environ: | ||
| error("No AliOS SDK installed") | ||
| error(NO_SDK_HINT) | ||
| else: | ||
| aos_sdk_path = os.environ["AOS_SDK_PATH"] | ||
| script = "%s/%s" % (aos_sdk_path, GEN_APPSOURCE) | ||
| aos_sdk_path = os.path.abspath(aos_sdk_path) | ||
| script = os.path.join(aos_sdk_path, GEN_APPSOURCE) | ||
| if os.path.isfile(script): | ||
| cmd = ["python", script] + list(args) | ||
| popen(cmd) | ||
| ret, err = pqueryerr(cmd) | ||
| if ret != 0: | ||
| simple_error("Failed to add source, error: %s" % err.decode(get_locale())) | ||
| else: | ||
| error("No %s found for current release!" % script) | ||
| set_op(result='success') | ||
| report_op() | ||
| # create new component | ||
| @cli.command("component", short_help="Create a new component by using templete.") | ||
| @click.argument("name", nargs=1, metavar="<name>") | ||
| @click.option("-t", "--comptype", required=True, type=click.Choice(["bus", "dm", "fs", "gui", "language", "linkkit", "network", "peripherals", "security", "service", "utility", "wireless", "generals"]), help="The type of the component") | ||
| @click.option("-m", "--mfname", help="The manufacturer of the component") | ||
| @click.option("-a", "--author", help="The author of the component") | ||
| def create_component(name, comptype, mfname, author): | ||
| """ Create a new component by using templete """ | ||
| args = [name, "-t%s" % comptype] | ||
| if mfname: | ||
| args += ["-m%s" % mfname] | ||
| if author: | ||
| args += ["-a%s" % author] | ||
| cmd_content = 'component ' + ' '.join(args) | ||
| set_op(op='create', content=cmd_content) | ||
| # Get aos source root directory | ||
| ret, original_dir = cd_aos_root() | ||
| if ret != 'success': | ||
| error(NOT_INSIDE_SDK_HINT) | ||
| source_root = os.path.abspath(os.getcwd()) | ||
| if os.path.isdir(original_dir): | ||
| os.chdir(original_dir) | ||
| # Run script GEN_SAL_STAGING | ||
| gen_new_component = os.path.join(source_root, GEN_NEW_COMPONENT) | ||
| if os.path.isfile(gen_new_component): | ||
| cmd = ["python", gen_new_component] + list(args) | ||
| ret, err = pqueryerr(cmd) | ||
| if ret != 0: | ||
| simple_error("Failed to create component, error: %s" % err.decode(get_locale())) | ||
| else: | ||
| error("No %s found for current release!" % gen_mal_staging) | ||
| set_op(result='success') | ||
| report_op() |
+44
-12
| import os, sys | ||
| import click | ||
| from aos.util import log, info, error, popen, Config, cd_aos_root, get_config_value | ||
| from aos.util import log, info, error, popen, Config, cd_aos_root, \ | ||
| get_config_value, cd_app_root, locale_to_unicode | ||
| from aos.config import Global | ||
| from aos.constant import APP_CONFIG, AOS_SDK_PATH | ||
| from aos.constant import APP_CONFIG, AOS_SDK_PATH, NO_AOSSRC_HINT | ||
| from aos.usertrace.do_report import set_op, report_op | ||
@@ -21,3 +23,19 @@ # Make command | ||
| cmd_content = '' | ||
| if target: | ||
| cmd_content += debug_target | ||
| if work_path: | ||
| work_path = locale_to_unicode(work_path) | ||
| cmd_content += " -w %s" % work_path | ||
| if binaries_dir: | ||
| binaries_dir = locale_to_unicode(binaries_dir) | ||
| cmd_content += " -b %s" % binaries_dir | ||
| if start_client: | ||
| cmd_content += " -c" | ||
| if gdb_args: | ||
| cmd_content += " -g %s" + gdb_args | ||
| set_op(op='debug', content=cmd_content) | ||
| if work_path: | ||
| if os.path.isdir(work_path): | ||
@@ -29,10 +47,17 @@ aos_path = work_path | ||
| # debug from app project | ||
| curr_dir = os.getcwd() | ||
| config_file = os.path.join(curr_dir, APP_CONFIG) | ||
| if os.path.isfile(config_file): | ||
| ac = Config(config_file) | ||
| aos_path = ac.get("AOS_SDK_PATH") | ||
| work_path = curr_dir | ||
| ret, orig_dir = cd_app_root() | ||
| app_root_dir = os.getcwd() | ||
| if ret == 'success': | ||
| aos_path = os.environ.get("AOS_SDK_PATH") | ||
| if not aos_path or not os.path.isdir(aos_path): | ||
| log("Looks like AOS_SDK_PATH is not correctly set." ) | ||
| error(NO_AOSSRC_HINT) | ||
| work_path = app_root_dir | ||
| if os.path.isdir(orig_dir): | ||
| os.chdir(orig_dir) | ||
| else: | ||
| #cd to aos root_dir | ||
| if os.path.isdir(orig_dir): | ||
| os.chdir(orig_dir) | ||
| ret, original_dir = cd_aos_root() | ||
@@ -44,5 +69,6 @@ if ret != 'success': | ||
| os.chdir(original_dir) | ||
| aos_path = Global().get_cfg(AOS_SDK_PATH) | ||
| if aos_path == None: | ||
| error("Not in aos_sdk_path, aos_sdk unavailable as well!") | ||
| aos_path = os.environ.get("AOS_SDK_PATH") | ||
| if not aos_path or not os.path.isdir(aos_path): | ||
| log("Looks like AOS_SDK_PATH is not correctly set." ) | ||
| error(NO_AOSSRC_HINT) | ||
| else: | ||
@@ -54,6 +80,9 @@ log("[INFO]: Config Loading OK, use '%s' as sdk path\n" % aos_path) | ||
| if work_path: | ||
| work_path = locale_to_unicode(work_path) | ||
| # read app & board from .config | ||
| if debug_target == '': | ||
| # check AliOS Things version | ||
| if os.path.exists(os.path.join(aos_path, 'build', 'Config.in')) == False: | ||
| if not aos_path or not os.path.exists(os.path.join(aos_path, 'build', 'Config.in')): | ||
| error('Target invalid') | ||
@@ -105,1 +134,4 @@ | ||
| aos_debug(debug_target, work_path, binaries_dir) | ||
| set_op(result='success') | ||
| report_op() |
@@ -6,7 +6,14 @@ import sys | ||
| from serial.tools.list_ports import comports | ||
| from aos.usertrace.do_report import set_op, report_op | ||
| # Make command | ||
| @click.command("devices", short_help="List devices on serial ports") | ||
| @click.command("devices", short_help="List devices on serial ports",\ | ||
| help="List devices on serial ports.\n"\ | ||
| "\nAttention: this command is deprecated! "\ | ||
| "Please use 'aos list devices' instead.") | ||
| def cli(): | ||
| """ List devices on serial ports """ | ||
| set_op(op='devices') | ||
| arr = [] | ||
@@ -17,2 +24,6 @@ for p in comports(): | ||
| print(json.dumps(arr, indent = 4)) | ||
| set_op(result='success') | ||
| report_op() | ||
| sys.exit(0) |
+69
-22
@@ -10,7 +10,12 @@ import os | ||
| from aos.util import * | ||
| from aos.constant import APP_CONFIG, APP_INCLUDES, APP_UPDATE_MKFILE, APP_GEN_INCLUDES | ||
| from aos.constant import APP_INCLUDES, APP_UPDATE_MKFILE, \ | ||
| APP_GEN_INCLUDES, NO_SDK_HINT, \ | ||
| NO_AOSSRC_HINT | ||
| from aos.download import install_externals | ||
| from aos.usertrace.do_report import set_op, report_op | ||
| # Make command | ||
| @click.command("make", short_help="Make aos program/component") | ||
| @click.command("make", short_help="Make aos program/component", | ||
| help="Make aos program/component.\n"\ | ||
| "\nPlease run 'aos make help' to explore more ...") | ||
| @click.argument("targets", required=False, nargs=-1, metavar="[TARGETS...]") | ||
@@ -23,2 +28,13 @@ @click.option("-c", "--cmd", metavar="[CMD]", help="Sub build stage for target") | ||
| ret = 0 | ||
| res = 'success' | ||
| cmd_content = '' | ||
| if cmd: | ||
| cmd_content += ' -c %s' % cmd | ||
| if targets: | ||
| cmd_content += ' ' + ' '.join(targets) | ||
| set_op(op='make', content=cmd_content) | ||
| make_args = ' '.join(targets) | ||
@@ -38,8 +54,19 @@ for arg in targets: | ||
| else: | ||
| make_build(make_args) | ||
| ret = make_build(make_args) | ||
| if ret != 0: | ||
| res = 'fail: return code %d' % ret | ||
| set_op(result=res) | ||
| report_op() | ||
| return | ||
| #aos make clean go here | ||
| make_build(make_args) | ||
| ret = make_build(make_args) | ||
| if ret != 0: | ||
| res = 'fail: return code %d' % ret | ||
| set_op(result=res) | ||
| report_op() | ||
| # | ||
@@ -254,9 +281,9 @@ # Common functions | ||
| # build from app project | ||
| curr_dir = os.getcwd() | ||
| config_file = os.path.join(curr_dir, APP_CONFIG) | ||
| ret, orig_dir = cd_app_root() | ||
| if os.path.isfile(config_file): | ||
| if ret == 'success': | ||
| app_root_dir = os.getcwd() | ||
| aos_sdk_path = os.environ.get("AOS_SDK_PATH") | ||
| if not aos_sdk_path: | ||
| error("No AliOS SDK installed") | ||
| error(NO_SDK_HINT) | ||
@@ -270,13 +297,20 @@ if not os.path.isdir(aos_sdk_path): | ||
| if "BUILD_DIR" not in make_args and "BUILD_DIR" not in os.environ: | ||
| out = os.path.join(curr_dir, "out").replace(os.path.sep, "/") | ||
| out = os.path.join(app_root_dir, "out").replace(os.path.sep, "/") | ||
| build_dir = "BUILD_DIR=%s" % out | ||
| app_makefile = "APP_MAKEFILE=%s" % os.path.join(curr_dir, "aos.mk").replace(os.path.sep, "/") | ||
| app_dir = "APPDIR=%s" % curr_dir | ||
| app_makefile = "APP_MAKEFILE=%s" % os.path.join(app_root_dir, "aos.mk").replace(os.path.sep, "/") | ||
| app_dir = "APPDIR=%s" % app_root_dir | ||
| with cd(aos_sdk_path): | ||
| _run_make(['-e', '-f %s/build/Makefile' % aos_sdk_path, source_root, make_args, app_dir, build_dir]) | ||
| ret = _run_make(['-e', ' '.join(['-f', os.path.sep.join([aos_sdk_path, 'build', 'Makefile'])]), source_root, make_args, app_dir, build_dir]) | ||
| if os.path.isdir(orig_dir): | ||
| os.chdir(orig_dir) | ||
| return ret | ||
| else: | ||
| # build from source code | ||
| _run_make(['-e', '-f build/Makefile', make_args]) | ||
| if os.path.isdir(orig_dir): | ||
| os.chdir(orig_dir) | ||
| return _run_make(['-e', ' '.join(['-f', os.path.sep.join(['build', 'Makefile'])]), make_args]) | ||
@@ -303,5 +337,8 @@ def _run_make(arg_list): | ||
| source_root = os.getcwd() | ||
| source_root = locale_to_unicode(source_root) | ||
| if not source_root: | ||
| error("Can't find AliOS-Things source code directory") | ||
| error("*** Fatal error! Failed to find source!\n" | ||
| "*** Please ensure you are running 'make' command inside " | ||
| "your project directory or AliOS Things SDK directory.") | ||
@@ -341,15 +378,23 @@ app_build = False | ||
| cmd = ["python", APP_GEN_INCLUDES, source_root, APP_INCLUDES] | ||
| popen(cmd) | ||
| ret, err = pqueryerr(cmd) | ||
| if ret != 0: | ||
| if original_dir and os.path.isdir(original_dir): | ||
| os.chdir(original_dir) | ||
| simple_error("Failed to generate includes, error: %s" % err.decode(get_locale())) | ||
| if os.path.isfile(APP_UPDATE_MKFILE): | ||
| cmd = ["python", APP_UPDATE_MKFILE, app_root] | ||
| popen(cmd) | ||
| ret, err = pqueryerr(cmd) | ||
| if ret != 0: | ||
| if original_dir and os.path.isdir(original_dir): | ||
| os.chdir(original_dir) | ||
| simple_error("Failed to update makefile, error: %s" % err.decode(get_locale())) | ||
| make_cmds = { | ||
| 'Win32': 'cmd/win32/make.exe', | ||
| 'Linux64': 'cmd/linux64/make', | ||
| 'Linux32': 'cmd/linux32/make', | ||
| 'OSX': 'cmd/osx/make' | ||
| 'Win32': os.path.sep.join(['cmd', 'win32', 'make.exe']), | ||
| 'Linux64': os.path.sep.join(['cmd', 'linux64', 'make']), | ||
| 'Linux32': os.path.sep.join(['cmd', 'linux32', 'make']), | ||
| 'OSX': os.path.sep.join(['cmd', 'osx', 'make']) | ||
| } | ||
| tools_dir = os.path.join(source_root, 'build').replace('\\', '/') | ||
| tools_dir = os.path.join(source_root, 'build') | ||
| make_cmd = os.path.join(tools_dir, make_cmds[host_os]) | ||
@@ -359,5 +404,7 @@ | ||
| make_cmd_str = ' '.join([make_cmd, 'HOST_OS=' + host_os, 'TOOLS_ROOT=' + tools_dir] + list(arg_list)) | ||
| popen(make_cmd_str, shell=True, cwd=os.getcwd()) | ||
| ret = popen(make_cmd_str, shell=True, cwd=os.getcwd(), suppress_error=True) | ||
| if original_dir and os.path.isdir(original_dir): | ||
| os.chdir(original_dir) | ||
| return ret |
@@ -5,2 +5,3 @@ import sys | ||
| from serial.tools import miniterm | ||
| from aos.usertrace.do_report import set_op, report_op | ||
@@ -22,2 +23,5 @@ # Make command | ||
| cmd_content = ' '.join([port, baud]) | ||
| set_op(op='monitor', content=cmd_content) | ||
| sys.argv = args | ||
@@ -27,2 +31,7 @@ try: | ||
| except ProcessException as e: | ||
| set_op(result='fail: ' + format(e)) | ||
| report_op() | ||
| raise e | ||
| set_op(result='success') | ||
| report_op() |
+45
-14
@@ -5,2 +5,4 @@ import os, sys | ||
| from aos.util import * | ||
| from aos.usertrace.do_report import set_op, report_op | ||
| from aos.constant import NO_AOSSRC_HINT | ||
@@ -20,4 +22,10 @@ @click.group(short_help="OTA firmware management tool") | ||
| source_root = os.getcwd() | ||
| extra_path = os.path.join(source_root, "build/cmd/%s" % os_type) | ||
| extra_path = os.path.join(source_root, "build", "cmd", "%s" % os_type) | ||
| else: | ||
| aos_sdk_path = os.environ.get("AOS_SDK_PATH") | ||
| if not aos_sdk_path or not os.path.isdir(aos_sdk_path): | ||
| error(NO_AOSSRC_HINT) | ||
| extra_path = os.path.join(aos_sdk_path, "build", "cmd", "%s" % os_type) | ||
| if os.path.isdir(original_dir): | ||
@@ -30,7 +38,7 @@ os.chdir(original_dir) | ||
| @cli.command("diff", short_help="Create diff bin base on old & new binaries") | ||
| @click.argument("old_bin", required=True, metavar="[OLD_BIN]") | ||
| @click.argument("new_bin", required=True, metavar="[NEW_BIN]") | ||
| @click.option("-o", "--output-file", required=False, metavar="DIFF_BIN", help="The name of diff binary, default as diff.bin" ) | ||
| @click.option("-s", "--split-size", required=False, metavar="BYTE", help="Split size, default as 65536") | ||
| @click.option("-l", "--logfile", required=False, metavar="LOG_FILE", help="The name of log file") | ||
| @click.argument("old_bin", required=True, metavar="<OLD_BIN>") | ||
| @click.argument("new_bin", required=True, metavar="<NEW_BIN>") | ||
| @click.option("-o", "--output-file", required=False, metavar="[DIFF_BIN]", help="The name of diff binary, default as diff.bin" ) | ||
| @click.option("-s", "--split-size", required=False, metavar="[BYTE]", help="Split size, default as 65536") | ||
| @click.option("-l", "--logfile", required=False, metavar="[LOG_FILE]", help="The name of log file") | ||
| def ota_diff(old_bin, new_bin, output_file, split_size, logfile): | ||
@@ -43,2 +51,12 @@ if not split_size: | ||
| cmd_content = 'diff' | ||
| if output_file: | ||
| cmd_content += ' -o %s' % output_file | ||
| if split_size: | ||
| cmd_content += ' -s %s' % str(split_size) | ||
| if logfile: | ||
| cmd_content += ' -l %s' % logfile | ||
| cmd_content += ' ' + ' '.join([old_bin, new_bin]) | ||
| set_op(op='ota', content=cmd_content) | ||
| if not os.path.isfile(old_bin): | ||
@@ -58,13 +76,21 @@ error("No such file %s" % old_bin) | ||
| popen(cmd) | ||
| ret, err = pqueryerr(cmd) | ||
| if ret != 0: | ||
| simple_error("Failed to run %s, error: %s" % (cmd, err.decode(get_locale()))) | ||
| else: | ||
| error("Can't find command ota_nbdiff") | ||
| warning("Sorry this tool is not supported on %s" % get_host_os()) | ||
| set_op(result='success') | ||
| report_op() | ||
| # Patch diff to old firmware | ||
| @cli.command("patch", short_help="Create new bin base on diff & old binaries") | ||
| @click.argument("old_bin", required=True, metavar="[OLD_BIN]") | ||
| @click.argument("new_bin", required=True, metavar="[NEW_BIN]") | ||
| @click.argument("diff_bin", required=True, metavar="[DIFF_BIN]") | ||
| @click.argument("old_bin", required=True, metavar="<OLD_BIN>") | ||
| @click.argument("new_bin", required=True, metavar="<NEW_BIN>") | ||
| @click.argument("diff_bin", required=True, metavar="<DIFF_BIN>") | ||
| def ota_patch(old_bin, new_bin, diff_bin): | ||
| for binfile in [old_bin, new_bin, diff_bin]: | ||
| cmd_content = ' '.join(['patch', old_bin, new_bin]) | ||
| set_op(op='ota', content=cmd_content) | ||
| for binfile in [old_bin, diff_bin]: | ||
| if not os.path.isfile(binfile): | ||
@@ -78,6 +104,11 @@ error("No such file %s" % binfile) | ||
| cmd = [ota_nbpatch, old_bin, new_bin, diff_bin] | ||
| popen(cmd) | ||
| ret, err = pqueryerr(cmd) | ||
| if ret != 0: | ||
| simple_error("Failed to run %s, error: %s" % (cmd, err.decode(get_locale()))) | ||
| else: | ||
| error("Can't find command ota_nbpatch") | ||
| warning("Sorry this tool is not supported on %s" % get_host_os()) | ||
| set_op(result='success') | ||
| report_op() | ||
| #@cli.command("upload", short_help="Upload firmware to board over OTA, e.g. aos ota helloworld@starterkit") | ||
@@ -84,0 +115,0 @@ #@click.argument("targets", required=False, nargs=-1, metavar="[TARGETS...]") |
| import click | ||
| from aos.util import popen | ||
| from aos.util import popen, pqueryerr, simple_error, get_locale | ||
| from aos.managers.addon import AddonManager | ||
| from aos.usertrace.do_report import set_op, report_op | ||
| # Upgrade command | ||
| @click.command( | ||
| "upgrade", short_help="Upgrade aos-cube to latest") | ||
| def cli(): | ||
| @click.group(short_help="Upgrade tools and components") | ||
| @click.pass_context | ||
| def cli(ctx): | ||
| pass | ||
| @cli.command("aos-cube", short_help="Upgrade aos-cube to latest") | ||
| def upgrade_cube(): | ||
| """ Run pip upgrade process to keep aos-cube up-to-date. """ | ||
| set_op(op='upgrade', content="aos-cube") | ||
| cmd = ["pip", "install", "--upgrade", "aos-cube"] | ||
@@ -14,4 +21,39 @@ try: | ||
| cmd.insert(3, "--no-cache-dir") | ||
| popen(cmd) | ||
| ret, err = pqueryerr(cmd) | ||
| if ret != 0: | ||
| simple_error("Failed to run %s, error: %s" % (cmd, err.decode(get_locale()))) | ||
| except Exception as e: | ||
| set_op(result='fail: ' + format(e)) | ||
| report_op() | ||
| raise e | ||
| set_op(result='success') | ||
| report_op() | ||
| @cli.command("comp", short_help="Upgrade installed components to latest",\ | ||
| help="Upgrade the installed components (specified by 'COMPONENTS' "\ | ||
| "argument) to latest. If 'COMPONENTS' argument not provided, "\ | ||
| "all installed components will be checked or upgraded.") | ||
| @click.argument("components", required=False, nargs=-1, metavar="[COMPONENTS...]") | ||
| @click.option("-c", "--only-check", is_flag=True, \ | ||
| help="Do not update, only check for new version") | ||
| def update_components(components, only_check): | ||
| am = AddonManager() | ||
| args = [] | ||
| if components: | ||
| args += components | ||
| cmd_content = 'comp' | ||
| if only_check: | ||
| cmd_content += ' -c' | ||
| if args: | ||
| cmd_content += ' ' + ' '.join(args) | ||
| set_op(op='upgrade', content=cmd_content) | ||
| am.upgrade(only_check, *args) | ||
| set_op(result='success') | ||
| report_op() |
+41
-12
| import os, sys | ||
| import click | ||
| from aos.util import log, info, error, popen, Config, cd_aos_root, get_config_value | ||
| from aos.util import log, info, error, popen, Config, cd_aos_root, \ | ||
| get_config_value, cd_app_root, locale_to_unicode | ||
| from aos.config import Global | ||
| from aos.constant import APP_CONFIG, AOS_SDK_PATH | ||
| from aos.constant import APP_CONFIG, AOS_SDK_PATH, NO_AOSSRC_HINT | ||
| from aos.usertrace.do_report import set_op, report_op | ||
@@ -20,3 +22,17 @@ # Make command | ||
| cmd_content = '' | ||
| if target: | ||
| cmd_content += upload_target | ||
| if work_path: | ||
| work_path = locale_to_unicode(work_path) | ||
| cmd_content += " -w %s" % work_path | ||
| if binaries_dir: | ||
| binaries_dir = locale_to_unicode(binaries_dir) | ||
| cmd_content += " -b %s" % binaries_dir | ||
| if target: | ||
| cmd_content += ' ' + ' '.join(target) | ||
| set_op(op='upload', content=cmd_content) | ||
| if work_path: | ||
| if os.path.isdir(work_path): | ||
@@ -28,10 +44,16 @@ aos_path = work_path | ||
| # upload from app project | ||
| curr_dir = os.getcwd() | ||
| config_file = os.path.join(curr_dir, APP_CONFIG) | ||
| if os.path.isfile(config_file): | ||
| ac = Config(config_file) | ||
| aos_path = ac.get("AOS_SDK_PATH") | ||
| work_path = curr_dir | ||
| ret, orig_dir = cd_app_root() | ||
| app_root_dir = os.getcwd() | ||
| if ret == 'success': | ||
| aos_path = os.environ.get("AOS_SDK_PATH") | ||
| if not aos_path or not os.path.isdir(aos_path): | ||
| log("Looks like AOS_SDK_PATH is not correctly set." ) | ||
| error(NO_AOSSRC_HINT) | ||
| work_path = locale_to_unicode(app_root_dir) | ||
| if os.path.isdir(orig_dir): | ||
| os.chdir(orig_dir) | ||
| else: | ||
| #cd to aos root_dir | ||
| if os.path.isdir(orig_dir): | ||
| os.chdir(orig_dir) | ||
| ret, original_dir = cd_aos_root() | ||
@@ -43,5 +65,6 @@ if ret != 'success': | ||
| os.chdir(original_dir) | ||
| aos_path = Global().get_cfg(AOS_SDK_PATH) | ||
| if aos_path == None: | ||
| error("Not in aos_sdk_path, aos_sdk unavailable as well!") | ||
| aos_path = os.environ.get("AOS_SDK_PATH") | ||
| if not aos_path or not os.path.isdir(aos_path): | ||
| log("Looks like AOS_SDK_PATH is not correctly set." ) | ||
| error(NO_AOSSRC_HINT) | ||
| else: | ||
@@ -53,6 +76,9 @@ log("[INFO]: Config Loading OK, use '%s' as sdk path\n" % aos_path) | ||
| if aos_path: | ||
| aos_path = locale_to_unicode(aos_path) | ||
| # read app & board from .config | ||
| if upload_target == '': | ||
| # check AliOS Things version | ||
| if os.path.exists(os.path.join(aos_path, 'build', 'Config.in')) == False: | ||
| if not aos_path or not os.path.exists(os.path.join(aos_path, 'build', 'Config.in')): | ||
| error('Target invalid') | ||
@@ -102,1 +128,4 @@ | ||
| aos_upload(upload_target, work_path, binaries_dir) | ||
| set_op(result='success') | ||
| report_op() |
+36
-6
@@ -163,3 +163,6 @@ import os | ||
| OS_CONFIG = "project.ini" | ||
| COMP_INFO_DB_FILE = "component_info_publish.db" | ||
| OS_REPO = "http://116.62.245.240/AliOSThings-2-packages/" | ||
| #OS_REPO = "http://11.238.148.13:81/2_test/" | ||
| OS_CACHE = os.path.join(os.path.expanduser("~"), ".aoscache") | ||
| OS_DEF_COMPS = [ "buildsystem", "system_include"] | ||
@@ -175,11 +178,38 @@ | ||
| # Path to scripts in OS | ||
| CHECK_WRAPPER = "build/check/check_wrapper.py" | ||
| GEN_SAL_STAGING = "build/scripts/gen_sal_staging.py" | ||
| GEN_NEWPROJECT = "build/scripts/gen_newproject.py" | ||
| GEN_APPSOURCE = "build/scripts/gen_appsource.py" | ||
| CHECK_WRAPPER = os.path.sep.join(["build", "check", "check_wrapper.py"]) | ||
| GEN_SAL_STAGING = os.path.sep.join(["build", "scripts", "gen_sal_staging.py"]) | ||
| GEN_MAL_STAGING = os.path.sep.join(["build", "scripts", "gen_mal_staging.py"]) | ||
| GEN_NEWPROJECT = os.path.sep.join(["build", "scripts", "gen_newproject.py"]) | ||
| GEN_APPSOURCE = os.path.sep.join(["build", "scripts", "gen_appsource.py"]) | ||
| GEN_NEW_COMPONENT = os.path.sep.join(["build", "scripts", "gen_new_component.py"]) | ||
| # App config | ||
| APP_CONFIG = ".aos" | ||
| APP_UPDATE_MKFILE = "build/scripts/app_update_aosmk.py" | ||
| APP_GEN_INCLUDES = "build/scripts/app_gen_comp_index.py" | ||
| APP_UPDATE_MKFILE = os.path.sep.join(["build", "scripts", "app_update_aosmk.py"]) | ||
| APP_GEN_INCLUDES = os.path.sep.join(["build", "scripts", "app_gen_comp_index.py"]) | ||
| APP_INCLUDES = "aos_comp_index.json" | ||
| # File to store user's choice of whether ot nor to participate in the tool improve plan. | ||
| AOS_INVESTIGATION_FILE = os.path.join(os.path.expanduser("~"), ".aos", ".ucubeplan") | ||
| # AOS query/report server | ||
| AOS_SERVER_URL = "https://os-activation.iot.aliyun.com/cube" | ||
| AOS_HTTP_HEADER = "Content-Type:application/json" | ||
| AOS_HTTP_METHOD = "POST" | ||
| # print debug message or not, bool value | ||
| DEBUG_PRINT = False | ||
| # No SDK/SRC messages, widely used | ||
| _HINT_COMMON = "No AliOS Things source directory found. To make things work, please:\n\n" | ||
| _SET_SDK_HINT = "-> Set AOS_SDK_PATH environment variable to a valid\n"\ | ||
| " AliOS-Things source directory as below:\n\n"\ | ||
| " * Linux/MacOS/Git-Bash:\n"\ | ||
| " $ export AOS_SDK_PATH=<path_to_AliOS_Things_src>\n"\ | ||
| " * Windows CMD:\n"\ | ||
| " > set AOS_SDK_PATH=<path_to_AliOS_Things_src>\n\n"\ | ||
| " Please set it on system level if you want so.\n" | ||
| _RUN_INSIDE_SDK_HINT = "-> Run this command in AliOS Things source directory.\n" | ||
| NO_SDK_HINT = _HINT_COMMON + _SET_SDK_HINT | ||
| NOT_INSIDE_SDK_HINT = _HINT_COMMON + _RUN_INSIDE_SDK_HINT | ||
| NO_AOSSRC_HINT = _HINT_COMMON + _RUN_INSIDE_SDK_HINT + "\nOr,\n\n" + _SET_SDK_HINT |
@@ -13,3 +13,4 @@ import os, sys | ||
| from aos.constant import OTA_SERVER, OTA_EMQ_PORT, OTA_WEBSERVER_PORT, OTA_UDEBUG_LIB | ||
| from aos.constant import OTA_SERVER, OTA_EMQ_PORT, OTA_WEBSERVER_PORT, \ | ||
| OTA_UDEBUG_LIB, NO_AOSSRC_HINT | ||
| from aos.util import log, error, cd_aos_root, is_domestic, popen | ||
@@ -50,3 +51,3 @@ | ||
| if aos_path == None: | ||
| error("[ERRO]: Not in aos_sdk_path, aos_sdk unavailable as well!") | ||
| error(NO_AOSSRC_HINT) | ||
| else: | ||
@@ -53,0 +54,0 @@ log("[INFO]: Config Loading OK, use '%s' as sdk path\n" % aos_path) |
+205
-23
@@ -9,8 +9,8 @@ import contextlib | ||
| import re | ||
| import sys | ||
| import locale | ||
| from aos.constant import APP_CONFIG | ||
| import errno | ||
| from aos.constant import * | ||
| # Directory navigation | ||
@@ -40,2 +40,7 @@ @contextlib.contextmanager | ||
| # Logging and output | ||
| def debug(msg): | ||
| if DEBUG_PRINT: | ||
| sys.stdout.write("%s\n" % msg) | ||
| sys.stdout.flush() | ||
| def log(msg): | ||
@@ -65,11 +70,54 @@ sys.stdout.write(msg) | ||
| sys.stderr.write("---\n") | ||
| sys.stderr.flush() | ||
| def error(msg, code=-1): | ||
| from aos.usertrace.do_report import report_op, set_op | ||
| for line in msg.splitlines(): | ||
| sys.stderr.write("[AliOS-Things] ERROR: %s\n" % line) | ||
| sys.stderr.write("---\n") | ||
| sys.stderr.flush() | ||
| res = 'fail: ' + msg | ||
| set_op(result=res) | ||
| report_op() | ||
| sys.exit(code) | ||
| def simple_error(msg, code=-1): | ||
| from aos.usertrace.do_report import report_op, set_op | ||
| report_str = '' | ||
| for line in msg.splitlines(): | ||
| sys.stderr.write("%s\n" % line) | ||
| sys.stderr.write("---\n") | ||
| res = 'fail: ' + msg | ||
| set_op(result=res) | ||
| report_op() | ||
| sys.exit(code) | ||
| def aos_input(prompt): | ||
| ret = 0 | ||
| answer = '' | ||
| pyver = sys.version_info | ||
| if pyver[0] < 3: | ||
| try: | ||
| if prompt: | ||
| sys.stdout.write(prompt) | ||
| sys.stdout.flush() | ||
| answer = raw_input() | ||
| except Exception as e: | ||
| ret, answer = -1, '' | ||
| return ret, answer | ||
| elif pyver[0] >= 3: | ||
| try: | ||
| if prompt: | ||
| sys.stdout.write(prompt) | ||
| sys.stdout.flush() | ||
| answer = input() | ||
| except Exception as e: | ||
| ret, answer = -1, '' | ||
| return ret, answer | ||
| else: | ||
| warning("Not supported python version!") | ||
| return -1, '' | ||
| def progress_cursor(): | ||
@@ -104,13 +152,21 @@ while True: | ||
| def popen(command, suppress_error=None, stdin=None, **kwargs): | ||
| # print for debugging | ||
| info('Exec "' + ' '.join(command) + '" in ' + os.getcwd()) | ||
| # fix error strings | ||
| try: | ||
| # command may be a single string command or list | ||
| if isinstance(command, list): | ||
| command_line = command | ||
| newcmd = [] | ||
| for c in command: | ||
| if not isinstance(c, str): | ||
| newcmd += [c.encode(get_locale())] | ||
| else: | ||
| newcmd += [c] | ||
| else: | ||
| command_line = command.split() | ||
| except: | ||
| command_line = command | ||
| if not isinstance(command, str): | ||
| command = command.encode(get_locale()) | ||
| newcmd = command | ||
| info('Exec "' + ' '.join(command_line) + '" in ' + locale_to_unicode(os.getcwd())) | ||
| try: | ||
| proc = subprocess.Popen(command, **kwargs) | ||
| proc = subprocess.Popen(newcmd, **kwargs) | ||
| except OSError as e: | ||
@@ -133,6 +189,21 @@ if e[0] == errno.ENOENT: | ||
| def pquery(command, stdin=None, **kwargs): | ||
| # command may be a single string command or list | ||
| if isinstance(command, list): | ||
| command_line = command | ||
| newcommand = [] | ||
| for c in command: | ||
| if not isinstance(c, str): | ||
| newcommand += [c.encode(get_locale())] | ||
| else: | ||
| newcommand += [c] | ||
| else: | ||
| if not isinstance(command, str): | ||
| command = command.encode(get_locale()) | ||
| newcommand = command | ||
| command_line = command.split() | ||
| if very_verbose: | ||
| info('Query "' + ' '.join(command) + '" in ' + os.getcwd()) | ||
| info('Query "' + ' '.join(newcommand) + '" in ' + locale_to_unicode(os.getcwd())) | ||
| try: | ||
| proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs) | ||
| proc = subprocess.Popen(newcommand, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs) | ||
| except OSError as e: | ||
@@ -143,3 +214,3 @@ if e[0] == errno.ENOENT: | ||
| "Please verify that it's installed and accessible from your current path by executing \"%s\".\n" % ( | ||
| command[0], command[0]), e[0]) | ||
| command_line[0], command_line[0]), e[0]) | ||
| else: | ||
@@ -154,18 +225,64 @@ raise e | ||
| if proc.returncode != 0: | ||
| raise ProcessException(proc.returncode, command[0], ' '.join(command), os.getcwd()) | ||
| raise ProcessException(proc.returncode, command_line[0], ' '.join(command_line), os.getcwd()) | ||
| return stdout | ||
| def pqueryerr(command, stdin=None, **kwargs): | ||
| """ Run command and return err """ | ||
| # command may be a single string command or list | ||
| if isinstance(command, list): | ||
| command_line = command | ||
| newcommand = [] | ||
| for c in command: | ||
| if not isinstance(c, str): | ||
| newcommand += [c.encode(get_locale())] | ||
| else: | ||
| newcommand += [c] | ||
| else: | ||
| command_line = command.split() | ||
| if not isinstance(command, str): | ||
| command = command.encode(get_locale()) | ||
| newcommand = command | ||
| if very_verbose: | ||
| info('Exec "' + ' '.join(newcommand) + '" in ' + locale_to_unicode(os.getcwd())) | ||
| try: | ||
| proc = subprocess.Popen(newcommand, stderr=subprocess.PIPE, **kwargs) | ||
| except OSError as e: | ||
| if e[0] == errno.ENOENT: | ||
| error( | ||
| "Could not execute \"%s\".\n" | ||
| "Please verify that it's installed and accessible from your current path by executing \"%s\".\n" % ( | ||
| command_line[0], command_line[0]), e[0]) | ||
| else: | ||
| raise e | ||
| stdout, err = proc.communicate(stdin) | ||
| if very_verbose: | ||
| log(str(err).strip() + "\n") | ||
| return proc.returncode, err | ||
| def exec_cmd(command, suppress_error=None, stdin=None, **kwargs): | ||
| """ Run command and return output, errcode """ | ||
| info('Exec "' + ' '.join(command) + '" in ' + os.getcwd()) | ||
| # fix error strings | ||
| if isinstance(command, str): | ||
| # command may be a single string command or list | ||
| if isinstance(command, list): | ||
| command_line = command | ||
| newcommand = [] | ||
| for c in command: | ||
| if not isinstance(c, str): | ||
| newcommand += [c.encode(get_locale())] | ||
| else: | ||
| newcommand += [c] | ||
| else: | ||
| command_line = command.split() | ||
| else: | ||
| command_line = command | ||
| if not isinstance(command, str): | ||
| command = command.encode(get_locale()) | ||
| newcommand = command | ||
| info('Exec "' + ' '.join(newcommand) + '" in ' + locale_to_unicode(os.getcwd())) | ||
| try: | ||
| proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) | ||
| proc = subprocess.Popen(newcommand, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) | ||
| except OSError as e: | ||
@@ -237,7 +354,50 @@ if e[0] == errno.ENOENT: | ||
| while os.path.isdir('./include/aos') == False and os.path.isdir('./kernel/rhino') == False and os.path.isdir('./include/core') == False and os.path.isdir('./core/rhino') == False: | ||
| os.chdir('../') | ||
| if sys_root.match(os.getcwd()): | ||
| return 'fail', original_dir | ||
| os.chdir('../') | ||
| return 'success', original_dir | ||
| def cd_app_root(): | ||
| original_dir = os.getcwd() | ||
| host_os = get_host_os() | ||
| if host_os == 'Win32': | ||
| sys_root = re.compile(r'^[A-Z]{1}:\\$') | ||
| else: | ||
| sys_root = re.compile('^/$') | ||
| curr = os.getcwd() | ||
| while not os.path.isfile(os.path.join(curr, APP_CONFIG)): | ||
| if sys_root.match(curr): | ||
| return 'fail', original_dir | ||
| os.chdir('../') | ||
| curr = os.getcwd() | ||
| return 'success', original_dir | ||
| def get_aos_version(root=None): | ||
| '''Figure out the version of AliOS-Things''' | ||
| ver = '' | ||
| orig_dir = None | ||
| if not root: | ||
| ret, orig_dir = cd_aos_root() | ||
| if ret == "fail": | ||
| return None | ||
| else: | ||
| root = os.getcwd() | ||
| ver_file = os.path.join(root, "include", "aos", "kernel.h") | ||
| if os.path.isfile(ver_file): | ||
| with open(ver_file, 'r') as f: | ||
| l = f.readline() | ||
| while l: | ||
| verlist = re.findall(r'.*#define SYSINFO_KERNEL_VERSION\s+\"AOS-R-([0-9|\.]+)\".*', l) | ||
| if verlist: | ||
| ver = verlist[0] | ||
| break | ||
| l = f.readline() | ||
| if orig_dir and os.path.isdir(orig_dir): | ||
| os.chdir(orig_dir) | ||
| return ver | ||
| def get_aos_project(): | ||
@@ -363,2 +523,24 @@ """ Figure out the aos project dir """ | ||
| def get_locale(): | ||
| try: | ||
| enc = locale.getdefaultlocale() | ||
| except Exception as e: | ||
| if not os.environ.get("LC_ALL"): | ||
| os.environ['LC_ALL'] = "en_US.UTF-8" | ||
| os.environ['LANG'] = "en_US.UTF-8" | ||
| enc = locale.getdefaultlocale() | ||
| else: | ||
| error("Failed to get locale, error: %s" % format(e)) | ||
| if not enc or not enc[1]: | ||
| return 'utf-8' | ||
| else: | ||
| return enc[1] | ||
| def locale_to_unicode(str): | ||
| if str and sys.version_info[0] == 2 and not isinstance(str, unicode): | ||
| return unicode(str, get_locale()) | ||
| else: | ||
| return str | ||
| class Config(): | ||
@@ -365,0 +547,0 @@ def __init__(self, conf_file): |
+3
-3
| Metadata-Version: 1.2 | ||
| Name: aos-cube | ||
| Version: 0.3.11 | ||
| Version: 0.5.8 | ||
| Summary: aos commmand line tool for AliOS-Things development. | ||
| Home-page: UNKNOWN | ||
| Home-page: https://pypi.org/project/aos-cube/ | ||
| Author: Alibaba | ||
@@ -174,3 +174,3 @@ Author-email: aliosthings@service.aliyun.com | ||
| Description: aos CUBE is the name of the `aos <http://aos.io>`_ command line tool, packaged as aos-cube, which enables the full aos workflow: repositories version control, maintaining dependencies, publishing code, updating from remotely hosted repositories, and invoking MXCHIP aos's own build system and export functions, among other operations. | ||
| Description: aos CUBE is the name of the `aos <http://aos.io>`_ command line tool, packaged as aos-cube, which enables the full aos workflow: repositories version control, maintaining dependencies, publishing code, updating from remotely hosted repositories, and invoking AliOS Things's own build system and export functions, among other operations. | ||
@@ -177,0 +177,0 @@ ## Installation |
+1
-1
@@ -1,2 +0,2 @@ | ||
| aos CUBE is the name of the `aos <http://aos.io>`_ command line tool, packaged as aos-cube, which enables the full aos workflow: repositories version control, maintaining dependencies, publishing code, updating from remotely hosted repositories, and invoking MXCHIP aos's own build system and export functions, among other operations. | ||
| aos CUBE is the name of the `aos <http://aos.io>`_ command line tool, packaged as aos-cube, which enables the full aos workflow: repositories version control, maintaining dependencies, publishing code, updating from remotely hosted repositories, and invoking AliOS Things's own build system and export functions, among other operations. | ||
@@ -3,0 +3,0 @@ ## Installation |
+10
-0
| import os, sys | ||
| from setuptools import find_packages, setup | ||
| from aos.managers.constant import PYURLPKG | ||
| sys.path.append('./aos') | ||
| from aos import __title__, __version__, __description__, __url__, __author__, __email__ | ||
| from aos.constant import AOS_INVESTIGATION_FILE | ||
@@ -9,2 +11,9 @@ LONG_DESC = open('pypi_readme.rst').read() | ||
| # remove the old config file first | ||
| if os.path.isfile(AOS_INVESTIGATION_FILE): | ||
| try: | ||
| os.unlink(AOS_INVESTIGATION_FILE) | ||
| except Exception as e: | ||
| pass | ||
| install_requires = [ | ||
@@ -19,2 +28,3 @@ 'pyserial', | ||
| 'configparser', | ||
| PYURLPKG, | ||
| ] | ||
@@ -21,0 +31,0 @@ |
Alert delta unavailable
Currently unable to show alert delta for PyPI packages.
224015
130.11%50
61.29%4785
178.04%