Verified Commit 179c6a92 authored by Oskar Roesler's avatar Oskar Roesler
Browse files

Format Python code with autopep8

parent fb61a3b0
......@@ -7,15 +7,17 @@ from concurrent.futures import ThreadPoolExecutor
class Clone(Routines):
def __init__(self, package, verbosity, output):
Routines.__init__(self, package, verbosity, output)
self.url = "git@github.com:{0}/{1}.git".format(self.gh_organization_name, self.package)
self.url = "git@github.com:{0}/{1}.git".format(
self.gh_organization_name, self.package)
def cloning(self):
Repo.clone_from(self.url, self.repos_path)
def clone(self):
if self.package=="all":
if self.package == "all":
t = ThreadPoolExecutor(max_workers=(os.cpu_count()))
repos = self.gh_organization.get_repos(type="all", sort="full_name", direction="desc")
repos = self.gh_organization.get_repos(
type="all", sort="full_name", direction="desc")
for repo in repos:
t.submit(Clone(repo.name, self.verbosity, self.output).cloning)
else:
......@@ -25,7 +27,7 @@ class Clone(Routines):
class Pull(Routines):
def pull(self):
if self.package=="all":
if self.package == "all":
t = ThreadPoolExecutor(max_workers=(os.cpu_count()))
for folder in os.listdir("./packages"):
t.submit(Pull(folder, self.verbosity, self.output).pull)
......
......@@ -9,6 +9,7 @@ from aurci.general import Routines
REPO_ADD_BIN = '/usr/bin/repo-add'
MAKECHROOTPKG_BIN = '/usr/bin/makechrootpkg'
class Packages(Routines):
FAILED_FILE = "failed.txt"
......@@ -16,18 +17,18 @@ class Packages(Routines):
def __init__(self, package, verbosity, output):
Routines.__init__(self, package, verbosity, output)
self.chroot = os.environ.get('CHROOT', os.path.join(self.cache_path, "chroot"))
self.chroot = os.environ.get(
'CHROOT', os.path.join(self.cache_path, "chroot"))
self.check_and_create_path(self.chroot)
self.localrepo_path = os.path.join(self.cache_path, 'repo')
self.check_and_create_path(self.localrepo_path)
def makepkg(self):
if os.path.isfile(os.path.join(self.repos_path, "PKGBUILD")):
try:
subprocess.run([MAKECHROOTPKG_BIN, '-c', '-d', self.localrepo_path, '-r',
self.chroot], stdout=( None if self.verbosity else subprocess.DEVNULL), \
stderr=subprocess.STDOUT, cwd=self.repos_path, check=True)
self.chroot], stdout=(None if self.verbosity else subprocess.DEVNULL),
stderr=subprocess.STDOUT, cwd=self.repos_path, check=True)
with open("success.txt", "a") as fobj:
fobj.write(self.package + "\n")
if self.output:
......@@ -36,17 +37,18 @@ class Packages(Routines):
except subprocess.CalledProcessError:
with open("failed.txt", "a") as fobj:
fobj.write(self.package + "\n")
raise RuntimeWarning("Building of {0} failed".format(self.package))
raise RuntimeWarning(
"Building of {0} failed".format(self.package))
else:
raise FileNotFoundError("No PKBUILD existing: ", self.repos_path)
try:
subprocess.run([REPO_ADD_BIN, 'localhost.db.tar.zst'] + glob.glob(os.path.join(self.localrepo_path, '*.pkg.tar.*')),
check=True, cwd=self.localrepo_path)
check=True, cwd=self.localrepo_path)
except subprocess.CalledProcessError as e:
print(e.stdout, file=sys.stderr)
def build(self):
if self.package=="all":
if self.package == "all":
for folder in os.listdir("./packages"):
Packages(folder, self.verbosity, self.output).makepkg()
else:
......@@ -64,7 +66,8 @@ class Packages(Routines):
try:
pkg_repo = Repo(path=self.repos_path).remote(name='aur')
except ValueError:
pkg_repo = Repo(path=self.repos_path).create_remote('aur', "aur@aur.archlinux.org:/{0}.git".format(self.package))
pkg_repo = Repo(path=self.repos_path).create_remote(
'aur', "aur@aur.archlinux.org:/{0}.git".format(self.package))
pkg_repo.fetch()
try:
pkg_repo.push()
......@@ -75,8 +78,9 @@ class Packages(Routines):
print(e)
def deploy(self):
DeprecationWarning("deploy command replaced by https://github.com/bionade24/abs-cd")
if self.package=="all":
DeprecationWarning(
"deploy command replaced by https://github.com/bionade24/abs-cd")
if self.package == "all":
os.remove(self.FAILED_FILE)
os.mknod(self.FAILED_FILE)
for folder in os.listdir("./packages"):
......@@ -85,9 +89,11 @@ class Packages(Routines):
pass
else:
try:
Packages(folder, self.verbosity, self.output).deploy()
Packages(folder, self.verbosity,
self.output).deploy()
except RuntimeWarning:
print("Building of {0} failed".format(self.package))
print("Building of {0} failed".format(
self.package))
else:
try:
self.makepkg()
......
......@@ -8,9 +8,12 @@ import yaml
import re
import configparser
class Routines:
CONFIG_ROOT = os.environ.get("XDG_CONFIG_HOME", os.path.join(Path.home(), ".config/"))
CACHE_ROOT = os.environ.get("XDG_CACHE_HOME", os.path.join(Path.home(), ".cache/"))
CONFIG_ROOT = os.environ.get(
"XDG_CONFIG_HOME", os.path.join(Path.home(), ".config/"))
CACHE_ROOT = os.environ.get(
"XDG_CACHE_HOME", os.path.join(Path.home(), ".cache/"))
def __init__(self, package=None, verbosity=False, output=True):
self.verbosity = verbosity
......@@ -21,12 +24,13 @@ class Routines:
self.check_and_create_path(self.cache_path)
if package:
self.package = package
self.repos_path = os.path.join(self.cache_path, "packages", self.package)
self.repos_path = os.path.join(
self.cache_path, "packages", self.package)
config = self.get_config()
self.gh = Github(config['CI']['GH_OAUTH_TOKEN'])
self.gh_organization_name = config['CI']['GH_ORGANIZATION']
self.gh_organization = self.gh.get_organization(self.gh_organization_name)
self.gh_organization = self.gh.get_organization(
self.gh_organization_name)
@staticmethod
def check_and_create_path(path):
......@@ -47,28 +51,31 @@ class Routines:
def build_metainfo_dict(self):
rosdistro_url = \
f'https://raw.githubusercontent.com/ros/rosdistro/master/{self.get_ros_distro()}/distribution.yaml'
f'https://raw.githubusercontent.com/ros/rosdistro/master/{self.get_ros_distro()}/distribution.yaml'
rosdistro = yaml.load(requests.get(rosdistro_url, allow_redirects=True).content,
Loader=yaml.BaseLoader)['repositories']
ros_dict = {}
for repo in rosdistro:
#Go through distro, and make entry for each package in a repository
# Go through distro, and make entry for each package in a repository
d = rosdistro[repo]
if 'source' in d:
src = d['source']['url']
elif 'release' in d:
src = d['release']['url']
target = re.sub(r'\.git', '', src.split('/')[3] + '/' + src.split('/')[4])
target = re.sub(r'\.git', '', src.split(
'/')[3] + '/' + src.split('/')[4])
pkgver = d.get('release', {'version': None}).get('version', None)
if pkgver:
pkgver = pkgver.split('-')[0]
if 'github' in src:
dl = 'https://github.com/' + target + '/archive/' + pkgver +'.tar.gz' \
dl = 'https://github.com/' + target + '/archive/' + pkgver + '.tar.gz' \
if pkgver else None
url = 'https://github.com/' + target + '/archive/${pkgver}.tar.gz'
url = 'https://github.com/' + \
target + '/archive/${pkgver}.tar.gz'
else:
dl = None
pkg_list = d.get('release', {'packages': [repo]}).get('packages', [repo])
pkg_list = d.get('release', {'packages': [repo]}).get(
'packages', [repo])
for pkg in pkg_list:
siblings = len(pkg_list)-1
pkgname = 'ros-melodic-{}'.format(re.sub('_', '-', pkg))
......
......@@ -9,24 +9,27 @@ from aurci.general import Routines
def commands(option, package, verbosity, output):
args = {
"clone" : (Clone, "clone"),
"pull" : (Pull, "pull"),
"build" : (Packages, "build"),
"deploy" : (Packages, "deploy"),
"update" : (Update, "update_pkgbuild")
"clone": (Clone, "clone"),
"pull": (Pull, "pull"),
"build": (Packages, "build"),
"deploy": (Packages, "deploy"),
"update": (Update, "update_pkgbuild")
}
command_class = args[option]
getattr(command_class[0](package, verbosity, output), command_class[1])()
def main(argv):
parser=argparse.ArgumentParser(prog='rosaur', add_help=True)
parser = argparse.ArgumentParser(prog='rosaur', add_help=True)
exclu_group = parser.add_mutually_exclusive_group()
parser.add_argument('command', choices=['clone', 'pull', 'build', 'deploy', 'update'])
parser.add_argument('command', choices=[
'clone', 'pull', 'build', 'deploy', 'update'])
parser.add_argument('package', type=str)
exclu_group.add_argument('-v', '--verbose', help='Increase verbosity', action="store_true")
exclu_group.add_argument('-q', '--quiet', help='Suppress output', action="store_false")
exclu_group.add_argument(
'-v', '--verbose', help='Increase verbosity', action="store_true")
exclu_group.add_argument(
'-q', '--quiet', help='Suppress output', action="store_false")
args = parser.parse_args(argv)
......@@ -38,11 +41,13 @@ def main(argv):
commands(args.command, name, args.verbose, args.quiet)
except KeyError:
print(f"Error: {args.package} could not be found in ROS Metainfo dict while running {args.command}",
file=sys.stderr)
file=sys.stderr)
except FileNotFoundError:
print(f"Error: {args.package} folder could not be found while running {args.command}", file=sys.stderr)
print(
f"Error: {args.package} folder could not be found while running {args.command}", file=sys.stderr)
else:
print(f"Error: {args.package} is not on disk. Try to clone it.", file=sys.stderr)
print(
f"Error: {args.package} is not on disk. Try to clone it.", file=sys.stderr)
try:
commands(args.command, args.package, args.verbose, args.quiet)
......@@ -51,5 +56,6 @@ def main(argv):
except FileNotFoundError:
retry_with_rosdistro_name(check_path=True)
if __name__=='__main__':
if __name__ == '__main__':
main(sys.argv)
......@@ -5,6 +5,7 @@ __license__ = "The MIT License (MIT)"
import re
def replace(oldstr, newstr, infile, dryrun=False):
'''
Sed-like Replace function..
......@@ -20,12 +21,15 @@ def replace(oldstr, newstr, infile, dryrun=False):
if dryrun == False:
with open(infile, "w") as f:
f.truncate()
for line in linelist: f.writelines(line)
for line in linelist:
f.writelines(line)
elif dryrun == True:
for line in linelist: print(line, end='')
for line in linelist:
print(line, end='')
else:
exit("Unknown option specified to 'dryrun' argument, Usage: dryrun=<True|False>.")
def rmlinematch(oldstr, infile, dryrun=False):
'''
Sed-like line deletion function based on given string..
......@@ -37,16 +41,20 @@ def rmlinematch(oldstr, infile, dryrun=False):
with open(infile) as f:
for item in f:
rmitem = re.match(r'.*{}'.format(oldstr), item)
if type(rmitem) == type(None): linelist.append(item)
if type(rmitem) == type(None):
linelist.append(item)
if dryrun == False:
with open(infile, "w") as f:
f.truncate()
for line in linelist: f.writelines(line)
for line in linelist:
f.writelines(line)
elif dryrun == True:
for line in linelist: print(line, end='')
for line in linelist:
print(line, end='')
else:
exit("Unknown option specified to 'dryrun' argument, Usage: dryrun=<True|False>.")
def rmlinenumber(linenumber, infile, dryrun=False):
'''
Sed-like line deletion function based on given line number..
......@@ -56,16 +64,20 @@ def rmlinenumber(linenumber, infile, dryrun=False):
'''
linelist = []
linecounter = 0
if type(linenumber) != type(linecounter): exit("'linenumber' argument must be an integer.")
if type(linenumber) != type(linecounter):
exit("'linenumber' argument must be an integer.")
with open(infile) as f:
for item in f:
linecounter = linecounter + 1
if linecounter != linenumber: linelist.append(item)
if linecounter != linenumber:
linelist.append(item)
if dryrun == False:
with open(infile, "w") as f:
f.truncate()
for line in linelist: f.writelines(line)
for line in linelist:
f.writelines(line)
elif dryrun == True:
for line in linelist: print(line, end='')
for line in linelist:
print(line, end='')
else:
exit("Unknown option specified to 'dryrun' argument, Usage: dryrun=<True|False>.")
\ No newline at end of file
exit("Unknown option specified to 'dryrun' argument, Usage: dryrun=<True|False>.")
......@@ -6,16 +6,17 @@ import subprocess
import urllib
import xml.etree.ElementTree as ET
class Update(Routines):
#Singleton to not reload for every package
# Singleton to not reload for every package
#metainfo_dict = None
# packages that are missing information or are special cases
skip = ['fcl', 'libviso2', 'viso2_ros', 'opencv3', 'roscpp_git', 'message_filters_git',
'ivcon', 'stage', 'nodelet_tutorial_math', 'common_tutorials',
'turtle_actionlib', 'pluginlib_tutorials', 'rosbag_migration_rule',
'actionlib_tutorials', 'ompl', 'bfl', 'convex_decomposition', 'mavlink']
'ivcon', 'stage', 'nodelet_tutorial_math', 'common_tutorials',
'turtle_actionlib', 'pluginlib_tutorials', 'rosbag_migration_rule',
'actionlib_tutorials', 'ompl', 'bfl', 'convex_decomposition', 'mavlink']
def __init__(self, package, verbosity, ouput):
Routines.__init__(self, package, verbosity, ouput)
......@@ -29,11 +30,16 @@ class Update(Routines):
print('pkgver not in dict: {}'.format(self.package))
return (self.package, 'no_tag')
old_pkgver = re.findall(r"^pkgver=.*", open('PKGBUILD').read(), re.MULTILINE)
old_pkgrel = re.findall(r"^pkgrel=\d", open('PKGBUILD').read(), re.MULTILINE)
old_dir = re.findall(r"^_dir=.*", open('PKGBUILD').read(), re.MULTILINE)
old_src = re.findall(r"^source=\(.*\"", open('PKGBUILD').read(), re.MULTILINE)
old_sha = re.findall(r"^sha256sums=\(.*\'", open('PKGBUILD').read(), re.MULTILINE)
old_pkgver = re.findall(
r"^pkgver=.*", open('PKGBUILD').read(), re.MULTILINE)
old_pkgrel = re.findall(r"^pkgrel=\d", open(
'PKGBUILD').read(), re.MULTILINE)
old_dir = re.findall(
r"^_dir=.*", open('PKGBUILD').read(), re.MULTILINE)
old_src = re.findall(
r"^source=\(.*\"", open('PKGBUILD').read(), re.MULTILINE)
old_sha = re.findall(
r"^sha256sums=\(.*\'", open('PKGBUILD').read(), re.MULTILINE)
if all((old_dir, old_src, old_sha, old_pkgver, old_pkgrel)):
old_pkgver = old_pkgver[0]
......@@ -47,23 +53,28 @@ class Update(Routines):
new_pkgver = "pkgver='{}'".format(self.package_info['pkgver'])
new_dir = '_dir="{}-${{pkgver}}/{}"'.format(self.package_info['repo'],
'{}'.format(self.get_nested_package_path()) if self.package_info['siblings'] else '')
new_src = 'source=("${{pkgname}}-${{pkgver}}.tar.gz"::"{}"'.format(self.package_info['url'])
'{}'.format(self.get_nested_package_path()) if self.package_info['siblings'] else '')
new_src = 'source=("${{pkgname}}-${{pkgver}}.tar.gz"::"{}"'.format(
self.package_info['url'])
if old_pkgver == new_pkgver and old_dir == new_dir and old_src == new_src:
print('already matches: {}'.format(self.package))
return
print('starting: {}'.format(self.package))
fname = '{}-{}.tar.gz'.format(self.package, self.package_info['pkgver'])
#Trying to download tar archive to generate checksum
fname = '{}-{}.tar.gz'.format(self.package,
self.package_info['pkgver'])
# Trying to download tar archive to generate checksum
try:
urllib.request.urlretrieve(self.package_info['dl'], fname)
except urllib.error.HTTPError:
raise RuntimeError('download failed: {}, URL: {}'.format(self.package, self.package_info['dl']))
raise RuntimeError('download failed: {}, URL: {}'.format(
self.package, self.package_info['dl']))
sha256 = subprocess.run(['sha256sum', fname], check=True, capture_output=True)
new_sha = "sha256sums=('{}'".format(sha256.stdout.decode('utf-8').split(' ')[0])
sha256 = subprocess.run(['sha256sum', fname],
check=True, capture_output=True)
new_sha = "sha256sums=('{}'".format(
sha256.stdout.decode('utf-8').split(' ')[0])
os.remove(fname)
with open('PKGBUILD', 'r') as f:
......@@ -87,11 +98,10 @@ class Update(Routines):
subprocess.call(['git', 'commit', '-m', 'Update package'])
subprocess.call(['git', 'push', 'origin', 'master'])
def print_metainfo_dict(self):
#rosdistro = yaml.load(requests.get(self.rosdistro_url, allow_redirects=True).content,
# rosdistro = yaml.load(requests.get(self.rosdistro_url, allow_redirects=True).content,
# Loader=yaml.BaseLoader)['repositories']
#for repo in rosdistro:
# for repo in rosdistro:
# print (rosdistro[repo])
for pkg in self.metainfo_dict:
print("\n" + pkg + ":\n")
......
......@@ -3,5 +3,5 @@
from aurci import parser
from sys import argv
if __name__=='__main__':
if __name__ == '__main__':
parser.main(argv[1:])
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment