mirror of
git://git.yoctoproject.org/layerindex-web.git
synced 2025-07-19 20:59:01 +02:00
scripts/update.py: Add support for rrs
conf/local.conf: Add meta-yocto includes for get distro information (distro aliases, maintainers, upstream {regex, tracking}) needed by rrs. scripts/update.py: Add support for rrs, ability to download poky layer and set BBPATH to support meta-yocto includes in bitbake. scripts/rrs_update/*: Modules for update data needed by rrs, distros, maintainers, upgrades and upstream. Signed-off-by: Aníbal Limón <anibal.limon@linux.intel.com>
This commit is contained in:
parent
36d5746acd
commit
1152195b9e
|
@ -10,3 +10,8 @@ MACHINE = "qemux86"
|
|||
# OE-Core basic distro configuration
|
||||
DISTRO = ""
|
||||
|
||||
# Recipe reporting system configuration
|
||||
#require conf/distro/include/maintainers.inc
|
||||
#require conf/distro/include/distro_alias.inc
|
||||
#require conf/distro/include/upstream_tracking.inc
|
||||
#require conf/distro/include/package_regex.inc
|
||||
|
|
162
scripts/rrs_update/__init__.py
Normal file
162
scripts/rrs_update/__init__.py
Normal file
|
@ -0,0 +1,162 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
# Will create the layer and branch required by layerindex
|
||||
#
|
||||
# Copyright (C) 2014 Intel Corporation
|
||||
# Author: Anibal Limon <anibal.limon@linux.intel.com>
|
||||
# Contributor: Marius Avram <marius.avram@intel.com>
|
||||
#
|
||||
# Licensed under the MIT license, see COPYING.MIT for details
|
||||
|
||||
import sys
|
||||
import os.path
|
||||
|
||||
import utils
|
||||
import recipeparse
|
||||
import settings
|
||||
|
||||
from layerindex.models import LayerItem, Branch, LayerBranch, Recipe
|
||||
|
||||
from recipe_maintainer import update_recipe_maintainers
|
||||
from recipe_distro import update_recipe_distros
|
||||
from recipe_upgrade import update_recipe_upgrades
|
||||
from recipe_upstream import update_recipe_upstream
|
||||
|
||||
class RrsUpdater:
|
||||
def __init__(self, fetchdir, options, layerquery, fetchedrepos,
|
||||
failedrepos, logger):
|
||||
self._fetchdir = fetchdir
|
||||
self._options = options
|
||||
self._logger = logger
|
||||
self._layerquery = layerquery
|
||||
|
||||
self._run_all = (not options.recipe_maintainers and
|
||||
not options.recipe_distros and
|
||||
not options.recipe_upgrades and
|
||||
not options.recipe_upstream)
|
||||
|
||||
self._filter_recipes()
|
||||
|
||||
"""
|
||||
Update the data needed by Recipe reporting system
|
||||
"""
|
||||
def run(self, tinfoil):
|
||||
if self._run_all or self._options.recipe_distros:
|
||||
from oe import distro_check
|
||||
self._logger.info("Downloading distro's package information")
|
||||
distro_check.create_distro_packages_list(self._fetchdir)
|
||||
pkglst_dir = os.path.join(self._fetchdir, "package_lists")
|
||||
|
||||
for layer in self._layerquery:
|
||||
(layerbranch, repodir, layerdir, config_data) = \
|
||||
self._get_config_data(layer, self._fetchdir, tinfoil)
|
||||
|
||||
envdata = self._get_recipes_envdata(layerbranch, layerdir,
|
||||
config_data, self._options)
|
||||
|
||||
if self._run_all:
|
||||
self._logger.info("Updating recipe maintainers")
|
||||
update_recipe_maintainers(envdata, self._logger)
|
||||
self._logger.info("Updating recipe distros")
|
||||
update_recipe_distros(envdata, layerbranch, pkglst_dir,
|
||||
self._logger)
|
||||
self._logger.info("Updating recipe upgrades")
|
||||
update_recipe_upgrades(layerbranch, repodir, layerdir,
|
||||
config_data, self._logger)
|
||||
self._logger.info("Updating recipe upstream")
|
||||
update_recipe_upstream(envdata, self._logger)
|
||||
else:
|
||||
run_maintainer = False
|
||||
|
||||
if self._options.recipe_maintainers:
|
||||
self._logger.info("Updating recipe maintainers")
|
||||
update_recipe_maintainers(envdata, self._logger)
|
||||
run_maintainer = True
|
||||
|
||||
if self._options.recipe_distros:
|
||||
self._logger.info("Updating recipe distros")
|
||||
update_recipe_distros(envdata, layerbranch, pkglst_dir,
|
||||
self._logger)
|
||||
|
||||
if self._options.recipe_upgrades:
|
||||
self._logger.info("Updating recipe upgrades")
|
||||
update_recipe_upgrades(layerbranch, repodir, layerdir,
|
||||
config_data, self._logger)
|
||||
|
||||
if self._options.recipe_upstream:
|
||||
# recipe upstream depends on recipe maintainers
|
||||
if not run_maintainer:
|
||||
self._logger.info("Updating recipe maintainers")
|
||||
update_recipe_maintainers(envdata, self._logger)
|
||||
|
||||
self._logger.info("Updating recipe upstream")
|
||||
update_recipe_upstream(envdata, self._logger)
|
||||
|
||||
"""
|
||||
Remove native and old recipes,
|
||||
Native recipes are unuseful because have target recipe.
|
||||
Older recipes means that if exist more than one version of recipe only
|
||||
take the last one.
|
||||
"""
|
||||
def _filter_recipes(self):
|
||||
self._remove_native_cross_initial_recipes()
|
||||
for recipe in Recipe.objects.all():
|
||||
self._remove_older_recipes(recipe)
|
||||
|
||||
def _remove_native_cross_initial_recipes(self):
|
||||
for recipe in Recipe.objects.all():
|
||||
if (recipe.pn.find('-native') != -1 or
|
||||
recipe.pn.find('nativesdk-') != -1 or
|
||||
recipe.pn.find('-cross') != -1 or
|
||||
recipe.pn.find('-initial') != -1):
|
||||
recipe.delete()
|
||||
self._logger.debug('_remove_native_recipes: %s delete' % (recipe.pn))
|
||||
|
||||
def _remove_older_recipes(self, cmp_recipe):
|
||||
pname = cmp_recipe.pn
|
||||
pversion = cmp_recipe.pv
|
||||
recipes = Recipe.objects.filter(pn__iexact = pname).filter(pv__lt = pversion)
|
||||
if recipes.count():
|
||||
# Remove git recipes with no versioning if tarballs exist
|
||||
if pversion == 'git':
|
||||
Recipe.objects.filter(pn__exact = pname).filter(pv__exact =
|
||||
pversion).delete()
|
||||
else:
|
||||
recipes.delete()
|
||||
|
||||
"""
|
||||
Get configuration data required by tinfoil for poky layer.
|
||||
"""
|
||||
def _get_config_data(self, layer, fetchdir, tinfoil):
|
||||
urldir = layer.get_fetch_dir()
|
||||
layerbranch = layer.get_layerbranch(self._options.branch)
|
||||
repodir = os.path.join(fetchdir, urldir)
|
||||
layerdir = os.path.join(repodir, layerbranch.vcs_subdir)
|
||||
config_data = recipeparse.setup_layer(tinfoil.config_data, fetchdir,
|
||||
layerdir, layer, layerbranch)
|
||||
return (layerbranch, repodir, layerdir, config_data)
|
||||
|
||||
"""
|
||||
Parse all recipes. Called only once per update.
|
||||
"""
|
||||
def _get_recipes_envdata(self, layerbranch, layerdir, config_data, options):
|
||||
envdata = {}
|
||||
|
||||
if options.recipe:
|
||||
recipes = Recipe.objects.filter(layerbranch = layerbranch,
|
||||
pn__exact = options.recipe)
|
||||
else:
|
||||
recipes = Recipe.objects.filter(layerbranch = layerbranch)
|
||||
|
||||
for recipe in recipes:
|
||||
recipe_path = str(os.path.join(layerdir, recipe.full_path()))
|
||||
|
||||
try:
|
||||
envdata[recipe] = bb.cache.Cache.loadDataFull(recipe_path,
|
||||
[], config_data)
|
||||
except Exception as e:
|
||||
self._logger.warn("%s, %s couldn't be parsed, %s"
|
||||
% (layerbranch, recipe, str(e)))
|
||||
continue
|
||||
|
||||
return envdata
|
65
scripts/rrs_update/recipe_distro.py
Normal file
65
scripts/rrs_update/recipe_distro.py
Normal file
|
@ -0,0 +1,65 @@
|
|||
from rrs.models import RecipeDistro
|
||||
from django.db import transaction
|
||||
|
||||
"""
|
||||
Update recipe distros entire table.
|
||||
"""
|
||||
def update_recipe_distros(envdata, layerbranch, pkglst_dir, logger):
|
||||
transaction.enter_transaction_management()
|
||||
transaction.managed(True)
|
||||
|
||||
RecipeDistro.objects.filter(recipe__layerbranch = layerbranch).delete()
|
||||
|
||||
for recipe, data in envdata.iteritems():
|
||||
distro_info = search_package_in_distros(pkglst_dir, recipe, data)
|
||||
for distro, alias in distro_info.iteritems():
|
||||
recipedistro = RecipeDistro()
|
||||
recipedistro.recipe = recipe
|
||||
recipedistro.distro = distro
|
||||
recipedistro.alias = alias
|
||||
recipedistro.save()
|
||||
|
||||
transaction.commit()
|
||||
transaction.leave_transaction_management()
|
||||
|
||||
"""
|
||||
Searches the recipe's package in major distributions.
|
||||
Returns a dictionary containing pairs of (distro name, package aliases).
|
||||
"""
|
||||
def search_package_in_distros(pkglst_dir, recipe, data):
|
||||
distros = {}
|
||||
distro_aliases = {}
|
||||
|
||||
recipe_name = recipe.pn
|
||||
|
||||
recipe_name.replace("-native", "").replace("nativesdk-", "")
|
||||
recipe_name.replace("-cross", "").replace("-initial", "")
|
||||
|
||||
distro_alias = data.getVar('DISTRO_PN_ALIAS', True)
|
||||
if distro_alias:
|
||||
# Gets info from DISTRO_PN_ALIAS into a dictionary containing
|
||||
# the distribution as a key and the package name as value.
|
||||
for alias in distro_alias.split():
|
||||
if alias.find("=") != -1:
|
||||
(dist, pn_alias) = alias.split('=')
|
||||
distro_aliases[dist.strip().lower()] = pn_alias.strip()
|
||||
|
||||
for distro_file in os.listdir(pkglst_dir):
|
||||
(distro, distro_release) = distro_file.split("-")
|
||||
|
||||
if distro.lower() in distro_aliases:
|
||||
pn = distro_aliases[distro.lower()]
|
||||
else:
|
||||
pn = recipe_name
|
||||
|
||||
f = open(os.path.join(pkglst_dir, distro_file), "rb")
|
||||
for line in f:
|
||||
(pkg, section) = line.split(":")
|
||||
if pn == pkg:
|
||||
distro_complete = distro + "-" + section[:-1]
|
||||
distros[distro_complete] = pn
|
||||
f.close()
|
||||
break
|
||||
f.close()
|
||||
|
||||
return distros
|
41
scripts/rrs_update/recipe_maintainer.py
Normal file
41
scripts/rrs_update/recipe_maintainer.py
Normal file
|
@ -0,0 +1,41 @@
|
|||
from django.db import transaction
|
||||
from rrs.models import Maintainer, RecipeMaintainer
|
||||
|
||||
"""
|
||||
Update recipe maintainter if don't exist create new one.
|
||||
"""
|
||||
def update_recipe_maintainers(envdata, logger):
|
||||
transaction.enter_transaction_management()
|
||||
transaction.managed(True)
|
||||
|
||||
for recipe, data in envdata.iteritems():
|
||||
maintainer = data.getVar('RECIPE_MAINTAINER', True) or ""
|
||||
|
||||
if (maintainer == ""):
|
||||
m = Maintainer.objects.get(id = 0) # No Maintainer
|
||||
else:
|
||||
maintainer_name = " ".join(maintainer.split(' ')[0:-1])
|
||||
maintainer_email = maintainer.split(' ')[-1].replace('<', '').replace('>','')
|
||||
|
||||
try:
|
||||
m = Maintainer.objects.get(name = maintainer_name)
|
||||
m.email = maintainer_email
|
||||
except Maintainer.DoesNotExist:
|
||||
m = Maintainer()
|
||||
m.name = maintainer_name
|
||||
m.email = maintainer_email
|
||||
|
||||
m.save()
|
||||
|
||||
try:
|
||||
rm = RecipeMaintainer.objects.get(recipe = recipe)
|
||||
rm.maintainer = m
|
||||
except RecipeMaintainer.DoesNotExist:
|
||||
rm = RecipeMaintainer()
|
||||
rm.recipe = recipe
|
||||
rm.maintainer = m
|
||||
|
||||
rm.save()
|
||||
|
||||
transaction.commit()
|
||||
transaction.leave_transaction_management()
|
154
scripts/rrs_update/recipe_upgrade.py
Normal file
154
scripts/rrs_update/recipe_upgrade.py
Normal file
|
@ -0,0 +1,154 @@
|
|||
from datetime import datetime
|
||||
from datetime import timedelta
|
||||
|
||||
import utils
|
||||
import recipeparse
|
||||
|
||||
from django.db import transaction
|
||||
|
||||
from layerindex.models import Recipe
|
||||
from rrs.models import Maintainer, RecipeUpgrade
|
||||
|
||||
"""
|
||||
Discovers the upgraded packages in the last day.
|
||||
"""
|
||||
def update_recipe_upgrades(layerbranch, repodir, layerdir, config_data, logger):
|
||||
today = datetime.today()
|
||||
yesterday = today - timedelta(days = 7)
|
||||
todaystr = today.strftime("%Y-%m-%d")
|
||||
yesterdaystr = yesterday.strftime("%Y-%m-%d")
|
||||
|
||||
temp_branch = "recipe_upgrades"
|
||||
|
||||
logger.debug("Check recent upgrades")
|
||||
|
||||
utils.runcmd("git checkout origin/master ", repodir)
|
||||
|
||||
# try to delete temp_branch if exists
|
||||
try:
|
||||
utils.runcmd("git branch -D " + temp_branch, repodir)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
transaction.enter_transaction_management()
|
||||
transaction.managed(True)
|
||||
commits = utils.runcmd("git log --since='" + yesterdaystr + "' --until='" +
|
||||
todaystr + "' --format='%H' --reverse", repodir)
|
||||
for commit in commits.split("\n"):
|
||||
if commit != "":
|
||||
logger.debug("Analysing commit %s" % commit)
|
||||
commit_files = get_commit_files(commit, repodir, layerdir, logger)
|
||||
|
||||
utils.runcmd("git branch " + temp_branch, repodir)
|
||||
utils.runcmd("git checkout " + temp_branch, repodir)
|
||||
utils.runcmd("git reset --hard " + commit, repodir)
|
||||
|
||||
for path in commit_files:
|
||||
try:
|
||||
envdata = bb.cache.Cache.loadDataFull(str(path), [],
|
||||
config_data)
|
||||
pn = envdata.getVar("PN", True)
|
||||
pv = envdata.getVar("PV", True)
|
||||
except Exception as e:
|
||||
logger.warn("Recipe %s couldn't be parsed, %s" %
|
||||
(path, str(e)))
|
||||
continue
|
||||
|
||||
try:
|
||||
recipe = Recipe.objects.get(layerbranch = layerbranch,
|
||||
pn__exact = pn)
|
||||
except Exception as e:
|
||||
# Most probably a native found
|
||||
logger.warn("Recipe %s not found in database, %s" %
|
||||
(pn, str(e)))
|
||||
continue
|
||||
|
||||
try:
|
||||
latest_upgrade = RecipeUpgrade.objects.filter(
|
||||
recipe = recipe).order_by('-commit_date')[0]
|
||||
prev_pv = latest_upgrade.version
|
||||
except Exception as e:
|
||||
prev_pv = None
|
||||
|
||||
# if no previous version in database consider it an upgrade
|
||||
if not prev_pv or prev_pv != pv:
|
||||
logger.debug("Detected upgrade for %s in commit %s." % (pn, commit))
|
||||
create_upgrade(commit, repodir, recipe, pv, logger)
|
||||
|
||||
utils.runcmd("git checkout origin/master ", repodir)
|
||||
utils.runcmd("git branch -D " + temp_branch, repodir)
|
||||
|
||||
transaction.commit()
|
||||
transaction.leave_transaction_management()
|
||||
|
||||
"""
|
||||
Returns a list containing the fullpaths to the recipes from a commit.
|
||||
"""
|
||||
def get_commit_files(commit, repodir, layerdir, logger):
|
||||
commit_files = []
|
||||
layerdir_start = os.path.normpath(layerdir) + os.sep
|
||||
|
||||
files = utils.runcmd("git log --name-only --format='%n' -n 1 " + commit,
|
||||
repodir, logger=logger)
|
||||
|
||||
for f in files.split("\n"):
|
||||
if f != "":
|
||||
fullpath = os.path.join(repodir, f)
|
||||
# Skip deleted files in commit
|
||||
if not os.path.exists(fullpath):
|
||||
continue
|
||||
(typename, _, filename) = recipeparse.detect_file_type(fullpath,
|
||||
layerdir_start)
|
||||
if typename == 'recipe':
|
||||
commit_files.append(fullpath)
|
||||
|
||||
return commit_files
|
||||
|
||||
"""
|
||||
Insert new entry in the RecipeUpgrade table.
|
||||
"""
|
||||
def create_upgrade(commit, repodir, recipe, pv, logger):
|
||||
from email.utils import parsedate_tz, mktime_tz
|
||||
info = utils.runcmd("git log --format='%an;%ae;%ad;%cd' --date=rfc -n 1 " + commit,
|
||||
destdir=repodir, logger=logger)
|
||||
|
||||
maintainer_name = info.split(';')[0]
|
||||
maintainer_email = info.split(';')[1]
|
||||
author_date = info.split(';')[2]
|
||||
commit_date = info.split(';')[3]
|
||||
|
||||
maintainer = get_maintainer(maintainer_name, maintainer_email, logger)
|
||||
|
||||
title = utils.runcmd("git log --format='%s' -n 1 " + commit,
|
||||
repodir, logger=logger)
|
||||
|
||||
upgrade = RecipeUpgrade()
|
||||
upgrade.recipe = recipe
|
||||
upgrade.maintainer = maintainer
|
||||
upgrade.author_date = datetime.utcfromtimestamp(mktime_tz(
|
||||
parsedate_tz(author_date)))
|
||||
upgrade.commit_date = datetime.utcfromtimestamp(mktime_tz(
|
||||
parsedate_tz(commit_date)))
|
||||
upgrade.version = pv
|
||||
upgrade.sha1 = commit
|
||||
upgrade.title = title.strip()
|
||||
upgrade.save()
|
||||
|
||||
"""
|
||||
Gets maintainer with the given details from the database.
|
||||
If the maintainer doesn't exist it will be created.
|
||||
"""
|
||||
def get_maintainer(name, email, logger):
|
||||
try:
|
||||
maintainer = Maintainer.objects.get(name = name)
|
||||
except Maintainer.DoesNotExist:
|
||||
maintainer = Maintainer()
|
||||
maintainer.name = name
|
||||
maintainer.email = email
|
||||
maintainer.save()
|
||||
|
||||
logger.debug("Create new maintainer %s: %s" %
|
||||
(maintainer.name, maintainer.email))
|
||||
|
||||
return maintainer
|
216
scripts/rrs_update/recipe_upstream.py
Normal file
216
scripts/rrs_update/recipe_upstream.py
Normal file
|
@ -0,0 +1,216 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import threading
|
||||
from multiprocessing import cpu_count
|
||||
|
||||
from django.db import transaction
|
||||
from rrs.models import RecipeUpstream
|
||||
|
||||
git_regex = re.compile("(?P<gprefix>(v|))(?P<gver>((\d+[\.\-_]*)+))(?P<gmiddle>(\+|)(git|)(r|)(AUTOINC|)(\+|))(?P<ghash>.*)")
|
||||
|
||||
"""
|
||||
Update Recipe upstream information searching in upstream sites.
|
||||
Adds information only when the version changes.
|
||||
"""
|
||||
def update_recipe_upstream(envdata, logger):
|
||||
result = get_upstream_info(envdata, logger)
|
||||
|
||||
transaction.enter_transaction_management()
|
||||
transaction.managed(True)
|
||||
|
||||
for recipe, recipe_result in result.iteritems():
|
||||
create_recipe_upstream(recipe, recipe_result, logger)
|
||||
|
||||
transaction.commit()
|
||||
transaction.leave_transaction_management()
|
||||
|
||||
def create_recipe_upstream(recipe, recipe_result, logger):
|
||||
create = False
|
||||
|
||||
try:
|
||||
recipe_upstream_db = RecipeUpstream.objects.filter(recipe = recipe).order_by("-date")[0]
|
||||
|
||||
if recipe_result['version'] != recipe_upstream_db.version:
|
||||
create = True
|
||||
elif recipe_result['status'] != recipe_upstream_db.status:
|
||||
create = True
|
||||
except Exception as e:
|
||||
create = True
|
||||
|
||||
if create:
|
||||
recipe_upstream = RecipeUpstream()
|
||||
recipe_upstream.recipe = recipe
|
||||
recipe_upstream.version = recipe_result['version']
|
||||
recipe_upstream.type = recipe_result['type']
|
||||
recipe_upstream.status = recipe_result['status']
|
||||
recipe_upstream.no_update_reason = recipe_result['no_update_reason']
|
||||
recipe_upstream.date = recipe_result['date']
|
||||
recipe_upstream.save()
|
||||
logger.debug("Add report for recipe %s" % recipe.pn)
|
||||
|
||||
"""
|
||||
Get upstream info for all Recipes.
|
||||
"""
|
||||
def get_upstream_info(envdata, logger):
|
||||
class GenericThread(threading.Thread):
|
||||
def __init__(self, function):
|
||||
threading.Thread.__init__(self)
|
||||
self.function = function
|
||||
|
||||
def run(self):
|
||||
self.function()
|
||||
|
||||
envdata_tmp = envdata.copy()
|
||||
result = {}
|
||||
|
||||
recipe_mutex = threading.Lock()
|
||||
result_mutex = threading.Lock()
|
||||
|
||||
# Find upstream versions in parallel use threads = cpu_count
|
||||
# since tasks are not CPU intensive
|
||||
threads = []
|
||||
thread_count = cpu_count()
|
||||
|
||||
for t in range(0, thread_count):
|
||||
threads.append(GenericThread(lambda: get_upstream_info_thread(envdata_tmp, result, recipe_mutex, result_mutex, logger)))
|
||||
|
||||
for t in threads:
|
||||
t.start()
|
||||
|
||||
for t in threads:
|
||||
t.join()
|
||||
|
||||
return result
|
||||
|
||||
def get_upstream_info_thread(envdata, result, recipe_mutex, result_mutex, logger):
|
||||
from datetime import datetime
|
||||
|
||||
def vercmp_string(a, b, recipe_type):
|
||||
cmp_result = None
|
||||
|
||||
if recipe_type == 'git':
|
||||
match_a = git_regex.match(a)
|
||||
match_b = git_regex.match(b)
|
||||
|
||||
if match_a and match_b:
|
||||
cmp_result = bb.utils.vercmp_string(match_a.group('gver'),
|
||||
match_b.group('gver'))
|
||||
|
||||
if cmp_result is None:
|
||||
cmp_result = bb.utils.vercmp_string(a, b)
|
||||
|
||||
return cmp_result
|
||||
|
||||
while True:
|
||||
recipe = None
|
||||
data = None
|
||||
recipe_type = None
|
||||
recipe_uri = None
|
||||
|
||||
recipe_mutex.acquire()
|
||||
if len(envdata) == 0:
|
||||
recipe_mutex.release()
|
||||
break
|
||||
|
||||
recipe = envdata.items()[0][0]
|
||||
data = envdata[recipe]
|
||||
|
||||
del envdata[recipe]
|
||||
recipe_mutex.release()
|
||||
|
||||
# Get recipe SRC_URI and type
|
||||
found = 0
|
||||
for uri in data.getVar("SRC_URI", True).split():
|
||||
m = re.compile('(?P<type>[^:]*)').match(uri)
|
||||
if not m:
|
||||
raise MalformedUrl(uri)
|
||||
elif m.group('type') in ('http', 'https', 'ftp', 'cvs', 'svn', 'git'):
|
||||
found = 1
|
||||
recipe_uri = uri
|
||||
recipe_type = m.group('type')
|
||||
break
|
||||
if not found:
|
||||
recipe_type = "file"
|
||||
|
||||
recipe_pv = data.getVar('PV', True)
|
||||
|
||||
# Build result dictionary (version, type, status, no_update_reason, date, save),
|
||||
# for types see RecipeUpstream.RECIPE_UPSTREAM_TYPE_CHOICES,
|
||||
# for status see RecipeUpstream.RECIPE_UPSTREAM_STATUS_CHOICES.
|
||||
recipe_result = {}
|
||||
recipe_result['version'] = ''
|
||||
recipe_result['type'] = ''
|
||||
recipe_result['status'] = ''
|
||||
recipe_result['no_update_reason'] = ''
|
||||
recipe_result['date'] = ''
|
||||
|
||||
manual_upstream_version = data.getVar("RECIPE_UPSTREAM_VERSION", True)
|
||||
if manual_upstream_version:
|
||||
recipe_result['version'] = manual_upstream_version
|
||||
recipe_result['type'] = 'M'
|
||||
|
||||
manual_upstream_date = data.getVar("CHECK_DATE", True)
|
||||
if manual_upstream_date:
|
||||
date = datetime.strptime(manual_upstream_date, "%b %d, %Y")
|
||||
else:
|
||||
date = datetime.utcnow()
|
||||
recipe_result['date'] = date
|
||||
elif recipe_type == "file":
|
||||
# files are always uptodate
|
||||
recipe_result['version'] = recipe_pv
|
||||
recipe_result['type'] = 'A'
|
||||
recipe_result['date'] = datetime.utcnow()
|
||||
elif recipe_type in ['http', 'https', 'ftp', 'git']:
|
||||
try:
|
||||
ud = bb.fetch2.FetchData(recipe_uri, data)
|
||||
|
||||
pupver = ud.method.latest_versionstring(ud, data)
|
||||
if (pupver == ''): # try to find again due to timeout errors
|
||||
pupver = ud.method.latest_versionstring(ud, data)
|
||||
|
||||
if recipe_type == 'git':
|
||||
git_regex_match = git_regex.match(recipe_pv)
|
||||
|
||||
if git_regex_match:
|
||||
pupver = git_regex_match.group('gprefix') + pupver
|
||||
|
||||
if not pupver:
|
||||
pupver = git_regex_match.group('gver')
|
||||
|
||||
pupver += git_regex_match.group('gmiddle')
|
||||
|
||||
latest_revision = ud.method.latest_revision(ud, data, ud.names[0])
|
||||
if git_regex_match.group('ghash') == 'X':
|
||||
pupver += 'AUTOINC+' + latest_revision[:10]
|
||||
else:
|
||||
pupver += latest_revision[:len(git_regex_match.group('ghash'))]
|
||||
|
||||
recipe_result['version'] = pupver
|
||||
recipe_result['type'] = 'A'
|
||||
recipe_result['date'] = datetime.utcnow()
|
||||
except Exception as inst:
|
||||
logger.warn("get_upstream_info, recipe %s, pv %s, unexpected error: %s"
|
||||
% (recipe.pn, recipe_pv, repr(inst)))
|
||||
|
||||
recipe_result['date'] = datetime.utcnow()
|
||||
else:
|
||||
logger.warn("get_upstream_info, recipe %s protocol %s isn't implemented"
|
||||
% (str(recipe.pn), recipe_type))
|
||||
|
||||
recipe_result['date'] = datetime.utcnow()
|
||||
|
||||
no_update_reason = data.getVar("RECIPE_NO_UPDATE_REASON", True) or ''
|
||||
recipe_result['no_update_reason'] = no_update_reason
|
||||
|
||||
if not recipe_result['version']:
|
||||
recipe_result['status'] = 'U' # Unknown, need to review why
|
||||
elif vercmp_string(recipe_pv, recipe_result['version'], recipe_type) == -1:
|
||||
recipe_result['status'] = 'N' # Not update
|
||||
elif vercmp_string(recipe_pv, recipe_result['version'], recipe_type) == 0:
|
||||
recipe_result['status'] = 'Y' # Up-to-date
|
||||
elif vercmp_string(recipe_pv, recipe_result['version'], recipe_type) == 1:
|
||||
recipe_result['status'] = 'D' # Downgrade, need to review why
|
||||
|
||||
result_mutex.acquire()
|
||||
result[recipe] = recipe_result
|
||||
result_mutex.release()
|
|
@ -61,6 +61,25 @@ def main():
|
|||
parser.add_option("-q", "--quiet",
|
||||
help = "Hide all output except error messages",
|
||||
action="store_const", const=logging.ERROR, dest="loglevel")
|
||||
if settings.APPLICATION == 'rrs':
|
||||
parser.add_option("", "--only-layerindex",
|
||||
help = "Only run layerindex update",
|
||||
action="store_true", dest="only_layerindex")
|
||||
parser.add_option("", "--recipe",
|
||||
help = "Specify recipe to update",
|
||||
action="store", dest="recipe")
|
||||
parser.add_option("", "--recipe-maintainers",
|
||||
help = "Only update recipe maintainers",
|
||||
action="store_true", dest="recipe_maintainers")
|
||||
parser.add_option("", "--recipe-distros",
|
||||
help = "Only update recipe distros",
|
||||
action="store_true", dest="recipe_distros")
|
||||
parser.add_option("", "--recipe-upgrades",
|
||||
help = "Only update recipe upgrades",
|
||||
action="store_true", dest="recipe_upgrades")
|
||||
parser.add_option("", "--recipe-upstream",
|
||||
help = "Only update recipe upstream",
|
||||
action="store_true", dest="recipe_upstream")
|
||||
|
||||
options, args = parser.parse_args(sys.argv)
|
||||
if len(args) > 1:
|
||||
|
@ -92,6 +111,13 @@ def main():
|
|||
sys.exit(1)
|
||||
|
||||
bitbakepath = update_repo(fetchdir, 'bitbake', settings.BITBAKE_REPO_URL, logger)
|
||||
if settings.APPLICATION == 'rrs':
|
||||
pokypath = update_repo(fetchdir, 'poky', settings.POKY_REPO_URL, logger)
|
||||
# add path for use oe-core libraries
|
||||
sys.path.insert(0, os.path.realpath(os.path.join(pokypath, 'meta', 'lib')))
|
||||
# add support for load distro include files
|
||||
os.environ['BBPATH'] = os.path.join(pokypath, 'meta-yocto')
|
||||
|
||||
(layerquery, fetchedrepos, failedrepos) = update_layers(options, fetchdir, logger)
|
||||
(tinfoil, tempdir) = get_tinfoil(branch, bitbakepath, options, logger)
|
||||
|
||||
|
@ -99,6 +125,13 @@ def main():
|
|||
failedrepos, logger)
|
||||
layerindex_updater.run(tinfoil)
|
||||
|
||||
if settings.APPLICATION == 'rrs':
|
||||
from rrs_update import RrsUpdater
|
||||
rrs_updater = RrsUpdater(fetchdir, options, layerquery,
|
||||
fetchedrepos, failedrepos, logger)
|
||||
if not options.only_layerindex:
|
||||
rrs_updater.run(tinfoil)
|
||||
|
||||
shutil.rmtree(tempdir)
|
||||
utils.unlock_file(lockfile)
|
||||
|
||||
|
|
Loading…
Reference in New Issue
Block a user