bitbake: fetch2: Stop passing around the pointless url parameter

There is no good reason to keep passing around the url parameter when
its contained within urldata (ud). This is left around due to
legacy reasons, some functions take it, some don't and its time
to cleanup.

This is fetcher internal API, there are a tiny number of external users
of the internal API (buildhistory and distrodata) which can be fixed up
after this change.

(Bitbake rev: 6a48474de9505a3700863f31839a7c53c5e18a8d)

Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
This commit is contained in:
Richard Purdie 2013-11-19 14:32:08 +00:00
parent 4acc7322a2
commit 9d7f8e2a20
15 changed files with 122 additions and 122 deletions

View File

@ -619,7 +619,7 @@ def get_srcrev(d):
raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI") raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
if len(scms) == 1 and len(urldata[scms[0]].names) == 1: if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
autoinc, rev = urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d, urldata[scms[0]].names[0]) autoinc, rev = urldata[scms[0]].method.sortable_revision(urldata[scms[0]], d, urldata[scms[0]].names[0])
if len(rev) > 10: if len(rev) > 10:
rev = rev[:10] rev = rev[:10]
if autoinc: if autoinc:
@ -637,7 +637,7 @@ def get_srcrev(d):
for scm in scms: for scm in scms:
ud = urldata[scm] ud = urldata[scm]
for name in ud.names: for name in ud.names:
autoinc, rev = ud.method.sortable_revision(scm, ud, d, name) autoinc, rev = ud.method.sortable_revision(ud, d, name)
seenautoinc = seenautoinc or autoinc seenautoinc = seenautoinc or autoinc
if len(rev) > 10: if len(rev) > 10:
rev = rev[:10] rev = rev[:10]
@ -777,17 +777,17 @@ def try_mirror_url(origud, ud, ld, check = False):
# False means try another url # False means try another url
try: try:
if check: if check:
found = ud.method.checkstatus(ud.url, ud, ld) found = ud.method.checkstatus(ud, ld)
if found: if found:
return found return found
return False return False
os.chdir(ld.getVar("DL_DIR", True)) os.chdir(ld.getVar("DL_DIR", True))
if not os.path.exists(ud.donestamp) or ud.method.need_update(ud.url, ud, ld): if not os.path.exists(ud.donestamp) or ud.method.need_update(ud, ld):
ud.method.download(ud.url, ud, ld) ud.method.download(ud, ld)
if hasattr(ud.method,"build_mirror_data"): if hasattr(ud.method,"build_mirror_data"):
ud.method.build_mirror_data(ud.url, ud, ld) ud.method.build_mirror_data(ud, ld)
if not ud.localpath or not os.path.exists(ud.localpath): if not ud.localpath or not os.path.exists(ud.localpath):
return False return False
@ -805,10 +805,10 @@ def try_mirror_url(origud, ud, ld, check = False):
dest = os.path.join(dldir, os.path.basename(ud.localpath)) dest = os.path.join(dldir, os.path.basename(ud.localpath))
if not os.path.exists(dest): if not os.path.exists(dest):
os.symlink(ud.localpath, dest) os.symlink(ud.localpath, dest)
if not os.path.exists(origud.donestamp) or origud.method.need_update(origud.url, origud, ld): if not os.path.exists(origud.donestamp) or origud.method.need_update(origud, ld):
origud.method.download(origud.url, origud, ld) origud.method.download(origud, ld)
if hasattr(ud.method,"build_mirror_data"): if hasattr(ud.method,"build_mirror_data"):
origud.method.build_mirror_data(origud.url, origud, ld) origud.method.build_mirror_data(origud, ld)
return None return None
# Otherwise the result is a local file:// and we symlink to it # Otherwise the result is a local file:// and we symlink to it
if not os.path.exists(origud.localpath): if not os.path.exists(origud.localpath):
@ -888,7 +888,7 @@ def srcrev_internal_helper(ud, d, name):
var = "SRCREV_%s_pn-%s" % (name, pn) var = "SRCREV_%s_pn-%s" % (name, pn)
raise FetchError("Please set %s to a valid value" % var, ud.url) raise FetchError("Please set %s to a valid value" % var, ud.url)
if rev == "AUTOINC": if rev == "AUTOINC":
rev = ud.method.latest_revision(ud.url, ud, d, name) rev = ud.method.latest_revision(ud, d, name)
return rev return rev
@ -1009,7 +1009,7 @@ class FetchData(object):
self.method = None self.method = None
for m in methods: for m in methods:
if m.supports(url, self, d): if m.supports(self, d):
self.method = m self.method = m
break break
@ -1031,7 +1031,7 @@ class FetchData(object):
self.localpath = self.parm["localpath"] self.localpath = self.parm["localpath"]
self.basename = os.path.basename(self.localpath) self.basename = os.path.basename(self.localpath)
elif self.localfile: elif self.localfile:
self.localpath = self.method.localpath(self.url, self, d) self.localpath = self.method.localpath(self, d)
dldir = d.getVar("DL_DIR", True) dldir = d.getVar("DL_DIR", True)
# Note: .done and .lock files should always be in DL_DIR whereas localpath may not be. # Note: .done and .lock files should always be in DL_DIR whereas localpath may not be.
@ -1055,7 +1055,7 @@ class FetchData(object):
def setup_localpath(self, d): def setup_localpath(self, d):
if not self.localpath: if not self.localpath:
self.localpath = self.method.localpath(self.url, self, d) self.localpath = self.method.localpath(self, d)
def getSRCDate(self, d): def getSRCDate(self, d):
""" """
@ -1079,13 +1079,13 @@ class FetchMethod(object):
def __init__(self, urls = []): def __init__(self, urls = []):
self.urls = [] self.urls = []
def supports(self, url, urldata, d): def supports(self, urldata, d):
""" """
Check to see if this fetch class supports a given url. Check to see if this fetch class supports a given url.
""" """
return 0 return 0
def localpath(self, url, urldata, d): def localpath(self, urldata, d):
""" """
Return the local filename of a given url assuming a successful fetch. Return the local filename of a given url assuming a successful fetch.
Can also setup variables in urldata for use in go (saving code duplication Can also setup variables in urldata for use in go (saving code duplication
@ -1129,7 +1129,7 @@ class FetchMethod(object):
urls = property(getUrls, setUrls, None, "Urls property") urls = property(getUrls, setUrls, None, "Urls property")
def need_update(self, url, ud, d): def need_update(self, ud, d):
""" """
Force a fetch, even if localpath exists? Force a fetch, even if localpath exists?
""" """
@ -1143,7 +1143,7 @@ class FetchMethod(object):
""" """
return False return False
def download(self, url, urldata, d): def download(self, urldata, d):
""" """
Fetch urls Fetch urls
Assumes localpath was called first Assumes localpath was called first
@ -1267,13 +1267,13 @@ class FetchMethod(object):
""" """
bb.utils.remove(urldata.localpath) bb.utils.remove(urldata.localpath)
def try_premirror(self, url, urldata, d): def try_premirror(self, urldata, d):
""" """
Should premirrors be used? Should premirrors be used?
""" """
return True return True
def checkstatus(self, url, urldata, d): def checkstatus(self, urldata, d):
""" """
Check the status of a URL Check the status of a URL
Assumes localpath was called first Assumes localpath was called first
@ -1281,7 +1281,7 @@ class FetchMethod(object):
logger.info("URL %s could not be checked for status since no method exists.", url) logger.info("URL %s could not be checked for status since no method exists.", url)
return True return True
def latest_revision(self, url, ud, d, name): def latest_revision(self, ud, d, name):
""" """
Look in the cache for the latest revision, if not present ask the SCM. Look in the cache for the latest revision, if not present ask the SCM.
""" """
@ -1289,19 +1289,19 @@ class FetchMethod(object):
raise ParameterError("The fetcher for this URL does not support _latest_revision", url) raise ParameterError("The fetcher for this URL does not support _latest_revision", url)
revs = bb.persist_data.persist('BB_URI_HEADREVS', d) revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
key = self.generate_revision_key(url, ud, d, name) key = self.generate_revision_key(ud, d, name)
try: try:
return revs[key] return revs[key]
except KeyError: except KeyError:
revs[key] = rev = self._latest_revision(url, ud, d, name) revs[key] = rev = self._latest_revision(ud, d, name)
return rev return rev
def sortable_revision(self, url, ud, d, name): def sortable_revision(self, ud, d, name):
latest_rev = self._build_revision(url, ud, d, name) latest_rev = self._build_revision(ud, d, name)
return True, str(latest_rev) return True, str(latest_rev)
def generate_revision_key(self, url, ud, d, name): def generate_revision_key(self, ud, d, name):
key = self._revision_key(url, ud, d, name) key = self._revision_key(ud, d, name)
return "%s-%s" % (key, d.getVar("PN", True) or "") return "%s-%s" % (key, d.getVar("PN", True) or "")
class Fetch(object): class Fetch(object):
@ -1372,9 +1372,9 @@ class Fetch(object):
try: try:
self.d.setVar("BB_NO_NETWORK", network) self.d.setVar("BB_NO_NETWORK", network)
if os.path.exists(ud.donestamp) and not m.need_update(u, ud, self.d): if os.path.exists(ud.donestamp) and not m.need_update(ud, self.d):
localpath = ud.localpath localpath = ud.localpath
elif m.try_premirror(u, ud, self.d): elif m.try_premirror(ud, self.d):
logger.debug(1, "Trying PREMIRRORS") logger.debug(1, "Trying PREMIRRORS")
mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True)) mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
localpath = try_mirrors(self.d, ud, mirrors, False) localpath = try_mirrors(self.d, ud, mirrors, False)
@ -1385,12 +1385,12 @@ class Fetch(object):
os.chdir(self.d.getVar("DL_DIR", True)) os.chdir(self.d.getVar("DL_DIR", True))
firsterr = None firsterr = None
if not localpath and ((not os.path.exists(ud.donestamp)) or m.need_update(u, ud, self.d)): if not localpath and ((not os.path.exists(ud.donestamp)) or m.need_update(ud, self.d)):
try: try:
logger.debug(1, "Trying Upstream") logger.debug(1, "Trying Upstream")
m.download(u, ud, self.d) m.download(ud, self.d)
if hasattr(m, "build_mirror_data"): if hasattr(m, "build_mirror_data"):
m.build_mirror_data(u, ud, self.d) m.build_mirror_data(ud, self.d)
localpath = ud.localpath localpath = ud.localpath
# early checksum verify, so that if checksum mismatched, # early checksum verify, so that if checksum mismatched,
# fetcher still have chance to fetch from mirror # fetcher still have chance to fetch from mirror
@ -1452,7 +1452,7 @@ class Fetch(object):
if not ret: if not ret:
# Next try checking from the original uri, u # Next try checking from the original uri, u
try: try:
ret = m.checkstatus(u, ud, self.d) ret = m.checkstatus(ud, self.d)
except: except:
# Finally, try checking uri, u, from MIRRORS # Finally, try checking uri, u, from MIRRORS
mirrors = mirror_from_string(self.d.getVar('MIRRORS', True)) mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))

View File

@ -34,7 +34,7 @@ from bb.fetch2 import runfetchcmd
from bb.fetch2 import logger from bb.fetch2 import logger
class Bzr(FetchMethod): class Bzr(FetchMethod):
def supports(self, url, ud, d): def supports(self, ud, d):
return ud.type in ['bzr'] return ud.type in ['bzr']
def urldata_init(self, ud, d): def urldata_init(self, ud, d):
@ -81,12 +81,12 @@ class Bzr(FetchMethod):
return bzrcmd return bzrcmd
def download(self, loc, ud, d): def download(self, ud, d):
"""Fetch url""" """Fetch url"""
if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK): if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK):
bzrcmd = self._buildbzrcommand(ud, d, "update") bzrcmd = self._buildbzrcommand(ud, d, "update")
logger.debug(1, "BZR Update %s", loc) logger.debug(1, "BZR Update %s", ud.url)
bb.fetch2.check_network_access(d, bzrcmd, ud.url) bb.fetch2.check_network_access(d, bzrcmd, ud.url)
os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path))) os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path)))
runfetchcmd(bzrcmd, d) runfetchcmd(bzrcmd, d)
@ -94,7 +94,7 @@ class Bzr(FetchMethod):
bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True) bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True)
bzrcmd = self._buildbzrcommand(ud, d, "fetch") bzrcmd = self._buildbzrcommand(ud, d, "fetch")
bb.fetch2.check_network_access(d, bzrcmd, ud.url) bb.fetch2.check_network_access(d, bzrcmd, ud.url)
logger.debug(1, "BZR Checkout %s", loc) logger.debug(1, "BZR Checkout %s", ud.url)
bb.utils.mkdirhier(ud.pkgdir) bb.utils.mkdirhier(ud.pkgdir)
os.chdir(ud.pkgdir) os.chdir(ud.pkgdir)
logger.debug(1, "Running %s", bzrcmd) logger.debug(1, "Running %s", bzrcmd)
@ -114,17 +114,17 @@ class Bzr(FetchMethod):
def supports_srcrev(self): def supports_srcrev(self):
return True return True
def _revision_key(self, url, ud, d, name): def _revision_key(self, ud, d, name):
""" """
Return a unique key for the url Return a unique key for the url
""" """
return "bzr:" + ud.pkgdir return "bzr:" + ud.pkgdir
def _latest_revision(self, url, ud, d, name): def _latest_revision(self, ud, d, name):
""" """
Return the latest upstream revision number Return the latest upstream revision number
""" """
logger.debug(2, "BZR fetcher hitting network for %s", url) logger.debug(2, "BZR fetcher hitting network for %s", ud.url)
bb.fetch2.check_network_access(d, self._buildbzrcommand(ud, d, "revno"), ud.url) bb.fetch2.check_network_access(d, self._buildbzrcommand(ud, d, "revno"), ud.url)
@ -132,12 +132,12 @@ class Bzr(FetchMethod):
return output.strip() return output.strip()
def sortable_revision(self, url, ud, d, name): def sortable_revision(self, ud, d, name):
""" """
Return a sortable revision number which in our case is the revision number Return a sortable revision number which in our case is the revision number
""" """
return False, self._build_revision(url, ud, d) return False, self._build_revision(ud, d)
def _build_revision(self, url, ud, d): def _build_revision(self, ud, d):
return ud.revision return ud.revision

View File

@ -36,7 +36,7 @@ class Cvs(FetchMethod):
""" """
Class to fetch a module or modules from cvs repositories Class to fetch a module or modules from cvs repositories
""" """
def supports(self, url, ud, d): def supports(self, ud, d):
""" """
Check to see if a given url can be fetched with cvs. Check to see if a given url can be fetched with cvs.
""" """
@ -65,14 +65,14 @@ class Cvs(FetchMethod):
ud.localfile = bb.data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d) ud.localfile = bb.data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d)
def need_update(self, url, ud, d): def need_update(self, ud, d):
if (ud.date == "now"): if (ud.date == "now"):
return True return True
if not os.path.exists(ud.localpath): if not os.path.exists(ud.localpath):
return True return True
return False return False
def download(self, loc, ud, d): def download(self, ud, d):
method = ud.parm.get('method', 'pserver') method = ud.parm.get('method', 'pserver')
localdir = ud.parm.get('localdir', ud.module) localdir = ud.parm.get('localdir', ud.module)
@ -124,13 +124,13 @@ class Cvs(FetchMethod):
pkgdir = os.path.join(d.getVar('CVSDIR', True), pkg) pkgdir = os.path.join(d.getVar('CVSDIR', True), pkg)
moddir = os.path.join(pkgdir, localdir) moddir = os.path.join(pkgdir, localdir)
if os.access(os.path.join(moddir, 'CVS'), os.R_OK): if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
logger.info("Update " + loc) logger.info("Update " + ud.url)
bb.fetch2.check_network_access(d, cvsupdatecmd, ud.url) bb.fetch2.check_network_access(d, cvsupdatecmd, ud.url)
# update sources there # update sources there
os.chdir(moddir) os.chdir(moddir)
cmd = cvsupdatecmd cmd = cvsupdatecmd
else: else:
logger.info("Fetch " + loc) logger.info("Fetch " + ud.url)
# check out sources there # check out sources there
bb.utils.mkdirhier(pkgdir) bb.utils.mkdirhier(pkgdir)
os.chdir(pkgdir) os.chdir(pkgdir)

View File

@ -73,7 +73,7 @@ class Git(FetchMethod):
def init(self, d): def init(self, d):
pass pass
def supports(self, url, ud, d): def supports(self, ud, d):
""" """
Check to see if a given url can be fetched with git. Check to see if a given url can be fetched with git.
""" """
@ -142,10 +142,10 @@ class Git(FetchMethod):
ud.localfile = ud.clonedir ud.localfile = ud.clonedir
def localpath(self, url, ud, d): def localpath(self, ud, d):
return ud.clonedir return ud.clonedir
def need_update(self, u, ud, d): def need_update(self, ud, d):
if not os.path.exists(ud.clonedir): if not os.path.exists(ud.clonedir):
return True return True
os.chdir(ud.clonedir) os.chdir(ud.clonedir)
@ -156,7 +156,7 @@ class Git(FetchMethod):
return True return True
return False return False
def try_premirror(self, u, ud, d): def try_premirror(self, ud, d):
# If we don't do this, updating an existing checkout with only premirrors # If we don't do this, updating an existing checkout with only premirrors
# is not possible # is not possible
if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None: if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None:
@ -165,7 +165,7 @@ class Git(FetchMethod):
return False return False
return True return True
def download(self, loc, ud, d): def download(self, ud, d):
"""Fetch url""" """Fetch url"""
if ud.user: if ud.user:
@ -214,7 +214,7 @@ class Git(FetchMethod):
runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d) runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
ud.repochanged = True ud.repochanged = True
def build_mirror_data(self, url, ud, d): def build_mirror_data(self, ud, d):
# Generate a mirror tarball if needed # Generate a mirror tarball if needed
if ud.write_tarballs and (ud.repochanged or not os.path.exists(ud.fullmirror)): if ud.write_tarballs and (ud.repochanged or not os.path.exists(ud.fullmirror)):
# it's possible that this symlink points to read-only filesystem with PREMIRROR # it's possible that this symlink points to read-only filesystem with PREMIRROR
@ -292,13 +292,13 @@ class Git(FetchMethod):
raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output)) raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
return output.split()[0] != "0" return output.split()[0] != "0"
def _revision_key(self, url, ud, d, name): def _revision_key(self, ud, d, name):
""" """
Return a unique key for the url Return a unique key for the url
""" """
return "git:" + ud.host + ud.path.replace('/', '.') + ud.branches[name] return "git:" + ud.host + ud.path.replace('/', '.') + ud.branches[name]
def _latest_revision(self, url, ud, d, name): def _latest_revision(self, ud, d, name):
""" """
Compute the HEAD revision for the url Compute the HEAD revision for the url
""" """
@ -314,14 +314,14 @@ class Git(FetchMethod):
bb.fetch2.check_network_access(d, cmd) bb.fetch2.check_network_access(d, cmd)
output = runfetchcmd(cmd, d, True) output = runfetchcmd(cmd, d, True)
if not output: if not output:
raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, url) raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, ud.url)
return output.split()[0] return output.split()[0]
def _build_revision(self, url, ud, d, name): def _build_revision(self, ud, d, name):
return ud.revisions[name] return ud.revisions[name]
def checkstatus(self, uri, ud, d): def checkstatus(self, ud, d):
fetchcmd = "%s ls-remote %s" % (ud.basecmd, uri) fetchcmd = "%s ls-remote %s" % (ud.basecmd, ud.url)
try: try:
runfetchcmd(fetchcmd, d, quiet=True) runfetchcmd(fetchcmd, d, quiet=True)
return True return True

View File

@ -27,7 +27,7 @@ from bb.fetch2 import runfetchcmd
from bb.fetch2 import logger from bb.fetch2 import logger
class GitSM(Git): class GitSM(Git):
def supports(self, url, ud, d): def supports(self, ud, d):
""" """
Check to see if a given url can be fetched with git. Check to see if a given url can be fetched with git.
""" """
@ -42,7 +42,7 @@ class GitSM(Git):
pass pass
return False return False
def update_submodules(self, u, ud, d): def update_submodules(self, ud, d):
# We have to convert bare -> full repo, do the submodule bit, then convert back # We have to convert bare -> full repo, do the submodule bit, then convert back
tmpclonedir = ud.clonedir + ".tmp" tmpclonedir = ud.clonedir + ".tmp"
gitdir = tmpclonedir + os.sep + ".git" gitdir = tmpclonedir + os.sep + ".git"
@ -58,13 +58,13 @@ class GitSM(Git):
os.rename(gitdir, ud.clonedir,) os.rename(gitdir, ud.clonedir,)
bb.utils.remove(tmpclonedir, True) bb.utils.remove(tmpclonedir, True)
def download(self, loc, ud, d): def download(self, ud, d):
Git.download(self, loc, ud, d) Git.download(self, ud, d)
os.chdir(ud.clonedir) os.chdir(ud.clonedir)
submodules = self.uses_submodules(ud, d) submodules = self.uses_submodules(ud, d)
if submodules: if submodules:
self.update_submodules(loc, ud, d) self.update_submodules(ud, d)
def unpack(self, ud, destdir, d): def unpack(self, ud, destdir, d):
Git.unpack(self, ud, destdir, d) Git.unpack(self, ud, destdir, d)

View File

@ -37,7 +37,7 @@ from bb.fetch2 import logger
class Hg(FetchMethod): class Hg(FetchMethod):
"""Class to fetch from mercurial repositories""" """Class to fetch from mercurial repositories"""
def supports(self, url, ud, d): def supports(self, ud, d):
""" """
Check to see if a given url can be fetched with mercurial. Check to see if a given url can be fetched with mercurial.
""" """
@ -66,7 +66,7 @@ class Hg(FetchMethod):
ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d) ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
def need_update(self, url, ud, d): def need_update(self, ud, d):
revTag = ud.parm.get('rev', 'tip') revTag = ud.parm.get('rev', 'tip')
if revTag == "tip": if revTag == "tip":
return True return True
@ -126,14 +126,14 @@ class Hg(FetchMethod):
return cmd return cmd
def download(self, loc, ud, d): def download(self, ud, d):
"""Fetch url""" """Fetch url"""
logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK): if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
updatecmd = self._buildhgcommand(ud, d, "pull") updatecmd = self._buildhgcommand(ud, d, "pull")
logger.info("Update " + loc) logger.info("Update " + ud.url)
# update sources there # update sources there
os.chdir(ud.moddir) os.chdir(ud.moddir)
logger.debug(1, "Running %s", updatecmd) logger.debug(1, "Running %s", updatecmd)
@ -142,7 +142,7 @@ class Hg(FetchMethod):
else: else:
fetchcmd = self._buildhgcommand(ud, d, "fetch") fetchcmd = self._buildhgcommand(ud, d, "fetch")
logger.info("Fetch " + loc) logger.info("Fetch " + ud.url)
# check out sources there # check out sources there
bb.utils.mkdirhier(ud.pkgdir) bb.utils.mkdirhier(ud.pkgdir)
os.chdir(ud.pkgdir) os.chdir(ud.pkgdir)
@ -169,7 +169,7 @@ class Hg(FetchMethod):
def supports_srcrev(self): def supports_srcrev(self):
return True return True
def _latest_revision(self, url, ud, d, name): def _latest_revision(self, ud, d, name):
""" """
Compute tip revision for the url Compute tip revision for the url
""" """
@ -177,10 +177,10 @@ class Hg(FetchMethod):
output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d) output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d)
return output.strip() return output.strip()
def _build_revision(self, url, ud, d, name): def _build_revision(self, ud, d, name):
return ud.revision return ud.revision
def _revision_key(self, url, ud, d, name): def _revision_key(self, ud, d, name):
""" """
Return a unique key for the url Return a unique key for the url
""" """

View File

@ -34,7 +34,7 @@ from bb.fetch2 import FetchMethod, FetchError
from bb.fetch2 import logger from bb.fetch2 import logger
class Local(FetchMethod): class Local(FetchMethod):
def supports(self, url, urldata, d): def supports(self, urldata, d):
""" """
Check to see if a given url represents a local fetch. Check to see if a given url represents a local fetch.
""" """
@ -47,7 +47,7 @@ class Local(FetchMethod):
ud.basepath = ud.decodedurl ud.basepath = ud.decodedurl
return return
def localpath(self, url, urldata, d): def localpath(self, urldata, d):
""" """
Return the local filename of a given url assuming a successful fetch. Return the local filename of a given url assuming a successful fetch.
""" """
@ -75,14 +75,14 @@ class Local(FetchMethod):
return dldirfile return dldirfile
return newpath return newpath
def need_update(self, url, ud, d): def need_update(self, ud, d):
if url.find("*") != -1: if ud.url.find("*") != -1:
return False return False
if os.path.exists(ud.localpath): if os.path.exists(ud.localpath):
return False return False
return True return True
def download(self, url, urldata, d): def download(self, urldata, d):
"""Fetch urls (no-op for Local method)""" """Fetch urls (no-op for Local method)"""
# no need to fetch local files, we'll deal with them in place. # no need to fetch local files, we'll deal with them in place.
if self.supports_checksum(urldata) and not os.path.exists(urldata.localpath): if self.supports_checksum(urldata) and not os.path.exists(urldata.localpath):
@ -95,17 +95,17 @@ class Local(FetchMethod):
locations.append(filesdir) locations.append(filesdir)
locations.append(d.getVar("DL_DIR", True)) locations.append(d.getVar("DL_DIR", True))
msg = "Unable to find file " + url + " anywhere. The paths that were searched were:\n " + "\n ".join(locations) msg = "Unable to find file " + urldata.url + " anywhere. The paths that were searched were:\n " + "\n ".join(locations)
raise FetchError(msg) raise FetchError(msg)
return True return True
def checkstatus(self, url, urldata, d): def checkstatus(self, urldata, d):
""" """
Check the status of the url Check the status of the url
""" """
if urldata.localpath.find("*") != -1: if urldata.localpath.find("*") != -1:
logger.info("URL %s looks like a glob and was therefore not checked.", url) logger.info("URL %s looks like a glob and was therefore not checked.", urldata.url)
return True return True
if os.path.exists(urldata.localpath): if os.path.exists(urldata.localpath):
return True return True

View File

@ -20,7 +20,7 @@ class Osc(FetchMethod):
"""Class to fetch a module or modules from Opensuse build server """Class to fetch a module or modules from Opensuse build server
repositories.""" repositories."""
def supports(self, url, ud, d): def supports(self, ud, d):
""" """
Check to see if a given url can be fetched with osc. Check to see if a given url can be fetched with osc.
""" """
@ -77,7 +77,7 @@ class Osc(FetchMethod):
return osccmd return osccmd
def download(self, loc, ud, d): def download(self, ud, d):
""" """
Fetch url Fetch url
""" """
@ -86,7 +86,7 @@ class Osc(FetchMethod):
if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK): if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK):
oscupdatecmd = self._buildosccommand(ud, d, "update") oscupdatecmd = self._buildosccommand(ud, d, "update")
logger.info("Update "+ loc) logger.info("Update "+ ud.url)
# update sources there # update sources there
os.chdir(ud.moddir) os.chdir(ud.moddir)
logger.debug(1, "Running %s", oscupdatecmd) logger.debug(1, "Running %s", oscupdatecmd)
@ -94,7 +94,7 @@ class Osc(FetchMethod):
runfetchcmd(oscupdatecmd, d) runfetchcmd(oscupdatecmd, d)
else: else:
oscfetchcmd = self._buildosccommand(ud, d, "fetch") oscfetchcmd = self._buildosccommand(ud, d, "fetch")
logger.info("Fetch " + loc) logger.info("Fetch " + ud.url)
# check out sources there # check out sources there
bb.utils.mkdirhier(ud.pkgdir) bb.utils.mkdirhier(ud.pkgdir)
os.chdir(ud.pkgdir) os.chdir(ud.pkgdir)

View File

@ -37,7 +37,7 @@ from bb.fetch2 import logger
from bb.fetch2 import runfetchcmd from bb.fetch2 import runfetchcmd
class Perforce(FetchMethod): class Perforce(FetchMethod):
def supports(self, url, ud, d): def supports(self, ud, d):
return ud.type in ['p4'] return ud.type in ['p4']
def doparse(url, d): def doparse(url, d):
@ -120,12 +120,12 @@ class Perforce(FetchMethod):
ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base.replace('/', '.'), cset), d) ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base.replace('/', '.'), cset), d)
def download(self, loc, ud, d): def download(self, ud, d):
""" """
Fetch urls Fetch urls
""" """
(host, depot, user, pswd, parm) = Perforce.doparse(loc, d) (host, depot, user, pswd, parm) = Perforce.doparse(ud.url, d)
if depot.find('/...') != -1: if depot.find('/...') != -1:
path = depot[:depot.find('/...')] path = depot[:depot.find('/...')]
@ -158,7 +158,7 @@ class Perforce(FetchMethod):
tmpfile, errors = bb.process.run(data.getVar('MKTEMPDIRCMD', localdata, True) or "false") tmpfile, errors = bb.process.run(data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
tmpfile = tmpfile.strip() tmpfile = tmpfile.strip()
if not tmpfile: if not tmpfile:
raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc) raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", ud.url)
if "label" in parm: if "label" in parm:
depot = "%s@%s" % (depot, parm["label"]) depot = "%s@%s" % (depot, parm["label"])
@ -167,13 +167,13 @@ class Perforce(FetchMethod):
depot = "%s@%s" % (depot, cset) depot = "%s@%s" % (depot, cset)
os.chdir(tmpfile) os.chdir(tmpfile)
logger.info("Fetch " + loc) logger.info("Fetch " + ud.url)
logger.info("%s%s files %s", p4cmd, p4opt, depot) logger.info("%s%s files %s", p4cmd, p4opt, depot)
p4file, errors = bb.process.run("%s%s files %s" % (p4cmd, p4opt, depot)) p4file, errors = bb.process.run("%s%s files %s" % (p4cmd, p4opt, depot))
p4file = [f.rstrip() for f in p4file.splitlines()] p4file = [f.rstrip() for f in p4file.splitlines()]
if not p4file: if not p4file:
raise FetchError("Fetch: unable to get the P4 files from %s" % depot, loc) raise FetchError("Fetch: unable to get the P4 files from %s" % depot, ud.url)
count = 0 count = 0
@ -191,7 +191,7 @@ class Perforce(FetchMethod):
if count == 0: if count == 0:
logger.error() logger.error()
raise FetchError("Fetch: No files gathered from the P4 fetch", loc) raise FetchError("Fetch: No files gathered from the P4 fetch", ud.url)
runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup = [ud.localpath]) runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup = [ud.localpath])
# cleanup # cleanup

View File

@ -31,7 +31,7 @@ from bb.fetch2 import runfetchcmd
class Repo(FetchMethod): class Repo(FetchMethod):
"""Class to fetch a module or modules from repo (git) repositories""" """Class to fetch a module or modules from repo (git) repositories"""
def supports(self, url, ud, d): def supports(self, ud, d):
""" """
Check to see if a given url can be fetched with repo. Check to see if a given url can be fetched with repo.
""" """
@ -53,7 +53,7 @@ class Repo(FetchMethod):
ud.localfile = data.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch), d) ud.localfile = data.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch), d)
def download(self, loc, ud, d): def download(self, ud, d):
"""Fetch url""" """Fetch url"""
if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK): if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK):
@ -91,8 +91,8 @@ class Repo(FetchMethod):
def supports_srcrev(self): def supports_srcrev(self):
return False return False
def _build_revision(self, url, ud, d): def _build_revision(self, ud, d):
return ud.manifest return ud.manifest
def _want_sortable_revision(self, url, ud, d): def _want_sortable_revision(self, ud, d):
return False return False

View File

@ -72,7 +72,7 @@ from bb.fetch2 import runfetchcmd
class SFTP(FetchMethod): class SFTP(FetchMethod):
"""Class to fetch urls via 'sftp'""" """Class to fetch urls via 'sftp'"""
def supports(self, url, ud, d): def supports(self, ud, d):
""" """
Check to see if a given url can be fetched with sftp. Check to see if a given url can be fetched with sftp.
""" """
@ -95,10 +95,10 @@ class SFTP(FetchMethod):
ud.localfile = data.expand(urllib.unquote(ud.basename), d) ud.localfile = data.expand(urllib.unquote(ud.basename), d)
def download(self, uri, ud, d): def download(self, ud, d):
"""Fetch urls""" """Fetch urls"""
urlo = URI(uri) urlo = URI(ud.url)
basecmd = 'sftp -oPasswordAuthentication=no' basecmd = 'sftp -oPasswordAuthentication=no'
port = '' port = ''
if urlo.port: if urlo.port:
@ -124,6 +124,6 @@ class SFTP(FetchMethod):
cmd = '%s %s %s %s' % (basecmd, port, commands.mkarg(remote), cmd = '%s %s %s %s' % (basecmd, port, commands.mkarg(remote),
commands.mkarg(lpath)) commands.mkarg(lpath))
bb.fetch2.check_network_access(d, cmd, uri) bb.fetch2.check_network_access(d, cmd, ud.url)
runfetchcmd(cmd, d) runfetchcmd(cmd, d)
return True return True

View File

@ -72,8 +72,8 @@ __pattern__ = re.compile(r'''
class SSH(FetchMethod): class SSH(FetchMethod):
'''Class to fetch a module or modules via Secure Shell''' '''Class to fetch a module or modules via Secure Shell'''
def supports(self, url, urldata, d): def supports(self, urldata, d):
return __pattern__.match(url) != None return __pattern__.match(urldata.url) != None
def supports_checksum(self, urldata): def supports_checksum(self, urldata):
return False return False
@ -89,10 +89,10 @@ class SSH(FetchMethod):
host = m.group('host') host = m.group('host')
urldata.localpath = os.path.join(d.getVar('DL_DIR', True), os.path.basename(path)) urldata.localpath = os.path.join(d.getVar('DL_DIR', True), os.path.basename(path))
def download(self, url, urldata, d): def download(self, urldata, d):
dldir = d.getVar('DL_DIR', True) dldir = d.getVar('DL_DIR', True)
m = __pattern__.match(url) m = __pattern__.match(urldata.url)
path = m.group('path') path = m.group('path')
host = m.group('host') host = m.group('host')
port = m.group('port') port = m.group('port')

View File

@ -37,7 +37,7 @@ from bb.fetch2 import runfetchcmd
class Svk(FetchMethod): class Svk(FetchMethod):
"""Class to fetch a module or modules from svk repositories""" """Class to fetch a module or modules from svk repositories"""
def supports(self, url, ud, d): def supports(self, ud, d):
""" """
Check to see if a given url can be fetched with svk. Check to see if a given url can be fetched with svk.
""" """
@ -54,14 +54,14 @@ class Svk(FetchMethod):
ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d) ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
def need_update(self, url, ud, d): def need_update(self, ud, d):
if ud.date == "now": if ud.date == "now":
return True return True
if not os.path.exists(ud.localpath): if not os.path.exists(ud.localpath):
return True return True
return False return False
def download(self, loc, ud, d): def download(self, ud, d):
"""Fetch urls""" """Fetch urls"""
svkroot = ud.host + ud.path svkroot = ud.host + ud.path
@ -81,11 +81,11 @@ class Svk(FetchMethod):
tmpfile = tmpfile.strip() tmpfile = tmpfile.strip()
if not tmpfile: if not tmpfile:
logger.error() logger.error()
raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc) raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", ud.url)
# check out sources there # check out sources there
os.chdir(tmpfile) os.chdir(tmpfile)
logger.info("Fetch " + loc) logger.info("Fetch " + ud.url)
logger.debug(1, "Running %s", svkcmd) logger.debug(1, "Running %s", svkcmd)
runfetchcmd(svkcmd, d, cleanup = [tmpfile]) runfetchcmd(svkcmd, d, cleanup = [tmpfile])

View File

@ -37,7 +37,7 @@ from bb.fetch2 import logger
class Svn(FetchMethod): class Svn(FetchMethod):
"""Class to fetch a module or modules from svn repositories""" """Class to fetch a module or modules from svn repositories"""
def supports(self, url, ud, d): def supports(self, ud, d):
""" """
Check to see if a given url can be fetched with svn. Check to see if a given url can be fetched with svn.
""" """
@ -112,14 +112,14 @@ class Svn(FetchMethod):
return svncmd return svncmd
def download(self, loc, ud, d): def download(self, ud, d):
"""Fetch url""" """Fetch url"""
logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK): if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
svnupdatecmd = self._buildsvncommand(ud, d, "update") svnupdatecmd = self._buildsvncommand(ud, d, "update")
logger.info("Update " + loc) logger.info("Update " + ud.url)
# update sources there # update sources there
os.chdir(ud.moddir) os.chdir(ud.moddir)
# We need to attempt to run svn upgrade first in case its an older working format # We need to attempt to run svn upgrade first in case its an older working format
@ -132,7 +132,7 @@ class Svn(FetchMethod):
runfetchcmd(svnupdatecmd, d) runfetchcmd(svnupdatecmd, d)
else: else:
svnfetchcmd = self._buildsvncommand(ud, d, "fetch") svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
logger.info("Fetch " + loc) logger.info("Fetch " + ud.url)
# check out sources there # check out sources there
bb.utils.mkdirhier(ud.pkgdir) bb.utils.mkdirhier(ud.pkgdir)
os.chdir(ud.pkgdir) os.chdir(ud.pkgdir)
@ -160,13 +160,13 @@ class Svn(FetchMethod):
def supports_srcrev(self): def supports_srcrev(self):
return True return True
def _revision_key(self, url, ud, d, name): def _revision_key(self, ud, d, name):
""" """
Return a unique key for the url Return a unique key for the url
""" """
return "svn:" + ud.moddir return "svn:" + ud.moddir
def _latest_revision(self, url, ud, d, name): def _latest_revision(self, ud, d, name):
""" """
Return the latest upstream revision number Return the latest upstream revision number
""" """
@ -180,12 +180,12 @@ class Svn(FetchMethod):
return revision return revision
def sortable_revision(self, url, ud, d, name): def sortable_revision(self, ud, d, name):
""" """
Return a sortable revision number which in our case is the revision number Return a sortable revision number which in our case is the revision number
""" """
return False, self._build_revision(url, ud, d) return False, self._build_revision(ud, d)
def _build_revision(self, url, ud, d): def _build_revision(self, ud, d):
return ud.revision return ud.revision

View File

@ -37,7 +37,7 @@ from bb.fetch2 import runfetchcmd
class Wget(FetchMethod): class Wget(FetchMethod):
"""Class to fetch urls via 'wget'""" """Class to fetch urls via 'wget'"""
def supports(self, url, ud, d): def supports(self, ud, d):
""" """
Check to see if a given url can be fetched with wget. Check to see if a given url can be fetched with wget.
""" """
@ -58,7 +58,7 @@ class Wget(FetchMethod):
ud.localfile = data.expand(urllib.unquote(ud.basename), d) ud.localfile = data.expand(urllib.unquote(ud.basename), d)
def download(self, uri, ud, d, checkonly = False): def download(self, ud, d, checkonly = False):
"""Fetch urls""" """Fetch urls"""
basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate" basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate"
@ -76,7 +76,7 @@ class Wget(FetchMethod):
else: else:
fetchcmd = d.getVar("FETCHCOMMAND_wget", True) or d.expand(basecmd + " -P ${DL_DIR} '${URI}'") fetchcmd = d.getVar("FETCHCOMMAND_wget", True) or d.expand(basecmd + " -P ${DL_DIR} '${URI}'")
uri = uri.split(";")[0] uri = ud.url.split(";")[0]
fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0]) fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
fetchcmd = fetchcmd.replace("${FILE}", ud.basename) fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
@ -93,5 +93,5 @@ class Wget(FetchMethod):
return True return True
def checkstatus(self, uri, ud, d): def checkstatus(self, ud, d):
return self.download(uri, ud, d, True) return self.download(ud, d, True)