mirror of
https://git.yoctoproject.org/poky
synced 2026-02-01 14:28:44 +01:00
Compare commits
75 Commits
uninative-
...
uninative-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0ba0a534a3 | ||
|
|
f13c22358a | ||
|
|
fa7db24367 | ||
|
|
3c0919821b | ||
|
|
d41e3e7d4c | ||
|
|
064e203987 | ||
|
|
9294bc4bb4 | ||
|
|
c2580d3a78 | ||
|
|
4094b0bd50 | ||
|
|
7a89d2d2ed | ||
|
|
ede47ac147 | ||
|
|
0b9711efcb | ||
|
|
7f4d057585 | ||
|
|
e1691ae855 | ||
|
|
75f87db413 | ||
|
|
7283a0b3b6 | ||
|
|
df90345d33 | ||
|
|
a30a06d9c1 | ||
|
|
40ecec326e | ||
|
|
73160aac06 | ||
|
|
94f34b951b | ||
|
|
44176bd385 | ||
|
|
3b559bb16d | ||
|
|
f5188da2f1 | ||
|
|
34ea1433fc | ||
|
|
7747c98bf2 | ||
|
|
d6b4d57e62 | ||
|
|
73befa8f41 | ||
|
|
29e280f7ee | ||
|
|
93775123c5 | ||
|
|
11d23de584 | ||
|
|
485af44c14 | ||
|
|
c1335d558f | ||
|
|
2f82cf5a42 | ||
|
|
9b106a2f0b | ||
|
|
b85d2a236c | ||
|
|
95204ac052 | ||
|
|
bf34676143 | ||
|
|
d8fe22e05f | ||
|
|
e789354511 | ||
|
|
612c8b8b12 | ||
|
|
cdd670b3cf | ||
|
|
35c4fbe550 | ||
|
|
6131fb0e1b | ||
|
|
9c7f1052f0 | ||
|
|
c43b253bc5 | ||
|
|
bf64a62bfd | ||
|
|
c5d80f154d | ||
|
|
f5ef0e12d0 | ||
|
|
525493e3ef | ||
|
|
88cb39cc3c | ||
|
|
20f926f086 | ||
|
|
bea9c8b988 | ||
|
|
66dab7e17b | ||
|
|
bdad4025c7 | ||
|
|
1dbef3ce98 | ||
|
|
c9299cea83 | ||
|
|
915a6afc3c | ||
|
|
1729dcf527 | ||
|
|
e1ab06f14b | ||
|
|
43421f57bc | ||
|
|
987aa92017 | ||
|
|
2ffbb020fe | ||
|
|
adcd9608a7 | ||
|
|
a8a468efac | ||
|
|
1d2fe91db5 | ||
|
|
2fcbd0f115 | ||
|
|
c0ec68956c | ||
|
|
b74de5d19a | ||
|
|
51b95cf1e7 | ||
|
|
b535616586 | ||
|
|
071f23ad79 | ||
|
|
e6be41a204 | ||
|
|
9000f80336 | ||
|
|
7ee0c2c8cb |
@@ -26,7 +26,7 @@ from bb.main import bitbake_main, BitBakeConfigParameters, BBMainException
|
||||
if sys.getfilesystemencoding() != "utf-8":
|
||||
sys.exit("Please use a locale setting which supports UTF-8 (such as LANG=en_US.UTF-8).\nPython can't change the filesystem locale after loading so we need a UTF-8 when Python starts or things won't work.")
|
||||
|
||||
__version__ = "1.49.0"
|
||||
__version__ = "1.49.1"
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __version__ != bb.__version__:
|
||||
|
||||
@@ -151,9 +151,6 @@ def main():
|
||||
func = getattr(args, 'func', None)
|
||||
if func:
|
||||
client = hashserv.create_client(args.address)
|
||||
# Try to establish a connection to the server now to detect failures
|
||||
# early
|
||||
client.connect()
|
||||
|
||||
return func(args, client)
|
||||
|
||||
|
||||
@@ -30,9 +30,11 @@ def main():
|
||||
"--bind [::1]:8686"'''
|
||||
)
|
||||
|
||||
parser.add_argument('--bind', default=DEFAULT_BIND, help='Bind address (default "%(default)s")')
|
||||
parser.add_argument('--database', default='./hashserv.db', help='Database file (default "%(default)s")')
|
||||
parser.add_argument('--log', default='WARNING', help='Set logging level')
|
||||
parser.add_argument('-b', '--bind', default=DEFAULT_BIND, help='Bind address (default "%(default)s")')
|
||||
parser.add_argument('-d', '--database', default='./hashserv.db', help='Database file (default "%(default)s")')
|
||||
parser.add_argument('-l', '--log', default='WARNING', help='Set logging level')
|
||||
parser.add_argument('-u', '--upstream', help='Upstream hashserv to pull hashes from')
|
||||
parser.add_argument('-r', '--read-only', action='store_true', help='Disallow write operations from clients')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
@@ -47,7 +49,7 @@ def main():
|
||||
console.setLevel(level)
|
||||
logger.addHandler(console)
|
||||
|
||||
server = hashserv.create_server(args.bind, args.database)
|
||||
server = hashserv.create_server(args.bind, args.database, upstream=args.upstream, read_only=args.read_only)
|
||||
server.serve_forever()
|
||||
return 0
|
||||
|
||||
|
||||
@@ -118,7 +118,9 @@ def worker_child_fire(event, d):
|
||||
data = b"<event>" + pickle.dumps(event) + b"</event>"
|
||||
try:
|
||||
worker_pipe_lock.acquire()
|
||||
worker_pipe.write(data)
|
||||
while(len(data)):
|
||||
written = worker_pipe.write(data)
|
||||
data = data[written:]
|
||||
worker_pipe_lock.release()
|
||||
except IOError:
|
||||
sigterm_handler(None, None)
|
||||
@@ -167,7 +169,7 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, taskha
|
||||
fakedirs = (workerdata["fakerootdirs"][fn] or "").split()
|
||||
for p in fakedirs:
|
||||
bb.utils.mkdirhier(p)
|
||||
logger.debug(2, 'Running %s:%s under fakeroot, fakedirs: %s' %
|
||||
logger.debug2('Running %s:%s under fakeroot, fakedirs: %s' %
|
||||
(fn, taskname, ', '.join(fakedirs)))
|
||||
else:
|
||||
envvars = (workerdata["fakerootnoenv"][fn] or "").split()
|
||||
@@ -321,7 +323,9 @@ class runQueueWorkerPipe():
|
||||
end = len(self.queue)
|
||||
index = self.queue.find(b"</event>")
|
||||
while index != -1:
|
||||
worker_fire_prepickled(self.queue[:index+8])
|
||||
msg = self.queue[:index+8]
|
||||
assert msg.startswith(b"<event>") and msg.count(b"<event>") == 1
|
||||
worker_fire_prepickled(msg)
|
||||
self.queue = self.queue[index+8:]
|
||||
index = self.queue.find(b"</event>")
|
||||
return (end > start)
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
__version__ = "1.49.0"
|
||||
__version__ = "1.49.1"
|
||||
|
||||
import sys
|
||||
if sys.version_info < (3, 5, 0):
|
||||
@@ -21,8 +21,8 @@ class BBHandledException(Exception):
|
||||
The big dilemma for generic bitbake code is what information to give the user
|
||||
when an exception occurs. Any exception inheriting this base exception class
|
||||
has already provided information to the user via some 'fired' message type such as
|
||||
an explicitly fired event using bb.fire, or a bb.error message. If bitbake
|
||||
encounters an exception derived from this class, no backtrace or other information
|
||||
an explicitly fired event using bb.fire, or a bb.error message. If bitbake
|
||||
encounters an exception derived from this class, no backtrace or other information
|
||||
will be given to the user, its assumed the earlier event provided the relevant information.
|
||||
"""
|
||||
pass
|
||||
@@ -42,7 +42,16 @@ class BBLoggerMixin(object):
|
||||
|
||||
def setup_bblogger(self, name):
|
||||
if name.split(".")[0] == "BitBake":
|
||||
self.debug = self.bbdebug
|
||||
self.debug = self._debug_helper
|
||||
|
||||
def _debug_helper(self, *args, **kwargs):
|
||||
return self.bbdebug(1, *args, **kwargs)
|
||||
|
||||
def debug2(self, *args, **kwargs):
|
||||
return self.bbdebug(2, *args, **kwargs)
|
||||
|
||||
def debug3(self, *args, **kwargs):
|
||||
return self.bbdebug(3, *args, **kwargs)
|
||||
|
||||
def bbdebug(self, level, msg, *args, **kwargs):
|
||||
loglevel = logging.DEBUG - level + 1
|
||||
@@ -128,7 +137,7 @@ def debug(lvl, *args):
|
||||
mainlogger.warning("Passed invalid debug level '%s' to bb.debug", lvl)
|
||||
args = (lvl,) + args
|
||||
lvl = 1
|
||||
mainlogger.debug(lvl, ''.join(args))
|
||||
mainlogger.bbdebug(lvl, ''.join(args))
|
||||
|
||||
def note(*args):
|
||||
mainlogger.info(''.join(args))
|
||||
|
||||
@@ -583,7 +583,7 @@ def _exec_task(fn, task, d, quieterr):
|
||||
logger.error("No such task: %s" % task)
|
||||
return 1
|
||||
|
||||
logger.debug(1, "Executing task %s", task)
|
||||
logger.debug("Executing task %s", task)
|
||||
|
||||
localdata = _task_data(fn, task, d)
|
||||
tempdir = localdata.getVar('T')
|
||||
@@ -596,7 +596,7 @@ def _exec_task(fn, task, d, quieterr):
|
||||
curnice = os.nice(0)
|
||||
nice = int(nice) - curnice
|
||||
newnice = os.nice(nice)
|
||||
logger.debug(1, "Renice to %s " % newnice)
|
||||
logger.debug("Renice to %s " % newnice)
|
||||
ionice = localdata.getVar("BB_TASK_IONICE_LEVEL")
|
||||
if ionice:
|
||||
try:
|
||||
@@ -720,7 +720,7 @@ def _exec_task(fn, task, d, quieterr):
|
||||
|
||||
logfile.close()
|
||||
if os.path.exists(logfn) and os.path.getsize(logfn) == 0:
|
||||
logger.debug(2, "Zero size logfn %s, removing", logfn)
|
||||
logger.debug2("Zero size logfn %s, removing", logfn)
|
||||
bb.utils.remove(logfn)
|
||||
bb.utils.remove(loglink)
|
||||
event.fire(TaskSucceeded(task, fn, logfn, localdata), localdata)
|
||||
|
||||
@@ -215,7 +215,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
if not self.not_world:
|
||||
cachedata.possible_world.append(fn)
|
||||
#else:
|
||||
# logger.debug(2, "EXCLUDE FROM WORLD: %s", fn)
|
||||
# logger.debug2("EXCLUDE FROM WORLD: %s", fn)
|
||||
|
||||
# create a collection of all targets for sanity checking
|
||||
# tasks, such as upstream versions, license, and tools for
|
||||
@@ -238,7 +238,7 @@ def virtualfn2realfn(virtualfn):
|
||||
Convert a virtual file name to a real one + the associated subclass keyword
|
||||
"""
|
||||
mc = ""
|
||||
if virtualfn.startswith('mc:'):
|
||||
if virtualfn.startswith('mc:') and virtualfn.count(':') >= 2:
|
||||
elems = virtualfn.split(':')
|
||||
mc = elems[1]
|
||||
virtualfn = ":".join(elems[2:])
|
||||
@@ -268,7 +268,7 @@ def variant2virtual(realfn, variant):
|
||||
"""
|
||||
if variant == "":
|
||||
return realfn
|
||||
if variant.startswith("mc:"):
|
||||
if variant.startswith("mc:") and variant.count(':') >= 2:
|
||||
elems = variant.split(":")
|
||||
if elems[2]:
|
||||
return "mc:" + elems[1] + ":virtual:" + ":".join(elems[2:]) + ":" + realfn
|
||||
@@ -323,7 +323,7 @@ class NoCache(object):
|
||||
Return a complete set of data for fn.
|
||||
To do this, we need to parse the file.
|
||||
"""
|
||||
logger.debug(1, "Parsing %s (full)" % virtualfn)
|
||||
logger.debug("Parsing %s (full)" % virtualfn)
|
||||
(fn, virtual, mc) = virtualfn2realfn(virtualfn)
|
||||
bb_data = self.load_bbfile(virtualfn, appends, virtonly=True)
|
||||
return bb_data[virtual]
|
||||
@@ -400,7 +400,7 @@ class Cache(NoCache):
|
||||
|
||||
self.cachefile = self.getCacheFile("bb_cache.dat")
|
||||
|
||||
self.logger.debug(1, "Cache dir: %s", self.cachedir)
|
||||
self.logger.debug("Cache dir: %s", self.cachedir)
|
||||
bb.utils.mkdirhier(self.cachedir)
|
||||
|
||||
cache_ok = True
|
||||
@@ -408,7 +408,7 @@ class Cache(NoCache):
|
||||
for cache_class in self.caches_array:
|
||||
cachefile = self.getCacheFile(cache_class.cachefile)
|
||||
cache_exists = os.path.exists(cachefile)
|
||||
self.logger.debug(2, "Checking if %s exists: %r", cachefile, cache_exists)
|
||||
self.logger.debug2("Checking if %s exists: %r", cachefile, cache_exists)
|
||||
cache_ok = cache_ok and cache_exists
|
||||
cache_class.init_cacheData(self)
|
||||
if cache_ok:
|
||||
@@ -416,7 +416,7 @@ class Cache(NoCache):
|
||||
elif os.path.isfile(self.cachefile):
|
||||
self.logger.info("Out of date cache found, rebuilding...")
|
||||
else:
|
||||
self.logger.debug(1, "Cache file %s not found, building..." % self.cachefile)
|
||||
self.logger.debug("Cache file %s not found, building..." % self.cachefile)
|
||||
|
||||
# We don't use the symlink, its just for debugging convinience
|
||||
if self.mc:
|
||||
@@ -453,7 +453,7 @@ class Cache(NoCache):
|
||||
|
||||
for cache_class in self.caches_array:
|
||||
cachefile = self.getCacheFile(cache_class.cachefile)
|
||||
self.logger.debug(1, 'Loading cache file: %s' % cachefile)
|
||||
self.logger.debug('Loading cache file: %s' % cachefile)
|
||||
with open(cachefile, "rb") as cachefile:
|
||||
pickled = pickle.Unpickler(cachefile)
|
||||
# Check cache version information
|
||||
@@ -500,7 +500,7 @@ class Cache(NoCache):
|
||||
|
||||
def parse(self, filename, appends):
|
||||
"""Parse the specified filename, returning the recipe information"""
|
||||
self.logger.debug(1, "Parsing %s", filename)
|
||||
self.logger.debug("Parsing %s", filename)
|
||||
infos = []
|
||||
datastores = self.load_bbfile(filename, appends, mc=self.mc)
|
||||
depends = []
|
||||
@@ -554,7 +554,7 @@ class Cache(NoCache):
|
||||
cached, infos = self.load(fn, appends)
|
||||
for virtualfn, info_array in infos:
|
||||
if info_array[0].skipped:
|
||||
self.logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason)
|
||||
self.logger.debug("Skipping %s: %s", virtualfn, info_array[0].skipreason)
|
||||
skipped += 1
|
||||
else:
|
||||
self.add_info(virtualfn, info_array, cacheData, not cached)
|
||||
@@ -590,21 +590,21 @@ class Cache(NoCache):
|
||||
|
||||
# File isn't in depends_cache
|
||||
if not fn in self.depends_cache:
|
||||
self.logger.debug(2, "%s is not cached", fn)
|
||||
self.logger.debug2("%s is not cached", fn)
|
||||
return False
|
||||
|
||||
mtime = bb.parse.cached_mtime_noerror(fn)
|
||||
|
||||
# Check file still exists
|
||||
if mtime == 0:
|
||||
self.logger.debug(2, "%s no longer exists", fn)
|
||||
self.logger.debug2("%s no longer exists", fn)
|
||||
self.remove(fn)
|
||||
return False
|
||||
|
||||
info_array = self.depends_cache[fn]
|
||||
# Check the file's timestamp
|
||||
if mtime != info_array[0].timestamp:
|
||||
self.logger.debug(2, "%s changed", fn)
|
||||
self.logger.debug2("%s changed", fn)
|
||||
self.remove(fn)
|
||||
return False
|
||||
|
||||
@@ -615,13 +615,13 @@ class Cache(NoCache):
|
||||
fmtime = bb.parse.cached_mtime_noerror(f)
|
||||
# Check if file still exists
|
||||
if old_mtime != 0 and fmtime == 0:
|
||||
self.logger.debug(2, "%s's dependency %s was removed",
|
||||
self.logger.debug2("%s's dependency %s was removed",
|
||||
fn, f)
|
||||
self.remove(fn)
|
||||
return False
|
||||
|
||||
if (fmtime != old_mtime):
|
||||
self.logger.debug(2, "%s's dependency %s changed",
|
||||
self.logger.debug2("%s's dependency %s changed",
|
||||
fn, f)
|
||||
self.remove(fn)
|
||||
return False
|
||||
@@ -638,14 +638,14 @@ class Cache(NoCache):
|
||||
continue
|
||||
f, exist = f.split(":")
|
||||
if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
|
||||
self.logger.debug(2, "%s's file checksum list file %s changed",
|
||||
self.logger.debug2("%s's file checksum list file %s changed",
|
||||
fn, f)
|
||||
self.remove(fn)
|
||||
return False
|
||||
|
||||
if tuple(appends) != tuple(info_array[0].appends):
|
||||
self.logger.debug(2, "appends for %s changed", fn)
|
||||
self.logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends)))
|
||||
self.logger.debug2("appends for %s changed", fn)
|
||||
self.logger.debug2("%s to %s" % (str(appends), str(info_array[0].appends)))
|
||||
self.remove(fn)
|
||||
return False
|
||||
|
||||
@@ -654,10 +654,10 @@ class Cache(NoCache):
|
||||
virtualfn = variant2virtual(fn, cls)
|
||||
self.clean.add(virtualfn)
|
||||
if virtualfn not in self.depends_cache:
|
||||
self.logger.debug(2, "%s is not cached", virtualfn)
|
||||
self.logger.debug2("%s is not cached", virtualfn)
|
||||
invalid = True
|
||||
elif len(self.depends_cache[virtualfn]) != len(self.caches_array):
|
||||
self.logger.debug(2, "Extra caches missing for %s?" % virtualfn)
|
||||
self.logger.debug2("Extra caches missing for %s?" % virtualfn)
|
||||
invalid = True
|
||||
|
||||
# If any one of the variants is not present, mark as invalid for all
|
||||
@@ -665,10 +665,10 @@ class Cache(NoCache):
|
||||
for cls in info_array[0].variants:
|
||||
virtualfn = variant2virtual(fn, cls)
|
||||
if virtualfn in self.clean:
|
||||
self.logger.debug(2, "Removing %s from cache", virtualfn)
|
||||
self.logger.debug2("Removing %s from cache", virtualfn)
|
||||
self.clean.remove(virtualfn)
|
||||
if fn in self.clean:
|
||||
self.logger.debug(2, "Marking %s as not clean", fn)
|
||||
self.logger.debug2("Marking %s as not clean", fn)
|
||||
self.clean.remove(fn)
|
||||
return False
|
||||
|
||||
@@ -681,10 +681,10 @@ class Cache(NoCache):
|
||||
Called from the parser in error cases
|
||||
"""
|
||||
if fn in self.depends_cache:
|
||||
self.logger.debug(1, "Removing %s from cache", fn)
|
||||
self.logger.debug("Removing %s from cache", fn)
|
||||
del self.depends_cache[fn]
|
||||
if fn in self.clean:
|
||||
self.logger.debug(1, "Marking %s as unclean", fn)
|
||||
self.logger.debug("Marking %s as unclean", fn)
|
||||
self.clean.remove(fn)
|
||||
|
||||
def sync(self):
|
||||
@@ -697,13 +697,13 @@ class Cache(NoCache):
|
||||
return
|
||||
|
||||
if self.cacheclean:
|
||||
self.logger.debug(2, "Cache is clean, not saving.")
|
||||
self.logger.debug2("Cache is clean, not saving.")
|
||||
return
|
||||
|
||||
for cache_class in self.caches_array:
|
||||
cache_class_name = cache_class.__name__
|
||||
cachefile = self.getCacheFile(cache_class.cachefile)
|
||||
self.logger.debug(2, "Writing %s", cachefile)
|
||||
self.logger.debug2("Writing %s", cachefile)
|
||||
with open(cachefile, "wb") as f:
|
||||
p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL)
|
||||
p.dump(__cache_version__)
|
||||
@@ -879,7 +879,7 @@ class MultiProcessCache(object):
|
||||
bb.utils.mkdirhier(cachedir)
|
||||
self.cachefile = os.path.join(cachedir,
|
||||
cache_file_name or self.__class__.cache_file_name)
|
||||
logger.debug(1, "Using cache in '%s'", self.cachefile)
|
||||
logger.debug("Using cache in '%s'", self.cachefile)
|
||||
|
||||
glf = bb.utils.lockfile(self.cachefile + ".lock")
|
||||
|
||||
@@ -985,7 +985,7 @@ class SimpleCache(object):
|
||||
bb.utils.mkdirhier(cachedir)
|
||||
self.cachefile = os.path.join(cachedir,
|
||||
cache_file_name or self.__class__.cache_file_name)
|
||||
logger.debug(1, "Using cache in '%s'", self.cachefile)
|
||||
logger.debug("Using cache in '%s'", self.cachefile)
|
||||
|
||||
glf = bb.utils.lockfile(self.cachefile + ".lock")
|
||||
|
||||
|
||||
@@ -411,6 +411,8 @@ class BBCooker:
|
||||
self.data.disableTracking()
|
||||
|
||||
def parseConfiguration(self):
|
||||
self.updateCacheSync()
|
||||
|
||||
# Change nice level if we're asked to
|
||||
nice = self.data.getVar("BB_NICE_LEVEL")
|
||||
if nice:
|
||||
@@ -441,7 +443,7 @@ class BBCooker:
|
||||
continue
|
||||
except AttributeError:
|
||||
pass
|
||||
logger.debug(1, "Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
|
||||
logger.debug("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
|
||||
print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
|
||||
clean = False
|
||||
if hasattr(self.configuration, o):
|
||||
@@ -468,17 +470,17 @@ class BBCooker:
|
||||
|
||||
for k in bb.utils.approved_variables():
|
||||
if k in environment and k not in self.configuration.env:
|
||||
logger.debug(1, "Updating new environment variable %s to %s" % (k, environment[k]))
|
||||
logger.debug("Updating new environment variable %s to %s" % (k, environment[k]))
|
||||
self.configuration.env[k] = environment[k]
|
||||
clean = False
|
||||
if k in self.configuration.env and k not in environment:
|
||||
logger.debug(1, "Updating environment variable %s (deleted)" % (k))
|
||||
logger.debug("Updating environment variable %s (deleted)" % (k))
|
||||
del self.configuration.env[k]
|
||||
clean = False
|
||||
if k not in self.configuration.env and k not in environment:
|
||||
continue
|
||||
if environment[k] != self.configuration.env[k]:
|
||||
logger.debug(1, "Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k]))
|
||||
logger.debug("Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k]))
|
||||
self.configuration.env[k] = environment[k]
|
||||
clean = False
|
||||
|
||||
@@ -486,7 +488,7 @@ class BBCooker:
|
||||
self.configuration.env = environment
|
||||
|
||||
if not clean:
|
||||
logger.debug(1, "Base environment change, triggering reparse")
|
||||
logger.debug("Base environment change, triggering reparse")
|
||||
self.reset()
|
||||
|
||||
def runCommands(self, server, data, abort):
|
||||
@@ -614,7 +616,7 @@ class BBCooker:
|
||||
# Replace string such as "mc:*:bash"
|
||||
# into "mc:A:bash mc:B:bash bash"
|
||||
for k in targetlist:
|
||||
if k.startswith("mc:"):
|
||||
if k.startswith("mc:") and k.count(':') >= 2:
|
||||
if wildcard:
|
||||
bb.fatal('multiconfig conflict')
|
||||
if k.split(":")[1] == "*":
|
||||
@@ -648,7 +650,7 @@ class BBCooker:
|
||||
for k in fulltargetlist:
|
||||
origk = k
|
||||
mc = ""
|
||||
if k.startswith("mc:"):
|
||||
if k.startswith("mc:") and k.count(':') >= 2:
|
||||
mc = k.split(":")[1]
|
||||
k = ":".join(k.split(":")[2:])
|
||||
ktask = task
|
||||
@@ -697,7 +699,7 @@ class BBCooker:
|
||||
if depmc not in self.multiconfigs:
|
||||
bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc))
|
||||
else:
|
||||
logger.debug(1, "Adding providers for multiconfig dependency %s" % l[3])
|
||||
logger.debug("Adding providers for multiconfig dependency %s" % l[3])
|
||||
taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
|
||||
seen.add(k)
|
||||
new = True
|
||||
@@ -1553,7 +1555,7 @@ class BBCooker:
|
||||
self.inotify_modified_files = []
|
||||
|
||||
if not self.baseconfig_valid:
|
||||
logger.debug(1, "Reloading base configuration data")
|
||||
logger.debug("Reloading base configuration data")
|
||||
self.initConfigurationData()
|
||||
self.handlePRServ()
|
||||
|
||||
|
||||
@@ -428,7 +428,7 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
|
||||
uri_decoded = list(decodeurl(ud.url))
|
||||
uri_find_decoded = list(decodeurl(uri_find))
|
||||
uri_replace_decoded = list(decodeurl(uri_replace))
|
||||
logger.debug(2, "For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
|
||||
logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
|
||||
result_decoded = ['', '', '', '', '', {}]
|
||||
for loc, i in enumerate(uri_find_decoded):
|
||||
result_decoded[loc] = uri_decoded[loc]
|
||||
@@ -474,7 +474,7 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
|
||||
result = encodeurl(result_decoded)
|
||||
if result == ud.url:
|
||||
return None
|
||||
logger.debug(2, "For url %s returning %s" % (ud.url, result))
|
||||
logger.debug2("For url %s returning %s" % (ud.url, result))
|
||||
return result
|
||||
|
||||
methods = []
|
||||
@@ -499,9 +499,9 @@ def fetcher_init(d):
|
||||
# When to drop SCM head revisions controlled by user policy
|
||||
srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear"
|
||||
if srcrev_policy == "cache":
|
||||
logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
|
||||
logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
|
||||
elif srcrev_policy == "clear":
|
||||
logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
|
||||
logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
|
||||
revs.clear()
|
||||
else:
|
||||
raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
|
||||
@@ -857,9 +857,9 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
|
||||
cmd = 'export PSEUDO_DISABLED=1; ' + cmd
|
||||
|
||||
if workdir:
|
||||
logger.debug(1, "Running '%s' in %s" % (cmd, workdir))
|
||||
logger.debug("Running '%s' in %s" % (cmd, workdir))
|
||||
else:
|
||||
logger.debug(1, "Running %s", cmd)
|
||||
logger.debug("Running %s", cmd)
|
||||
|
||||
success = False
|
||||
error_message = ""
|
||||
@@ -900,7 +900,7 @@ def check_network_access(d, info, url):
|
||||
elif not trusted_network(d, url):
|
||||
raise UntrustedUrl(url, info)
|
||||
else:
|
||||
logger.debug(1, "Fetcher accessed the network with the command %s" % info)
|
||||
logger.debug("Fetcher accessed the network with the command %s" % info)
|
||||
|
||||
def build_mirroruris(origud, mirrors, ld):
|
||||
uris = []
|
||||
@@ -926,7 +926,7 @@ def build_mirroruris(origud, mirrors, ld):
|
||||
continue
|
||||
|
||||
if not trusted_network(ld, newuri):
|
||||
logger.debug(1, "Mirror %s not in the list of trusted networks, skipping" % (newuri))
|
||||
logger.debug("Mirror %s not in the list of trusted networks, skipping" % (newuri))
|
||||
continue
|
||||
|
||||
# Create a local copy of the mirrors minus the current line
|
||||
@@ -939,8 +939,8 @@ def build_mirroruris(origud, mirrors, ld):
|
||||
newud = FetchData(newuri, ld)
|
||||
newud.setup_localpath(ld)
|
||||
except bb.fetch2.BBFetchException as e:
|
||||
logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
|
||||
logger.debug(1, str(e))
|
||||
logger.debug("Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
|
||||
logger.debug(str(e))
|
||||
try:
|
||||
# setup_localpath of file:// urls may fail, we should still see
|
||||
# if mirrors of the url exist
|
||||
@@ -1043,8 +1043,8 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
|
||||
elif isinstance(e, NoChecksumError):
|
||||
raise
|
||||
else:
|
||||
logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url))
|
||||
logger.debug(1, str(e))
|
||||
logger.debug("Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url))
|
||||
logger.debug(str(e))
|
||||
try:
|
||||
ud.method.clean(ud, ld)
|
||||
except UnboundLocalError:
|
||||
@@ -1688,7 +1688,7 @@ class Fetch(object):
|
||||
if m.verify_donestamp(ud, self.d) and not m.need_update(ud, self.d):
|
||||
done = True
|
||||
elif m.try_premirror(ud, self.d):
|
||||
logger.debug(1, "Trying PREMIRRORS")
|
||||
logger.debug("Trying PREMIRRORS")
|
||||
mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
|
||||
done = m.try_mirrors(self, ud, self.d, mirrors)
|
||||
if done:
|
||||
@@ -1698,7 +1698,7 @@ class Fetch(object):
|
||||
m.update_donestamp(ud, self.d)
|
||||
except ChecksumError as e:
|
||||
logger.warning("Checksum failure encountered with premirror download of %s - will attempt other sources." % u)
|
||||
logger.debug(1, str(e))
|
||||
logger.debug(str(e))
|
||||
done = False
|
||||
|
||||
if premirroronly:
|
||||
@@ -1710,7 +1710,7 @@ class Fetch(object):
|
||||
try:
|
||||
if not trusted_network(self.d, ud.url):
|
||||
raise UntrustedUrl(ud.url)
|
||||
logger.debug(1, "Trying Upstream")
|
||||
logger.debug("Trying Upstream")
|
||||
m.download(ud, self.d)
|
||||
if hasattr(m, "build_mirror_data"):
|
||||
m.build_mirror_data(ud, self.d)
|
||||
@@ -1725,19 +1725,19 @@ class Fetch(object):
|
||||
except BBFetchException as e:
|
||||
if isinstance(e, ChecksumError):
|
||||
logger.warning("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
|
||||
logger.debug(1, str(e))
|
||||
logger.debug(str(e))
|
||||
if os.path.exists(ud.localpath):
|
||||
rename_bad_checksum(ud, e.checksum)
|
||||
elif isinstance(e, NoChecksumError):
|
||||
raise
|
||||
else:
|
||||
logger.warning('Failed to fetch URL %s, attempting MIRRORS if available' % u)
|
||||
logger.debug(1, str(e))
|
||||
logger.debug(str(e))
|
||||
firsterr = e
|
||||
# Remove any incomplete fetch
|
||||
if not verified_stamp:
|
||||
m.clean(ud, self.d)
|
||||
logger.debug(1, "Trying MIRRORS")
|
||||
logger.debug("Trying MIRRORS")
|
||||
mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
|
||||
done = m.try_mirrors(self, ud, self.d, mirrors)
|
||||
|
||||
@@ -1774,7 +1774,7 @@ class Fetch(object):
|
||||
ud = self.ud[u]
|
||||
ud.setup_localpath(self.d)
|
||||
m = ud.method
|
||||
logger.debug(1, "Testing URL %s", u)
|
||||
logger.debug("Testing URL %s", u)
|
||||
# First try checking uri, u, from PREMIRRORS
|
||||
mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
|
||||
ret = m.try_mirrors(self, ud, self.d, mirrors, True)
|
||||
|
||||
@@ -74,16 +74,16 @@ class Bzr(FetchMethod):
|
||||
|
||||
if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK):
|
||||
bzrcmd = self._buildbzrcommand(ud, d, "update")
|
||||
logger.debug(1, "BZR Update %s", ud.url)
|
||||
logger.debug("BZR Update %s", ud.url)
|
||||
bb.fetch2.check_network_access(d, bzrcmd, ud.url)
|
||||
runfetchcmd(bzrcmd, d, workdir=os.path.join(ud.pkgdir, os.path.basename(ud.path)))
|
||||
else:
|
||||
bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True)
|
||||
bzrcmd = self._buildbzrcommand(ud, d, "fetch")
|
||||
bb.fetch2.check_network_access(d, bzrcmd, ud.url)
|
||||
logger.debug(1, "BZR Checkout %s", ud.url)
|
||||
logger.debug("BZR Checkout %s", ud.url)
|
||||
bb.utils.mkdirhier(ud.pkgdir)
|
||||
logger.debug(1, "Running %s", bzrcmd)
|
||||
logger.debug("Running %s", bzrcmd)
|
||||
runfetchcmd(bzrcmd, d, workdir=ud.pkgdir)
|
||||
|
||||
scmdata = ud.parm.get("scmdata", "")
|
||||
@@ -109,7 +109,7 @@ class Bzr(FetchMethod):
|
||||
"""
|
||||
Return the latest upstream revision number
|
||||
"""
|
||||
logger.debug(2, "BZR fetcher hitting network for %s", ud.url)
|
||||
logger.debug2("BZR fetcher hitting network for %s", ud.url)
|
||||
|
||||
bb.fetch2.check_network_access(d, self._buildbzrcommand(ud, d, "revno"), ud.url)
|
||||
|
||||
|
||||
@@ -70,7 +70,7 @@ class ClearCase(FetchMethod):
|
||||
return ud.type in ['ccrc']
|
||||
|
||||
def debug(self, msg):
|
||||
logger.debug(1, "ClearCase: %s", msg)
|
||||
logger.debug("ClearCase: %s", msg)
|
||||
|
||||
def urldata_init(self, ud, d):
|
||||
"""
|
||||
|
||||
@@ -109,7 +109,7 @@ class Cvs(FetchMethod):
|
||||
cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)
|
||||
|
||||
# create module directory
|
||||
logger.debug(2, "Fetch: checking for module directory")
|
||||
logger.debug2("Fetch: checking for module directory")
|
||||
moddir = os.path.join(ud.pkgdir, localdir)
|
||||
workdir = None
|
||||
if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
|
||||
@@ -123,7 +123,7 @@ class Cvs(FetchMethod):
|
||||
# check out sources there
|
||||
bb.utils.mkdirhier(ud.pkgdir)
|
||||
workdir = ud.pkgdir
|
||||
logger.debug(1, "Running %s", cvscmd)
|
||||
logger.debug("Running %s", cvscmd)
|
||||
bb.fetch2.check_network_access(d, cvscmd, ud.url)
|
||||
cmd = cvscmd
|
||||
|
||||
|
||||
@@ -78,7 +78,7 @@ class GitSM(Git):
|
||||
module_hash = ""
|
||||
|
||||
if not module_hash:
|
||||
logger.debug(1, "submodule %s is defined, but is not initialized in the repository. Skipping", m)
|
||||
logger.debug("submodule %s is defined, but is not initialized in the repository. Skipping", m)
|
||||
continue
|
||||
|
||||
submodules.append(m)
|
||||
@@ -179,7 +179,7 @@ class GitSM(Git):
|
||||
(ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir)
|
||||
|
||||
if len(need_update_list) > 0:
|
||||
logger.debug(1, 'gitsm: Submodules requiring update: %s' % (' '.join(need_update_list)))
|
||||
logger.debug('gitsm: Submodules requiring update: %s' % (' '.join(need_update_list)))
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@@ -150,7 +150,7 @@ class Hg(FetchMethod):
|
||||
def download(self, ud, d):
|
||||
"""Fetch url"""
|
||||
|
||||
logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
|
||||
logger.debug2("Fetch: checking for module directory '" + ud.moddir + "'")
|
||||
|
||||
# If the checkout doesn't exist and the mirror tarball does, extract it
|
||||
if not os.path.exists(ud.pkgdir) and os.path.exists(ud.fullmirror):
|
||||
@@ -160,7 +160,7 @@ class Hg(FetchMethod):
|
||||
if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
|
||||
# Found the source, check whether need pull
|
||||
updatecmd = self._buildhgcommand(ud, d, "update")
|
||||
logger.debug(1, "Running %s", updatecmd)
|
||||
logger.debug("Running %s", updatecmd)
|
||||
try:
|
||||
runfetchcmd(updatecmd, d, workdir=ud.moddir)
|
||||
except bb.fetch2.FetchError:
|
||||
@@ -168,7 +168,7 @@ class Hg(FetchMethod):
|
||||
pullcmd = self._buildhgcommand(ud, d, "pull")
|
||||
logger.info("Pulling " + ud.url)
|
||||
# update sources there
|
||||
logger.debug(1, "Running %s", pullcmd)
|
||||
logger.debug("Running %s", pullcmd)
|
||||
bb.fetch2.check_network_access(d, pullcmd, ud.url)
|
||||
runfetchcmd(pullcmd, d, workdir=ud.moddir)
|
||||
try:
|
||||
@@ -183,14 +183,14 @@ class Hg(FetchMethod):
|
||||
logger.info("Fetch " + ud.url)
|
||||
# check out sources there
|
||||
bb.utils.mkdirhier(ud.pkgdir)
|
||||
logger.debug(1, "Running %s", fetchcmd)
|
||||
logger.debug("Running %s", fetchcmd)
|
||||
bb.fetch2.check_network_access(d, fetchcmd, ud.url)
|
||||
runfetchcmd(fetchcmd, d, workdir=ud.pkgdir)
|
||||
|
||||
# Even when we clone (fetch), we still need to update as hg's clone
|
||||
# won't checkout the specified revision if its on a branch
|
||||
updatecmd = self._buildhgcommand(ud, d, "update")
|
||||
logger.debug(1, "Running %s", updatecmd)
|
||||
logger.debug("Running %s", updatecmd)
|
||||
runfetchcmd(updatecmd, d, workdir=ud.moddir)
|
||||
|
||||
def clean(self, ud, d):
|
||||
@@ -247,9 +247,9 @@ class Hg(FetchMethod):
|
||||
if scmdata != "nokeep":
|
||||
proto = ud.parm.get('protocol', 'http')
|
||||
if not os.access(os.path.join(codir, '.hg'), os.R_OK):
|
||||
logger.debug(2, "Unpack: creating new hg repository in '" + codir + "'")
|
||||
logger.debug2("Unpack: creating new hg repository in '" + codir + "'")
|
||||
runfetchcmd("%s init %s" % (ud.basecmd, codir), d)
|
||||
logger.debug(2, "Unpack: updating source in '" + codir + "'")
|
||||
logger.debug2("Unpack: updating source in '" + codir + "'")
|
||||
if ud.user and ud.pswd:
|
||||
runfetchcmd("%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" pull %s" % (ud.basecmd, ud.user, ud.pswd, proto, ud.moddir), d, workdir=codir)
|
||||
else:
|
||||
@@ -259,5 +259,5 @@ class Hg(FetchMethod):
|
||||
else:
|
||||
runfetchcmd("%s up -C %s" % (ud.basecmd, revflag), d, workdir=codir)
|
||||
else:
|
||||
logger.debug(2, "Unpack: extracting source to '" + codir + "'")
|
||||
logger.debug2("Unpack: extracting source to '" + codir + "'")
|
||||
runfetchcmd("%s archive -t files %s %s" % (ud.basecmd, revflag, codir), d, workdir=ud.moddir)
|
||||
|
||||
@@ -54,12 +54,12 @@ class Local(FetchMethod):
|
||||
return [path]
|
||||
filespath = d.getVar('FILESPATH')
|
||||
if filespath:
|
||||
logger.debug(2, "Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":"))))
|
||||
logger.debug2("Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":"))))
|
||||
newpath, hist = bb.utils.which(filespath, path, history=True)
|
||||
searched.extend(hist)
|
||||
if not os.path.exists(newpath):
|
||||
dldirfile = os.path.join(d.getVar("DL_DIR"), path)
|
||||
logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path))
|
||||
logger.debug2("Defaulting to %s for %s" % (dldirfile, path))
|
||||
bb.utils.mkdirhier(os.path.dirname(dldirfile))
|
||||
searched.append(dldirfile)
|
||||
return searched
|
||||
|
||||
@@ -84,13 +84,13 @@ class Osc(FetchMethod):
|
||||
Fetch url
|
||||
"""
|
||||
|
||||
logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
|
||||
logger.debug2("Fetch: checking for module directory '" + ud.moddir + "'")
|
||||
|
||||
if os.access(os.path.join(d.getVar('OSCDIR'), ud.path, ud.module), os.R_OK):
|
||||
oscupdatecmd = self._buildosccommand(ud, d, "update")
|
||||
logger.info("Update "+ ud.url)
|
||||
# update sources there
|
||||
logger.debug(1, "Running %s", oscupdatecmd)
|
||||
logger.debug("Running %s", oscupdatecmd)
|
||||
bb.fetch2.check_network_access(d, oscupdatecmd, ud.url)
|
||||
runfetchcmd(oscupdatecmd, d, workdir=ud.moddir)
|
||||
else:
|
||||
@@ -98,7 +98,7 @@ class Osc(FetchMethod):
|
||||
logger.info("Fetch " + ud.url)
|
||||
# check out sources there
|
||||
bb.utils.mkdirhier(ud.pkgdir)
|
||||
logger.debug(1, "Running %s", oscfetchcmd)
|
||||
logger.debug("Running %s", oscfetchcmd)
|
||||
bb.fetch2.check_network_access(d, oscfetchcmd, ud.url)
|
||||
runfetchcmd(oscfetchcmd, d, workdir=ud.pkgdir)
|
||||
|
||||
|
||||
@@ -90,16 +90,16 @@ class Perforce(FetchMethod):
|
||||
p4port = d.getVar('P4PORT')
|
||||
|
||||
if p4port:
|
||||
logger.debug(1, 'Using recipe provided P4PORT: %s' % p4port)
|
||||
logger.debug('Using recipe provided P4PORT: %s' % p4port)
|
||||
ud.host = p4port
|
||||
else:
|
||||
logger.debug(1, 'Trying to use P4CONFIG to automatically set P4PORT...')
|
||||
logger.debug('Trying to use P4CONFIG to automatically set P4PORT...')
|
||||
ud.usingp4config = True
|
||||
p4cmd = '%s info | grep "Server address"' % ud.basecmd
|
||||
bb.fetch2.check_network_access(d, p4cmd, ud.url)
|
||||
ud.host = runfetchcmd(p4cmd, d, True)
|
||||
ud.host = ud.host.split(': ')[1].strip()
|
||||
logger.debug(1, 'Determined P4PORT to be: %s' % ud.host)
|
||||
logger.debug('Determined P4PORT to be: %s' % ud.host)
|
||||
if not ud.host:
|
||||
raise FetchError('Could not determine P4PORT from P4CONFIG')
|
||||
|
||||
@@ -208,7 +208,7 @@ class Perforce(FetchMethod):
|
||||
for filename in p4fileslist:
|
||||
item = filename.split(' - ')
|
||||
lastaction = item[1].split()
|
||||
logger.debug(1, 'File: %s Last Action: %s' % (item[0], lastaction[0]))
|
||||
logger.debug('File: %s Last Action: %s' % (item[0], lastaction[0]))
|
||||
if lastaction[0] == 'delete':
|
||||
continue
|
||||
filelist.append(item[0])
|
||||
@@ -255,7 +255,7 @@ class Perforce(FetchMethod):
|
||||
raise FetchError('Could not determine the latest perforce changelist')
|
||||
|
||||
tipcset = tip.split(' ')[1]
|
||||
logger.debug(1, 'p4 tip found to be changelist %s' % tipcset)
|
||||
logger.debug('p4 tip found to be changelist %s' % tipcset)
|
||||
return tipcset
|
||||
|
||||
def sortable_revision(self, ud, d, name):
|
||||
|
||||
@@ -47,7 +47,7 @@ class Repo(FetchMethod):
|
||||
"""Fetch url"""
|
||||
|
||||
if os.access(os.path.join(d.getVar("DL_DIR"), ud.localfile), os.R_OK):
|
||||
logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
|
||||
logger.debug("%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
|
||||
return
|
||||
|
||||
repodir = d.getVar("REPODIR") or (d.getVar("DL_DIR") + "/repo")
|
||||
|
||||
@@ -116,7 +116,7 @@ class Svn(FetchMethod):
|
||||
def download(self, ud, d):
|
||||
"""Fetch url"""
|
||||
|
||||
logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
|
||||
logger.debug2("Fetch: checking for module directory '" + ud.moddir + "'")
|
||||
|
||||
lf = bb.utils.lockfile(ud.svnlock)
|
||||
|
||||
@@ -129,7 +129,7 @@ class Svn(FetchMethod):
|
||||
runfetchcmd(ud.basecmd + " upgrade", d, workdir=ud.moddir)
|
||||
except FetchError:
|
||||
pass
|
||||
logger.debug(1, "Running %s", svncmd)
|
||||
logger.debug("Running %s", svncmd)
|
||||
bb.fetch2.check_network_access(d, svncmd, ud.url)
|
||||
runfetchcmd(svncmd, d, workdir=ud.moddir)
|
||||
else:
|
||||
@@ -137,7 +137,7 @@ class Svn(FetchMethod):
|
||||
logger.info("Fetch " + ud.url)
|
||||
# check out sources there
|
||||
bb.utils.mkdirhier(ud.pkgdir)
|
||||
logger.debug(1, "Running %s", svncmd)
|
||||
logger.debug("Running %s", svncmd)
|
||||
bb.fetch2.check_network_access(d, svncmd, ud.url)
|
||||
runfetchcmd(svncmd, d, workdir=ud.pkgdir)
|
||||
|
||||
|
||||
@@ -88,7 +88,7 @@ class Wget(FetchMethod):
|
||||
|
||||
progresshandler = WgetProgressHandler(d)
|
||||
|
||||
logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command))
|
||||
logger.debug2("Fetching %s using command '%s'" % (ud.url, command))
|
||||
bb.fetch2.check_network_access(d, command, ud.url)
|
||||
runfetchcmd(command + ' --progress=dot -v', d, quiet, log=progresshandler, workdir=workdir)
|
||||
|
||||
@@ -326,11 +326,11 @@ class Wget(FetchMethod):
|
||||
pass
|
||||
except urllib.error.URLError as e:
|
||||
if try_again:
|
||||
logger.debug(2, "checkstatus: trying again")
|
||||
logger.debug2("checkstatus: trying again")
|
||||
return self.checkstatus(fetch, ud, d, False)
|
||||
else:
|
||||
# debug for now to avoid spamming the logs in e.g. remote sstate searches
|
||||
logger.debug(2, "checkstatus() urlopen failed: %s" % e)
|
||||
logger.debug2("checkstatus() urlopen failed: %s" % e)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
@@ -71,7 +71,7 @@ def update_mtime(f):
|
||||
|
||||
def update_cache(f):
|
||||
if f in __mtime_cache:
|
||||
logger.debug(1, "Updating mtime cache for %s" % f)
|
||||
logger.debug("Updating mtime cache for %s" % f)
|
||||
update_mtime(f)
|
||||
|
||||
def clear_cache():
|
||||
|
||||
@@ -34,7 +34,7 @@ class IncludeNode(AstNode):
|
||||
Include the file and evaluate the statements
|
||||
"""
|
||||
s = data.expand(self.what_file)
|
||||
logger.debug(2, "CONF %s:%s: including %s", self.filename, self.lineno, s)
|
||||
logger.debug2("CONF %s:%s: including %s", self.filename, self.lineno, s)
|
||||
|
||||
# TODO: Cache those includes... maybe not here though
|
||||
if self.force:
|
||||
@@ -376,7 +376,7 @@ def _create_variants(datastores, names, function, onlyfinalise):
|
||||
def multi_finalize(fn, d):
|
||||
appends = (d.getVar("__BBAPPEND") or "").split()
|
||||
for append in appends:
|
||||
logger.debug(1, "Appending .bbappend file %s to %s", append, fn)
|
||||
logger.debug("Appending .bbappend file %s to %s", append, fn)
|
||||
bb.parse.BBHandler.handle(append, d, True)
|
||||
|
||||
onlyfinalise = d.getVar("__ONLYFINALISE", False)
|
||||
|
||||
@@ -22,7 +22,7 @@ from .ConfHandler import include, init
|
||||
# For compatibility
|
||||
bb.deprecate_import(__name__, "bb.parse", ["vars_from_file"])
|
||||
|
||||
__func_start_regexp__ = re.compile(r"(((?P<py>python)|(?P<fr>fakeroot))\s*)*(?P<func>[\w\.\-\+\{\}\$]+)?\s*\(\s*\)\s*{$" )
|
||||
__func_start_regexp__ = re.compile(r"(((?P<py>python(?=(\s|\()))|(?P<fr>fakeroot(?=\s)))\s*)*(?P<func>[\w\.\-\+\{\}\$]+)?\s*\(\s*\)\s*{$" )
|
||||
__inherit_regexp__ = re.compile(r"inherit\s+(.+)" )
|
||||
__export_func_regexp__ = re.compile(r"EXPORT_FUNCTIONS\s+(.+)" )
|
||||
__addtask_regexp__ = re.compile(r"addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*")
|
||||
@@ -60,7 +60,7 @@ def inherit(files, fn, lineno, d):
|
||||
file = abs_fn
|
||||
|
||||
if not file in __inherit_cache:
|
||||
logger.debug(1, "Inheriting %s (from %s:%d)" % (file, fn, lineno))
|
||||
logger.debug("Inheriting %s (from %s:%d)" % (file, fn, lineno))
|
||||
__inherit_cache.append( file )
|
||||
d.setVar('__inherit_cache', __inherit_cache)
|
||||
include(fn, file, lineno, d, "inherit")
|
||||
|
||||
@@ -95,7 +95,7 @@ def include_single_file(parentfn, fn, lineno, data, error_out):
|
||||
if exc.errno == errno.ENOENT:
|
||||
if error_out:
|
||||
raise ParseError("Could not %s file %s" % (error_out, fn), parentfn, lineno)
|
||||
logger.debug(2, "CONF file '%s' not found", fn)
|
||||
logger.debug2("CONF file '%s' not found", fn)
|
||||
else:
|
||||
if error_out:
|
||||
raise ParseError("Could not %s file %s: %s" % (error_out, fn, exc.strerror), parentfn, lineno)
|
||||
|
||||
@@ -248,7 +248,7 @@ class PersistData(object):
|
||||
stacklevel=2)
|
||||
|
||||
self.data = persist(d)
|
||||
logger.debug(1, "Using '%s' as the persistent data cache",
|
||||
logger.debug("Using '%s' as the persistent data cache",
|
||||
self.data.filename)
|
||||
|
||||
def addDomain(self, domain):
|
||||
|
||||
@@ -165,7 +165,7 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
|
||||
available_vers.sort()
|
||||
logger.warn("versions of %s available: %s", pn, ' '.join(available_vers))
|
||||
else:
|
||||
logger.debug(1, "selecting %s as PREFERRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr)
|
||||
logger.debug("selecting %s as PREFERRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr)
|
||||
|
||||
return (preferred_ver, preferred_file)
|
||||
|
||||
@@ -232,7 +232,7 @@ def _filterProviders(providers, item, cfgData, dataCache):
|
||||
pkg_pn[pn] = []
|
||||
pkg_pn[pn].append(p)
|
||||
|
||||
logger.debug(1, "providers for %s are: %s", item, list(sorted(pkg_pn.keys())))
|
||||
logger.debug("providers for %s are: %s", item, list(sorted(pkg_pn.keys())))
|
||||
|
||||
# First add PREFERRED_VERSIONS
|
||||
for pn in sorted(pkg_pn):
|
||||
@@ -291,7 +291,7 @@ def filterProviders(providers, item, cfgData, dataCache):
|
||||
foundUnique = True
|
||||
break
|
||||
|
||||
logger.debug(1, "sorted providers for %s are: %s", item, eligible)
|
||||
logger.debug("sorted providers for %s are: %s", item, eligible)
|
||||
|
||||
return eligible, foundUnique
|
||||
|
||||
@@ -333,7 +333,7 @@ def filterProvidersRunTime(providers, item, cfgData, dataCache):
|
||||
provides = dataCache.pn_provides[pn]
|
||||
for provide in provides:
|
||||
prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % provide)
|
||||
#logger.debug(1, "checking PREFERRED_PROVIDER_%s (value %s) against %s", provide, prefervar, pns.keys())
|
||||
#logger.debug("checking PREFERRED_PROVIDER_%s (value %s) against %s", provide, prefervar, pns.keys())
|
||||
if prefervar in pns and pns[prefervar] not in preferred:
|
||||
var = "PREFERRED_PROVIDER_%s = %s" % (provide, prefervar)
|
||||
logger.verbose("selecting %s to satisfy runtime %s due to %s", prefervar, item, var)
|
||||
@@ -349,7 +349,7 @@ def filterProvidersRunTime(providers, item, cfgData, dataCache):
|
||||
if numberPreferred > 1:
|
||||
logger.error("Trying to resolve runtime dependency %s resulted in conflicting PREFERRED_PROVIDER entries being found.\nThe providers found were: %s\nThe PREFERRED_PROVIDER entries resulting in this conflict were: %s. You could set PREFERRED_RPROVIDER_%s" % (item, preferred, preferred_vars, item))
|
||||
|
||||
logger.debug(1, "sorted runtime providers for %s are: %s", item, eligible)
|
||||
logger.debug("sorted runtime providers for %s are: %s", item, eligible)
|
||||
|
||||
return eligible, numberPreferred
|
||||
|
||||
@@ -384,7 +384,7 @@ def getRuntimeProviders(dataCache, rdepend):
|
||||
regexp_cache[pattern] = regexp
|
||||
if regexp.match(rdepend):
|
||||
rproviders += dataCache.packages_dynamic[pattern]
|
||||
logger.debug(1, "Assuming %s is a dynamic package, but it may not exist" % rdepend)
|
||||
logger.debug("Assuming %s is a dynamic package, but it may not exist" % rdepend)
|
||||
|
||||
return rproviders
|
||||
|
||||
@@ -396,22 +396,22 @@ def buildWorldTargetList(dataCache, task=None):
|
||||
if dataCache.world_target:
|
||||
return
|
||||
|
||||
logger.debug(1, "collating packages for \"world\"")
|
||||
logger.debug("collating packages for \"world\"")
|
||||
for f in dataCache.possible_world:
|
||||
terminal = True
|
||||
pn = dataCache.pkg_fn[f]
|
||||
if task and task not in dataCache.task_deps[f]['tasks']:
|
||||
logger.debug(2, "World build skipping %s as task %s doesn't exist", f, task)
|
||||
logger.debug2("World build skipping %s as task %s doesn't exist", f, task)
|
||||
terminal = False
|
||||
|
||||
for p in dataCache.pn_provides[pn]:
|
||||
if p.startswith('virtual/'):
|
||||
logger.debug(2, "World build skipping %s due to %s provider starting with virtual/", f, p)
|
||||
logger.debug2("World build skipping %s due to %s provider starting with virtual/", f, p)
|
||||
terminal = False
|
||||
break
|
||||
for pf in dataCache.providers[p]:
|
||||
if dataCache.pkg_fn[pf] != pn:
|
||||
logger.debug(2, "World build skipping %s due to both us and %s providing %s", f, pf, p)
|
||||
logger.debug2("World build skipping %s due to both us and %s providing %s", f, pf, p)
|
||||
terminal = False
|
||||
break
|
||||
if terminal:
|
||||
|
||||
@@ -38,7 +38,7 @@ def taskname_from_tid(tid):
|
||||
return tid.rsplit(":", 1)[1]
|
||||
|
||||
def mc_from_tid(tid):
|
||||
if tid.startswith('mc:'):
|
||||
if tid.startswith('mc:') and tid.count(':') >= 2:
|
||||
return tid.split(':')[1]
|
||||
return ""
|
||||
|
||||
@@ -47,13 +47,13 @@ def split_tid(tid):
|
||||
return (mc, fn, taskname)
|
||||
|
||||
def split_mc(n):
|
||||
if n.startswith("mc:"):
|
||||
if n.startswith("mc:") and n.count(':') >= 2:
|
||||
_, mc, n = n.split(":", 2)
|
||||
return (mc, n)
|
||||
return ('', n)
|
||||
|
||||
def split_tid_mcfn(tid):
|
||||
if tid.startswith('mc:'):
|
||||
if tid.startswith('mc:') and tid.count(':') >= 2:
|
||||
elems = tid.split(':')
|
||||
mc = elems[1]
|
||||
fn = ":".join(elems[2:-1])
|
||||
@@ -544,8 +544,8 @@ class RunQueueData:
|
||||
for tid in self.runtaskentries:
|
||||
if task_done[tid] is False or deps_left[tid] != 0:
|
||||
problem_tasks.append(tid)
|
||||
logger.debug(2, "Task %s is not buildable", tid)
|
||||
logger.debug(2, "(Complete marker was %s and the remaining dependency count was %s)\n", task_done[tid], deps_left[tid])
|
||||
logger.debug2("Task %s is not buildable", tid)
|
||||
logger.debug2("(Complete marker was %s and the remaining dependency count was %s)\n", task_done[tid], deps_left[tid])
|
||||
self.runtaskentries[tid].weight = weight[tid]
|
||||
|
||||
if problem_tasks:
|
||||
@@ -643,7 +643,7 @@ class RunQueueData:
|
||||
(mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
|
||||
#runtid = build_tid(mc, fn, taskname)
|
||||
|
||||
#logger.debug(2, "Processing %s,%s:%s", mc, fn, taskname)
|
||||
#logger.debug2("Processing %s,%s:%s", mc, fn, taskname)
|
||||
|
||||
depends = set()
|
||||
task_deps = self.dataCaches[mc].task_deps[taskfn]
|
||||
@@ -1199,9 +1199,9 @@ class RunQueueData:
|
||||
"""
|
||||
Dump some debug information on the internal data structures
|
||||
"""
|
||||
logger.debug(3, "run_tasks:")
|
||||
logger.debug3("run_tasks:")
|
||||
for tid in self.runtaskentries:
|
||||
logger.debug(3, " %s: %s Deps %s RevDeps %s", tid,
|
||||
logger.debug3(" %s: %s Deps %s RevDeps %s", tid,
|
||||
self.runtaskentries[tid].weight,
|
||||
self.runtaskentries[tid].depends,
|
||||
self.runtaskentries[tid].revdeps)
|
||||
@@ -1238,7 +1238,7 @@ class RunQueue:
|
||||
self.fakeworker = {}
|
||||
|
||||
def _start_worker(self, mc, fakeroot = False, rqexec = None):
|
||||
logger.debug(1, "Starting bitbake-worker")
|
||||
logger.debug("Starting bitbake-worker")
|
||||
magic = "decafbad"
|
||||
if self.cooker.configuration.profile:
|
||||
magic = "decafbadbad"
|
||||
@@ -1283,7 +1283,7 @@ class RunQueue:
|
||||
def _teardown_worker(self, worker):
|
||||
if not worker:
|
||||
return
|
||||
logger.debug(1, "Teardown for bitbake-worker")
|
||||
logger.debug("Teardown for bitbake-worker")
|
||||
try:
|
||||
worker.process.stdin.write(b"<quit></quit>")
|
||||
worker.process.stdin.flush()
|
||||
@@ -1356,12 +1356,12 @@ class RunQueue:
|
||||
|
||||
# If the stamp is missing, it's not current
|
||||
if not os.access(stampfile, os.F_OK):
|
||||
logger.debug(2, "Stampfile %s not available", stampfile)
|
||||
logger.debug2("Stampfile %s not available", stampfile)
|
||||
return False
|
||||
# If it's a 'nostamp' task, it's not current
|
||||
taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn]
|
||||
if 'nostamp' in taskdep and taskname in taskdep['nostamp']:
|
||||
logger.debug(2, "%s.%s is nostamp\n", fn, taskname)
|
||||
logger.debug2("%s.%s is nostamp\n", fn, taskname)
|
||||
return False
|
||||
|
||||
if taskname != "do_setscene" and taskname.endswith("_setscene"):
|
||||
@@ -1385,18 +1385,18 @@ class RunQueue:
|
||||
continue
|
||||
if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist):
|
||||
if not t2:
|
||||
logger.debug(2, 'Stampfile %s does not exist', stampfile2)
|
||||
logger.debug2('Stampfile %s does not exist', stampfile2)
|
||||
iscurrent = False
|
||||
break
|
||||
if t1 < t2:
|
||||
logger.debug(2, 'Stampfile %s < %s', stampfile, stampfile2)
|
||||
logger.debug2('Stampfile %s < %s', stampfile, stampfile2)
|
||||
iscurrent = False
|
||||
break
|
||||
if recurse and iscurrent:
|
||||
if dep in cache:
|
||||
iscurrent = cache[dep]
|
||||
if not iscurrent:
|
||||
logger.debug(2, 'Stampfile for dependency %s:%s invalid (cached)' % (fn2, taskname2))
|
||||
logger.debug2('Stampfile for dependency %s:%s invalid (cached)' % (fn2, taskname2))
|
||||
else:
|
||||
iscurrent = self.check_stamp_task(dep, recurse=True, cache=cache)
|
||||
cache[dep] = iscurrent
|
||||
@@ -1761,7 +1761,7 @@ class RunQueueExecute:
|
||||
for scheduler in schedulers:
|
||||
if self.scheduler == scheduler.name:
|
||||
self.sched = scheduler(self, self.rqdata)
|
||||
logger.debug(1, "Using runqueue scheduler '%s'", scheduler.name)
|
||||
logger.debug("Using runqueue scheduler '%s'", scheduler.name)
|
||||
break
|
||||
else:
|
||||
bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" %
|
||||
@@ -1899,7 +1899,7 @@ class RunQueueExecute:
|
||||
break
|
||||
if alldeps:
|
||||
self.setbuildable(revdep)
|
||||
logger.debug(1, "Marking task %s as buildable", revdep)
|
||||
logger.debug("Marking task %s as buildable", revdep)
|
||||
|
||||
def task_complete(self, task):
|
||||
self.stats.taskCompleted()
|
||||
@@ -1929,7 +1929,7 @@ class RunQueueExecute:
|
||||
def summarise_scenequeue_errors(self):
|
||||
err = False
|
||||
if not self.sqdone:
|
||||
logger.debug(1, 'We could skip tasks %s', "\n".join(sorted(self.scenequeue_covered)))
|
||||
logger.debug('We could skip tasks %s', "\n".join(sorted(self.scenequeue_covered)))
|
||||
completeevent = sceneQueueComplete(self.sq_stats, self.rq)
|
||||
bb.event.fire(completeevent, self.cfgData)
|
||||
if self.sq_deferred:
|
||||
@@ -1986,7 +1986,7 @@ class RunQueueExecute:
|
||||
if nexttask in self.sq_buildable and nexttask not in self.sq_running and self.sqdata.stamps[nexttask] not in self.build_stamps.values():
|
||||
if nexttask not in self.sqdata.unskippable and len(self.sqdata.sq_revdeps[nexttask]) > 0 and self.sqdata.sq_revdeps[nexttask].issubset(self.scenequeue_covered) and self.check_dependencies(nexttask, self.sqdata.sq_revdeps[nexttask]):
|
||||
if nexttask not in self.rqdata.target_tids:
|
||||
logger.debug(2, "Skipping setscene for task %s" % nexttask)
|
||||
logger.debug2("Skipping setscene for task %s" % nexttask)
|
||||
self.sq_task_skip(nexttask)
|
||||
self.scenequeue_notneeded.add(nexttask)
|
||||
if nexttask in self.sq_deferred:
|
||||
@@ -1999,28 +1999,28 @@ class RunQueueExecute:
|
||||
if nexttask in self.sq_deferred:
|
||||
if self.sq_deferred[nexttask] not in self.runq_complete:
|
||||
continue
|
||||
logger.debug(1, "Task %s no longer deferred" % nexttask)
|
||||
logger.debug("Task %s no longer deferred" % nexttask)
|
||||
del self.sq_deferred[nexttask]
|
||||
valid = self.rq.validate_hashes(set([nexttask]), self.cooker.data, 0, False, summary=False)
|
||||
if not valid:
|
||||
logger.debug(1, "%s didn't become valid, skipping setscene" % nexttask)
|
||||
logger.debug("%s didn't become valid, skipping setscene" % nexttask)
|
||||
self.sq_task_failoutright(nexttask)
|
||||
return True
|
||||
else:
|
||||
self.sqdata.outrightfail.remove(nexttask)
|
||||
if nexttask in self.sqdata.outrightfail:
|
||||
logger.debug(2, 'No package found, so skipping setscene task %s', nexttask)
|
||||
logger.debug2('No package found, so skipping setscene task %s', nexttask)
|
||||
self.sq_task_failoutright(nexttask)
|
||||
return True
|
||||
if nexttask in self.sqdata.unskippable:
|
||||
logger.debug(2, "Setscene task %s is unskippable" % nexttask)
|
||||
logger.debug2("Setscene task %s is unskippable" % nexttask)
|
||||
task = nexttask
|
||||
break
|
||||
if task is not None:
|
||||
(mc, fn, taskname, taskfn) = split_tid_mcfn(task)
|
||||
taskname = taskname + "_setscene"
|
||||
if self.rq.check_stamp_task(task, taskname_from_tid(task), recurse = True, cache=self.stampcache):
|
||||
logger.debug(2, 'Stamp for underlying task %s is current, so skipping setscene variant', task)
|
||||
logger.debug2('Stamp for underlying task %s is current, so skipping setscene variant', task)
|
||||
self.sq_task_failoutright(task)
|
||||
return True
|
||||
|
||||
@@ -2030,12 +2030,12 @@ class RunQueueExecute:
|
||||
return True
|
||||
|
||||
if self.rq.check_stamp_task(task, taskname, cache=self.stampcache):
|
||||
logger.debug(2, 'Setscene stamp current task %s, so skip it and its dependencies', task)
|
||||
logger.debug2('Setscene stamp current task %s, so skip it and its dependencies', task)
|
||||
self.sq_task_skip(task)
|
||||
return True
|
||||
|
||||
if self.cooker.configuration.skipsetscene:
|
||||
logger.debug(2, 'No setscene tasks should be executed. Skipping %s', task)
|
||||
logger.debug2('No setscene tasks should be executed. Skipping %s', task)
|
||||
self.sq_task_failoutright(task)
|
||||
return True
|
||||
|
||||
@@ -2097,12 +2097,12 @@ class RunQueueExecute:
|
||||
return True
|
||||
|
||||
if task in self.tasks_covered:
|
||||
logger.debug(2, "Setscene covered task %s", task)
|
||||
logger.debug2("Setscene covered task %s", task)
|
||||
self.task_skip(task, "covered")
|
||||
return True
|
||||
|
||||
if self.rq.check_stamp_task(task, taskname, cache=self.stampcache):
|
||||
logger.debug(2, "Stamp current task %s", task)
|
||||
logger.debug2("Stamp current task %s", task)
|
||||
|
||||
self.task_skip(task, "existing")
|
||||
self.runq_tasksrun.add(task)
|
||||
@@ -2322,7 +2322,7 @@ class RunQueueExecute:
|
||||
remapped = True
|
||||
|
||||
if not remapped:
|
||||
#logger.debug(1, "Task %s hash changes: %s->%s %s->%s" % (tid, orighash, newhash, origuni, newuni))
|
||||
#logger.debug("Task %s hash changes: %s->%s %s->%s" % (tid, orighash, newhash, origuni, newuni))
|
||||
self.rqdata.runtaskentries[tid].hash = newhash
|
||||
self.rqdata.runtaskentries[tid].unihash = newuni
|
||||
changed.add(tid)
|
||||
@@ -2337,7 +2337,7 @@ class RunQueueExecute:
|
||||
for mc in self.rq.fakeworker:
|
||||
self.rq.fakeworker[mc].process.stdin.write(b"<newtaskhashes>" + pickle.dumps(bb.parse.siggen.get_taskhashes()) + b"</newtaskhashes>")
|
||||
|
||||
hashequiv_logger.debug(1, pprint.pformat("Tasks changed:\n%s" % (changed)))
|
||||
hashequiv_logger.debug(pprint.pformat("Tasks changed:\n%s" % (changed)))
|
||||
|
||||
for tid in changed:
|
||||
if tid not in self.rqdata.runq_setscene_tids:
|
||||
@@ -2356,7 +2356,7 @@ class RunQueueExecute:
|
||||
# Check no tasks this covers are running
|
||||
for dep in self.sqdata.sq_covered_tasks[tid]:
|
||||
if dep in self.runq_running and dep not in self.runq_complete:
|
||||
hashequiv_logger.debug(2, "Task %s is running which blocks setscene for %s from running" % (dep, tid))
|
||||
hashequiv_logger.debug2("Task %s is running which blocks setscene for %s from running" % (dep, tid))
|
||||
valid = False
|
||||
break
|
||||
if not valid:
|
||||
@@ -2430,7 +2430,7 @@ class RunQueueExecute:
|
||||
|
||||
for dep in sorted(self.sqdata.sq_deps[task]):
|
||||
if fail and task in self.sqdata.sq_harddeps and dep in self.sqdata.sq_harddeps[task]:
|
||||
logger.debug(2, "%s was unavailable and is a hard dependency of %s so skipping" % (task, dep))
|
||||
logger.debug2("%s was unavailable and is a hard dependency of %s so skipping" % (task, dep))
|
||||
self.sq_task_failoutright(dep)
|
||||
continue
|
||||
if self.sqdata.sq_revdeps[dep].issubset(self.scenequeue_covered | self.scenequeue_notcovered):
|
||||
@@ -2460,7 +2460,7 @@ class RunQueueExecute:
|
||||
completed dependencies as buildable
|
||||
"""
|
||||
|
||||
logger.debug(1, 'Found task %s which could be accelerated', task)
|
||||
logger.debug('Found task %s which could be accelerated', task)
|
||||
self.scenequeue_covered.add(task)
|
||||
self.scenequeue_updatecounters(task)
|
||||
|
||||
@@ -2775,13 +2775,13 @@ def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq, s
|
||||
continue
|
||||
|
||||
if rq.check_stamp_task(tid, taskname + "_setscene", cache=stampcache):
|
||||
logger.debug(2, 'Setscene stamp current for task %s', tid)
|
||||
logger.debug2('Setscene stamp current for task %s', tid)
|
||||
sqdata.stamppresent.add(tid)
|
||||
sqrq.sq_task_skip(tid)
|
||||
continue
|
||||
|
||||
if rq.check_stamp_task(tid, taskname, recurse = True, cache=stampcache):
|
||||
logger.debug(2, 'Normal stamp current for task %s', tid)
|
||||
logger.debug2('Normal stamp current for task %s', tid)
|
||||
sqdata.stamppresent.add(tid)
|
||||
sqrq.sq_task_skip(tid)
|
||||
continue
|
||||
|
||||
@@ -541,7 +541,7 @@ class SignatureGeneratorUniHashMixIn(object):
|
||||
# is much more interesting, so it is reported at debug level 1
|
||||
hashequiv_logger.debug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, tid, self.server))
|
||||
else:
|
||||
hashequiv_logger.debug(2, 'No reported unihash for %s:%s from %s' % (tid, taskhash, self.server))
|
||||
hashequiv_logger.debug2('No reported unihash for %s:%s from %s' % (tid, taskhash, self.server))
|
||||
except hashserv.client.HashConnectionError as e:
|
||||
bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
|
||||
|
||||
@@ -615,12 +615,12 @@ class SignatureGeneratorUniHashMixIn(object):
|
||||
new_unihash = data['unihash']
|
||||
|
||||
if new_unihash != unihash:
|
||||
hashequiv_logger.debug(1, 'Task %s unihash changed %s -> %s by server %s' % (taskhash, unihash, new_unihash, self.server))
|
||||
hashequiv_logger.debug('Task %s unihash changed %s -> %s by server %s' % (taskhash, unihash, new_unihash, self.server))
|
||||
bb.event.fire(bb.runqueue.taskUniHashUpdate(fn + ':do_' + task, new_unihash), d)
|
||||
self.set_unihash(tid, new_unihash)
|
||||
d.setVar('BB_UNIHASH', new_unihash)
|
||||
else:
|
||||
hashequiv_logger.debug(1, 'Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server))
|
||||
hashequiv_logger.debug('Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server))
|
||||
except hashserv.client.HashConnectionError as e:
|
||||
bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
|
||||
finally:
|
||||
@@ -748,7 +748,7 @@ def clean_basepath(basepath):
|
||||
if basepath[0] == '/':
|
||||
return cleaned
|
||||
|
||||
if basepath.startswith("mc:"):
|
||||
if basepath.startswith("mc:") and basepath.count(':') >= 2:
|
||||
mc, mc_name, basepath = basepath.split(":", 2)
|
||||
mc_suffix = ':mc:' + mc_name
|
||||
else:
|
||||
|
||||
@@ -131,7 +131,7 @@ class TaskData:
|
||||
for depend in dataCache.deps[fn]:
|
||||
dependids.add(depend)
|
||||
self.depids[fn] = list(dependids)
|
||||
logger.debug(2, "Added dependencies %s for %s", str(dataCache.deps[fn]), fn)
|
||||
logger.debug2("Added dependencies %s for %s", str(dataCache.deps[fn]), fn)
|
||||
|
||||
# Work out runtime dependencies
|
||||
if not fn in self.rdepids:
|
||||
@@ -149,9 +149,9 @@ class TaskData:
|
||||
rreclist.append(rdepend)
|
||||
rdependids.add(rdepend)
|
||||
if rdependlist:
|
||||
logger.debug(2, "Added runtime dependencies %s for %s", str(rdependlist), fn)
|
||||
logger.debug2("Added runtime dependencies %s for %s", str(rdependlist), fn)
|
||||
if rreclist:
|
||||
logger.debug(2, "Added runtime recommendations %s for %s", str(rreclist), fn)
|
||||
logger.debug2("Added runtime recommendations %s for %s", str(rreclist), fn)
|
||||
self.rdepids[fn] = list(rdependids)
|
||||
|
||||
for dep in self.depids[fn]:
|
||||
@@ -378,7 +378,7 @@ class TaskData:
|
||||
for fn in eligible:
|
||||
if fn in self.failed_fns:
|
||||
continue
|
||||
logger.debug(2, "adding %s to satisfy %s", fn, item)
|
||||
logger.debug2("adding %s to satisfy %s", fn, item)
|
||||
self.add_build_target(fn, item)
|
||||
self.add_tasks(fn, dataCache)
|
||||
|
||||
@@ -431,7 +431,7 @@ class TaskData:
|
||||
for fn in eligible:
|
||||
if fn in self.failed_fns:
|
||||
continue
|
||||
logger.debug(2, "adding '%s' to satisfy runtime '%s'", fn, item)
|
||||
logger.debug2("adding '%s' to satisfy runtime '%s'", fn, item)
|
||||
self.add_runtime_target(fn, item)
|
||||
self.add_tasks(fn, dataCache)
|
||||
|
||||
@@ -446,7 +446,7 @@ class TaskData:
|
||||
return
|
||||
if not missing_list:
|
||||
missing_list = []
|
||||
logger.debug(1, "File '%s' is unbuildable, removing...", fn)
|
||||
logger.debug("File '%s' is unbuildable, removing...", fn)
|
||||
self.failed_fns.append(fn)
|
||||
for target in self.build_targets:
|
||||
if fn in self.build_targets[target]:
|
||||
@@ -526,7 +526,7 @@ class TaskData:
|
||||
added = added + 1
|
||||
except (bb.providers.NoRProvider, bb.providers.MultipleRProvider):
|
||||
self.remove_runtarget(target)
|
||||
logger.debug(1, "Resolved " + str(added) + " extra dependencies")
|
||||
logger.debug("Resolved " + str(added) + " extra dependencies")
|
||||
if added == 0:
|
||||
break
|
||||
# self.dump_data()
|
||||
@@ -549,38 +549,38 @@ class TaskData:
|
||||
"""
|
||||
Dump some debug information on the internal data structures
|
||||
"""
|
||||
logger.debug(3, "build_names:")
|
||||
logger.debug(3, ", ".join(self.build_targets))
|
||||
logger.debug3("build_names:")
|
||||
logger.debug3(", ".join(self.build_targets))
|
||||
|
||||
logger.debug(3, "run_names:")
|
||||
logger.debug(3, ", ".join(self.run_targets))
|
||||
logger.debug3("run_names:")
|
||||
logger.debug3(", ".join(self.run_targets))
|
||||
|
||||
logger.debug(3, "build_targets:")
|
||||
logger.debug3("build_targets:")
|
||||
for target in self.build_targets:
|
||||
targets = "None"
|
||||
if target in self.build_targets:
|
||||
targets = self.build_targets[target]
|
||||
logger.debug(3, " %s: %s", target, targets)
|
||||
logger.debug3(" %s: %s", target, targets)
|
||||
|
||||
logger.debug(3, "run_targets:")
|
||||
logger.debug3("run_targets:")
|
||||
for target in self.run_targets:
|
||||
targets = "None"
|
||||
if target in self.run_targets:
|
||||
targets = self.run_targets[target]
|
||||
logger.debug(3, " %s: %s", target, targets)
|
||||
logger.debug3(" %s: %s", target, targets)
|
||||
|
||||
logger.debug(3, "tasks:")
|
||||
logger.debug3("tasks:")
|
||||
for tid in self.taskentries:
|
||||
logger.debug(3, " %s: %s %s %s",
|
||||
logger.debug3(" %s: %s %s %s",
|
||||
tid,
|
||||
self.taskentries[tid].idepends,
|
||||
self.taskentries[tid].irdepends,
|
||||
self.taskentries[tid].tdepends)
|
||||
|
||||
logger.debug(3, "dependency ids (per fn):")
|
||||
logger.debug3("dependency ids (per fn):")
|
||||
for fn in self.depids:
|
||||
logger.debug(3, " %s: %s", fn, self.depids[fn])
|
||||
logger.debug3(" %s: %s", fn, self.depids[fn])
|
||||
|
||||
logger.debug(3, "runtime dependency ids (per fn):")
|
||||
logger.debug3("runtime dependency ids (per fn):")
|
||||
for fn in self.rdepids:
|
||||
logger.debug(3, " %s: %s", fn, self.rdepids[fn])
|
||||
logger.debug3(" %s: %s", fn, self.rdepids[fn])
|
||||
|
||||
@@ -148,14 +148,14 @@ class ORMWrapper(object):
|
||||
buildrequest = None
|
||||
if brbe is not None:
|
||||
# Toaster-triggered build
|
||||
logger.debug(1, "buildinfohelper: brbe is %s" % brbe)
|
||||
logger.debug("buildinfohelper: brbe is %s" % brbe)
|
||||
br, _ = brbe.split(":")
|
||||
buildrequest = BuildRequest.objects.get(pk=br)
|
||||
prj = buildrequest.project
|
||||
else:
|
||||
# CLI build
|
||||
prj = Project.objects.get_or_create_default_project()
|
||||
logger.debug(1, "buildinfohelper: project is not specified, defaulting to %s" % prj)
|
||||
logger.debug("buildinfohelper: project is not specified, defaulting to %s" % prj)
|
||||
|
||||
if buildrequest is not None:
|
||||
# reuse existing Build object
|
||||
@@ -171,7 +171,7 @@ class ORMWrapper(object):
|
||||
completed_on=now,
|
||||
build_name='')
|
||||
|
||||
logger.debug(1, "buildinfohelper: build is created %s" % build)
|
||||
logger.debug("buildinfohelper: build is created %s" % build)
|
||||
|
||||
if buildrequest is not None:
|
||||
buildrequest.build = build
|
||||
@@ -906,7 +906,7 @@ class BuildInfoHelper(object):
|
||||
|
||||
self.project = None
|
||||
|
||||
logger.debug(1, "buildinfohelper: Build info helper inited %s" % vars(self))
|
||||
logger.debug("buildinfohelper: Build info helper inited %s" % vars(self))
|
||||
|
||||
|
||||
###################
|
||||
@@ -1620,7 +1620,7 @@ class BuildInfoHelper(object):
|
||||
# if we have a backlog of events, do our best to save them here
|
||||
if len(self.internal_state['backlog']):
|
||||
tempevent = self.internal_state['backlog'].pop()
|
||||
logger.debug(1, "buildinfohelper: Saving stored event %s "
|
||||
logger.debug("buildinfohelper: Saving stored event %s "
|
||||
% tempevent)
|
||||
self.store_log_event(tempevent,cli_backlog)
|
||||
else:
|
||||
|
||||
@@ -609,7 +609,7 @@ def filter_environment(good_vars):
|
||||
os.environ["LC_ALL"] = "en_US.UTF-8"
|
||||
|
||||
if removed_vars:
|
||||
logger.debug(1, "Removed the following variables from the environment: %s", ", ".join(removed_vars.keys()))
|
||||
logger.debug("Removed the following variables from the environment: %s", ", ".join(removed_vars.keys()))
|
||||
|
||||
return removed_vars
|
||||
|
||||
@@ -1613,12 +1613,12 @@ def export_proxies(d):
|
||||
|
||||
def load_plugins(logger, plugins, pluginpath):
|
||||
def load_plugin(name):
|
||||
logger.debug(1, 'Loading plugin %s' % name)
|
||||
logger.debug('Loading plugin %s' % name)
|
||||
spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] )
|
||||
if spec:
|
||||
return spec.loader.load_module()
|
||||
|
||||
logger.debug(1, 'Loading plugins from %s...' % pluginpath)
|
||||
logger.debug('Loading plugins from %s...' % pluginpath)
|
||||
|
||||
expanded = (glob.glob(os.path.join(pluginpath, '*' + ext))
|
||||
for ext in python_extensions)
|
||||
|
||||
@@ -50,10 +50,10 @@ class ActionPlugin(LayerPlugin):
|
||||
if not (args.force or notadded):
|
||||
try:
|
||||
self.tinfoil.run_command('parseConfiguration')
|
||||
except bb.tinfoil.TinfoilUIException:
|
||||
except (bb.tinfoil.TinfoilUIException, bb.BBHandledException):
|
||||
# Restore the back up copy of bblayers.conf
|
||||
shutil.copy2(backup, bblayers_conf)
|
||||
bb.fatal("Parse failure with the specified layer added")
|
||||
bb.fatal("Parse failure with the specified layer added, aborting.")
|
||||
else:
|
||||
for item in notadded:
|
||||
sys.stderr.write("Specified layer %s is already in BBLAYERS\n" % item)
|
||||
|
||||
@@ -79,7 +79,7 @@ class LayerIndexPlugin(ActionPlugin):
|
||||
branches = [args.branch]
|
||||
else:
|
||||
branches = (self.tinfoil.config_data.getVar('LAYERSERIES_CORENAMES') or 'master').split()
|
||||
logger.debug(1, 'Trying branches: %s' % branches)
|
||||
logger.debug('Trying branches: %s' % branches)
|
||||
|
||||
ignore_layers = []
|
||||
if args.ignore:
|
||||
|
||||
@@ -94,10 +94,10 @@ def chunkify(msg, max_chunk):
|
||||
yield "\n"
|
||||
|
||||
|
||||
def create_server(addr, dbname, *, sync=True, upstream=None):
|
||||
def create_server(addr, dbname, *, sync=True, upstream=None, read_only=False):
|
||||
from . import server
|
||||
db = setup_database(dbname, sync=sync)
|
||||
s = server.Server(db, upstream=upstream)
|
||||
s = server.Server(db, upstream=upstream, read_only=read_only)
|
||||
|
||||
(typ, a) = parse_address(addr)
|
||||
if typ == ADDR_TYPE_UNIX:
|
||||
|
||||
@@ -99,7 +99,7 @@ class AsyncClient(object):
|
||||
l = await get_line()
|
||||
|
||||
m = json.loads(l)
|
||||
if "chunk-stream" in m:
|
||||
if m and "chunk-stream" in m:
|
||||
lines = []
|
||||
while True:
|
||||
l = (await get_line()).rstrip("\n")
|
||||
@@ -170,6 +170,12 @@ class AsyncClient(object):
|
||||
{"get": {"taskhash": taskhash, "method": method, "all": all_properties}}
|
||||
)
|
||||
|
||||
async def get_outhash(self, method, outhash, taskhash):
|
||||
await self._set_mode(self.MODE_NORMAL)
|
||||
return await self.send_message(
|
||||
{"get-outhash": {"outhash": outhash, "taskhash": taskhash, "method": method}}
|
||||
)
|
||||
|
||||
async def get_stats(self):
|
||||
await self._set_mode(self.MODE_NORMAL)
|
||||
return await self.send_message({"get-stats": None})
|
||||
|
||||
@@ -112,6 +112,9 @@ class Stats(object):
|
||||
class ClientError(Exception):
|
||||
pass
|
||||
|
||||
class ServerError(Exception):
|
||||
pass
|
||||
|
||||
def insert_task(cursor, data, ignore=False):
|
||||
keys = sorted(data.keys())
|
||||
query = '''INSERT%s INTO tasks_v2 (%s) VALUES (%s)''' % (
|
||||
@@ -127,6 +130,18 @@ async def copy_from_upstream(client, db, method, taskhash):
|
||||
d = {k: v for k, v in d.items() if k in TABLE_COLUMNS}
|
||||
keys = sorted(d.keys())
|
||||
|
||||
with closing(db.cursor()) as cursor:
|
||||
insert_task(cursor, d)
|
||||
db.commit()
|
||||
|
||||
return d
|
||||
|
||||
async def copy_outhash_from_upstream(client, db, method, outhash, taskhash):
|
||||
d = await client.get_outhash(method, outhash, taskhash)
|
||||
if d is not None:
|
||||
# Filter out unknown columns
|
||||
d = {k: v for k, v in d.items() if k in TABLE_COLUMNS}
|
||||
keys = sorted(d.keys())
|
||||
|
||||
with closing(db.cursor()) as cursor:
|
||||
insert_task(cursor, d)
|
||||
@@ -137,8 +152,22 @@ async def copy_from_upstream(client, db, method, taskhash):
|
||||
class ServerClient(object):
|
||||
FAST_QUERY = 'SELECT taskhash, method, unihash FROM tasks_v2 WHERE method=:method AND taskhash=:taskhash ORDER BY created ASC LIMIT 1'
|
||||
ALL_QUERY = 'SELECT * FROM tasks_v2 WHERE method=:method AND taskhash=:taskhash ORDER BY created ASC LIMIT 1'
|
||||
OUTHASH_QUERY = '''
|
||||
-- Find tasks with a matching outhash (that is, tasks that
|
||||
-- are equivalent)
|
||||
SELECT * FROM tasks_v2 WHERE method=:method AND outhash=:outhash
|
||||
|
||||
def __init__(self, reader, writer, db, request_stats, backfill_queue, upstream):
|
||||
-- If there is an exact match on the taskhash, return it.
|
||||
-- Otherwise return the oldest matching outhash of any
|
||||
-- taskhash
|
||||
ORDER BY CASE WHEN taskhash=:taskhash THEN 1 ELSE 2 END,
|
||||
created ASC
|
||||
|
||||
-- Only return one row
|
||||
LIMIT 1
|
||||
'''
|
||||
|
||||
def __init__(self, reader, writer, db, request_stats, backfill_queue, upstream, read_only):
|
||||
self.reader = reader
|
||||
self.writer = writer
|
||||
self.db = db
|
||||
@@ -149,15 +178,20 @@ class ServerClient(object):
|
||||
|
||||
self.handlers = {
|
||||
'get': self.handle_get,
|
||||
'report': self.handle_report,
|
||||
'report-equiv': self.handle_equivreport,
|
||||
'get-outhash': self.handle_get_outhash,
|
||||
'get-stream': self.handle_get_stream,
|
||||
'get-stats': self.handle_get_stats,
|
||||
'reset-stats': self.handle_reset_stats,
|
||||
'chunk-stream': self.handle_chunk,
|
||||
'backfill-wait': self.handle_backfill_wait,
|
||||
}
|
||||
|
||||
if not read_only:
|
||||
self.handlers.update({
|
||||
'report': self.handle_report,
|
||||
'report-equiv': self.handle_equivreport,
|
||||
'reset-stats': self.handle_reset_stats,
|
||||
'backfill-wait': self.handle_backfill_wait,
|
||||
})
|
||||
|
||||
async def process_requests(self):
|
||||
if self.upstream is not None:
|
||||
self.upstream_client = await create_async_client(self.upstream)
|
||||
@@ -282,6 +316,21 @@ class ServerClient(object):
|
||||
|
||||
self.write_message(d)
|
||||
|
||||
async def handle_get_outhash(self, request):
|
||||
with closing(self.db.cursor()) as cursor:
|
||||
cursor.execute(self.OUTHASH_QUERY,
|
||||
{k: request[k] for k in ('method', 'outhash', 'taskhash')})
|
||||
|
||||
row = cursor.fetchone()
|
||||
|
||||
if row is not None:
|
||||
logger.debug('Found equivalent outhash %s -> %s', (row['outhash'], row['unihash']))
|
||||
d = {k: row[k] for k in row.keys()}
|
||||
else:
|
||||
d = None
|
||||
|
||||
self.write_message(d)
|
||||
|
||||
async def handle_get_stream(self, request):
|
||||
self.write_message('ok')
|
||||
|
||||
@@ -335,23 +384,19 @@ class ServerClient(object):
|
||||
|
||||
async def handle_report(self, data):
|
||||
with closing(self.db.cursor()) as cursor:
|
||||
cursor.execute('''
|
||||
-- Find tasks with a matching outhash (that is, tasks that
|
||||
-- are equivalent)
|
||||
SELECT taskhash, method, unihash FROM tasks_v2 WHERE method=:method AND outhash=:outhash
|
||||
|
||||
-- If there is an exact match on the taskhash, return it.
|
||||
-- Otherwise return the oldest matching outhash of any
|
||||
-- taskhash
|
||||
ORDER BY CASE WHEN taskhash=:taskhash THEN 1 ELSE 2 END,
|
||||
created ASC
|
||||
|
||||
-- Only return one row
|
||||
LIMIT 1
|
||||
''', {k: data[k] for k in ('method', 'outhash', 'taskhash')})
|
||||
cursor.execute(self.OUTHASH_QUERY,
|
||||
{k: data[k] for k in ('method', 'outhash', 'taskhash')})
|
||||
|
||||
row = cursor.fetchone()
|
||||
|
||||
if row is None and self.upstream_client:
|
||||
# Try upstream
|
||||
row = await copy_outhash_from_upstream(self.upstream_client,
|
||||
self.db,
|
||||
data['method'],
|
||||
data['outhash'],
|
||||
data['taskhash'])
|
||||
|
||||
# If no matching outhash was found, or one *was* found but it
|
||||
# wasn't an exact match on the taskhash, a new entry for this
|
||||
# taskhash should be added
|
||||
@@ -455,7 +500,10 @@ class ServerClient(object):
|
||||
|
||||
|
||||
class Server(object):
|
||||
def __init__(self, db, loop=None, upstream=None):
|
||||
def __init__(self, db, loop=None, upstream=None, read_only=False):
|
||||
if upstream and read_only:
|
||||
raise ServerError("Read-only hashserv cannot pull from an upstream server")
|
||||
|
||||
self.request_stats = Stats()
|
||||
self.db = db
|
||||
|
||||
@@ -467,6 +515,7 @@ class Server(object):
|
||||
self.close_loop = False
|
||||
|
||||
self.upstream = upstream
|
||||
self.read_only = read_only
|
||||
|
||||
self._cleanup_socket = None
|
||||
|
||||
@@ -510,7 +559,7 @@ class Server(object):
|
||||
async def handle_client(self, reader, writer):
|
||||
# writer.transport.set_write_buffer_limits(0)
|
||||
try:
|
||||
client = ServerClient(reader, writer, self.db, self.request_stats, self.backfill_queue, self.upstream)
|
||||
client = ServerClient(reader, writer, self.db, self.request_stats, self.backfill_queue, self.upstream, self.read_only)
|
||||
await client.process_requests()
|
||||
except Exception as e:
|
||||
import traceback
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
#
|
||||
|
||||
from . import create_server, create_client
|
||||
from .client import HashConnectionError
|
||||
import hashlib
|
||||
import logging
|
||||
import multiprocessing
|
||||
@@ -29,7 +30,7 @@ class HashEquivalenceTestSetup(object):
|
||||
|
||||
server_index = 0
|
||||
|
||||
def start_server(self, dbpath=None, upstream=None):
|
||||
def start_server(self, dbpath=None, upstream=None, read_only=False):
|
||||
self.server_index += 1
|
||||
if dbpath is None:
|
||||
dbpath = os.path.join(self.temp_dir.name, "db%d.sqlite" % self.server_index)
|
||||
@@ -38,7 +39,10 @@ class HashEquivalenceTestSetup(object):
|
||||
thread.terminate()
|
||||
thread.join()
|
||||
|
||||
server = create_server(self.get_server_addr(self.server_index), dbpath, upstream=upstream)
|
||||
server = create_server(self.get_server_addr(self.server_index),
|
||||
dbpath,
|
||||
upstream=upstream,
|
||||
read_only=read_only)
|
||||
server.dbpath = dbpath
|
||||
|
||||
server.thread = multiprocessing.Process(target=_run_server, args=(server, self.server_index))
|
||||
@@ -242,6 +246,43 @@ class HashEquivalenceCommonTests(object):
|
||||
self.assertClientGetHash(side_client, taskhash4, unihash4)
|
||||
self.assertClientGetHash(self.client, taskhash4, None)
|
||||
|
||||
# Test that reporting a unihash in the downstream is able to find a
|
||||
# match which was previously reported to the upstream server
|
||||
taskhash5 = '35788efcb8dfb0a02659d81cf2bfd695fb30faf9'
|
||||
outhash5 = '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f'
|
||||
unihash5 = 'f46d3fbb439bd9b921095da657a4de906510d2cd'
|
||||
result = self.client.report_unihash(taskhash5, self.METHOD, outhash5, unihash5)
|
||||
|
||||
taskhash6 = '35788efcb8dfb0a02659d81cf2bfd695fb30fafa'
|
||||
unihash6 = 'f46d3fbb439bd9b921095da657a4de906510d2ce'
|
||||
result = down_client.report_unihash(taskhash6, self.METHOD, outhash5, unihash6)
|
||||
self.assertEqual(result['unihash'], unihash5, 'Server failed to copy unihash from upstream')
|
||||
|
||||
def test_ro_server(self):
|
||||
(ro_client, ro_server) = self.start_server(dbpath=self.server.dbpath, read_only=True)
|
||||
|
||||
# Report a hash via the read-write server
|
||||
taskhash = '35788efcb8dfb0a02659d81cf2bfd695fb30faf9'
|
||||
outhash = '2765d4a5884be49b28601445c2760c5f21e7e5c0ee2b7e3fce98fd7e5970796f'
|
||||
unihash = 'f46d3fbb439bd9b921095da657a4de906510d2cd'
|
||||
|
||||
result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash)
|
||||
self.assertEqual(result['unihash'], unihash, 'Server returned bad unihash')
|
||||
|
||||
# Check the hash via the read-only server
|
||||
self.assertClientGetHash(ro_client, taskhash, unihash)
|
||||
|
||||
# Ensure that reporting via the read-only server fails
|
||||
taskhash2 = 'c665584ee6817aa99edfc77a44dd853828279370'
|
||||
outhash2 = '3c979c3db45c569f51ab7626a4651074be3a9d11a84b1db076f5b14f7d39db44'
|
||||
unihash2 = '90e9bc1d1f094c51824adca7f8ea79a048d68824'
|
||||
|
||||
with self.assertRaises(HashConnectionError):
|
||||
ro_client.report_unihash(taskhash2, self.METHOD, outhash2, unihash2)
|
||||
|
||||
# Ensure that the database was not modified
|
||||
self.assertClientGetHash(self.client, taskhash2, None)
|
||||
|
||||
|
||||
class TestHashEquivalenceUnixServer(HashEquivalenceTestSetup, HashEquivalenceCommonTests, unittest.TestCase):
|
||||
def get_server_addr(self, server_idx):
|
||||
|
||||
@@ -94,7 +94,7 @@ class LayerIndex():
|
||||
if not param:
|
||||
continue
|
||||
item = param.split('=', 1)
|
||||
logger.debug(1, item)
|
||||
logger.debug(item)
|
||||
param_dict[item[0]] = item[1]
|
||||
|
||||
return param_dict
|
||||
@@ -123,7 +123,7 @@ class LayerIndex():
|
||||
up = urlparse(url)
|
||||
|
||||
if username:
|
||||
logger.debug(1, "Configuring authentication for %s..." % url)
|
||||
logger.debug("Configuring authentication for %s..." % url)
|
||||
password_mgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
|
||||
password_mgr.add_password(None, "%s://%s" % (up.scheme, up.netloc), username, password)
|
||||
handler = urllib.request.HTTPBasicAuthHandler(password_mgr)
|
||||
@@ -133,20 +133,20 @@ class LayerIndex():
|
||||
|
||||
urllib.request.install_opener(opener)
|
||||
|
||||
logger.debug(1, "Fetching %s (%s)..." % (url, ["without authentication", "with authentication"][bool(username)]))
|
||||
logger.debug("Fetching %s (%s)..." % (url, ["without authentication", "with authentication"][bool(username)]))
|
||||
|
||||
try:
|
||||
res = urlopen(Request(url, headers={'User-Agent': 'Mozilla/5.0 (bitbake/lib/layerindex)'}, unverifiable=True))
|
||||
except urllib.error.HTTPError as e:
|
||||
logger.debug(1, "HTTP Error: %s: %s" % (e.code, e.reason))
|
||||
logger.debug(1, " Requested: %s" % (url))
|
||||
logger.debug(1, " Actual: %s" % (e.geturl()))
|
||||
logger.debug("HTTP Error: %s: %s" % (e.code, e.reason))
|
||||
logger.debug(" Requested: %s" % (url))
|
||||
logger.debug(" Actual: %s" % (e.geturl()))
|
||||
|
||||
if e.code == 404:
|
||||
logger.debug(1, "Request not found.")
|
||||
logger.debug("Request not found.")
|
||||
raise LayerIndexFetchError(url, e)
|
||||
else:
|
||||
logger.debug(1, "Headers:\n%s" % (e.headers))
|
||||
logger.debug("Headers:\n%s" % (e.headers))
|
||||
raise LayerIndexFetchError(url, e)
|
||||
except OSError as e:
|
||||
error = 0
|
||||
@@ -170,7 +170,7 @@ class LayerIndex():
|
||||
raise LayerIndexFetchError(url, "Unable to fetch OSError exception: %s" % e)
|
||||
|
||||
finally:
|
||||
logger.debug(1, "...fetching %s (%s), done." % (url, ["without authentication", "with authentication"][bool(username)]))
|
||||
logger.debug("...fetching %s (%s), done." % (url, ["without authentication", "with authentication"][bool(username)]))
|
||||
|
||||
return res
|
||||
|
||||
@@ -205,14 +205,14 @@ The format of the indexURI:
|
||||
if reload:
|
||||
self.indexes = []
|
||||
|
||||
logger.debug(1, 'Loading: %s' % indexURI)
|
||||
logger.debug('Loading: %s' % indexURI)
|
||||
|
||||
if not self.plugins:
|
||||
raise LayerIndexException("No LayerIndex Plugins available")
|
||||
|
||||
for plugin in self.plugins:
|
||||
# Check if the plugin was initialized
|
||||
logger.debug(1, 'Trying %s' % plugin.__class__)
|
||||
logger.debug('Trying %s' % plugin.__class__)
|
||||
if not hasattr(plugin, 'type') or not plugin.type:
|
||||
continue
|
||||
try:
|
||||
@@ -220,11 +220,11 @@ The format of the indexURI:
|
||||
indexEnt = plugin.load_index(indexURI, load)
|
||||
break
|
||||
except LayerIndexPluginUrlError as e:
|
||||
logger.debug(1, "%s doesn't support %s" % (plugin.type, e.url))
|
||||
logger.debug("%s doesn't support %s" % (plugin.type, e.url))
|
||||
except NotImplementedError:
|
||||
pass
|
||||
else:
|
||||
logger.debug(1, "No plugins support %s" % indexURI)
|
||||
logger.debug("No plugins support %s" % indexURI)
|
||||
raise LayerIndexException("No plugins support %s" % indexURI)
|
||||
|
||||
# Mark CONFIG data as something we've added...
|
||||
@@ -255,19 +255,19 @@ will write out the individual elements split by layer and related components.
|
||||
|
||||
for plugin in self.plugins:
|
||||
# Check if the plugin was initialized
|
||||
logger.debug(1, 'Trying %s' % plugin.__class__)
|
||||
logger.debug('Trying %s' % plugin.__class__)
|
||||
if not hasattr(plugin, 'type') or not plugin.type:
|
||||
continue
|
||||
try:
|
||||
plugin.store_index(indexURI, index)
|
||||
break
|
||||
except LayerIndexPluginUrlError as e:
|
||||
logger.debug(1, "%s doesn't support %s" % (plugin.type, e.url))
|
||||
logger.debug("%s doesn't support %s" % (plugin.type, e.url))
|
||||
except NotImplementedError:
|
||||
logger.debug(1, "Store not implemented in %s" % plugin.type)
|
||||
logger.debug("Store not implemented in %s" % plugin.type)
|
||||
pass
|
||||
else:
|
||||
logger.debug(1, "No plugins support %s" % indexURI)
|
||||
logger.debug("No plugins support %s" % indexURI)
|
||||
raise LayerIndexException("No plugins support %s" % indexURI)
|
||||
|
||||
|
||||
@@ -292,7 +292,7 @@ layerBranches set. If not, they are effectively blank.'''
|
||||
the default configuration until the first vcs_url/branch match.'''
|
||||
|
||||
for index in self.indexes:
|
||||
logger.debug(1, ' searching %s' % index.config['DESCRIPTION'])
|
||||
logger.debug(' searching %s' % index.config['DESCRIPTION'])
|
||||
layerBranch = index.find_vcs_url(vcs_url, [branch])
|
||||
if layerBranch:
|
||||
return layerBranch
|
||||
@@ -304,7 +304,7 @@ layerBranches set. If not, they are effectively blank.'''
|
||||
If a branch has not been specified, we will iterate over the branches in
|
||||
the default configuration until the first collection/branch match.'''
|
||||
|
||||
logger.debug(1, 'find_collection: %s (%s) %s' % (collection, version, branch))
|
||||
logger.debug('find_collection: %s (%s) %s' % (collection, version, branch))
|
||||
|
||||
if branch:
|
||||
branches = [branch]
|
||||
@@ -312,12 +312,12 @@ layerBranches set. If not, they are effectively blank.'''
|
||||
branches = None
|
||||
|
||||
for index in self.indexes:
|
||||
logger.debug(1, ' searching %s' % index.config['DESCRIPTION'])
|
||||
logger.debug(' searching %s' % index.config['DESCRIPTION'])
|
||||
layerBranch = index.find_collection(collection, version, branches)
|
||||
if layerBranch:
|
||||
return layerBranch
|
||||
else:
|
||||
logger.debug(1, 'Collection %s (%s) not found for branch (%s)' % (collection, version, branch))
|
||||
logger.debug('Collection %s (%s) not found for branch (%s)' % (collection, version, branch))
|
||||
return None
|
||||
|
||||
def find_layerbranch(self, name, branch=None):
|
||||
@@ -408,7 +408,7 @@ layerBranches set. If not, they are effectively blank.'''
|
||||
version=deplayerbranch.version
|
||||
)
|
||||
if rdeplayerbranch != deplayerbranch:
|
||||
logger.debug(1, 'Replaced %s:%s:%s with %s:%s:%s' % \
|
||||
logger.debug('Replaced %s:%s:%s with %s:%s:%s' % \
|
||||
(deplayerbranch.index.config['DESCRIPTION'],
|
||||
deplayerbranch.branch.name,
|
||||
deplayerbranch.layer.name,
|
||||
@@ -1121,7 +1121,7 @@ class LayerBranch(LayerIndexItemObj):
|
||||
@property
|
||||
def branch(self):
|
||||
try:
|
||||
logger.debug(1, "Get branch object from branches[%s]" % (self.branch_id))
|
||||
logger.debug("Get branch object from branches[%s]" % (self.branch_id))
|
||||
return self.index.branches[self.branch_id]
|
||||
except KeyError:
|
||||
raise AttributeError('Unable to find branches in index to map branch_id %s' % self.branch_id)
|
||||
@@ -1149,7 +1149,7 @@ class LayerBranch(LayerIndexItemObj):
|
||||
|
||||
@actual_branch.setter
|
||||
def actual_branch(self, value):
|
||||
logger.debug(1, "Set actual_branch to %s .. name is %s" % (value, self.branch.name))
|
||||
logger.debug("Set actual_branch to %s .. name is %s" % (value, self.branch.name))
|
||||
if value != self.branch.name:
|
||||
self._setattr('actual_branch', value, prop=False)
|
||||
else:
|
||||
|
||||
@@ -173,7 +173,7 @@ class CookerPlugin(layerindexlib.plugin.IndexPlugin):
|
||||
else:
|
||||
branches = ['HEAD']
|
||||
|
||||
logger.debug(1, "Loading cooker data branches %s" % branches)
|
||||
logger.debug("Loading cooker data branches %s" % branches)
|
||||
|
||||
index = self._load_bblayers(branches=branches)
|
||||
|
||||
@@ -220,7 +220,7 @@ class CookerPlugin(layerindexlib.plugin.IndexPlugin):
|
||||
required=required, layerbranch=layerBranchId,
|
||||
dependency=depLayerBranch.layer_id)
|
||||
|
||||
logger.debug(1, '%s requires %s' % (layerDependency.layer.name, layerDependency.dependency.name))
|
||||
logger.debug('%s requires %s' % (layerDependency.layer.name, layerDependency.dependency.name))
|
||||
index.add_element("layerDependencies", [layerDependency])
|
||||
|
||||
return layerDependencyId
|
||||
|
||||
@@ -82,7 +82,7 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin):
|
||||
|
||||
|
||||
def load_cache(path, index, branches=[]):
|
||||
logger.debug(1, 'Loading json file %s' % path)
|
||||
logger.debug('Loading json file %s' % path)
|
||||
with open(path, 'rt', encoding='utf-8') as f:
|
||||
pindex = json.load(f)
|
||||
|
||||
@@ -102,7 +102,7 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin):
|
||||
if newpBranch:
|
||||
index.add_raw_element('branches', layerindexlib.Branch, newpBranch)
|
||||
else:
|
||||
logger.debug(1, 'No matching branches (%s) in index file(s)' % branches)
|
||||
logger.debug('No matching branches (%s) in index file(s)' % branches)
|
||||
# No matching branches.. return nothing...
|
||||
return
|
||||
|
||||
@@ -120,7 +120,7 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin):
|
||||
load_cache(up.path, index, branches)
|
||||
return index
|
||||
|
||||
logger.debug(1, 'Loading from dir %s...' % (up.path))
|
||||
logger.debug('Loading from dir %s...' % (up.path))
|
||||
for (dirpath, _, filenames) in os.walk(up.path):
|
||||
for filename in filenames:
|
||||
if not filename.endswith('.json'):
|
||||
@@ -144,7 +144,7 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin):
|
||||
def _get_json_response(apiurl=None, username=None, password=None, retry=True):
|
||||
assert apiurl is not None
|
||||
|
||||
logger.debug(1, "fetching %s" % apiurl)
|
||||
logger.debug("fetching %s" % apiurl)
|
||||
|
||||
up = urlparse(apiurl)
|
||||
|
||||
@@ -163,9 +163,9 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin):
|
||||
parsed = json.loads(res.read().decode('utf-8'))
|
||||
except ConnectionResetError:
|
||||
if retry:
|
||||
logger.debug(1, "%s: Connection reset by peer. Retrying..." % url)
|
||||
logger.debug("%s: Connection reset by peer. Retrying..." % url)
|
||||
parsed = _get_json_response(apiurl=up_stripped.geturl(), username=username, password=password, retry=False)
|
||||
logger.debug(1, "%s: retry successful.")
|
||||
logger.debug("%s: retry successful.")
|
||||
else:
|
||||
raise layerindexlib.LayerIndexFetchError('%s: Connection reset by peer. Is there a firewall blocking your connection?' % apiurl)
|
||||
|
||||
@@ -207,25 +207,25 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin):
|
||||
if "*" not in branches:
|
||||
filter = "?filter=name:%s" % "OR".join(branches)
|
||||
|
||||
logger.debug(1, "Loading %s from %s" % (branches, index.apilinks['branches']))
|
||||
logger.debug("Loading %s from %s" % (branches, index.apilinks['branches']))
|
||||
|
||||
# The link won't include username/password, so pull it from the original url
|
||||
pindex['branches'] = _get_json_response(index.apilinks['branches'] + filter,
|
||||
username=up.username, password=up.password)
|
||||
if not pindex['branches']:
|
||||
logger.debug(1, "No valid branches (%s) found at url %s." % (branch, url))
|
||||
logger.debug("No valid branches (%s) found at url %s." % (branch, url))
|
||||
return index
|
||||
index.add_raw_element("branches", layerindexlib.Branch, pindex['branches'])
|
||||
|
||||
# Load all of the layerItems (these can not be easily filtered)
|
||||
logger.debug(1, "Loading %s from %s" % ('layerItems', index.apilinks['layerItems']))
|
||||
logger.debug("Loading %s from %s" % ('layerItems', index.apilinks['layerItems']))
|
||||
|
||||
|
||||
# The link won't include username/password, so pull it from the original url
|
||||
pindex['layerItems'] = _get_json_response(index.apilinks['layerItems'],
|
||||
username=up.username, password=up.password)
|
||||
if not pindex['layerItems']:
|
||||
logger.debug(1, "No layers were found at url %s." % (url))
|
||||
logger.debug("No layers were found at url %s." % (url))
|
||||
return index
|
||||
index.add_raw_element("layerItems", layerindexlib.LayerItem, pindex['layerItems'])
|
||||
|
||||
@@ -235,13 +235,13 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin):
|
||||
for branch in index.branches:
|
||||
filter = "?filter=branch__name:%s" % index.branches[branch].name
|
||||
|
||||
logger.debug(1, "Loading %s from %s" % ('layerBranches', index.apilinks['layerBranches']))
|
||||
logger.debug("Loading %s from %s" % ('layerBranches', index.apilinks['layerBranches']))
|
||||
|
||||
# The link won't include username/password, so pull it from the original url
|
||||
pindex['layerBranches'] = _get_json_response(index.apilinks['layerBranches'] + filter,
|
||||
username=up.username, password=up.password)
|
||||
if not pindex['layerBranches']:
|
||||
logger.debug(1, "No valid layer branches (%s) found at url %s." % (branches or "*", url))
|
||||
logger.debug("No valid layer branches (%s) found at url %s." % (branches or "*", url))
|
||||
return index
|
||||
index.add_raw_element("layerBranches", layerindexlib.LayerBranch, pindex['layerBranches'])
|
||||
|
||||
@@ -256,7 +256,7 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin):
|
||||
("distros", layerindexlib.Distro)]:
|
||||
if lName not in load:
|
||||
continue
|
||||
logger.debug(1, "Loading %s from %s" % (lName, index.apilinks[lName]))
|
||||
logger.debug("Loading %s from %s" % (lName, index.apilinks[lName]))
|
||||
|
||||
# The link won't include username/password, so pull it from the original url
|
||||
pindex[lName] = _get_json_response(index.apilinks[lName] + filter,
|
||||
@@ -283,7 +283,7 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin):
|
||||
if up.scheme != 'file':
|
||||
raise layerindexlib.plugin.LayerIndexPluginUrlError(self.type, url)
|
||||
|
||||
logger.debug(1, "Storing to %s..." % up.path)
|
||||
logger.debug("Storing to %s..." % up.path)
|
||||
|
||||
try:
|
||||
layerbranches = index.layerBranches
|
||||
@@ -299,12 +299,12 @@ class RestApiPlugin(layerindexlib.plugin.IndexPlugin):
|
||||
if getattr(index, objects)[obj].layerbranch_id == layerbranchid:
|
||||
filtered.append(getattr(index, objects)[obj]._data)
|
||||
except AttributeError:
|
||||
logger.debug(1, 'No obj.layerbranch_id: %s' % objects)
|
||||
logger.debug('No obj.layerbranch_id: %s' % objects)
|
||||
# No simple filter method, just include it...
|
||||
try:
|
||||
filtered.append(getattr(index, objects)[obj]._data)
|
||||
except AttributeError:
|
||||
logger.debug(1, 'No obj._data: %s %s' % (objects, type(obj)))
|
||||
logger.debug('No obj._data: %s %s' % (objects, type(obj)))
|
||||
filtered.append(obj)
|
||||
return filtered
|
||||
|
||||
|
||||
@@ -72,7 +72,7 @@ class LayerIndexCookerTest(LayersTest):
|
||||
|
||||
def test_find_collection(self):
|
||||
def _check(collection, expected):
|
||||
self.logger.debug(1, "Looking for collection %s..." % collection)
|
||||
self.logger.debug("Looking for collection %s..." % collection)
|
||||
result = self.layerindex.find_collection(collection)
|
||||
if expected:
|
||||
self.assertIsNotNone(result, msg="Did not find %s when it shouldn't be there" % collection)
|
||||
@@ -91,7 +91,7 @@ class LayerIndexCookerTest(LayersTest):
|
||||
|
||||
def test_find_layerbranch(self):
|
||||
def _check(name, expected):
|
||||
self.logger.debug(1, "Looking for layerbranch %s..." % name)
|
||||
self.logger.debug("Looking for layerbranch %s..." % name)
|
||||
result = self.layerindex.find_layerbranch(name)
|
||||
if expected:
|
||||
self.assertIsNotNone(result, msg="Did not find %s when it shouldn't be there" % collection)
|
||||
|
||||
@@ -57,11 +57,11 @@ class LayerIndexWebRestApiTest(LayersTest):
|
||||
type in self.layerindex.indexes[0].config['local']:
|
||||
continue
|
||||
for id in getattr(self.layerindex.indexes[0], type):
|
||||
self.logger.debug(1, "type %s" % (type))
|
||||
self.logger.debug("type %s" % (type))
|
||||
|
||||
self.assertTrue(id in getattr(reload.indexes[0], type), msg="Id number not in reloaded index")
|
||||
|
||||
self.logger.debug(1, "%s ? %s" % (getattr(self.layerindex.indexes[0], type)[id], getattr(reload.indexes[0], type)[id]))
|
||||
self.logger.debug("%s ? %s" % (getattr(self.layerindex.indexes[0], type)[id], getattr(reload.indexes[0], type)[id]))
|
||||
|
||||
self.assertEqual(getattr(self.layerindex.indexes[0], type)[id], getattr(reload.indexes[0], type)[id], msg="Reloaded contents different")
|
||||
|
||||
@@ -80,11 +80,11 @@ class LayerIndexWebRestApiTest(LayersTest):
|
||||
type in self.layerindex.indexes[0].config['local']:
|
||||
continue
|
||||
for id in getattr(self.layerindex.indexes[0] ,type):
|
||||
self.logger.debug(1, "type %s" % (type))
|
||||
self.logger.debug("type %s" % (type))
|
||||
|
||||
self.assertTrue(id in getattr(reload.indexes[0], type), msg="Id number missing from reloaded data")
|
||||
|
||||
self.logger.debug(1, "%s ? %s" % (getattr(self.layerindex.indexes[0] ,type)[id], getattr(reload.indexes[0], type)[id]))
|
||||
self.logger.debug("%s ? %s" % (getattr(self.layerindex.indexes[0] ,type)[id], getattr(reload.indexes[0], type)[id]))
|
||||
|
||||
self.assertEqual(getattr(self.layerindex.indexes[0] ,type)[id], getattr(reload.indexes[0], type)[id], msg="reloaded data does not match original")
|
||||
|
||||
@@ -111,14 +111,14 @@ class LayerIndexWebRestApiTest(LayersTest):
|
||||
if dep.layer.name == 'meta-python':
|
||||
break
|
||||
else:
|
||||
self.logger.debug(1, "meta-python was not found")
|
||||
self.logger.debug("meta-python was not found")
|
||||
raise self.failureException
|
||||
|
||||
# Only check the first element...
|
||||
break
|
||||
else:
|
||||
# Empty list, this is bad.
|
||||
self.logger.debug(1, "Empty list of dependencies")
|
||||
self.logger.debug("Empty list of dependencies")
|
||||
self.assertIsNotNone(first, msg="Empty list of dependencies")
|
||||
|
||||
# Last dep should be the requested item
|
||||
@@ -128,7 +128,7 @@ class LayerIndexWebRestApiTest(LayersTest):
|
||||
@skipIfNoNetwork()
|
||||
def test_find_collection(self):
|
||||
def _check(collection, expected):
|
||||
self.logger.debug(1, "Looking for collection %s..." % collection)
|
||||
self.logger.debug("Looking for collection %s..." % collection)
|
||||
result = self.layerindex.find_collection(collection)
|
||||
if expected:
|
||||
self.assertIsNotNone(result, msg="Did not find %s when it should be there" % collection)
|
||||
@@ -148,11 +148,11 @@ class LayerIndexWebRestApiTest(LayersTest):
|
||||
@skipIfNoNetwork()
|
||||
def test_find_layerbranch(self):
|
||||
def _check(name, expected):
|
||||
self.logger.debug(1, "Looking for layerbranch %s..." % name)
|
||||
self.logger.debug("Looking for layerbranch %s..." % name)
|
||||
|
||||
for index in self.layerindex.indexes:
|
||||
for layerbranchid in index.layerBranches:
|
||||
self.logger.debug(1, "Present: %s" % index.layerBranches[layerbranchid].layer.name)
|
||||
self.logger.debug("Present: %s" % index.layerBranches[layerbranchid].layer.name)
|
||||
result = self.layerindex.find_layerbranch(name)
|
||||
if expected:
|
||||
self.assertIsNotNone(result, msg="Did not find %s when it should be there" % collection)
|
||||
|
||||
@@ -17,7 +17,7 @@ def autotools_dep_prepend(d):
|
||||
and not d.getVar('INHIBIT_DEFAULT_DEPS'):
|
||||
deps += 'libtool-cross '
|
||||
|
||||
return deps + 'gnu-config-native '
|
||||
return deps
|
||||
|
||||
DEPENDS_prepend = "${@autotools_dep_prepend(d)} "
|
||||
|
||||
@@ -30,7 +30,7 @@ inherit siteinfo
|
||||
export CONFIG_SITE
|
||||
|
||||
acpaths ?= "default"
|
||||
EXTRA_AUTORECONF = "--exclude=autopoint"
|
||||
EXTRA_AUTORECONF = "--exclude=autopoint --exclude=gtkdocize"
|
||||
|
||||
export lt_cv_sys_lib_dlsearch_path_spec = "${libdir} ${base_libdir}"
|
||||
|
||||
@@ -215,21 +215,13 @@ autotools_do_configure() {
|
||||
PRUNE_M4="$PRUNE_M4 gettext.m4 iconv.m4 lib-ld.m4 lib-link.m4 lib-prefix.m4 nls.m4 po.m4 progtest.m4"
|
||||
fi
|
||||
mkdir -p m4
|
||||
if grep -q "^[[:space:]]*[AI][CT]_PROG_INTLTOOL" $CONFIGURE_AC; then
|
||||
if ! echo "${DEPENDS}" | grep -q intltool-native; then
|
||||
bbwarn "Missing DEPENDS on intltool-native"
|
||||
fi
|
||||
PRUNE_M4="$PRUNE_M4 intltool.m4"
|
||||
bbnote Executing intltoolize --copy --force --automake
|
||||
intltoolize --copy --force --automake
|
||||
fi
|
||||
|
||||
for i in $PRUNE_M4; do
|
||||
find ${S} -ignore_readdir_race -name $i -delete
|
||||
done
|
||||
|
||||
bbnote Executing ACLOCAL=\"$ACLOCAL\" autoreconf -Wcross --verbose --install --force ${EXTRA_AUTORECONF} $acpaths
|
||||
ACLOCAL="$ACLOCAL" autoreconf -Wcross --verbose --install --force ${EXTRA_AUTORECONF} $acpaths || die "autoreconf execution failed."
|
||||
ACLOCAL="$ACLOCAL" autoreconf -Wcross -Wno-obsolete --verbose --install --force ${EXTRA_AUTORECONF} $acpaths || die "autoreconf execution failed."
|
||||
cd $olddir
|
||||
fi
|
||||
if [ -e ${CONFIGURE_SCRIPT} ]; then
|
||||
|
||||
@@ -53,6 +53,13 @@ CVE_CHECK_PN_WHITELIST ?= ""
|
||||
#
|
||||
CVE_CHECK_WHITELIST ?= ""
|
||||
|
||||
# Layers to be excluded
|
||||
CVE_CHECK_LAYER_EXCLUDELIST ??= ""
|
||||
|
||||
# Layers to be included
|
||||
CVE_CHECK_LAYER_INCLUDELIST ??= ""
|
||||
|
||||
|
||||
# set to "alphabetical" for version using single alphabetical character as increament release
|
||||
CVE_VERSION_SUFFIX ??= ""
|
||||
|
||||
@@ -334,7 +341,20 @@ def cve_write_data(d, patched, unpatched, whitelisted, cve_data):
|
||||
CVE manifest if enabled.
|
||||
"""
|
||||
|
||||
|
||||
cve_file = d.getVar("CVE_CHECK_LOG")
|
||||
fdir_name = d.getVar("FILE_DIRNAME")
|
||||
layer = fdir_name.split("/")[-3]
|
||||
|
||||
include_layers = d.getVar("CVE_CHECK_LAYER_INCLUDELIST").split()
|
||||
exclude_layers = d.getVar("CVE_CHECK_LAYER_EXCLUDELIST").split()
|
||||
|
||||
if exclude_layers and layer in exclude_layers:
|
||||
return
|
||||
|
||||
if include_layers and layer not in include_layers:
|
||||
return
|
||||
|
||||
nvd_link = "https://web.nvd.nist.gov/view/vuln/detail?vulnId="
|
||||
write_string = ""
|
||||
unpatched_cves = []
|
||||
@@ -344,6 +364,7 @@ def cve_write_data(d, patched, unpatched, whitelisted, cve_data):
|
||||
is_patched = cve in patched
|
||||
if is_patched and (d.getVar("CVE_CHECK_REPORT_PATCHED") != "1"):
|
||||
continue
|
||||
write_string += "LAYER: %s\n" % layer
|
||||
write_string += "PACKAGE NAME: %s\n" % d.getVar("PN")
|
||||
write_string += "PACKAGE VERSION: %s%s\n" % (d.getVar("EXTENDPE"), d.getVar("PV"))
|
||||
write_string += "CVE: %s\n" % cve
|
||||
|
||||
@@ -110,7 +110,7 @@ IMAGE_CMD_squashfs-lz4 = "mksquashfs ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAM
|
||||
|
||||
IMAGE_CMD_TAR ?= "tar"
|
||||
# ignore return code 1 "file changed as we read it" as other tasks(e.g. do_image_wic) may be hardlinking rootfs
|
||||
IMAGE_CMD_tar = "${IMAGE_CMD_TAR} --sort=name --format=gnu --numeric-owner -cf ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.tar -C ${IMAGE_ROOTFS} . || [ $? -eq 1 ]"
|
||||
IMAGE_CMD_tar = "${IMAGE_CMD_TAR} --sort=name --format=posix --numeric-owner -cf ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.tar -C ${IMAGE_ROOTFS} . || [ $? -eq 1 ]"
|
||||
|
||||
do_image_cpio[cleandirs] += "${WORKDIR}/cpio_append"
|
||||
IMAGE_CMD_cpio () {
|
||||
|
||||
@@ -210,7 +210,8 @@ def license_deployed_manifest(d):
|
||||
os.unlink(lic_manifest_symlink_dir)
|
||||
|
||||
# create the image dir symlink
|
||||
os.symlink(lic_manifest_dir, lic_manifest_symlink_dir)
|
||||
if lic_manifest_dir != lic_manifest_symlink_dir:
|
||||
os.symlink(lic_manifest_dir, lic_manifest_symlink_dir)
|
||||
|
||||
def get_deployed_dependencies(d):
|
||||
"""
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
inherit python3native
|
||||
|
||||
DEPENDS_prepend = "nodejs-native "
|
||||
RDEPENDS_${PN}_prepend = "nodejs "
|
||||
RDEPENDS_${PN}_append_class-target = " nodejs"
|
||||
|
||||
NPM_INSTALL_DEV ?= "0"
|
||||
|
||||
|
||||
@@ -10,7 +10,9 @@ GCCPIE ?= "--enable-default-pie"
|
||||
|
||||
# _FORTIFY_SOURCE requires -O1 or higher, so disable in debug builds as they use
|
||||
# -O0 which then results in a compiler warning.
|
||||
lcl_maybe_fortify ?= "${@oe.utils.conditional('DEBUG_BUILD','1','','-D_FORTIFY_SOURCE=2',d)}"
|
||||
OPTLEVEL = "${@bb.utils.filter('SELECTED_OPTIMIZATION', '-O0 -O1 -O2 -O3 -Ofast -Og -Os -Oz -O', d)}"
|
||||
|
||||
lcl_maybe_fortify ?= "${@oe.utils.conditional('OPTLEVEL','-O0','','${OPTLEVEL} -D_FORTIFY_SOURCE=2',d)}"
|
||||
|
||||
# Error on use of format strings that represent possible security problems
|
||||
SECURITY_STRINGFORMAT ?= "-Wformat -Wformat-security -Werror=format-security"
|
||||
|
||||
@@ -6,9 +6,9 @@
|
||||
# to the distro running on the build machine.
|
||||
#
|
||||
|
||||
UNINATIVE_MAXGLIBCVERSION = "2.32"
|
||||
UNINATIVE_MAXGLIBCVERSION = "2.33"
|
||||
|
||||
UNINATIVE_URL ?= "http://downloads.yoctoproject.org/releases/uninative/2.9/"
|
||||
UNINATIVE_CHECKSUM[aarch64] ?= "9f25a667aee225b1dd65c4aea73e01983e825b1cb9b56937932a1ee328b45f81"
|
||||
UNINATIVE_CHECKSUM[i686] ?= "cae5d73245d95b07cf133b780ba3f6c8d0adca3ffc4e7e7fab999961d5e24d36"
|
||||
UNINATIVE_CHECKSUM[x86_64] ?= "d07916b95c419c81541a19c8ef0ed8cbd78ae18437ff28a4c8a60ef40518e423"
|
||||
UNINATIVE_URL ?= "http://downloads.yoctoproject.org/releases/uninative/2.11/"
|
||||
UNINATIVE_CHECKSUM[aarch64] ?= "fa703e25c26eaebb1afd895337b92a24cc5077818e093af74912e53846a117fe"
|
||||
UNINATIVE_CHECKSUM[i686] ?= "638901c990ffbe716a34400134a2ad49a1c3104e3b48cdafd6fcd28e9b133294"
|
||||
UNINATIVE_CHECKSUM[x86_64] ?= "047ddd78d6b5cabd2a102120e27755a9eaa1d5724c6a8f4007daa3f10ecb6871"
|
||||
|
||||
@@ -123,6 +123,8 @@ CONFLICT_MACHINE_FEATURES[doc] = "When a recipe inherits the features_check clas
|
||||
CORE_IMAGE_EXTRA_INSTALL[doc] = "Specifies the list of packages to be added to the image. You should only set this variable in the conf/local.conf file in the Build Directory."
|
||||
COREBASE[doc] = "Specifies the parent directory of the OpenEmbedded Core Metadata layer (i.e. meta)."
|
||||
CONF_VERSION[doc] = "Tracks the version of local.conf. Increased each time build/conf/ changes incompatibly."
|
||||
CVE_CHECK_LAYER_EXCLUDELIST[doc] = "Defines which layers to exclude from cve-check scanning"
|
||||
CVE_CHECK_LAYER_INCLUDELIST[doc] = "Defines which layers to include during cve-check scanning"
|
||||
|
||||
#D
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
# See sanity.bbclass
|
||||
#
|
||||
# Expert users can confirm their sanity with "touch conf/sanity.conf"
|
||||
BB_MIN_VERSION = "1.47.0"
|
||||
BB_MIN_VERSION = "1.49.1"
|
||||
|
||||
SANITY_ABIFILE = "${TMPDIR}/abi_version"
|
||||
|
||||
|
||||
@@ -185,7 +185,7 @@ class Custom(Terminal):
|
||||
Terminal.__init__(self, sh_cmd, title, env, d)
|
||||
logger.warning('Custom terminal was started.')
|
||||
else:
|
||||
logger.debug(1, 'No custom terminal (OE_TERMINAL_CUSTOMCMD) set')
|
||||
logger.debug('No custom terminal (OE_TERMINAL_CUSTOMCMD) set')
|
||||
raise UnsupportedTerminal('OE_TERMINAL_CUSTOMCMD not set')
|
||||
|
||||
|
||||
@@ -216,7 +216,7 @@ def spawn_preferred(sh_cmd, title=None, env=None, d=None):
|
||||
|
||||
def spawn(name, sh_cmd, title=None, env=None, d=None):
|
||||
"""Spawn the specified terminal, by name"""
|
||||
logger.debug(1, 'Attempting to spawn terminal "%s"', name)
|
||||
logger.debug('Attempting to spawn terminal "%s"', name)
|
||||
try:
|
||||
terminal = Registry.registry[name]
|
||||
except KeyError:
|
||||
|
||||
@@ -65,7 +65,6 @@ exclude_packages = [
|
||||
'quilt-ptest',
|
||||
'rsync',
|
||||
'ruby',
|
||||
'spirv-tools-dev',
|
||||
'swig',
|
||||
'syslinux-misc',
|
||||
'systemd-bootchart',
|
||||
|
||||
@@ -318,6 +318,7 @@ class Wic(WicTestCase):
|
||||
"--image-name=core-image-minimal "
|
||||
"-D -o %s" % self.resultdir)
|
||||
self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct")))
|
||||
self.assertEqual(1, len(glob(self.resultdir + "tmp.wic*")))
|
||||
|
||||
def test_debug_long(self):
|
||||
"""Test --debug option"""
|
||||
@@ -325,6 +326,7 @@ class Wic(WicTestCase):
|
||||
"--image-name=core-image-minimal "
|
||||
"--debug -o %s" % self.resultdir)
|
||||
self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct")))
|
||||
self.assertEqual(1, len(glob(self.resultdir + "tmp.wic*")))
|
||||
|
||||
def test_skip_build_check_short(self):
|
||||
"""Test -s option"""
|
||||
|
||||
@@ -43,28 +43,12 @@ def make_logger_bitbake_compatible(logger):
|
||||
import logging
|
||||
|
||||
"""
|
||||
Bitbake logger redifines debug() in order to
|
||||
set a level within debug, this breaks compatibility
|
||||
with vainilla logging, so we neeed to redifine debug()
|
||||
method again also add info() method with INFO + 1 level.
|
||||
We need to raise the log level of the info output so unittest
|
||||
messages are visible on the console.
|
||||
"""
|
||||
def _bitbake_log_debug(*args, **kwargs):
|
||||
lvl = logging.DEBUG
|
||||
|
||||
if isinstance(args[0], int):
|
||||
lvl = args[0]
|
||||
msg = args[1]
|
||||
args = args[2:]
|
||||
else:
|
||||
msg = args[0]
|
||||
args = args[1:]
|
||||
|
||||
logger.log(lvl, msg, *args, **kwargs)
|
||||
|
||||
def _bitbake_log_info(msg, *args, **kwargs):
|
||||
logger.log(logging.INFO + 1, msg, *args, **kwargs)
|
||||
|
||||
logger.debug = _bitbake_log_debug
|
||||
logger.info = _bitbake_log_info
|
||||
|
||||
return logger
|
||||
|
||||
@@ -117,7 +117,7 @@ def extract_packages(d, needed_packages):
|
||||
extract = package.get('extract', True)
|
||||
|
||||
if extract:
|
||||
#logger.debug(1, 'Extracting %s' % pkg)
|
||||
#logger.debug('Extracting %s' % pkg)
|
||||
dst_dir = os.path.join(extracted_path, pkg)
|
||||
# Same package used for more than one test,
|
||||
# don't need to extract again.
|
||||
@@ -130,7 +130,7 @@ def extract_packages(d, needed_packages):
|
||||
shutil.rmtree(pkg_dir)
|
||||
|
||||
else:
|
||||
#logger.debug(1, 'Copying %s' % pkg)
|
||||
#logger.debug('Copying %s' % pkg)
|
||||
_copy_package(d, pkg)
|
||||
|
||||
def _extract_in_tmpdir(d, pkg):
|
||||
|
||||
@@ -3,10 +3,9 @@ Update autotools infrastructure (including gettext) to modern versions.
|
||||
Upstream-Status: Pending
|
||||
Signed-off-by: Phil Blundell <pb@pbcl.net>
|
||||
|
||||
Index: lrzsz-0.12.20/configure.in
|
||||
===================================================================
|
||||
--- lrzsz-0.12.20.orig/configure.in
|
||||
+++ lrzsz-0.12.20/configure.in
|
||||
diff -uprN clean/lrzsz-0.12.20/configure.in lrzsz-0.12.20/configure.in
|
||||
--- clean/lrzsz-0.12.20/configure.in 1998-12-30 07:50:07.000000000 +0000
|
||||
+++ lrzsz-0.12.20/configure.in 2019-11-25 16:22:37.000000000 +0000
|
||||
@@ -92,7 +92,6 @@ AC_PROG_RANLIB
|
||||
AC_ISC_POSIX
|
||||
AC_AIX
|
||||
@@ -15,7 +14,7 @@ Index: lrzsz-0.12.20/configure.in
|
||||
AC_C_CONST
|
||||
AC_C_INLINE
|
||||
|
||||
@@ -253,18 +252,14 @@ ihave$lookup_facility
|
||||
@@ -253,18 +252,13 @@ ihave$lookup_facility
|
||||
fi
|
||||
|
||||
|
||||
@@ -25,7 +24,6 @@ Index: lrzsz-0.12.20/configure.in
|
||||
|
||||
-AM_GNU_GETTEXT
|
||||
+AM_GNU_GETTEXT([external])
|
||||
+AM_GNU_GETTEXT_VERSION([0.21])
|
||||
|
||||
-AC_DEFINE_UNQUOTED(LOCALEDIR,"$prefix/$DATADIRNAME")
|
||||
-AC_LINK_FILES($nls_cv_header_libgt, $nls_cv_header_intl)
|
||||
@@ -38,10 +36,9 @@ Index: lrzsz-0.12.20/configure.in
|
||||
+[
|
||||
chmod +x debian/rules;
|
||||
test -z "$CONFIG_HEADERS" || echo timestamp > stamp-h])
|
||||
Index: lrzsz-0.12.20/intl/bindtextdom.c
|
||||
===================================================================
|
||||
--- lrzsz-0.12.20.orig/intl/bindtextdom.c
|
||||
+++ /dev/null
|
||||
diff -uprN clean/lrzsz-0.12.20/intl/bindtextdom.c lrzsz-0.12.20/intl/bindtextdom.c
|
||||
--- clean/lrzsz-0.12.20/intl/bindtextdom.c 1998-04-26 14:22:36.000000000 +0100
|
||||
+++ lrzsz-0.12.20/intl/bindtextdom.c 1970-01-01 01:00:00.000000000 +0100
|
||||
@@ -1,199 +0,0 @@
|
||||
-/* Implementation of the bindtextdomain(3) function
|
||||
- Copyright (C) 1995, 1996, 1997 Free Software Foundation, Inc.
|
||||
@@ -242,10 +239,9 @@ Index: lrzsz-0.12.20/intl/bindtextdom.c
|
||||
-/* Alias for function name in GNU C Library. */
|
||||
-weak_alias (__bindtextdomain, bindtextdomain);
|
||||
-#endif
|
||||
Index: lrzsz-0.12.20/intl/cat-compat.c
|
||||
===================================================================
|
||||
--- lrzsz-0.12.20.orig/intl/cat-compat.c
|
||||
+++ /dev/null
|
||||
diff -uprN clean/lrzsz-0.12.20/intl/cat-compat.c lrzsz-0.12.20/intl/cat-compat.c
|
||||
--- clean/lrzsz-0.12.20/intl/cat-compat.c 1998-04-26 14:22:37.000000000 +0100
|
||||
+++ lrzsz-0.12.20/intl/cat-compat.c 1970-01-01 01:00:00.000000000 +0100
|
||||
@@ -1,262 +0,0 @@
|
||||
-/* Compatibility code for gettext-using-catgets interface.
|
||||
- Copyright (C) 1995, 1997 Free Software Foundation, Inc.
|
||||
@@ -509,10 +505,9 @@ Index: lrzsz-0.12.20/intl/cat-compat.c
|
||||
- return dest - 1;
|
||||
-}
|
||||
-#endif
|
||||
Index: lrzsz-0.12.20/intl/ChangeLog
|
||||
===================================================================
|
||||
--- lrzsz-0.12.20.orig/intl/ChangeLog
|
||||
+++ /dev/null
|
||||
diff -uprN clean/lrzsz-0.12.20/intl/ChangeLog lrzsz-0.12.20/intl/ChangeLog
|
||||
--- clean/lrzsz-0.12.20/intl/ChangeLog 1998-04-26 14:22:35.000000000 +0100
|
||||
+++ lrzsz-0.12.20/intl/ChangeLog 1970-01-01 01:00:00.000000000 +0100
|
||||
@@ -1,1022 +0,0 @@
|
||||
-1997-09-06 02:10 Ulrich Drepper <drepper@cygnus.com>
|
||||
-
|
||||
@@ -1536,10 +1531,9 @@ Index: lrzsz-0.12.20/intl/ChangeLog
|
||||
- which allow to use the X/Open catgets function with an interface
|
||||
- like the Uniforum gettext function. For system which does not
|
||||
- have neither of those a complete implementation is provided.
|
||||
Index: lrzsz-0.12.20/intl/dcgettext.c
|
||||
===================================================================
|
||||
--- lrzsz-0.12.20.orig/intl/dcgettext.c
|
||||
+++ /dev/null
|
||||
diff -uprN clean/lrzsz-0.12.20/intl/dcgettext.c lrzsz-0.12.20/intl/dcgettext.c
|
||||
--- clean/lrzsz-0.12.20/intl/dcgettext.c 1998-04-26 14:22:36.000000000 +0100
|
||||
+++ lrzsz-0.12.20/intl/dcgettext.c 1970-01-01 01:00:00.000000000 +0100
|
||||
@@ -1,593 +0,0 @@
|
||||
-/* Implementation of the dcgettext(3) function
|
||||
- Copyright (C) 1995, 1996, 1997 Free Software Foundation, Inc.
|
||||
@@ -2134,10 +2128,9 @@ Index: lrzsz-0.12.20/intl/dcgettext.c
|
||||
- return dest - 1;
|
||||
-}
|
||||
-#endif
|
||||
Index: lrzsz-0.12.20/intl/dgettext.c
|
||||
===================================================================
|
||||
--- lrzsz-0.12.20.orig/intl/dgettext.c
|
||||
+++ /dev/null
|
||||
diff -uprN clean/lrzsz-0.12.20/intl/dgettext.c lrzsz-0.12.20/intl/dgettext.c
|
||||
--- clean/lrzsz-0.12.20/intl/dgettext.c 1998-04-26 14:20:52.000000000 +0100
|
||||
+++ lrzsz-0.12.20/intl/dgettext.c 1970-01-01 01:00:00.000000000 +0100
|
||||
@@ -1,59 +0,0 @@
|
||||
-/* dgettext.c -- implementation of the dgettext(3) function
|
||||
- Copyright (C) 1995 Software Foundation, Inc.
|
||||
@@ -2198,10 +2191,9 @@ Index: lrzsz-0.12.20/intl/dgettext.c
|
||||
-/* Alias for function name in GNU C Library. */
|
||||
-weak_alias (__dgettext, dgettext);
|
||||
-#endif
|
||||
Index: lrzsz-0.12.20/intl/explodename.c
|
||||
===================================================================
|
||||
--- lrzsz-0.12.20.orig/intl/explodename.c
|
||||
+++ /dev/null
|
||||
diff -uprN clean/lrzsz-0.12.20/intl/explodename.c lrzsz-0.12.20/intl/explodename.c
|
||||
--- clean/lrzsz-0.12.20/intl/explodename.c 1998-04-26 14:22:37.000000000 +0100
|
||||
+++ lrzsz-0.12.20/intl/explodename.c 1970-01-01 01:00:00.000000000 +0100
|
||||
@@ -1,181 +0,0 @@
|
||||
-/* Copyright (C) 1995, 1996, 1997 Free Software Foundation, Inc.
|
||||
- Contributed by Ulrich Drepper <drepper@gnu.ai.mit.edu>, 1995.
|
||||
@@ -2384,10 +2376,9 @@ Index: lrzsz-0.12.20/intl/explodename.c
|
||||
-
|
||||
- return mask;
|
||||
-}
|
||||
Index: lrzsz-0.12.20/intl/finddomain.c
|
||||
===================================================================
|
||||
--- lrzsz-0.12.20.orig/intl/finddomain.c
|
||||
+++ /dev/null
|
||||
diff -uprN clean/lrzsz-0.12.20/intl/finddomain.c lrzsz-0.12.20/intl/finddomain.c
|
||||
--- clean/lrzsz-0.12.20/intl/finddomain.c 1998-04-26 14:22:36.000000000 +0100
|
||||
+++ lrzsz-0.12.20/intl/finddomain.c 1970-01-01 01:00:00.000000000 +0100
|
||||
@@ -1,189 +0,0 @@
|
||||
-/* Handle list of needed message catalogs
|
||||
- Copyright (C) 1995, 1996, 1997 Free Software Foundation, Inc.
|
||||
@@ -2578,10 +2569,9 @@ Index: lrzsz-0.12.20/intl/finddomain.c
|
||||
-
|
||||
- return retval;
|
||||
-}
|
||||
Index: lrzsz-0.12.20/intl/gettext.c
|
||||
===================================================================
|
||||
--- lrzsz-0.12.20.orig/intl/gettext.c
|
||||
+++ /dev/null
|
||||
diff -uprN clean/lrzsz-0.12.20/intl/gettext.c lrzsz-0.12.20/intl/gettext.c
|
||||
--- clean/lrzsz-0.12.20/intl/gettext.c 1998-04-26 14:22:36.000000000 +0100
|
||||
+++ lrzsz-0.12.20/intl/gettext.c 1970-01-01 01:00:00.000000000 +0100
|
||||
@@ -1,70 +0,0 @@
|
||||
-/* Implementation of gettext(3) function
|
||||
- Copyright (C) 1995, 1997 Free Software Foundation, Inc.
|
||||
@@ -2653,10 +2643,9 @@ Index: lrzsz-0.12.20/intl/gettext.c
|
||||
-/* Alias for function name in GNU C Library. */
|
||||
-weak_alias (__gettext, gettext);
|
||||
-#endif
|
||||
Index: lrzsz-0.12.20/intl/gettext.h
|
||||
===================================================================
|
||||
--- lrzsz-0.12.20.orig/intl/gettext.h
|
||||
+++ /dev/null
|
||||
diff -uprN clean/lrzsz-0.12.20/intl/gettext.h lrzsz-0.12.20/intl/gettext.h
|
||||
--- clean/lrzsz-0.12.20/intl/gettext.h 1998-04-26 14:22:35.000000000 +0100
|
||||
+++ lrzsz-0.12.20/intl/gettext.h 1970-01-01 01:00:00.000000000 +0100
|
||||
@@ -1,105 +0,0 @@
|
||||
-/* Internal header for GNU gettext internationalization functions
|
||||
- Copyright (C) 1995, 1997 Free Software Foundation, Inc.
|
||||
@@ -2763,10 +2752,9 @@ Index: lrzsz-0.12.20/intl/gettext.h
|
||||
-/* @@ begin of epilog @@ */
|
||||
-
|
||||
-#endif /* gettext.h */
|
||||
Index: lrzsz-0.12.20/intl/gettextP.h
|
||||
===================================================================
|
||||
--- lrzsz-0.12.20.orig/intl/gettextP.h
|
||||
+++ /dev/null
|
||||
diff -uprN clean/lrzsz-0.12.20/intl/gettextP.h lrzsz-0.12.20/intl/gettextP.h
|
||||
--- clean/lrzsz-0.12.20/intl/gettextP.h 1998-04-26 14:22:35.000000000 +0100
|
||||
+++ lrzsz-0.12.20/intl/gettextP.h 1970-01-01 01:00:00.000000000 +0100
|
||||
@@ -1,73 +0,0 @@
|
||||
-/* Header describing internals of gettext library
|
||||
- Copyright (C) 1995, 1996, 1997 Free Software Foundation, Inc.
|
||||
@@ -2841,10 +2829,9 @@ Index: lrzsz-0.12.20/intl/gettextP.h
|
||||
-/* @@ begin of epilog @@ */
|
||||
-
|
||||
-#endif /* gettextP.h */
|
||||
Index: lrzsz-0.12.20/intl/hash-string.h
|
||||
===================================================================
|
||||
--- lrzsz-0.12.20.orig/intl/hash-string.h
|
||||
+++ /dev/null
|
||||
diff -uprN clean/lrzsz-0.12.20/intl/hash-string.h lrzsz-0.12.20/intl/hash-string.h
|
||||
--- clean/lrzsz-0.12.20/intl/hash-string.h 1998-04-26 14:22:36.000000000 +0100
|
||||
+++ lrzsz-0.12.20/intl/hash-string.h 1970-01-01 01:00:00.000000000 +0100
|
||||
@@ -1,63 +0,0 @@
|
||||
-/* Implements a string hashing function.
|
||||
- Copyright (C) 1995, 1997 Free Software Foundation, Inc.
|
||||
@@ -2909,10 +2896,9 @@ Index: lrzsz-0.12.20/intl/hash-string.h
|
||||
- }
|
||||
- return hval;
|
||||
-}
|
||||
Index: lrzsz-0.12.20/intl/intl-compat.c
|
||||
===================================================================
|
||||
--- lrzsz-0.12.20.orig/intl/intl-compat.c
|
||||
+++ /dev/null
|
||||
diff -uprN clean/lrzsz-0.12.20/intl/intl-compat.c lrzsz-0.12.20/intl/intl-compat.c
|
||||
--- clean/lrzsz-0.12.20/intl/intl-compat.c 1998-04-26 14:20:52.000000000 +0100
|
||||
+++ lrzsz-0.12.20/intl/intl-compat.c 1970-01-01 01:00:00.000000000 +0100
|
||||
@@ -1,76 +0,0 @@
|
||||
-/* intl-compat.c - Stub functions to call gettext functions from GNU gettext
|
||||
- Library.
|
||||
@@ -2990,10 +2976,9 @@ Index: lrzsz-0.12.20/intl/intl-compat.c
|
||||
-{
|
||||
- return textdomain__ (domainname);
|
||||
-}
|
||||
Index: lrzsz-0.12.20/intl/l10nflist.c
|
||||
===================================================================
|
||||
--- lrzsz-0.12.20.orig/intl/l10nflist.c
|
||||
+++ /dev/null
|
||||
diff -uprN clean/lrzsz-0.12.20/intl/l10nflist.c lrzsz-0.12.20/intl/l10nflist.c
|
||||
--- clean/lrzsz-0.12.20/intl/l10nflist.c 1998-04-26 14:22:37.000000000 +0100
|
||||
+++ lrzsz-0.12.20/intl/l10nflist.c 1970-01-01 01:00:00.000000000 +0100
|
||||
@@ -1,409 +0,0 @@
|
||||
-/* Handle list of needed message catalogs
|
||||
- Copyright (C) 1995, 1996, 1997 Free Software Foundation, Inc.
|
||||
@@ -3404,10 +3389,9 @@ Index: lrzsz-0.12.20/intl/l10nflist.c
|
||||
- return dest - 1;
|
||||
-}
|
||||
-#endif
|
||||
Index: lrzsz-0.12.20/intl/libgettext.h
|
||||
===================================================================
|
||||
--- lrzsz-0.12.20.orig/intl/libgettext.h
|
||||
+++ /dev/null
|
||||
diff -uprN clean/lrzsz-0.12.20/intl/libgettext.h lrzsz-0.12.20/intl/libgettext.h
|
||||
--- clean/lrzsz-0.12.20/intl/libgettext.h 1998-04-26 14:22:36.000000000 +0100
|
||||
+++ lrzsz-0.12.20/intl/libgettext.h 1970-01-01 01:00:00.000000000 +0100
|
||||
@@ -1,182 +0,0 @@
|
||||
-/* Message catalogs for internationalization.
|
||||
- Copyright (C) 1995, 1996, 1997 Free Software Foundation, Inc.
|
||||
@@ -3591,10 +3575,9 @@ Index: lrzsz-0.12.20/intl/libgettext.h
|
||||
-#endif
|
||||
-
|
||||
-#endif
|
||||
Index: lrzsz-0.12.20/intl/linux-msg.sed
|
||||
===================================================================
|
||||
--- lrzsz-0.12.20.orig/intl/linux-msg.sed
|
||||
+++ /dev/null
|
||||
diff -uprN clean/lrzsz-0.12.20/intl/linux-msg.sed lrzsz-0.12.20/intl/linux-msg.sed
|
||||
--- clean/lrzsz-0.12.20/intl/linux-msg.sed 1998-04-26 14:20:52.000000000 +0100
|
||||
+++ lrzsz-0.12.20/intl/linux-msg.sed 1970-01-01 01:00:00.000000000 +0100
|
||||
@@ -1,100 +0,0 @@
|
||||
-# po2msg.sed - Convert Uniforum style .po file to Linux style .msg file
|
||||
-# Copyright (C) 1995 Free Software Foundation, Inc.
|
||||
@@ -3696,10 +3679,9 @@ Index: lrzsz-0.12.20/intl/linux-msg.sed
|
||||
- tb
|
||||
-}
|
||||
-d
|
||||
Index: lrzsz-0.12.20/intl/loadinfo.h
|
||||
===================================================================
|
||||
--- lrzsz-0.12.20.orig/intl/loadinfo.h
|
||||
+++ /dev/null
|
||||
diff -uprN clean/lrzsz-0.12.20/intl/loadinfo.h lrzsz-0.12.20/intl/loadinfo.h
|
||||
--- clean/lrzsz-0.12.20/intl/loadinfo.h 1998-04-26 14:20:52.000000000 +0100
|
||||
+++ lrzsz-0.12.20/intl/loadinfo.h 1970-01-01 01:00:00.000000000 +0100
|
||||
@@ -1,58 +0,0 @@
|
||||
-#ifndef PARAMS
|
||||
-# if __STDC__
|
||||
@@ -3759,10 +3741,9 @@ Index: lrzsz-0.12.20/intl/loadinfo.h
|
||||
- const char **special,
|
||||
- const char **sponsor,
|
||||
- const char **revision));
|
||||
Index: lrzsz-0.12.20/intl/loadmsgcat.c
|
||||
===================================================================
|
||||
--- lrzsz-0.12.20.orig/intl/loadmsgcat.c
|
||||
+++ /dev/null
|
||||
diff -uprN clean/lrzsz-0.12.20/intl/loadmsgcat.c lrzsz-0.12.20/intl/loadmsgcat.c
|
||||
--- clean/lrzsz-0.12.20/intl/loadmsgcat.c 1998-04-26 14:22:37.000000000 +0100
|
||||
+++ lrzsz-0.12.20/intl/loadmsgcat.c 1970-01-01 01:00:00.000000000 +0100
|
||||
@@ -1,199 +0,0 @@
|
||||
-/* Load needed message catalogs
|
||||
- Copyright (C) 1995, 1996, 1997 Free Software Foundation, Inc.
|
||||
@@ -3963,10 +3944,9 @@ Index: lrzsz-0.12.20/intl/loadmsgcat.c
|
||||
- translations invalid. */
|
||||
- ++_nl_msg_cat_cntr;
|
||||
-}
|
||||
Index: lrzsz-0.12.20/intl/localealias.c
|
||||
===================================================================
|
||||
--- lrzsz-0.12.20.orig/intl/localealias.c
|
||||
+++ /dev/null
|
||||
diff -uprN clean/lrzsz-0.12.20/intl/localealias.c lrzsz-0.12.20/intl/localealias.c
|
||||
--- clean/lrzsz-0.12.20/intl/localealias.c 1998-04-26 14:22:37.000000000 +0100
|
||||
+++ lrzsz-0.12.20/intl/localealias.c 1970-01-01 01:00:00.000000000 +0100
|
||||
@@ -1,378 +0,0 @@
|
||||
-/* Handle aliases for locale names
|
||||
- Copyright (C) 1995, 1996, 1997 Free Software Foundation, Inc.
|
||||
@@ -4346,10 +4326,9 @@ Index: lrzsz-0.12.20/intl/localealias.c
|
||||
- return c1 - c2;
|
||||
-#endif
|
||||
-}
|
||||
Index: lrzsz-0.12.20/intl/Makefile.in
|
||||
===================================================================
|
||||
--- lrzsz-0.12.20.orig/intl/Makefile.in
|
||||
+++ /dev/null
|
||||
diff -uprN clean/lrzsz-0.12.20/intl/Makefile.in lrzsz-0.12.20/intl/Makefile.in
|
||||
--- clean/lrzsz-0.12.20/intl/Makefile.in 1998-04-26 14:22:35.000000000 +0100
|
||||
+++ lrzsz-0.12.20/intl/Makefile.in 1970-01-01 01:00:00.000000000 +0100
|
||||
@@ -1,214 +0,0 @@
|
||||
-# Makefile for directory with message catalog handling in GNU NLS Utilities.
|
||||
-# Copyright (C) 1995, 1996, 1997 Free Software Foundation, Inc.
|
||||
@@ -4565,10 +4544,9 @@ Index: lrzsz-0.12.20/intl/Makefile.in
|
||||
-# Tell versions [3.59,3.63) of GNU make not to export all variables.
|
||||
-# Otherwise a system limit (for SysV at least) may be exceeded.
|
||||
-.NOEXPORT:
|
||||
Index: lrzsz-0.12.20/intl/po2tbl.sed.in
|
||||
===================================================================
|
||||
--- lrzsz-0.12.20.orig/intl/po2tbl.sed.in
|
||||
+++ /dev/null
|
||||
diff -uprN clean/lrzsz-0.12.20/intl/po2tbl.sed.in lrzsz-0.12.20/intl/po2tbl.sed.in
|
||||
--- clean/lrzsz-0.12.20/intl/po2tbl.sed.in 1998-04-26 14:20:52.000000000 +0100
|
||||
+++ lrzsz-0.12.20/intl/po2tbl.sed.in 1970-01-01 01:00:00.000000000 +0100
|
||||
@@ -1,102 +0,0 @@
|
||||
-# po2tbl.sed - Convert Uniforum style .po file to lookup table for catgets
|
||||
-# Copyright (C) 1995 Free Software Foundation, Inc.
|
||||
@@ -4672,10 +4650,9 @@ Index: lrzsz-0.12.20/intl/po2tbl.sed.in
|
||||
- s/0*\(.*\)/int _msg_tbl_length = \1;/p
|
||||
-}
|
||||
-d
|
||||
Index: lrzsz-0.12.20/intl/textdomain.c
|
||||
===================================================================
|
||||
--- lrzsz-0.12.20.orig/intl/textdomain.c
|
||||
+++ /dev/null
|
||||
diff -uprN clean/lrzsz-0.12.20/intl/textdomain.c lrzsz-0.12.20/intl/textdomain.c
|
||||
--- clean/lrzsz-0.12.20/intl/textdomain.c 1998-04-26 14:22:37.000000000 +0100
|
||||
+++ lrzsz-0.12.20/intl/textdomain.c 1970-01-01 01:00:00.000000000 +0100
|
||||
@@ -1,106 +0,0 @@
|
||||
-/* Implementation of the textdomain(3) function
|
||||
- Copyright (C) 1995, 1996, 1997 Free Software Foundation, Inc.
|
||||
@@ -4783,16 +4760,14 @@ Index: lrzsz-0.12.20/intl/textdomain.c
|
||||
-/* Alias for function name in GNU C Library. */
|
||||
-weak_alias (__textdomain, textdomain);
|
||||
-#endif
|
||||
Index: lrzsz-0.12.20/intl/VERSION
|
||||
===================================================================
|
||||
--- lrzsz-0.12.20.orig/intl/VERSION
|
||||
+++ /dev/null
|
||||
diff -uprN clean/lrzsz-0.12.20/intl/VERSION lrzsz-0.12.20/intl/VERSION
|
||||
--- clean/lrzsz-0.12.20/intl/VERSION 1998-04-26 14:22:37.000000000 +0100
|
||||
+++ lrzsz-0.12.20/intl/VERSION 1970-01-01 01:00:00.000000000 +0100
|
||||
@@ -1 +0,0 @@
|
||||
-GNU gettext library from gettext-0.10.32
|
||||
Index: lrzsz-0.12.20/intl/xopen-msg.sed
|
||||
===================================================================
|
||||
--- lrzsz-0.12.20.orig/intl/xopen-msg.sed
|
||||
+++ /dev/null
|
||||
diff -uprN clean/lrzsz-0.12.20/intl/xopen-msg.sed lrzsz-0.12.20/intl/xopen-msg.sed
|
||||
--- clean/lrzsz-0.12.20/intl/xopen-msg.sed 1998-04-26 14:20:52.000000000 +0100
|
||||
+++ lrzsz-0.12.20/intl/xopen-msg.sed 1970-01-01 01:00:00.000000000 +0100
|
||||
@@ -1,104 +0,0 @@
|
||||
-# po2msg.sed - Convert Uniforum style .po file to X/Open style .msg file
|
||||
-# Copyright (C) 1995 Free Software Foundation, Inc.
|
||||
@@ -4898,10 +4873,9 @@ Index: lrzsz-0.12.20/intl/xopen-msg.sed
|
||||
- tb
|
||||
-}
|
||||
-d
|
||||
Index: lrzsz-0.12.20/lib/Makefile.am
|
||||
===================================================================
|
||||
--- lrzsz-0.12.20.orig/lib/Makefile.am
|
||||
+++ lrzsz-0.12.20/lib/Makefile.am
|
||||
diff -uprN clean/lrzsz-0.12.20/lib/Makefile.am lrzsz-0.12.20/lib/Makefile.am
|
||||
--- clean/lrzsz-0.12.20/lib/Makefile.am 1998-12-27 16:25:26.000000000 +0000
|
||||
+++ lrzsz-0.12.20/lib/Makefile.am 2019-11-25 16:22:34.000000000 +0000
|
||||
@@ -1,6 +1,4 @@
|
||||
noinst_LIBRARIES=libzmodem.a
|
||||
-CFLAGS=@CFLAGS@
|
||||
@@ -4909,10 +4883,9 @@ Index: lrzsz-0.12.20/lib/Makefile.am
|
||||
|
||||
EXTRA_DIST = alloca.c ansi2knr.1 ansi2knr.c \
|
||||
getopt.c getopt1.c mkdir.c mktime.c \
|
||||
Index: lrzsz-0.12.20/Makefile.am
|
||||
===================================================================
|
||||
--- lrzsz-0.12.20.orig/Makefile.am
|
||||
+++ lrzsz-0.12.20/Makefile.am
|
||||
diff -uprN clean/lrzsz-0.12.20/Makefile.am lrzsz-0.12.20/Makefile.am
|
||||
--- clean/lrzsz-0.12.20/Makefile.am 1998-12-30 11:19:40.000000000 +0000
|
||||
+++ lrzsz-0.12.20/Makefile.am 2019-11-26 11:47:29.000000000 +0000
|
||||
@@ -1,5 +1,5 @@
|
||||
-SUBDIRS = lib intl src po man testsuite
|
||||
-EXTRA_DIST = check.lrzsz COMPATABILITY README.cvs README.isdn4linux \
|
||||
@@ -4935,10 +4908,9 @@ Index: lrzsz-0.12.20/Makefile.am
|
||||
|
||||
+
|
||||
+ACLOCAL_AMFLAGS = -I m4
|
||||
Index: lrzsz-0.12.20/po/cat-id-tbl.c
|
||||
===================================================================
|
||||
--- lrzsz-0.12.20.orig/po/cat-id-tbl.c
|
||||
+++ /dev/null
|
||||
diff -uprN clean/lrzsz-0.12.20/po/cat-id-tbl.c lrzsz-0.12.20/po/cat-id-tbl.c
|
||||
--- clean/lrzsz-0.12.20/po/cat-id-tbl.c 1998-12-29 09:24:24.000000000 +0000
|
||||
+++ lrzsz-0.12.20/po/cat-id-tbl.c 1970-01-01 01:00:00.000000000 +0100
|
||||
@@ -1,234 +0,0 @@
|
||||
-/* Automatically generated by po2tbl.sed from lrzsz.pot. */
|
||||
-
|
||||
@@ -5174,10 +5146,10 @@ Index: lrzsz-0.12.20/po/cat-id-tbl.c
|
||||
-};
|
||||
-
|
||||
-int _msg_tbl_length = 136;
|
||||
Index: lrzsz-0.12.20/po/de.po
|
||||
===================================================================
|
||||
--- lrzsz-0.12.20.orig/po/de.po
|
||||
+++ lrzsz-0.12.20/po/de.po
|
||||
Binary files clean/lrzsz-0.12.20/po/de.gmo and lrzsz-0.12.20/po/de.gmo differ
|
||||
diff -uprN clean/lrzsz-0.12.20/po/de.po lrzsz-0.12.20/po/de.po
|
||||
--- clean/lrzsz-0.12.20/po/de.po 1998-12-30 16:31:46.000000000 +0000
|
||||
+++ lrzsz-0.12.20/po/de.po 2019-11-26 11:42:07.000000000 +0000
|
||||
@@ -6,10 +6,12 @@
|
||||
msgid ""
|
||||
msgstr ""
|
||||
@@ -5412,10 +5384,9 @@ Index: lrzsz-0.12.20/po/de.po
|
||||
#: src/lrz.c:2079
|
||||
msgid "rzfile: reached stop time"
|
||||
msgstr "rzfile: Abbruchzeit erreicht"
|
||||
Index: lrzsz-0.12.20/po/lrzsz.pot
|
||||
===================================================================
|
||||
--- lrzsz-0.12.20.orig/po/lrzsz.pot
|
||||
+++ lrzsz-0.12.20/po/lrzsz.pot
|
||||
diff -uprN clean/lrzsz-0.12.20/po/lrzsz.pot lrzsz-0.12.20/po/lrzsz.pot
|
||||
--- clean/lrzsz-0.12.20/po/lrzsz.pot 1998-12-30 07:50:00.000000000 +0000
|
||||
+++ lrzsz-0.12.20/po/lrzsz.pot 2019-11-26 11:39:12.000000000 +0000
|
||||
@@ -1,24 +1,27 @@
|
||||
# SOME DESCRIPTIVE TITLE.
|
||||
# Copyright (C) YEAR Free Software Foundation, Inc.
|
||||
@@ -5647,10 +5618,9 @@ Index: lrzsz-0.12.20/po/lrzsz.pot
|
||||
#: src/lrz.c:2079
|
||||
msgid "rzfile: reached stop time"
|
||||
msgstr ""
|
||||
Index: lrzsz-0.12.20/po/Makevars
|
||||
===================================================================
|
||||
--- /dev/null
|
||||
+++ lrzsz-0.12.20/po/Makevars
|
||||
diff -uprN clean/lrzsz-0.12.20/po/Makevars lrzsz-0.12.20/po/Makevars
|
||||
--- clean/lrzsz-0.12.20/po/Makevars 1970-01-01 01:00:00.000000000 +0100
|
||||
+++ lrzsz-0.12.20/po/Makevars 2019-11-25 18:09:52.000000000 +0000
|
||||
@@ -0,0 +1,78 @@
|
||||
+# Makefile variables for PO directory in any package using GNU gettext.
|
||||
+
|
||||
@@ -5730,22 +5700,19 @@ Index: lrzsz-0.12.20/po/Makevars
|
||||
+# "no". Set this to no if the POT file and PO files are maintained
|
||||
+# externally.
|
||||
+DIST_DEPENDS_ON_UPDATE_PO = yes
|
||||
Index: lrzsz-0.12.20/po/stamp-cat-id
|
||||
===================================================================
|
||||
--- lrzsz-0.12.20.orig/po/stamp-cat-id
|
||||
+++ /dev/null
|
||||
diff -uprN clean/lrzsz-0.12.20/po/stamp-cat-id lrzsz-0.12.20/po/stamp-cat-id
|
||||
--- clean/lrzsz-0.12.20/po/stamp-cat-id 1998-12-30 07:50:01.000000000 +0000
|
||||
+++ lrzsz-0.12.20/po/stamp-cat-id 1970-01-01 01:00:00.000000000 +0100
|
||||
@@ -1 +0,0 @@
|
||||
-timestamp
|
||||
Index: lrzsz-0.12.20/po/stamp-po
|
||||
===================================================================
|
||||
--- /dev/null
|
||||
+++ lrzsz-0.12.20/po/stamp-po
|
||||
diff -uprN clean/lrzsz-0.12.20/po/stamp-po lrzsz-0.12.20/po/stamp-po
|
||||
--- clean/lrzsz-0.12.20/po/stamp-po 1970-01-01 01:00:00.000000000 +0100
|
||||
+++ lrzsz-0.12.20/po/stamp-po 2019-11-26 11:42:09.000000000 +0000
|
||||
@@ -0,0 +1 @@
|
||||
+timestamp
|
||||
Index: lrzsz-0.12.20/src/Makefile.am
|
||||
===================================================================
|
||||
--- lrzsz-0.12.20.orig/src/Makefile.am
|
||||
+++ lrzsz-0.12.20/src/Makefile.am
|
||||
diff -uprN clean/lrzsz-0.12.20/src/Makefile.am lrzsz-0.12.20/src/Makefile.am
|
||||
--- clean/lrzsz-0.12.20/src/Makefile.am 1998-12-28 08:38:47.000000000 +0000
|
||||
+++ lrzsz-0.12.20/src/Makefile.am 2019-11-25 16:22:49.000000000 +0000
|
||||
@@ -2,13 +2,11 @@ bin_PROGRAMS=lrz lsz
|
||||
lrz_SOURCES=lrz.c timing.c zperr.c zreadline.c crctab.c rbsb.c zm.c protname.c tcp.c lsyslog.c canit.c
|
||||
lsz_SOURCES=lsz.c timing.c zperr.c zreadline.c crctab.c rbsb.c zm.c protname.c tcp.c lsyslog.c canit.c
|
||||
@@ -5762,10 +5729,9 @@ Index: lrzsz-0.12.20/src/Makefile.am
|
||||
EXTRA_DIST = ansi2knr.1 ansi2knr.c lrzszbug.in
|
||||
INCLUDES = -I.. -I$(srcdir) -I$(top_srcdir)/src -I../intl -I$(top_srcdir)/lib
|
||||
#DEFS = -DLOCALEDIR=\"$(localedir)\" -DOS=\"@host_os@\" -DCPU=\"@host_cpu@\"
|
||||
Index: lrzsz-0.12.20/src/zglobal.h
|
||||
===================================================================
|
||||
--- lrzsz-0.12.20.orig/src/zglobal.h
|
||||
+++ lrzsz-0.12.20/src/zglobal.h
|
||||
diff -uprN clean/lrzsz-0.12.20/src/zglobal.h lrzsz-0.12.20/src/zglobal.h
|
||||
--- clean/lrzsz-0.12.20/src/zglobal.h 1998-12-29 12:34:59.000000000 +0000
|
||||
+++ lrzsz-0.12.20/src/zglobal.h 2019-11-25 16:32:42.000000000 +0000
|
||||
@@ -180,9 +180,6 @@ struct termios;
|
||||
#if HAVE_LOCALE_H
|
||||
# include <locale.h>
|
||||
@@ -5776,9 +5742,8 @@ Index: lrzsz-0.12.20/src/zglobal.h
|
||||
|
||||
#if ENABLE_NLS
|
||||
# include <libintl.h>
|
||||
Index: lrzsz-0.12.20/stamp-h.in
|
||||
===================================================================
|
||||
--- lrzsz-0.12.20.orig/stamp-h.in
|
||||
+++ /dev/null
|
||||
diff -uprN clean/lrzsz-0.12.20/stamp-h.in lrzsz-0.12.20/stamp-h.in
|
||||
--- clean/lrzsz-0.12.20/stamp-h.in 1998-12-30 07:51:07.000000000 +0000
|
||||
+++ lrzsz-0.12.20/stamp-h.in 1970-01-01 01:00:00.000000000 +0100
|
||||
@@ -1 +0,0 @@
|
||||
-timestamp
|
||||
|
||||
@@ -12,7 +12,7 @@ PE = "1"
|
||||
|
||||
# We use the revision in order to avoid having to fetch it from the
|
||||
# repo during parse
|
||||
SRCREV = "050acee119b3757fee3bd128f55d720fdd9bb890"
|
||||
SRCREV = "c4fddedc48f336eabc4ce3f74940e6aa372de18c"
|
||||
|
||||
SRC_URI = "git://git.denx.de/u-boot.git \
|
||||
"
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
require u-boot-common.inc
|
||||
require u-boot.inc
|
||||
|
||||
DEPENDS += "bc-native dtc-native"
|
||||
|
||||
4
meta/recipes-bsp/u-boot/u-boot_2021.01.bb
Normal file
4
meta/recipes-bsp/u-boot/u-boot_2021.01.bb
Normal file
@@ -0,0 +1,4 @@
|
||||
require u-boot-common.inc
|
||||
require u-boot.inc
|
||||
|
||||
DEPENDS += "bc-native dtc-native python3-setuptools-native"
|
||||
@@ -3,7 +3,7 @@ HOMEPAGE = "https://www.isc.org/bind/"
|
||||
SECTION = "console/network"
|
||||
|
||||
LICENSE = "MPL-2.0"
|
||||
LIC_FILES_CHKSUM = "file://COPYRIGHT;md5=4673dc07337cace3b93c65e9ffe57b60"
|
||||
LIC_FILES_CHKSUM = "file://COPYRIGHT;md5=ef10b4de6371115dcecdc38ca2af4561"
|
||||
|
||||
DEPENDS = "openssl libcap zlib libuv"
|
||||
|
||||
@@ -19,7 +19,7 @@ SRC_URI = "https://ftp.isc.org/isc/bind9/${PV}/${BPN}-${PV}.tar.xz \
|
||||
file://0001-avoid-start-failure-with-bind-user.patch \
|
||||
"
|
||||
|
||||
SRC_URI[sha256sum] = "bc47fc019c6205e6a6bfb839c544a1472321df0537ba905b846a4cbffe3362b3"
|
||||
SRC_URI[sha256sum] = "0111f64dd7d8f515cfa129e181cce96ff82070d1b27f11a21f6856110d0699c1"
|
||||
|
||||
UPSTREAM_CHECK_URI = "https://ftp.isc.org/isc/bind9/"
|
||||
# stay at 9.16 follow the ESV versions divisible by 4
|
||||
@@ -9,8 +9,7 @@ SRC_URI = "${KERNELORG_MIRROR}/linux/network/${BPN}/${BP}.tar.xz \
|
||||
|
||||
SRC_URI_append_libc-musl = " file://0002-resolve-musl-does-not-implement-res_ninit.patch"
|
||||
|
||||
SRC_URI[md5sum] = "1ed8745354c7254bdfd4def54833ee94"
|
||||
SRC_URI[sha256sum] = "cb30aca97c2f79ccaed8802aa2909ac5100a3969de74c0af8a9d73b85fc4932b"
|
||||
SRC_URI[sha256sum] = "9f62a7169b7491c670a1ff2e335b0d966308fb2f62e285c781105eb90f181af3"
|
||||
|
||||
RRECOMMENDS_${PN} = "connman-conf"
|
||||
RCONFLICTS_${PN} = "networkmanager"
|
||||
@@ -1,31 +0,0 @@
|
||||
Upstream-Status: Pending
|
||||
|
||||
Subject: rcp: fix to work with large files
|
||||
|
||||
When we copy file by rcp command, if the file > 2GB, it will fail.
|
||||
The cause is that it used incorrect data type on file size in sink() of rcp.
|
||||
|
||||
Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
|
||||
---
|
||||
src/rcp.c | 4 ++--
|
||||
1 file changed, 2 insertions(+), 2 deletions(-)
|
||||
|
||||
diff --git a/src/rcp.c b/src/rcp.c
|
||||
index 21f55b6..bafa35f 100644
|
||||
--- a/src/rcp.c
|
||||
+++ b/src/rcp.c
|
||||
@@ -876,9 +876,9 @@ sink (int argc, char *argv[])
|
||||
enum
|
||||
{ YES, NO, DISPLAYED } wrerr;
|
||||
BUF *bp;
|
||||
- off_t i, j;
|
||||
+ off_t i, j, size;
|
||||
int amt, count, exists, first, mask, mode, ofd, omode;
|
||||
- int setimes, size, targisdir, wrerrno;
|
||||
+ int setimes, targisdir, wrerrno;
|
||||
char ch, *cp, *np, *targ, *vect[1], buf[BUFSIZ];
|
||||
const char *why;
|
||||
|
||||
--
|
||||
1.9.1
|
||||
|
||||
@@ -1,17 +1,22 @@
|
||||
Upstream: http://www.mail-archive.com/bug-inetutils@gnu.org/msg02103.html
|
||||
From c7c27ba763c613f83c1561e56448b49315c271c5 Mon Sep 17 00:00:00 2001
|
||||
From: Jackie Huang <jackie.huang@windriver.com>
|
||||
Date: Wed, 6 Mar 2019 09:36:11 -0500
|
||||
Subject: [PATCH] Upstream:
|
||||
http://www.mail-archive.com/bug-inetutils@gnu.org/msg02103.html
|
||||
|
||||
Upstream-Status: Pending
|
||||
|
||||
Signed-off-by: Jackie Huang <jackie.huang@windriver.com>
|
||||
|
||||
---
|
||||
ping/ping_common.h | 20 ++++++++++++++++++++
|
||||
1 file changed, 20 insertions(+)
|
||||
|
||||
diff --git a/ping/ping_common.h b/ping/ping_common.h
|
||||
index 1dfd1b5..3bfbd12 100644
|
||||
index 65e3e60..3e84db0 100644
|
||||
--- a/ping/ping_common.h
|
||||
+++ b/ping/ping_common.h
|
||||
@@ -17,10 +17,14 @@
|
||||
@@ -18,10 +18,14 @@
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see `http://www.gnu.org/licenses/'. */
|
||||
|
||||
@@ -26,7 +31,7 @@ index 1dfd1b5..3bfbd12 100644
|
||||
#include <icmp.h>
|
||||
#include <error.h>
|
||||
#include <progname.h>
|
||||
@@ -62,7 +66,12 @@ struct ping_stat
|
||||
@@ -63,7 +67,12 @@ struct ping_stat
|
||||
want to follow the traditional behaviour of ping. */
|
||||
#define DEFAULT_PING_COUNT 0
|
||||
|
||||
@@ -39,9 +44,9 @@ index 1dfd1b5..3bfbd12 100644
|
||||
#define PING_TIMING(s) ((s) >= sizeof (struct timeval))
|
||||
#define PING_DATALEN (64 - PING_HEADER_LEN) /* default data length */
|
||||
|
||||
@@ -74,13 +83,20 @@ struct ping_stat
|
||||
(t).tv_usec = ((i)%PING_PRECISION)*(1000000/PING_PRECISION) ;\
|
||||
} while (0)
|
||||
@@ -78,13 +87,20 @@ struct ping_stat
|
||||
|
||||
#define PING_MIN_USER_INTERVAL (200000/PING_PRECISION)
|
||||
|
||||
+#ifdef HAVE_IPV6
|
||||
/* FIXME: Adjust IPv6 case for options and their consumption. */
|
||||
@@ -60,7 +65,7 @@ index 1dfd1b5..3bfbd12 100644
|
||||
|
||||
typedef int (*ping_efp) (int code,
|
||||
void *closure,
|
||||
@@ -89,13 +105,17 @@ typedef int (*ping_efp) (int code,
|
||||
@@ -93,13 +109,17 @@ typedef int (*ping_efp) (int code,
|
||||
struct ip * ip, icmphdr_t * icmp, int datalen);
|
||||
|
||||
union event {
|
||||
@@ -78,6 +83,3 @@ index 1dfd1b5..3bfbd12 100644
|
||||
};
|
||||
|
||||
typedef struct ping_data PING;
|
||||
--
|
||||
2.8.3
|
||||
|
||||
|
||||
@@ -1,20 +1,21 @@
|
||||
From 552a7d64ad4a7188a9b7cd89933ae7caf7ebfe90 Mon Sep 17 00:00:00 2001
|
||||
From f7f785c21306010b2367572250b2822df5bc7728 Mon Sep 17 00:00:00 2001
|
||||
From: Mike Frysinger <vapier at gentoo.org>
|
||||
Date: Thu, 18 Nov 2010 16:59:14 -0500
|
||||
Subject: [PATCH gnulib] printf-parse: pull in features.h for __GLIBC__
|
||||
Subject: [PATCH] printf-parse: pull in features.h for __GLIBC__
|
||||
|
||||
Upstream-Status: Pending
|
||||
|
||||
Signed-off-by: Mike Frysinger <vapier at gentoo.org>
|
||||
|
||||
---
|
||||
lib/printf-parse.h | 3 +++
|
||||
1 files changed, 3 insertions(+), 0 deletions(-)
|
||||
lib/printf-parse.h | 3 +++
|
||||
1 file changed, 3 insertions(+)
|
||||
|
||||
diff --git a/lib/printf-parse.h b/lib/printf-parse.h
|
||||
index 67a4a2a..3bd6152 100644
|
||||
index e7d0f82..d7b4534 100644
|
||||
--- a/lib/printf-parse.h
|
||||
+++ b/lib/printf-parse.h
|
||||
@@ -25,6 +25,9 @@
|
||||
@@ -28,6 +28,9 @@
|
||||
|
||||
#include "printf-args.h"
|
||||
|
||||
@@ -24,6 +25,3 @@ index 67a4a2a..3bd6152 100644
|
||||
|
||||
/* Flags */
|
||||
#define FLAG_GROUP 1 /* ' flag */
|
||||
--
|
||||
1.7.3.2
|
||||
|
||||
|
||||
@@ -1,8 +1,19 @@
|
||||
From 9089c6eafbf5903174dce87b68476e35db80beb9 Mon Sep 17 00:00:00 2001
|
||||
From: Martin Jansa <martin.jansa@gmail.com>
|
||||
Date: Wed, 6 Mar 2019 09:36:11 -0500
|
||||
Subject: [PATCH] inetutils: Import version 1.9.4
|
||||
|
||||
Upstream-Status: Pending
|
||||
|
||||
--- inetutils-1.8/lib/wchar.in.h
|
||||
+++ inetutils-1.8/lib/wchar.in.h
|
||||
@@ -70,6 +70,9 @@
|
||||
---
|
||||
lib/wchar.in.h | 3 +++
|
||||
1 file changed, 3 insertions(+)
|
||||
|
||||
diff --git a/lib/wchar.in.h b/lib/wchar.in.h
|
||||
index cdda680..043866a 100644
|
||||
--- a/lib/wchar.in.h
|
||||
+++ b/lib/wchar.in.h
|
||||
@@ -77,6 +77,9 @@
|
||||
/* The include_next requires a split double-inclusion guard. */
|
||||
#if @HAVE_WCHAR_H@
|
||||
# @INCLUDE_NEXT@ @NEXT_WCHAR_H@
|
||||
|
||||
@@ -1,4 +1,10 @@
|
||||
inetutils: define PATH_PROCNET_DEV if not already defined
|
||||
From 101130f422dd5c01a1459645d7b2a5b8d19720ab Mon Sep 17 00:00:00 2001
|
||||
From: Martin Jansa <martin.jansa@gmail.com>
|
||||
Date: Wed, 6 Mar 2019 09:36:11 -0500
|
||||
Subject: [PATCH] inetutils: define PATH_PROCNET_DEV if not already defined
|
||||
MIME-Version: 1.0
|
||||
Content-Type: text/plain; charset=UTF-8
|
||||
Content-Transfer-Encoding: 8bit
|
||||
|
||||
this prevents the following compilation error :
|
||||
system/linux.c:401:15: error: 'PATH_PROCNET_DEV' undeclared (first use in this function)
|
||||
@@ -9,11 +15,16 @@ this patch comes from :
|
||||
Upstream-Status: Inappropriate [not author]
|
||||
|
||||
Signed-of-by: Eric Bénard <eric@eukrea.com>
|
||||
|
||||
---
|
||||
diff -Naur inetutils-1.9.orig/ifconfig/system/linux.c inetutils-1.9/ifconfig/system/linux.c
|
||||
--- inetutils-1.9.orig/ifconfig/system/linux.c 2012-01-04 16:31:36.000000000 -0500
|
||||
+++ inetutils-1.9/ifconfig/system/linux.c 2012-01-04 16:40:53.000000000 -0500
|
||||
@@ -49,6 +49,10 @@
|
||||
ifconfig/system/linux.c | 4 ++++
|
||||
1 file changed, 4 insertions(+)
|
||||
|
||||
diff --git a/ifconfig/system/linux.c b/ifconfig/system/linux.c
|
||||
index e453b46..4268ca9 100644
|
||||
--- a/ifconfig/system/linux.c
|
||||
+++ b/ifconfig/system/linux.c
|
||||
@@ -53,6 +53,10 @@
|
||||
#include "../ifconfig.h"
|
||||
|
||||
|
||||
|
||||
@@ -1,15 +1,24 @@
|
||||
From 684e45b34a33186bb17bcee0b01814c549a60bf6 Mon Sep 17 00:00:00 2001
|
||||
From: Kai Kang <kai.kang@windriver.com>
|
||||
Date: Wed, 6 Mar 2019 09:36:11 -0500
|
||||
Subject: [PATCH] inetutils: Import version 1.9.4
|
||||
|
||||
Only check security/pam_appl.h which is provided by package libpam when pam is
|
||||
enabled.
|
||||
|
||||
Upstream-Status: Pending
|
||||
|
||||
Signed-off-by: Kai Kang <kai.kang@windriver.com>
|
||||
|
||||
---
|
||||
configure.ac | 15 ++++++++++++++-
|
||||
1 file changed, 14 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/configure.ac b/configure.ac
|
||||
index b35e672..e78a751 100644
|
||||
index 86136fb..b220319 100644
|
||||
--- a/configure.ac
|
||||
+++ b/configure.ac
|
||||
@@ -195,6 +195,19 @@ fi
|
||||
@@ -183,6 +183,19 @@ AC_SUBST(LIBUTIL)
|
||||
|
||||
# See if we have libpam.a. Investigate PAM versus Linux-PAM.
|
||||
if test "$with_pam" = yes ; then
|
||||
@@ -29,8 +38,8 @@ index b35e672..e78a751 100644
|
||||
AC_CHECK_LIB(dl, dlopen, LIBDL=-ldl)
|
||||
AC_CHECK_LIB(pam, pam_authenticate, LIBPAM=-lpam)
|
||||
if test "$ac_cv_lib_pam_pam_authenticate" = yes ; then
|
||||
@@ -587,7 +600,7 @@ AC_HEADER_DIRENT
|
||||
AC_CHECK_HEADERS([arpa/nameser.h errno.h fcntl.h features.h \
|
||||
@@ -620,7 +633,7 @@ AC_HEADER_DIRENT
|
||||
AC_CHECK_HEADERS([arpa/nameser.h arpa/tftp.h errno.h fcntl.h features.h \
|
||||
glob.h memory.h netinet/ether.h netinet/in_systm.h \
|
||||
netinet/ip.h netinet/ip_icmp.h netinet/ip_var.h \
|
||||
- security/pam_appl.h shadow.h \
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
Upstream-Status: Pending
|
||||
|
||||
remove m4_esyscmd function
|
||||
|
||||
Signed-off-by: Chunrong Guo <b40290@freescale.com>
|
||||
--- inetutils-1.9.1/configure.ac 2012-01-06 22:05:05.000000000 +0800
|
||||
+++ inetutils-1.9.1/configure.ac 2012-11-12 14:01:11.732957019 +0800
|
||||
@@ -20,8 +20,7 @@
|
||||
|
||||
AC_PREREQ(2.59)
|
||||
|
||||
-AC_INIT([GNU inetutils],
|
||||
- m4_esyscmd([build-aux/git-version-gen .tarball-version 's/inetutils-/v/;s/_/./g']),
|
||||
+AC_INIT([GNU inetutils],[1.9.4],
|
||||
[bug-inetutils@gnu.org])
|
||||
|
||||
AC_CONFIG_SRCDIR([src/inetd.c])
|
||||
@@ -10,8 +10,7 @@ LICENSE = "GPLv3"
|
||||
|
||||
LIC_FILES_CHKSUM = "file://COPYING;md5=0c7051aef9219dc7237f206c5c4179a7"
|
||||
|
||||
SRC_URI = "${GNU_MIRROR}/inetutils/inetutils-${PV}.tar.gz \
|
||||
file://version.patch \
|
||||
SRC_URI = "${GNU_MIRROR}/inetutils/inetutils-${PV}.tar.xz \
|
||||
file://inetutils-1.8-0001-printf-parse-pull-in-features.h-for-__GLIBC__.patch \
|
||||
file://inetutils-1.8-0003-wchar.patch \
|
||||
file://rexec.xinetd.inetutils \
|
||||
@@ -21,13 +20,9 @@ SRC_URI = "${GNU_MIRROR}/inetutils/inetutils-${PV}.tar.gz \
|
||||
file://tftpd.xinetd.inetutils \
|
||||
file://inetutils-1.9-PATH_PROCNET_DEV.patch \
|
||||
file://inetutils-only-check-pam_appl.h-when-pam-enabled.patch \
|
||||
file://0001-rcp-fix-to-work-with-large-files.patch \
|
||||
file://fix-buffer-fortify-tfpt.patch \
|
||||
file://0001-ftpd-telnetd-Fix-multiple-definitions-of-errcatch-an.patch \
|
||||
"
|
||||
|
||||
SRC_URI[md5sum] = "04852c26c47cc8c6b825f2b74f191f52"
|
||||
SRC_URI[sha256sum] = "be8f75eff936b8e41b112462db51adf689715658a1b09e0d6b05d11ec92cc616"
|
||||
SRC_URI[md5sum] = "5e1018502cd131ed8e42339f6b5c98aa"
|
||||
|
||||
inherit autotools gettext update-alternatives texinfo
|
||||
|
||||
@@ -65,7 +65,8 @@ CFLAGS_append_class-nativesdk = " -DOPENSSLDIR=/not/builtin -DENGINESDIR=/not/bu
|
||||
# rc2 (mailx)
|
||||
# psk (qt5)
|
||||
# srp (libest)
|
||||
DEPRECATED_CRYPTO_FLAGS = "no-ssl no-idea no-rc5 no-md2 no-camellia no-mdc2 no-scrypt no-seed no-siphash no-sm2 no-sm3 no-sm4 no-whirlpool"
|
||||
# whirlpool (qca)
|
||||
DEPRECATED_CRYPTO_FLAGS = "no-ssl no-idea no-rc5 no-md2 no-camellia no-mdc2 no-scrypt no-seed no-siphash no-sm2 no-sm3 no-sm4"
|
||||
|
||||
do_configure () {
|
||||
os=${HOST_OS}
|
||||
|
||||
@@ -16,7 +16,7 @@ inherit autotools pkgconfig
|
||||
SRC_URI = "https://mirrors.edge.kernel.org/pub/linux/libs/${BPN}/${BPN}-${PV}.tar.xz \
|
||||
file://0001-pem.c-do-not-use-rawmemchr.patch \
|
||||
"
|
||||
SRC_URI[sha256sum] = "2f99e743a235b1c834b19112e4e0283d02da93b863899381466cde47bf159cf6"
|
||||
SRC_URI[sha256sum] = "30027a2043bbe2faca7849946bb2ed7d5e48c1b9d2638bfa8f5fdef3093c4784"
|
||||
|
||||
do_configure_prepend () {
|
||||
mkdir -p ${S}/build-aux
|
||||
31
meta/recipes-core/glibc/glibc/faccessat2-perm.patch
Normal file
31
meta/recipes-core/glibc/glibc/faccessat2-perm.patch
Normal file
@@ -0,0 +1,31 @@
|
||||
Older seccomp-based filters used in container frameworks will block faccessat2
|
||||
calls as it's a relatively new syscall. This isn't a big problem with
|
||||
glibc <2.33 but 2.33 will call faccessat2 itself, get EPERM, and thenn be confused
|
||||
about what to do as EPERM isn't an expected error code.
|
||||
|
||||
This manifests itself as mysterious errors, for example a kernel failing to link.
|
||||
|
||||
The root cause of bad seccomp filters is mostly fixed (systemd 247, Docker 20.10.0)
|
||||
but we can't expect everyone to upgrade, so add a workaound (originally from
|
||||
Red Hat) to handle EPERM like ENOSYS and fallback to faccessat().
|
||||
|
||||
Upstream-Status: Inappropriate
|
||||
Signed-off-by: Ross Burton <ross.burton@arm.com>
|
||||
|
||||
diff --git a/sysdeps/unix/sysv/linux/faccessat.c b/sysdeps/unix/sysv/linux/faccessat.c
|
||||
index 56cb6dcc8b4d58d3..5de75032bbc93a2c 100644
|
||||
--- a/sysdeps/unix/sysv/linux/faccessat.c
|
||||
+++ b/sysdeps/unix/sysv/linux/faccessat.c
|
||||
@@ -34,7 +34,11 @@ faccessat (int fd, const char *file, int mode, int flag)
|
||||
#if __ASSUME_FACCESSAT2
|
||||
return ret;
|
||||
#else
|
||||
- if (ret == 0 || errno != ENOSYS)
|
||||
+ /* Fedora-specific workaround:
|
||||
+ As a workround for a broken systemd-nspawn that returns
|
||||
+ EPERM when a syscall is not allowed instead of ENOSYS
|
||||
+ we must check for EPERM here and fall back to faccessat. */
|
||||
+ if (ret == 0 || !(errno == ENOSYS || errno == EPERM))
|
||||
return ret;
|
||||
|
||||
if (flag & ~(AT_SYMLINK_NOFOLLOW | AT_EACCESS))
|
||||
@@ -12,6 +12,7 @@ NATIVESDKFIXES_class-nativesdk = "\
|
||||
file://0005-nativesdk-glibc-Raise-the-size-of-arrays-containing-.patch \
|
||||
file://0006-nativesdk-glibc-Allow-64-bit-atomics-for-x86.patch \
|
||||
file://0007-nativesdk-glibc-Make-relocatable-install-for-locales.patch \
|
||||
file://faccessat2-perm.patch \
|
||||
"
|
||||
|
||||
SRC_URI = "${GLIBC_GIT_URI};branch=${SRCBRANCH};name=glibc \
|
||||
|
||||
@@ -279,6 +279,11 @@ fi
|
||||
|
||||
umount /tgt_root
|
||||
|
||||
# copy any extra files needed for ESP
|
||||
if [ -d /run/media/$1/esp ]; then
|
||||
cp -r /run/media/$1/esp/* /boot
|
||||
fi
|
||||
|
||||
# Copy kernel artifacts. To add more artifacts just add to types
|
||||
# For now just support kernel types already being used by something in OE-core
|
||||
for types in bzImage zImage vmlinux vmlinuz fitImage; do
|
||||
|
||||
@@ -70,8 +70,6 @@ Signed-off-by: Luca Boccassi <luca.boccassi@microsoft.com>
|
||||
src/udev/udev-rules.c | 1 +
|
||||
50 files changed, 61 insertions(+)
|
||||
|
||||
diff --git a/meson.build b/meson.build
|
||||
index 6aa47fc755..0d0fa4963c 100644
|
||||
--- a/meson.build
|
||||
+++ b/meson.build
|
||||
@@ -535,6 +535,7 @@ foreach ident : [
|
||||
@@ -82,8 +80,6 @@ index 6aa47fc755..0d0fa4963c 100644
|
||||
]
|
||||
|
||||
have = cc.has_function(ident[0], prefix : ident[1], args : '-D_GNU_SOURCE')
|
||||
diff --git a/src/backlight/backlight.c b/src/backlight/backlight.c
|
||||
index d1b6a81e33..38b7008371 100644
|
||||
--- a/src/backlight/backlight.c
|
||||
+++ b/src/backlight/backlight.c
|
||||
@@ -19,6 +19,7 @@
|
||||
@@ -94,8 +90,6 @@ index d1b6a81e33..38b7008371 100644
|
||||
|
||||
static int help(void) {
|
||||
_cleanup_free_ char *link = NULL;
|
||||
diff --git a/src/basic/cgroup-util.c b/src/basic/cgroup-util.c
|
||||
index f28bf1866a..bb960f183c 100644
|
||||
--- a/src/basic/cgroup-util.c
|
||||
+++ b/src/basic/cgroup-util.c
|
||||
@@ -38,6 +38,7 @@
|
||||
@@ -106,8 +100,6 @@ index f28bf1866a..bb960f183c 100644
|
||||
|
||||
static int cg_enumerate_items(const char *controller, const char *path, FILE **_f, const char *item) {
|
||||
_cleanup_free_ char *fs = NULL;
|
||||
diff --git a/src/basic/env-util.c b/src/basic/env-util.c
|
||||
index a84863ff22..d4f5d57231 100644
|
||||
--- a/src/basic/env-util.c
|
||||
+++ b/src/basic/env-util.c
|
||||
@@ -15,6 +15,7 @@
|
||||
@@ -118,8 +110,6 @@ index a84863ff22..d4f5d57231 100644
|
||||
|
||||
/* We follow bash for the character set. Different shells have different rules. */
|
||||
#define VALID_BASH_ENV_NAME_CHARS \
|
||||
diff --git a/src/basic/log.c b/src/basic/log.c
|
||||
index d4054cf46a..b608863e45 100644
|
||||
--- a/src/basic/log.c
|
||||
+++ b/src/basic/log.c
|
||||
@@ -36,6 +36,7 @@
|
||||
@@ -130,8 +120,6 @@ index d4054cf46a..b608863e45 100644
|
||||
|
||||
#define SNDBUF_SIZE (8*1024*1024)
|
||||
|
||||
diff --git a/src/basic/missing_stdlib.h b/src/basic/missing_stdlib.h
|
||||
index 8c76f93eb2..9068bfb4f0 100644
|
||||
--- a/src/basic/missing_stdlib.h
|
||||
+++ b/src/basic/missing_stdlib.h
|
||||
@@ -11,3 +11,15 @@
|
||||
@@ -150,8 +138,6 @@ index 8c76f93eb2..9068bfb4f0 100644
|
||||
+ (char *)memcpy(__new, __old, __len); \
|
||||
+ })
|
||||
+#endif
|
||||
diff --git a/src/basic/mkdir.c b/src/basic/mkdir.c
|
||||
index f91f8f7a08..fb31596216 100644
|
||||
--- a/src/basic/mkdir.c
|
||||
+++ b/src/basic/mkdir.c
|
||||
@@ -14,6 +14,7 @@
|
||||
@@ -162,8 +148,6 @@ index f91f8f7a08..fb31596216 100644
|
||||
|
||||
int mkdir_safe_internal(
|
||||
const char *path,
|
||||
diff --git a/src/basic/parse-util.c b/src/basic/parse-util.c
|
||||
index 5d4dafe3a5..70749750d4 100644
|
||||
--- a/src/basic/parse-util.c
|
||||
+++ b/src/basic/parse-util.c
|
||||
@@ -22,6 +22,7 @@
|
||||
@@ -174,8 +158,6 @@ index 5d4dafe3a5..70749750d4 100644
|
||||
|
||||
int parse_boolean(const char *v) {
|
||||
if (!v)
|
||||
diff --git a/src/basic/path-lookup.c b/src/basic/path-lookup.c
|
||||
index 96b82170d0..71342b46af 100644
|
||||
--- a/src/basic/path-lookup.c
|
||||
+++ b/src/basic/path-lookup.c
|
||||
@@ -15,6 +15,7 @@
|
||||
@@ -186,8 +168,6 @@ index 96b82170d0..71342b46af 100644
|
||||
|
||||
int xdg_user_runtime_dir(char **ret, const char *suffix) {
|
||||
const char *e;
|
||||
diff --git a/src/basic/proc-cmdline.c b/src/basic/proc-cmdline.c
|
||||
index 0b6fb137bd..e8e8c7b270 100644
|
||||
--- a/src/basic/proc-cmdline.c
|
||||
+++ b/src/basic/proc-cmdline.c
|
||||
@@ -15,6 +15,7 @@
|
||||
@@ -198,8 +178,6 @@ index 0b6fb137bd..e8e8c7b270 100644
|
||||
|
||||
int proc_cmdline(char **ret) {
|
||||
const char *e;
|
||||
diff --git a/src/basic/procfs-util.c b/src/basic/procfs-util.c
|
||||
index ccab71f7d2..8f9eee8d36 100644
|
||||
--- a/src/basic/procfs-util.c
|
||||
+++ b/src/basic/procfs-util.c
|
||||
@@ -11,6 +11,7 @@
|
||||
@@ -210,8 +188,6 @@ index ccab71f7d2..8f9eee8d36 100644
|
||||
|
||||
int procfs_tasks_get_limit(uint64_t *ret) {
|
||||
_cleanup_free_ char *value = NULL;
|
||||
diff --git a/src/basic/selinux-util.c b/src/basic/selinux-util.c
|
||||
index 4989f4f37c..ca00bf99d8 100644
|
||||
--- a/src/basic/selinux-util.c
|
||||
+++ b/src/basic/selinux-util.c
|
||||
@@ -27,6 +27,7 @@
|
||||
@@ -222,8 +198,6 @@ index 4989f4f37c..ca00bf99d8 100644
|
||||
|
||||
#if HAVE_SELINUX
|
||||
DEFINE_TRIVIAL_CLEANUP_FUNC(context_t, context_free);
|
||||
diff --git a/src/basic/time-util.c b/src/basic/time-util.c
|
||||
index 5318d6378d..23c2f77675 100644
|
||||
--- a/src/basic/time-util.c
|
||||
+++ b/src/basic/time-util.c
|
||||
@@ -27,6 +27,7 @@
|
||||
@@ -234,8 +208,6 @@ index 5318d6378d..23c2f77675 100644
|
||||
|
||||
static clockid_t map_clock_id(clockid_t c) {
|
||||
|
||||
diff --git a/src/boot/bless-boot.c b/src/boot/bless-boot.c
|
||||
index cd34f88bb9..3a77b6f2ca 100644
|
||||
--- a/src/boot/bless-boot.c
|
||||
+++ b/src/boot/bless-boot.c
|
||||
@@ -18,6 +18,7 @@
|
||||
@@ -246,8 +218,6 @@ index cd34f88bb9..3a77b6f2ca 100644
|
||||
|
||||
static char **arg_path = NULL;
|
||||
|
||||
diff --git a/src/core/dbus-cgroup.c b/src/core/dbus-cgroup.c
|
||||
index 37c581fb22..e02789d689 100644
|
||||
--- a/src/core/dbus-cgroup.c
|
||||
+++ b/src/core/dbus-cgroup.c
|
||||
@@ -16,6 +16,7 @@
|
||||
@@ -258,8 +228,6 @@ index 37c581fb22..e02789d689 100644
|
||||
|
||||
BUS_DEFINE_PROPERTY_GET(bus_property_get_tasks_max, "t", TasksMax, tasks_max_resolve);
|
||||
|
||||
diff --git a/src/core/dbus-execute.c b/src/core/dbus-execute.c
|
||||
index abe009c395..0451e58d1c 100644
|
||||
--- a/src/core/dbus-execute.c
|
||||
+++ b/src/core/dbus-execute.c
|
||||
@@ -41,6 +41,7 @@
|
||||
@@ -270,8 +238,6 @@ index abe009c395..0451e58d1c 100644
|
||||
|
||||
BUS_DEFINE_PROPERTY_GET_ENUM(bus_property_get_exec_output, exec_output, ExecOutput);
|
||||
static BUS_DEFINE_PROPERTY_GET_ENUM(property_get_exec_input, exec_input, ExecInput);
|
||||
diff --git a/src/core/dbus-util.c b/src/core/dbus-util.c
|
||||
index d6223db305..3654c344ee 100644
|
||||
--- a/src/core/dbus-util.c
|
||||
+++ b/src/core/dbus-util.c
|
||||
@@ -7,6 +7,7 @@
|
||||
@@ -282,8 +248,6 @@ index d6223db305..3654c344ee 100644
|
||||
|
||||
int bus_property_get_triggered_unit(
|
||||
sd_bus *bus,
|
||||
diff --git a/src/core/execute.c b/src/core/execute.c
|
||||
index c992b8d5d3..89632e0582 100644
|
||||
--- a/src/core/execute.c
|
||||
+++ b/src/core/execute.c
|
||||
@@ -96,6 +96,7 @@
|
||||
@@ -294,8 +258,6 @@ index c992b8d5d3..89632e0582 100644
|
||||
|
||||
#define IDLE_TIMEOUT_USEC (5*USEC_PER_SEC)
|
||||
#define IDLE_TIMEOUT2_USEC (1*USEC_PER_SEC)
|
||||
diff --git a/src/core/kmod-setup.c b/src/core/kmod-setup.c
|
||||
index a56f12f47f..6b8729ef67 100644
|
||||
--- a/src/core/kmod-setup.c
|
||||
+++ b/src/core/kmod-setup.c
|
||||
@@ -11,6 +11,7 @@
|
||||
@@ -306,8 +268,6 @@ index a56f12f47f..6b8729ef67 100644
|
||||
|
||||
#if HAVE_KMOD
|
||||
#include "module-util.h"
|
||||
diff --git a/src/core/service.c b/src/core/service.c
|
||||
index d7bdeb7cca..bfd483b2c0 100644
|
||||
--- a/src/core/service.c
|
||||
+++ b/src/core/service.c
|
||||
@@ -41,6 +41,7 @@
|
||||
@@ -318,8 +278,6 @@ index d7bdeb7cca..bfd483b2c0 100644
|
||||
|
||||
static const UnitActiveState state_translation_table[_SERVICE_STATE_MAX] = {
|
||||
[SERVICE_DEAD] = UNIT_INACTIVE,
|
||||
diff --git a/src/coredump/coredump-vacuum.c b/src/coredump/coredump-vacuum.c
|
||||
index 30c67ffe7c..595bc30726 100644
|
||||
--- a/src/coredump/coredump-vacuum.c
|
||||
+++ b/src/coredump/coredump-vacuum.c
|
||||
@@ -16,6 +16,7 @@
|
||||
@@ -330,8 +288,6 @@ index 30c67ffe7c..595bc30726 100644
|
||||
|
||||
#define DEFAULT_MAX_USE_LOWER (uint64_t) (1ULL*1024ULL*1024ULL) /* 1 MiB */
|
||||
#define DEFAULT_MAX_USE_UPPER (uint64_t) (4ULL*1024ULL*1024ULL*1024ULL) /* 4 GiB */
|
||||
diff --git a/src/journal-remote/journal-remote-main.c b/src/journal-remote/journal-remote-main.c
|
||||
index d2aa1815c2..a851aa203f 100644
|
||||
--- a/src/journal-remote/journal-remote-main.c
|
||||
+++ b/src/journal-remote/journal-remote-main.c
|
||||
@@ -22,6 +22,7 @@
|
||||
@@ -342,8 +298,6 @@ index d2aa1815c2..a851aa203f 100644
|
||||
|
||||
#define PRIV_KEY_FILE CERTIFICATE_ROOT "/private/journal-remote.pem"
|
||||
#define CERT_FILE CERTIFICATE_ROOT "/certs/journal-remote.pem"
|
||||
diff --git a/src/journal/journalctl.c b/src/journal/journalctl.c
|
||||
index bcf2e01d5c..5394d1dc01 100644
|
||||
--- a/src/journal/journalctl.c
|
||||
+++ b/src/journal/journalctl.c
|
||||
@@ -73,6 +73,7 @@
|
||||
@@ -354,8 +308,6 @@ index bcf2e01d5c..5394d1dc01 100644
|
||||
|
||||
#define DEFAULT_FSS_INTERVAL_USEC (15*USEC_PER_MINUTE)
|
||||
#define PROCESS_INOTIFY_INTERVAL 1024 /* Every 1,024 messages processed */
|
||||
diff --git a/src/journal/sd-journal.c b/src/journal/sd-journal.c
|
||||
index cb1ab88ca5..7f35759540 100644
|
||||
--- a/src/journal/sd-journal.c
|
||||
+++ b/src/journal/sd-journal.c
|
||||
@@ -40,6 +40,7 @@
|
||||
@@ -366,8 +318,6 @@ index cb1ab88ca5..7f35759540 100644
|
||||
|
||||
#define JOURNAL_FILES_MAX 7168
|
||||
|
||||
diff --git a/src/libsystemd/sd-bus/bus-message.c b/src/libsystemd/sd-bus/bus-message.c
|
||||
index 86ff5bdfa2..3fd053a358 100644
|
||||
--- a/src/libsystemd/sd-bus/bus-message.c
|
||||
+++ b/src/libsystemd/sd-bus/bus-message.c
|
||||
@@ -21,6 +21,7 @@
|
||||
@@ -378,8 +328,6 @@ index 86ff5bdfa2..3fd053a358 100644
|
||||
|
||||
static int message_append_basic(sd_bus_message *m, char type, const void *p, const void **stored);
|
||||
|
||||
diff --git a/src/libsystemd/sd-bus/bus-objects.c b/src/libsystemd/sd-bus/bus-objects.c
|
||||
index 275c4318a1..5ffee59d17 100644
|
||||
--- a/src/libsystemd/sd-bus/bus-objects.c
|
||||
+++ b/src/libsystemd/sd-bus/bus-objects.c
|
||||
@@ -13,6 +13,7 @@
|
||||
@@ -390,8 +338,6 @@ index 275c4318a1..5ffee59d17 100644
|
||||
|
||||
static int node_vtable_get_userdata(
|
||||
sd_bus *bus,
|
||||
diff --git a/src/libsystemd/sd-bus/bus-socket.c b/src/libsystemd/sd-bus/bus-socket.c
|
||||
index 4881fd0d32..c865f1133b 100644
|
||||
--- a/src/libsystemd/sd-bus/bus-socket.c
|
||||
+++ b/src/libsystemd/sd-bus/bus-socket.c
|
||||
@@ -28,6 +28,7 @@
|
||||
@@ -402,8 +348,6 @@ index 4881fd0d32..c865f1133b 100644
|
||||
|
||||
#define SNDBUF_SIZE (8*1024*1024)
|
||||
|
||||
diff --git a/src/libsystemd/sd-bus/sd-bus.c b/src/libsystemd/sd-bus/sd-bus.c
|
||||
index b8d4dc8d95..a8bac2665d 100644
|
||||
--- a/src/libsystemd/sd-bus/sd-bus.c
|
||||
+++ b/src/libsystemd/sd-bus/sd-bus.c
|
||||
@@ -41,6 +41,7 @@
|
||||
@@ -414,8 +358,6 @@ index b8d4dc8d95..a8bac2665d 100644
|
||||
|
||||
#define log_debug_bus_message(m) \
|
||||
do { \
|
||||
diff --git a/src/libsystemd/sd-bus/test-bus-benchmark.c b/src/libsystemd/sd-bus/test-bus-benchmark.c
|
||||
index 8c6711797a..fac178823a 100644
|
||||
--- a/src/libsystemd/sd-bus/test-bus-benchmark.c
|
||||
+++ b/src/libsystemd/sd-bus/test-bus-benchmark.c
|
||||
@@ -14,6 +14,7 @@
|
||||
@@ -426,8 +368,6 @@ index 8c6711797a..fac178823a 100644
|
||||
|
||||
#define MAX_SIZE (2*1024*1024)
|
||||
|
||||
diff --git a/src/locale/keymap-util.c b/src/locale/keymap-util.c
|
||||
index cb8153f4fe..d52a56019d 100644
|
||||
--- a/src/locale/keymap-util.c
|
||||
+++ b/src/locale/keymap-util.c
|
||||
@@ -21,6 +21,7 @@
|
||||
@@ -438,8 +378,6 @@ index cb8153f4fe..d52a56019d 100644
|
||||
|
||||
static bool startswith_comma(const char *s, const char *prefix) {
|
||||
s = startswith(s, prefix);
|
||||
diff --git a/src/login/pam_systemd.c b/src/login/pam_systemd.c
|
||||
index 8e7a94db55..b5c368c6d1 100644
|
||||
--- a/src/login/pam_systemd.c
|
||||
+++ b/src/login/pam_systemd.c
|
||||
@@ -31,6 +31,7 @@
|
||||
@@ -450,8 +388,6 @@ index 8e7a94db55..b5c368c6d1 100644
|
||||
#include "pam-util.h"
|
||||
#include "parse-util.h"
|
||||
#include "path-util.h"
|
||||
diff --git a/src/network/generator/network-generator.c b/src/network/generator/network-generator.c
|
||||
index 2fa21a067a..2e9995aa62 100644
|
||||
--- a/src/network/generator/network-generator.c
|
||||
+++ b/src/network/generator/network-generator.c
|
||||
@@ -13,6 +13,7 @@
|
||||
@@ -462,8 +398,6 @@ index 2fa21a067a..2e9995aa62 100644
|
||||
|
||||
/*
|
||||
# .network
|
||||
diff --git a/src/nspawn/nspawn-settings.c b/src/nspawn/nspawn-settings.c
|
||||
index 92bb5120ab..eeca905c75 100644
|
||||
--- a/src/nspawn/nspawn-settings.c
|
||||
+++ b/src/nspawn/nspawn-settings.c
|
||||
@@ -16,6 +16,7 @@
|
||||
@@ -474,8 +408,6 @@ index 92bb5120ab..eeca905c75 100644
|
||||
|
||||
Settings *settings_new(void) {
|
||||
Settings *s;
|
||||
diff --git a/src/nss-mymachines/nss-mymachines.c b/src/nss-mymachines/nss-mymachines.c
|
||||
index 53f0492116..c71941fdd7 100644
|
||||
--- a/src/nss-mymachines/nss-mymachines.c
|
||||
+++ b/src/nss-mymachines/nss-mymachines.c
|
||||
@@ -19,6 +19,7 @@
|
||||
@@ -486,8 +418,6 @@ index 53f0492116..c71941fdd7 100644
|
||||
|
||||
NSS_GETHOSTBYNAME_PROTOTYPES(mymachines);
|
||||
NSS_GETPW_PROTOTYPES(mymachines);
|
||||
diff --git a/src/portable/portable.c b/src/portable/portable.c
|
||||
index ed7eac0291..78986977f8 100644
|
||||
--- a/src/portable/portable.c
|
||||
+++ b/src/portable/portable.c
|
||||
@@ -31,6 +31,7 @@
|
||||
@@ -498,8 +428,6 @@ index ed7eac0291..78986977f8 100644
|
||||
|
||||
static const char profile_dirs[] = CONF_PATHS_NULSTR("systemd/portable/profile");
|
||||
|
||||
diff --git a/src/resolve/resolvectl.c b/src/resolve/resolvectl.c
|
||||
index b479335769..212d0bed20 100644
|
||||
--- a/src/resolve/resolvectl.c
|
||||
+++ b/src/resolve/resolvectl.c
|
||||
@@ -37,6 +37,7 @@
|
||||
@@ -510,8 +438,6 @@ index b479335769..212d0bed20 100644
|
||||
|
||||
static int arg_family = AF_UNSPEC;
|
||||
static int arg_ifindex = 0;
|
||||
diff --git a/src/shared/bus-get-properties.c b/src/shared/bus-get-properties.c
|
||||
index 32f68d5e6a..bda1e1ef4f 100644
|
||||
--- a/src/shared/bus-get-properties.c
|
||||
+++ b/src/shared/bus-get-properties.c
|
||||
@@ -4,6 +4,7 @@
|
||||
@@ -522,8 +448,6 @@ index 32f68d5e6a..bda1e1ef4f 100644
|
||||
|
||||
int bus_property_get_bool(
|
||||
sd_bus *bus,
|
||||
diff --git a/src/shared/bus-unit-procs.c b/src/shared/bus-unit-procs.c
|
||||
index 3e97be9671..2b8ca838f7 100644
|
||||
--- a/src/shared/bus-unit-procs.c
|
||||
+++ b/src/shared/bus-unit-procs.c
|
||||
@@ -10,6 +10,7 @@
|
||||
@@ -534,8 +458,6 @@ index 3e97be9671..2b8ca838f7 100644
|
||||
|
||||
struct CGroupInfo {
|
||||
char *cgroup_path;
|
||||
diff --git a/src/shared/bus-unit-util.c b/src/shared/bus-unit-util.c
|
||||
index 2bab2299fb..62afdc7973 100644
|
||||
--- a/src/shared/bus-unit-util.c
|
||||
+++ b/src/shared/bus-unit-util.c
|
||||
@@ -44,6 +44,7 @@
|
||||
@@ -546,8 +468,6 @@ index 2bab2299fb..62afdc7973 100644
|
||||
|
||||
int bus_parse_unit_info(sd_bus_message *message, UnitInfo *u) {
|
||||
assert(message);
|
||||
diff --git a/src/shared/bus-util.c b/src/shared/bus-util.c
|
||||
index fbda218b3b..aae0be75c6 100644
|
||||
--- a/src/shared/bus-util.c
|
||||
+++ b/src/shared/bus-util.c
|
||||
@@ -21,6 +21,7 @@
|
||||
@@ -558,8 +478,6 @@ index fbda218b3b..aae0be75c6 100644
|
||||
|
||||
static int name_owner_change_callback(sd_bus_message *m, void *userdata, sd_bus_error *ret_error) {
|
||||
sd_event *e = userdata;
|
||||
diff --git a/src/shared/dns-domain.c b/src/shared/dns-domain.c
|
||||
index 8bd1e3a6ac..f8d6eab9bd 100644
|
||||
--- a/src/shared/dns-domain.c
|
||||
+++ b/src/shared/dns-domain.c
|
||||
@@ -17,6 +17,7 @@
|
||||
@@ -570,8 +488,6 @@ index 8bd1e3a6ac..f8d6eab9bd 100644
|
||||
|
||||
int dns_label_unescape(const char **name, char *dest, size_t sz, DNSLabelFlags flags) {
|
||||
const char *n;
|
||||
diff --git a/src/shared/journal-importer.c b/src/shared/journal-importer.c
|
||||
index e95b638f4d..a1bf15baa8 100644
|
||||
--- a/src/shared/journal-importer.c
|
||||
+++ b/src/shared/journal-importer.c
|
||||
@@ -14,6 +14,7 @@
|
||||
@@ -582,8 +498,6 @@ index e95b638f4d..a1bf15baa8 100644
|
||||
|
||||
enum {
|
||||
IMPORTER_STATE_LINE = 0, /* waiting to read, or reading line */
|
||||
diff --git a/src/shared/logs-show.c b/src/shared/logs-show.c
|
||||
index bf574d32a5..a09c79d2fb 100644
|
||||
--- a/src/shared/logs-show.c
|
||||
+++ b/src/shared/logs-show.c
|
||||
@@ -41,6 +41,7 @@
|
||||
@@ -594,8 +508,6 @@ index bf574d32a5..a09c79d2fb 100644
|
||||
|
||||
/* up to three lines (each up to 100 characters) or 300 characters, whichever is less */
|
||||
#define PRINT_LINE_THRESHOLD 3
|
||||
diff --git a/src/shared/pager.c b/src/shared/pager.c
|
||||
index f689d9f28f..aae3957c2f 100644
|
||||
--- a/src/shared/pager.c
|
||||
+++ b/src/shared/pager.c
|
||||
@@ -26,6 +26,7 @@
|
||||
@@ -606,8 +518,6 @@ index f689d9f28f..aae3957c2f 100644
|
||||
|
||||
static pid_t pager_pid = 0;
|
||||
|
||||
diff --git a/src/shared/uid-range.c b/src/shared/uid-range.c
|
||||
index 5d5bf7f21d..f1002ffa6c 100644
|
||||
--- a/src/shared/uid-range.c
|
||||
+++ b/src/shared/uid-range.c
|
||||
@@ -9,6 +9,7 @@
|
||||
@@ -618,8 +528,6 @@ index 5d5bf7f21d..f1002ffa6c 100644
|
||||
|
||||
static bool uid_range_intersect(UidRange *range, uid_t start, uid_t nr) {
|
||||
assert(range);
|
||||
diff --git a/src/socket-proxy/socket-proxyd.c b/src/socket-proxy/socket-proxyd.c
|
||||
index 4391d9f1f5..9b73cd0aeb 100644
|
||||
--- a/src/socket-proxy/socket-proxyd.c
|
||||
+++ b/src/socket-proxy/socket-proxyd.c
|
||||
@@ -26,6 +26,7 @@
|
||||
@@ -630,8 +538,6 @@ index 4391d9f1f5..9b73cd0aeb 100644
|
||||
|
||||
#define BUFFER_SIZE (256 * 1024)
|
||||
|
||||
diff --git a/src/test/test-hexdecoct.c b/src/test/test-hexdecoct.c
|
||||
index f0f9679769..128fc164da 100644
|
||||
--- a/src/test/test-hexdecoct.c
|
||||
+++ b/src/test/test-hexdecoct.c
|
||||
@@ -6,6 +6,7 @@
|
||||
@@ -642,8 +548,6 @@ index f0f9679769..128fc164da 100644
|
||||
|
||||
static void test_hexchar(void) {
|
||||
assert_se(hexchar(0xa) == 'a');
|
||||
diff --git a/src/udev/udev-builtin-path_id.c b/src/udev/udev-builtin-path_id.c
|
||||
index 0da59e2c75..66da3741ee 100644
|
||||
--- a/src/udev/udev-builtin-path_id.c
|
||||
+++ b/src/udev/udev-builtin-path_id.c
|
||||
@@ -22,6 +22,7 @@
|
||||
@@ -654,8 +558,6 @@ index 0da59e2c75..66da3741ee 100644
|
||||
|
||||
_printf_(2,3)
|
||||
static void path_prepend(char **path, const char *fmt, ...) {
|
||||
diff --git a/src/udev/udev-event.c b/src/udev/udev-event.c
|
||||
index 5159d19a38..0ed5b65844 100644
|
||||
--- a/src/udev/udev-event.c
|
||||
+++ b/src/udev/udev-event.c
|
||||
@@ -34,6 +34,7 @@
|
||||
@@ -666,8 +568,6 @@ index 5159d19a38..0ed5b65844 100644
|
||||
|
||||
typedef struct Spawn {
|
||||
sd_device *device;
|
||||
diff --git a/src/udev/udev-rules.c b/src/udev/udev-rules.c
|
||||
index ef6a0c112c..422fc19127 100644
|
||||
--- a/src/udev/udev-rules.c
|
||||
+++ b/src/udev/udev-rules.c
|
||||
@@ -30,6 +30,7 @@
|
||||
@@ -678,6 +578,13 @@ index ef6a0c112c..422fc19127 100644
|
||||
|
||||
#define RULES_DIRS (const char* const*) CONF_PATHS_STRV("udev/rules.d")
|
||||
|
||||
--
|
||||
2.27.0
|
||||
|
||||
--- a/src/basic/mountpoint-util.c
|
||||
+++ b/src/basic/mountpoint-util.c
|
||||
@@ -10,6 +10,7 @@
|
||||
#include "fs-util.h"
|
||||
#include "missing_stat.h"
|
||||
#include "missing_syscall.h"
|
||||
+#include "missing_stdlib.h"
|
||||
#include "mountpoint-util.h"
|
||||
#include "parse-util.h"
|
||||
#include "path-util.h"
|
||||
|
||||
@@ -373,15 +373,15 @@ USERADD_PACKAGES = "${PN} ${PN}-extra-utils \
|
||||
${@bb.utils.contains('PACKAGECONFIG', 'journal-upload', '${PN}-journal-upload', '', d)} \
|
||||
"
|
||||
GROUPADD_PARAM_${PN} = "-r systemd-journal"
|
||||
USERADD_PARAM_${PN} += "${@bb.utils.contains('PACKAGECONFIG', 'coredump', '--system -d / -M --shell /bin/nologin systemd-coredump;', '', d)}"
|
||||
USERADD_PARAM_${PN} += "${@bb.utils.contains('PACKAGECONFIG', 'networkd', '--system -d / -M --shell /bin/nologin systemd-network;', '', d)}"
|
||||
USERADD_PARAM_${PN} += "${@bb.utils.contains('PACKAGECONFIG', 'coredump', '--system -d / -M --shell /sbin/nologin systemd-coredump;', '', d)}"
|
||||
USERADD_PARAM_${PN} += "${@bb.utils.contains('PACKAGECONFIG', 'networkd', '--system -d / -M --shell /sbin/nologin systemd-network;', '', d)}"
|
||||
USERADD_PARAM_${PN} += "${@bb.utils.contains('PACKAGECONFIG', 'polkit', '--system --no-create-home --user-group --home-dir ${sysconfdir}/polkit-1 polkitd;', '', d)}"
|
||||
USERADD_PARAM_${PN} += "${@bb.utils.contains('PACKAGECONFIG', 'resolved', '--system -d / -M --shell /bin/nologin systemd-resolve;', '', d)}"
|
||||
USERADD_PARAM_${PN} += "${@bb.utils.contains('PACKAGECONFIG', 'timesyncd', '--system -d / -M --shell /bin/nologin systemd-timesync;', '', d)}"
|
||||
USERADD_PARAM_${PN}-extra-utils = "--system -d / -M --shell /bin/nologin systemd-bus-proxy"
|
||||
USERADD_PARAM_${PN}-journal-gateway = "--system -d / -M --shell /bin/nologin systemd-journal-gateway"
|
||||
USERADD_PARAM_${PN}-journal-remote = "--system -d / -M --shell /bin/nologin systemd-journal-remote"
|
||||
USERADD_PARAM_${PN}-journal-upload = "--system -d / -M --shell /bin/nologin systemd-journal-upload"
|
||||
USERADD_PARAM_${PN} += "${@bb.utils.contains('PACKAGECONFIG', 'resolved', '--system -d / -M --shell /sbin/nologin systemd-resolve;', '', d)}"
|
||||
USERADD_PARAM_${PN} += "${@bb.utils.contains('PACKAGECONFIG', 'timesyncd', '--system -d / -M --shell /sbin/nologin systemd-timesync;', '', d)}"
|
||||
USERADD_PARAM_${PN}-extra-utils = "--system -d / -M --shell /sbin/nologin systemd-bus-proxy"
|
||||
USERADD_PARAM_${PN}-journal-gateway = "--system -d / -M --shell /sbin/nologin systemd-journal-gateway"
|
||||
USERADD_PARAM_${PN}-journal-remote = "--system -d / -M --shell /sbin/nologin systemd-journal-remote"
|
||||
USERADD_PARAM_${PN}-journal-upload = "--system -d / -M --shell /sbin/nologin systemd-journal-upload"
|
||||
|
||||
FILES_${PN}-analyze = "${bindir}/systemd-analyze"
|
||||
|
||||
|
||||
@@ -1,101 +0,0 @@
|
||||
From b859ab1b211d348b46eca9158b7742f050c8115e Mon Sep 17 00:00:00 2001
|
||||
From: Eric Blake <eblake@redhat.com>
|
||||
Date: Wed, 14 Sep 2016 08:17:06 -0500
|
||||
Subject: [PATCH] AC_HEADER_MAJOR: port to glibc 2.25
|
||||
|
||||
glibc 2.25 is deprecating the namespace pollution of <sys/types.h>
|
||||
injecting major(), minor(), and makedev() into the compilation
|
||||
environment, with a warning that insists that users include
|
||||
<sys/sysmacros.h> instead. However, because the expansion of
|
||||
AC_HEADER_MAJOR didn't bother checking sys/sysmacros.h until
|
||||
after probing whether sys/types.h pollutes the namespace, it was
|
||||
not defining MAJOR_IN_SYSMACROS, with the result that code
|
||||
compiled with -Werror chokes on the deprecation warnings because
|
||||
it was not including sysmacros.h.
|
||||
|
||||
In addition to fixing autoconf (which only benefits projects
|
||||
that rebuild configure after this fix is released), we can also
|
||||
give a hint to distros on how they can populate config.site with
|
||||
a cache variable to force pre-existing configure scripts without
|
||||
the updated macro to behave sanely in the presence of glibc 2.25
|
||||
(the documentation is especially useful since that cache variable
|
||||
is no longer present in autoconf after this patch).
|
||||
|
||||
Note that mingw lacks major/minor/makedev in any of its standard
|
||||
headers; for that platform, the behavior of this macro is unchanged
|
||||
(code using the recommended include formula will get a compile error
|
||||
when trying to use major(), whether before or after this patch); but
|
||||
for now, it is assumed that programs actually concerned with
|
||||
creating devices are not worried about portability to mingw. If
|
||||
desired, a later patch could tighten AC_HEADER_MAJOR to fail at
|
||||
configure time if the macros are unavailable in any of the three
|
||||
system headers, but that semantic change is not worth mixing into
|
||||
this patch.
|
||||
|
||||
* lib/autoconf/headers.m4 (AC_HEADER_MAJOR): Drop check for
|
||||
major within sys/types.h; it interferes with the need to check
|
||||
sysmacros.h first.
|
||||
|
||||
Signed-off-by: Eric Blake <eblake@redhat.com>
|
||||
|
||||
Remove the documentation change from the patch
|
||||
Upstream-Status: Backport
|
||||
|
||||
Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
|
||||
---
|
||||
lib/autoconf/headers.m4 | 30 ++++++++++++++----------------
|
||||
1 file changed, 14 insertions(+), 16 deletions(-)
|
||||
|
||||
diff --git a/lib/autoconf/headers.m4 b/lib/autoconf/headers.m4
|
||||
index 81a7fa2..a57d0d3 100644
|
||||
--- a/lib/autoconf/headers.m4
|
||||
+++ b/lib/autoconf/headers.m4
|
||||
@@ -502,31 +502,29 @@ fi
|
||||
|
||||
# AC_HEADER_MAJOR
|
||||
# ---------------
|
||||
+# Thanks to glibc 2.25 deprecating macros in sys/types.h, coupled with
|
||||
+# back-compat to autoconf 2.69, we need the following logic:
|
||||
+# Check whether <sys/types.h> compiles.
|
||||
+# If <sys/mkdev.h> compiles, assume it provides major/minor/makedev.
|
||||
+# Otherwise, if <sys/sysmacros.h> compiles, assume it provides the macros.
|
||||
+# Otherwise, either the macros were provided by <sys/types.h>, or do
|
||||
+# not exist on the platform. Code trying to use these three macros is
|
||||
+# assumed to not care about platforms that lack the macros.
|
||||
AN_FUNCTION([major], [AC_HEADER_MAJOR])
|
||||
AN_FUNCTION([makedev], [AC_HEADER_MAJOR])
|
||||
AN_FUNCTION([minor], [AC_HEADER_MAJOR])
|
||||
AN_HEADER([sys/mkdev.h], [AC_HEADER_MAJOR])
|
||||
AC_DEFUN([AC_HEADER_MAJOR],
|
||||
-[AC_CACHE_CHECK(whether sys/types.h defines makedev,
|
||||
- ac_cv_header_sys_types_h_makedev,
|
||||
-[AC_LINK_IFELSE([AC_LANG_PROGRAM([[@%:@include <sys/types.h>]],
|
||||
- [[return makedev(0, 0);]])],
|
||||
- [ac_cv_header_sys_types_h_makedev=yes],
|
||||
- [ac_cv_header_sys_types_h_makedev=no])
|
||||
-])
|
||||
-
|
||||
-if test $ac_cv_header_sys_types_h_makedev = no; then
|
||||
+[AC_CHECK_HEADERS_ONCE([sys/types.h])
|
||||
AC_CHECK_HEADER(sys/mkdev.h,
|
||||
[AC_DEFINE(MAJOR_IN_MKDEV, 1,
|
||||
[Define to 1 if `major', `minor', and `makedev' are
|
||||
declared in <mkdev.h>.])])
|
||||
-
|
||||
- if test $ac_cv_header_sys_mkdev_h = no; then
|
||||
- AC_CHECK_HEADER(sys/sysmacros.h,
|
||||
- [AC_DEFINE(MAJOR_IN_SYSMACROS, 1,
|
||||
- [Define to 1 if `major', `minor', and `makedev'
|
||||
- are declared in <sysmacros.h>.])])
|
||||
- fi
|
||||
+if test $ac_cv_header_sys_mkdev_h = no; then
|
||||
+ AC_CHECK_HEADER(sys/sysmacros.h,
|
||||
+ [AC_DEFINE(MAJOR_IN_SYSMACROS, 1,
|
||||
+ [Define to 1 if `major', `minor', and `makedev'
|
||||
+ are declared in <sysmacros.h>.])])
|
||||
fi
|
||||
])# AC_HEADER_MAJOR
|
||||
|
||||
--
|
||||
2.7.4
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
backport http://git.savannah.gnu.org/cgit/config.git/commit/config.sub?id=062587eaa891396c936555ae51f7e77eeb71a5fe
|
||||
|
||||
Signed-off-by: Khem Raj <raj.khem@gmail.com>
|
||||
Upstream-Status: Backport
|
||||
Index: autoconf-2.69/build-aux/config.sub
|
||||
===================================================================
|
||||
--- autoconf-2.69.orig/build-aux/config.sub
|
||||
+++ autoconf-2.69/build-aux/config.sub
|
||||
@@ -123,7 +123,7 @@ esac
|
||||
maybe_os=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\2/'`
|
||||
case $maybe_os in
|
||||
nto-qnx* | linux-gnu* | linux-android* | linux-dietlibc | linux-newlib* | \
|
||||
- linux-uclibc* | uclinux-uclibc* | uclinux-gnu* | kfreebsd*-gnu* | \
|
||||
+ linux-musl* | linux-uclibc* | uclinux-uclibc* | uclinux-gnu* | kfreebsd*-gnu* | \
|
||||
knetbsd*-gnu* | netbsd*-gnu* | \
|
||||
kopensolaris*-gnu* | \
|
||||
storm-chaos* | os2-emx* | rtmk-nova*)
|
||||
@@ -1360,7 +1360,7 @@ case $os in
|
||||
| -chorusos* | -chorusrdb* | -cegcc* \
|
||||
| -cygwin* | -msys* | -pe* | -psos* | -moss* | -proelf* | -rtems* \
|
||||
| -mingw32* | -linux-gnu* | -linux-android* \
|
||||
- | -linux-newlib* | -linux-uclibc* \
|
||||
+ | -linux-newlib* | -linux-musl* | -linux-uclibc* \
|
||||
| -uxpv* | -beos* | -mpeix* | -udk* \
|
||||
| -interix* | -uwin* | -mks* | -rhapsody* | -darwin* | -opened* \
|
||||
| -openstep* | -oskit* | -conix* | -pw32* | -nonstopux* \
|
||||
@@ -1,120 +0,0 @@
|
||||
From 236552ff5b9f1ebf666d8d0e9850007dcce03d26 Mon Sep 17 00:00:00 2001
|
||||
From: Serhii Popovych <spopovyc@cisco.com>
|
||||
Date: Wed, 10 Feb 2016 16:32:44 +0000
|
||||
Subject: [PATCH] perl: Replace -w option in shebangs with modern "use
|
||||
warnings"
|
||||
|
||||
In some builds we might provide ac_cv_path_PERL as /usr/bin/env perl
|
||||
to use newer version of the perl from users PATH rather than
|
||||
older from standard system path.
|
||||
|
||||
However using /usr/bin/env perl -w from shebang line isn't
|
||||
possible because it translates to something like
|
||||
/usr/bin/env -w perl and env complains about illegal option.
|
||||
|
||||
To address this we can remove -w option from perl shebang
|
||||
line and add "use warnings" statement.
|
||||
|
||||
Upstream-Status: Pending
|
||||
Signed-off-by: Serhii Popovych <spopovyc@cisco.com>
|
||||
---
|
||||
bin/autom4te.in | 3 ++-
|
||||
bin/autoreconf.in | 3 ++-
|
||||
bin/autoscan.in | 3 ++-
|
||||
bin/autoupdate.in | 3 ++-
|
||||
bin/ifnames.in | 3 ++-
|
||||
5 files changed, 10 insertions(+), 5 deletions(-)
|
||||
|
||||
diff --git a/bin/autom4te.in b/bin/autom4te.in
|
||||
index 11773c9..a8f5e41 100644
|
||||
--- a/bin/autom4te.in
|
||||
+++ b/bin/autom4te.in
|
||||
@@ -1,4 +1,4 @@
|
||||
-#! @PERL@ -w
|
||||
+#! @PERL@
|
||||
# -*- perl -*-
|
||||
# @configure_input@
|
||||
|
||||
@@ -42,6 +42,7 @@ use Autom4te::General;
|
||||
use Autom4te::XFile;
|
||||
use File::Basename;
|
||||
use strict;
|
||||
+use warnings;
|
||||
|
||||
# Data directory.
|
||||
my $pkgdatadir = $ENV{'AC_MACRODIR'} || '@pkgdatadir@';
|
||||
diff --git a/bin/autoreconf.in b/bin/autoreconf.in
|
||||
index e245db4..1a318cb 100644
|
||||
--- a/bin/autoreconf.in
|
||||
+++ b/bin/autoreconf.in
|
||||
@@ -1,4 +1,4 @@
|
||||
-#! @PERL@ -w
|
||||
+#! @PERL@
|
||||
# -*- perl -*-
|
||||
# @configure_input@
|
||||
|
||||
@@ -45,6 +45,7 @@ use Autom4te::XFile;
|
||||
# Do not use Cwd::chdir, since it might hang.
|
||||
use Cwd 'cwd';
|
||||
use strict;
|
||||
+use warnings;
|
||||
|
||||
## ----------- ##
|
||||
## Variables. ##
|
||||
diff --git a/bin/autoscan.in b/bin/autoscan.in
|
||||
index a67c48d..b931249 100644
|
||||
--- a/bin/autoscan.in
|
||||
+++ b/bin/autoscan.in
|
||||
@@ -1,4 +1,4 @@
|
||||
-#! @PERL@ -w
|
||||
+#! @PERL@
|
||||
# -*- perl -*-
|
||||
# @configure_input@
|
||||
|
||||
@@ -43,6 +43,7 @@ use Autom4te::XFile;
|
||||
use File::Basename;
|
||||
use File::Find;
|
||||
use strict;
|
||||
+use warnings;
|
||||
|
||||
use vars qw(@cfiles @makefiles @shfiles @subdirs %printed);
|
||||
|
||||
diff --git a/bin/autoupdate.in b/bin/autoupdate.in
|
||||
index 9737d49..92cb147 100644
|
||||
--- a/bin/autoupdate.in
|
||||
+++ b/bin/autoupdate.in
|
||||
@@ -1,4 +1,4 @@
|
||||
-#! @PERL@ -w
|
||||
+#! @PERL@
|
||||
# -*- perl -*-
|
||||
# @configure_input@
|
||||
|
||||
@@ -44,6 +44,7 @@ use Autom4te::General;
|
||||
use Autom4te::XFile;
|
||||
use File::Basename;
|
||||
use strict;
|
||||
+use warnings;
|
||||
|
||||
# Lib files.
|
||||
my $autom4te = $ENV{'AUTOM4TE'} || '@bindir@/@autom4te-name@';
|
||||
diff --git a/bin/ifnames.in b/bin/ifnames.in
|
||||
index ba2cd05..74b0278 100644
|
||||
--- a/bin/ifnames.in
|
||||
+++ b/bin/ifnames.in
|
||||
@@ -1,4 +1,4 @@
|
||||
-#! @PERL@ -w
|
||||
+#! @PERL@
|
||||
# -*- perl -*-
|
||||
# @configure_input@
|
||||
|
||||
@@ -44,6 +44,7 @@ BEGIN
|
||||
use Autom4te::General;
|
||||
use Autom4te::XFile;
|
||||
use Autom4te::FileUtils;
|
||||
+use warnings;
|
||||
|
||||
# $HELP
|
||||
# -----
|
||||
--
|
||||
2.3.0
|
||||
|
||||
@@ -1,32 +1,36 @@
|
||||
Upstream-Status: Pending
|
||||
From 0071d28e304745a16871561f23117fdb00dd2559 Mon Sep 17 00:00:00 2001
|
||||
From: Ross Burton <ross.burton@intel.com>
|
||||
Date: Thu, 12 Mar 2020 17:25:23 +0000
|
||||
Subject: [PATCH 4/7] autoreconf-exclude.patch
|
||||
|
||||
Index: autoconf-2.63/bin/autoreconf.in
|
||||
===================================================================
|
||||
--- autoconf-2.63.orig/bin/autoreconf.in 2008-12-31 17:39:01.000000000 +0000
|
||||
+++ autoconf-2.63/bin/autoreconf.in 2008-12-31 17:43:38.000000000 +0000
|
||||
@@ -76,6 +76,7 @@
|
||||
-i, --install copy missing auxiliary files
|
||||
---
|
||||
bin/autoreconf.in | 26 ++++++++++++++++++++++++++
|
||||
1 file changed, 26 insertions(+)
|
||||
|
||||
diff --git a/bin/autoreconf.in b/bin/autoreconf.in
|
||||
index bb9f316d..7da3005b 100644
|
||||
--- a/bin/autoreconf.in
|
||||
+++ b/bin/autoreconf.in
|
||||
@@ -82,6 +82,7 @@ Operation modes:
|
||||
-i, --install copy missing standard auxiliary files
|
||||
--no-recursive don't rebuild sub-packages
|
||||
-s, --symlink with -i, install symbolic links instead of copies
|
||||
+ -x, --exclude=STEPS steps we should not run
|
||||
-m, --make when applicable, re-run ./configure && make
|
||||
-W, --warnings=CATEGORY report the warnings falling in CATEGORY [syntax]
|
||||
|
||||
@@ -136,6 +137,13 @@
|
||||
@@ -141,6 +142,10 @@ my $run_make = 0;
|
||||
# Recurse into subpackages
|
||||
my $recursive = 1;
|
||||
|
||||
+# Steps to exclude
|
||||
+my @exclude;
|
||||
+my @ex;
|
||||
+
|
||||
+my $uses_gettext;
|
||||
+my $configure_ac;
|
||||
+
|
||||
## ---------- ##
|
||||
## Routines. ##
|
||||
## ---------- ##
|
||||
@@ -153,6 +161,7 @@
|
||||
@@ -161,6 +166,7 @@ sub parse_args ()
|
||||
'B|prepend-include=s' => \@prepend_include,
|
||||
'i|install' => \$install,
|
||||
's|symlink' => \$symlink,
|
||||
@@ -34,106 +38,85 @@ Index: autoconf-2.63/bin/autoreconf.in
|
||||
'm|make' => \$run_make,
|
||||
'recursive!' => \$recursive);
|
||||
|
||||
@@ -162,6 +171,8 @@
|
||||
@@ -170,6 +176,8 @@ sub parse_args ()
|
||||
parse_WARNINGS;
|
||||
parse_warnings '--warnings', @warning;
|
||||
parse_warnings @warning;
|
||||
|
||||
+ @exclude = map { split /,/ } @exclude;
|
||||
+
|
||||
# Even if the user specified a configure.ac, trim to get the
|
||||
# directory, and look for configure.ac again. Because (i) the code
|
||||
# is simpler, and (ii) we are still able to diagnose simultaneous
|
||||
@@ -255,6 +266,11 @@
|
||||
{
|
||||
my ($aclocal, $flags) = @_;
|
||||
|
||||
+ @ex = grep (/^aclocal$/, @exclude);
|
||||
+ if ($#ex != -1) {
|
||||
+ return;
|
||||
+ }
|
||||
+
|
||||
# aclocal 1.8+ does all this for free. It can be recognized by its
|
||||
# --force support.
|
||||
if ($aclocal_supports_force)
|
||||
@@ -368,7 +384,10 @@
|
||||
@@ -493,8 +501,11 @@ sub autoreconf_current_directory ($)
|
||||
}
|
||||
else
|
||||
{
|
||||
- xsystem_hint ("autopoint is needed because this package uses Gettext", "$autopoint");
|
||||
+ @ex = grep (/^autopoint$/, @exclude);
|
||||
+ if ($#ex == -1) {
|
||||
+ xsystem_hint ("autopoint is needed because this package uses Gettext", "$autopoint");
|
||||
xsystem_hint ("autopoint is needed because this package uses Gettext",
|
||||
$autopoint);
|
||||
+ }
|
||||
}
|
||||
|
||||
|
||||
@@ -532,16 +551,17 @@
|
||||
@@ -687,9 +698,12 @@ sub autoreconf_current_directory ($)
|
||||
{
|
||||
$libtoolize .= " --ltdl";
|
||||
}
|
||||
- xsystem_hint ("libtoolize is needed because this package uses Libtool", $libtoolize);
|
||||
- $rerun_aclocal = 1;
|
||||
+ @ex = grep (/^libtoolize$/, @exclude);
|
||||
+ if ($#ex == -1) {
|
||||
+ xsystem_hint ("libtoolize is needed because this package uses Libtool", $libtoolize);
|
||||
+ $rerun_aclocal = 1;
|
||||
xsystem_hint ("libtoolize is needed because this package uses Libtool",
|
||||
$libtoolize);
|
||||
$rerun_aclocal = 1;
|
||||
+ }
|
||||
}
|
||||
else
|
||||
{
|
||||
verb "$configure_ac: not running libtoolize: --install not given";
|
||||
@@ -726,8 +740,11 @@ sub autoreconf_current_directory ($)
|
||||
}
|
||||
|
||||
-
|
||||
-
|
||||
# ------------------- #
|
||||
# Rerunning aclocal. #
|
||||
# ------------------- #
|
||||
@@ -572,7 +592,10 @@
|
||||
elsif ($install)
|
||||
{
|
||||
+ @ex = grep (/^gtkdocize$/, @exclude);
|
||||
+ if ($#ex == -1) {
|
||||
xsystem_hint ("gtkdocize is needed because this package uses Gtkdoc",
|
||||
$gtkdocize);
|
||||
+ }
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -765,7 +782,10 @@ sub autoreconf_current_directory ($)
|
||||
# latter runs the former, and (ii) autoconf is stricter than
|
||||
# autoheader. So all in all, autoconf should give better error
|
||||
# messages.
|
||||
- xsystem ($autoconf);
|
||||
+ @ex = grep (/^autoconf$/, @exclude);
|
||||
+ if ($#ex == -1) {
|
||||
+ xsystem ("$autoconf");
|
||||
xsystem ($autoconf);
|
||||
+ }
|
||||
|
||||
|
||||
# -------------------- #
|
||||
@@ -593,7 +616,10 @@
|
||||
@@ -786,7 +806,10 @@ sub autoreconf_current_directory ($)
|
||||
}
|
||||
else
|
||||
{
|
||||
- xsystem ($autoheader);
|
||||
+ @ex = grep (/^autoheader$/, @exclude);
|
||||
+ if ($#ex == -1) {
|
||||
+ xsystem ("$autoheader");
|
||||
xsystem ($autoheader);
|
||||
+ }
|
||||
}
|
||||
|
||||
|
||||
@@ -610,7 +636,10 @@
|
||||
@@ -803,7 +826,10 @@ sub autoreconf_current_directory ($)
|
||||
# We should always run automake, and let it decide whether it shall
|
||||
# update the file or not. In fact, the effect of `$force' is already
|
||||
# included in `$automake' via `--no-force'.
|
||||
- xsystem ($automake);
|
||||
# update the file or not. In fact, the effect of '$force' is already
|
||||
# included in '$automake' via '--no-force'.
|
||||
+ @ex = grep (/^automake$/, @exclude);
|
||||
+ if ($#ex == -1) {
|
||||
+ xsystem ("$automake");
|
||||
xsystem ($automake);
|
||||
+ }
|
||||
}
|
||||
|
||||
|
||||
@@ -634,7 +663,10 @@
|
||||
}
|
||||
else
|
||||
{
|
||||
- xsystem ("$make");
|
||||
+ @ex = grep (/^make$/, @exclude);
|
||||
+ if ($#ex == -1) {
|
||||
+ xsystem ("$make");
|
||||
+ }
|
||||
}
|
||||
}
|
||||
}
|
||||
# ---------------------------------------------------- #
|
||||
--
|
||||
2.25.1
|
||||
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
Upstream-Status: Pending
|
||||
|
||||
#
|
||||
# Patch managed by http://www.mn-logistik.de/unsupported/pxa250/patcher
|
||||
#
|
||||
|
||||
Index: autoconf-2.63/bin/autoreconf.in
|
||||
===================================================================
|
||||
--- autoconf-2.63.orig/bin/autoreconf.in 2008-12-31 17:43:55.000000000 +0000
|
||||
+++ autoconf-2.63/bin/autoreconf.in 2008-12-31 17:46:16.000000000 +0000
|
||||
@@ -58,7 +58,7 @@
|
||||
$help = "Usage: $0 [OPTION]... [DIRECTORY]...
|
||||
|
||||
Run `autoconf' (and `autoheader', `aclocal', `automake', `autopoint'
|
||||
-(formerly `gettextize'), and `libtoolize' where appropriate)
|
||||
+(formerly `gettextize'), `libtoolize', and `gnu-configize' where appropriate)
|
||||
repeatedly to remake the GNU Build System files in specified
|
||||
DIRECTORIES and their subdirectories (defaulting to `.').
|
||||
|
||||
@@ -115,6 +115,7 @@
|
||||
my $libtoolize = $ENV{'LIBTOOLIZE'} || 'libtoolize';
|
||||
my $autopoint = $ENV{'AUTOPOINT'} || 'autopoint';
|
||||
my $make = $ENV{'MAKE'} || 'make';
|
||||
+my $gnuconfigize = $ENV{'GNUCONFIGIZE'} || 'gnu-configize';
|
||||
|
||||
# --install -- as --add-missing in other tools.
|
||||
my $install = 0;
|
||||
@@ -644,6 +645,10 @@
|
||||
}
|
||||
}
|
||||
|
||||
+ @ex = grep (/^gnu-configize$/, @exclude);
|
||||
+ if ($#ex == -1) {
|
||||
+ xsystem ("$gnuconfigize");
|
||||
+ }
|
||||
|
||||
# -------------- #
|
||||
# Running make. #
|
||||
@@ -1,22 +1,14 @@
|
||||
From a7e722f974e2529d3e564d8d94c86cc8bdbc40e7 Mon Sep 17 00:00:00 2001
|
||||
From: Radu Patriu <radu.patriu@enea.com>
|
||||
Date: Mon, 24 Mar 2014 16:33:19 +0200
|
||||
Subject: [PATCH] autotest: new testsuite option to enable automake test
|
||||
result format
|
||||
From 8c0f24404bebffdaf3132d81e2b9560d34ff1677 Mon Sep 17 00:00:00 2001
|
||||
From: Ross Burton <ross.burton@intel.com>
|
||||
Date: Thu, 12 Mar 2020 17:25:45 +0000
|
||||
Subject: [PATCH 6/7] autotest-automake-result-format.patch
|
||||
|
||||
* lib/autotest/general.m4: added "--am-fmt | -A" command line
|
||||
parameter for testsuite script to enable "RESULT: testname" output;
|
||||
will be used by yocto ptest packages.
|
||||
|
||||
Upstream-Status: Pending
|
||||
|
||||
Signed-off-by: Radu Patriu <radu.patriu@enea.com>
|
||||
---
|
||||
lib/autotest/general.m4 | 39 +++++++++++++++++++++++++++++----------
|
||||
lib/autotest/general.m4 | 39 +++++++++++++++++++++++++++++----------
|
||||
1 file changed, 29 insertions(+), 10 deletions(-)
|
||||
|
||||
diff --git a/lib/autotest/general.m4 b/lib/autotest/general.m4
|
||||
index 60c0352..c1f5a9b 100644
|
||||
index 0c0e3c5b..17590e96 100644
|
||||
--- a/lib/autotest/general.m4
|
||||
+++ b/lib/autotest/general.m4
|
||||
@@ -412,6 +412,9 @@ at_recheck=
|
||||
@@ -29,7 +21,7 @@ index 60c0352..c1f5a9b 100644
|
||||
# The directory we run the suite in. Default to . if no -C option.
|
||||
at_dir=`pwd`
|
||||
# An absolute reference to this testsuite script.
|
||||
@@ -530,6 +533,10 @@ do
|
||||
@@ -525,6 +528,10 @@ do
|
||||
at_check_filter_trace=at_fn_filter_trace
|
||||
;;
|
||||
|
||||
@@ -40,7 +32,7 @@ index 60c0352..c1f5a9b 100644
|
||||
[[0-9] | [0-9][0-9] | [0-9][0-9][0-9] | [0-9][0-9][0-9][0-9]])
|
||||
at_fn_validate_ranges at_option
|
||||
AS_VAR_APPEND([at_groups], ["$at_option$as_nl"])
|
||||
@@ -718,10 +725,10 @@ m4_divert_push([HELP_MODES])dnl
|
||||
@@ -713,10 +720,10 @@ m4_divert_push([HELP_MODES])dnl
|
||||
cat <<_ATEOF || at_write_fail=1
|
||||
|
||||
Operation modes:
|
||||
@@ -55,7 +47,7 @@ index 60c0352..c1f5a9b 100644
|
||||
_ATEOF
|
||||
m4_divert_pop([HELP_MODES])dnl
|
||||
m4_wrap([m4_divert_push([HELP_TUNING_BEGIN])dnl
|
||||
@@ -747,6 +754,7 @@ Execution tuning:
|
||||
@@ -742,6 +749,7 @@ Execution tuning:
|
||||
-d, --debug inhibit clean up and top-level logging
|
||||
[ default for debugging scripts]
|
||||
-x, --trace enable tests shell tracing
|
||||
@@ -63,7 +55,7 @@ index 60c0352..c1f5a9b 100644
|
||||
_ATEOF
|
||||
m4_divert_pop([HELP_TUNING_BEGIN])])dnl
|
||||
m4_divert_push([HELP_END])dnl
|
||||
@@ -1162,7 +1170,9 @@ at_fn_group_banner ()
|
||||
@@ -1129,7 +1137,9 @@ at_fn_group_banner ()
|
||||
[*]) at_desc_line="$[1]: " ;;
|
||||
esac
|
||||
AS_VAR_APPEND([at_desc_line], ["$[3]$[4]"])
|
||||
@@ -74,7 +66,7 @@ index 60c0352..c1f5a9b 100644
|
||||
echo "# -*- compilation -*-" >> "$at_group_log"
|
||||
}
|
||||
|
||||
@@ -1188,42 +1198,51 @@ _ATEOF
|
||||
@@ -1155,42 +1165,51 @@ _ATEOF
|
||||
case $at_xfail:$at_status in
|
||||
yes:0)
|
||||
at_msg="UNEXPECTED PASS"
|
||||
@@ -132,5 +124,5 @@ index 60c0352..c1f5a9b 100644
|
||||
at_log_msg="$at_group. $at_desc ($at_setup_line): $at_msg"
|
||||
case $at_status in
|
||||
--
|
||||
1.7.9.5
|
||||
2.25.1
|
||||
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
Use --warning=cross only if supported by automake
|
||||
|
||||
Upstream-Status: Inappropriate [configuration]
|
||||
|
||||
Signed-off-by: Constantin Musca <constantinx.musca@intel.com>
|
||||
|
||||
--- a/bin/autoreconf.in
|
||||
+++ b/bin/autoreconf.in
|
||||
@@ -127,6 +127,8 @@ my $aclocal_supports_warnings = 0;
|
||||
my $automake_supports_force_missing = 0;
|
||||
# Does automake support -Wfoo?
|
||||
my $automake_supports_warnings = 0;
|
||||
+# Does automake support --warning=cross
|
||||
+my $automake_supports_cross_warning = 0;
|
||||
|
||||
my @prepend_include;
|
||||
my @include;
|
||||
@@ -191,6 +193,7 @@ sub parse_args ()
|
||||
$aclocal_supports_warnings = $aclocal_help =~ /--warnings/;
|
||||
$automake_supports_force_missing = $automake_help =~ /--force-missing/;
|
||||
$automake_supports_warnings = $automake_help =~ /--warnings/;
|
||||
+ $automake_supports_cross_warning = $automake_help =~ /cross/;
|
||||
|
||||
# Dispatch autoreconf's option to the tools.
|
||||
# --include;
|
||||
@@ -244,6 +247,8 @@ sub parse_args ()
|
||||
$libtoolize .= ' --debug';
|
||||
}
|
||||
# --warnings;
|
||||
+ @warning = grep { $_ ne "cross" } @warning
|
||||
+ if ! $automake_supports_cross_warning;
|
||||
if (@warning)
|
||||
{
|
||||
my $warn = ' --warnings=' . join (',', @warning);
|
||||
@@ -1,40 +0,0 @@
|
||||
Upstream-Status: Pending
|
||||
|
||||
Poky provides a list of site files in CONFIG_SITE whereas autoconf
|
||||
only expects one file. This patch changes autoconf to accept a list of
|
||||
them.
|
||||
|
||||
RP 1/2/10
|
||||
|
||||
Updated for 2.68 version: the CONFIG_SITE var was not getting used at all
|
||||
fixed the 2.68 code
|
||||
Nitin A Kamble <nitin.a.kamble@intel.com> 2011/05/27
|
||||
|
||||
Index: autoconf-2.68/lib/autoconf/general.m4
|
||||
===================================================================
|
||||
--- autoconf-2.68.orig/lib/autoconf/general.m4
|
||||
+++ autoconf-2.68/lib/autoconf/general.m4
|
||||
@@ -1878,7 +1878,6 @@ AU_DEFUN([AC_VALIDATE_CACHED_SYSTEM_TUPL
|
||||
m4_define([AC_SITE_LOAD],
|
||||
[# Prefer an explicitly selected file to automatically selected ones.
|
||||
ac_site_file1=NONE
|
||||
-ac_site_file2=NONE
|
||||
if test -n "$CONFIG_SITE"; then
|
||||
# We do not want a PATH search for config.site.
|
||||
case $CONFIG_SITE in @%:@((
|
||||
@@ -1886,14 +1885,8 @@ if test -n "$CONFIG_SITE"; then
|
||||
*/*) ac_site_file1=$CONFIG_SITE;;
|
||||
*) ac_site_file1=./$CONFIG_SITE;;
|
||||
esac
|
||||
-elif test "x$prefix" != xNONE; then
|
||||
- ac_site_file1=$prefix/share/config.site
|
||||
- ac_site_file2=$prefix/etc/config.site
|
||||
-else
|
||||
- ac_site_file1=$ac_default_prefix/share/config.site
|
||||
- ac_site_file2=$ac_default_prefix/etc/config.site
|
||||
fi
|
||||
-for ac_site_file in "$ac_site_file1" "$ac_site_file2"
|
||||
+for ac_site_file in $ac_site_file1
|
||||
do
|
||||
test "x$ac_site_file" = xNONE && continue
|
||||
if test /dev/null != "$ac_site_file" && test -r "$ac_site_file"; then
|
||||
@@ -1,120 +0,0 @@
|
||||
Upstream-Status: Pending
|
||||
|
||||
We don't build xmkmf so any values returned from it are going to be wrong.
|
||||
Using any paths in /usr/ for x headers/libs is a bad idea when cross compiling.
|
||||
This patch removes them to stop any confusion.
|
||||
|
||||
RP - 20071115
|
||||
|
||||
Index: autoconf-2.68/lib/autoconf/libs.m4
|
||||
===================================================================
|
||||
--- autoconf-2.68.orig/lib/autoconf/libs.m4
|
||||
+++ autoconf-2.68/lib/autoconf/libs.m4
|
||||
@@ -159,53 +159,6 @@ m4_popdef([AC_Lib_Name])dnl
|
||||
# --------------------- #
|
||||
|
||||
|
||||
-# _AC_PATH_X_XMKMF
|
||||
-# ----------------
|
||||
-# Internal subroutine of _AC_PATH_X.
|
||||
-# Set ac_x_includes and/or ac_x_libraries.
|
||||
-m4_define([_AC_PATH_X_XMKMF],
|
||||
-[AC_ARG_VAR(XMKMF, [Path to xmkmf, Makefile generator for X Window System])dnl
|
||||
-rm -f -r conftest.dir
|
||||
-if mkdir conftest.dir; then
|
||||
- cd conftest.dir
|
||||
- cat >Imakefile <<'_ACEOF'
|
||||
-incroot:
|
||||
- @echo incroot='${INCROOT}'
|
||||
-usrlibdir:
|
||||
- @echo usrlibdir='${USRLIBDIR}'
|
||||
-libdir:
|
||||
- @echo libdir='${LIBDIR}'
|
||||
-_ACEOF
|
||||
- if (export CC; ${XMKMF-xmkmf}) >/dev/null 2>/dev/null && test -f Makefile; then
|
||||
- # GNU make sometimes prints "make[1]: Entering ...", which would confuse us.
|
||||
- for ac_var in incroot usrlibdir libdir; do
|
||||
- eval "ac_im_$ac_var=\`\${MAKE-make} $ac_var 2>/dev/null | sed -n 's/^$ac_var=//p'\`"
|
||||
- done
|
||||
- # Open Windows xmkmf reportedly sets LIBDIR instead of USRLIBDIR.
|
||||
- for ac_extension in a so sl dylib la dll; do
|
||||
- if test ! -f "$ac_im_usrlibdir/libX11.$ac_extension" &&
|
||||
- test -f "$ac_im_libdir/libX11.$ac_extension"; then
|
||||
- ac_im_usrlibdir=$ac_im_libdir; break
|
||||
- fi
|
||||
- done
|
||||
- # Screen out bogus values from the imake configuration. They are
|
||||
- # bogus both because they are the default anyway, and because
|
||||
- # using them would break gcc on systems where it needs fixed includes.
|
||||
- case $ac_im_incroot in
|
||||
- /usr/include) ac_x_includes= ;;
|
||||
- *) test -f "$ac_im_incroot/X11/Xos.h" && ac_x_includes=$ac_im_incroot;;
|
||||
- esac
|
||||
- case $ac_im_usrlibdir in
|
||||
- /usr/lib | /usr/lib64 | /lib | /lib64) ;;
|
||||
- *) test -d "$ac_im_usrlibdir" && ac_x_libraries=$ac_im_usrlibdir ;;
|
||||
- esac
|
||||
- fi
|
||||
- cd ..
|
||||
- rm -f -r conftest.dir
|
||||
-fi
|
||||
-])# _AC_PATH_X_XMKMF
|
||||
-
|
||||
-
|
||||
# _AC_PATH_X_DIRECT
|
||||
# -----------------
|
||||
# Internal subroutine of _AC_PATH_X.
|
||||
@@ -213,44 +166,7 @@ fi
|
||||
m4_define([_AC_PATH_X_DIRECT],
|
||||
[# Standard set of common directories for X headers.
|
||||
# Check X11 before X11Rn because it is often a symlink to the current release.
|
||||
-ac_x_header_dirs='
|
||||
-/usr/X11/include
|
||||
-/usr/X11R7/include
|
||||
-/usr/X11R6/include
|
||||
-/usr/X11R5/include
|
||||
-/usr/X11R4/include
|
||||
-
|
||||
-/usr/include/X11
|
||||
-/usr/include/X11R7
|
||||
-/usr/include/X11R6
|
||||
-/usr/include/X11R5
|
||||
-/usr/include/X11R4
|
||||
-
|
||||
-/usr/local/X11/include
|
||||
-/usr/local/X11R7/include
|
||||
-/usr/local/X11R6/include
|
||||
-/usr/local/X11R5/include
|
||||
-/usr/local/X11R4/include
|
||||
-
|
||||
-/usr/local/include/X11
|
||||
-/usr/local/include/X11R7
|
||||
-/usr/local/include/X11R6
|
||||
-/usr/local/include/X11R5
|
||||
-/usr/local/include/X11R4
|
||||
-
|
||||
-/usr/X386/include
|
||||
-/usr/x386/include
|
||||
-/usr/XFree86/include/X11
|
||||
-
|
||||
-/usr/include
|
||||
-/usr/local/include
|
||||
-/usr/unsupported/include
|
||||
-/usr/athena/include
|
||||
-/usr/local/x11r5/include
|
||||
-/usr/lpp/Xamples/include
|
||||
-
|
||||
-/usr/openwin/include
|
||||
-/usr/openwin/share/include'
|
||||
+ac_x_header_dirs=''
|
||||
|
||||
if test "$ac_x_includes" = no; then
|
||||
# Guess where to find include files, by looking for Xlib.h.
|
||||
@@ -299,7 +215,6 @@ AC_DEFUN([_AC_PATH_X],
|
||||
[AC_CACHE_VAL(ac_cv_have_x,
|
||||
[# One or both of the vars are not set, and there is no cached value.
|
||||
ac_x_includes=no ac_x_libraries=no
|
||||
-_AC_PATH_X_XMKMF
|
||||
_AC_PATH_X_DIRECT
|
||||
case $ac_x_includes,$ac_x_libraries in #(
|
||||
no,* | *,no | *\'*)
|
||||
24
meta/recipes-devtools/autoconf/autoconf/man-host-perl.patch
Normal file
24
meta/recipes-devtools/autoconf/autoconf/man-host-perl.patch
Normal file
@@ -0,0 +1,24 @@
|
||||
Don't use the target perl when regenerating the man pages.
|
||||
|
||||
Upstream-Status: Inappropriate
|
||||
Signed-off-by: Ross Burton <ross.burton@arm.com>
|
||||
|
||||
diff --git a/man/local.mk b/man/local.mk
|
||||
index e69858b1..78c68ab5 100644
|
||||
--- a/man/local.mk
|
||||
+++ b/man/local.mk
|
||||
@@ -67,13 +67,12 @@ SUFFIXES += .w .1
|
||||
@echo "Updating man page $@"
|
||||
$(MKDIR_P) $(@D)
|
||||
PATH="$(top_srcdir)/man$(PATH_SEPARATOR)$$PATH"; \
|
||||
- PERL="$(PERL)"; \
|
||||
PACKAGE_NAME="$(PACKAGE_NAME)"; \
|
||||
VERSION="$(VERSION)"; \
|
||||
RELEASE_YEAR="$(RELEASE_YEAR)"; \
|
||||
top_srcdir="$(top_srcdir)"; \
|
||||
channeldefs_pm="$(channeldefs_pm)"; \
|
||||
- export PATH PERL PACKAGE_NAME VERSION RELEASE_YEAR; \
|
||||
+ export PATH PACKAGE_NAME VERSION RELEASE_YEAR; \
|
||||
export top_srcdir channeldefs_pm; \
|
||||
$(HELP2MAN) \
|
||||
--include=$(srcdir)/$*.x \
|
||||
21
meta/recipes-devtools/autoconf/autoconf/no-man.patch
Normal file
21
meta/recipes-devtools/autoconf/autoconf/no-man.patch
Normal file
@@ -0,0 +1,21 @@
|
||||
For native builds we don't care about the documentation, and this would
|
||||
otherwise pull in a dependency on help2man.
|
||||
|
||||
Upstream-Status: Inappropriate
|
||||
Signed-off-by: Ross Burton <ross.burton@arm.com>
|
||||
|
||||
diff --git a/Makefile.in b/Makefile.in
|
||||
index 146e8e3..a1827c1 100644
|
||||
--- a/Makefile.in
|
||||
+++ b/Makefile.in
|
||||
@@ -763,10 +762,0 @@ dist_buildaux_SCRIPTS = \
|
||||
-dist_man_MANS = \
|
||||
- man/autoconf.1 \
|
||||
- man/autoheader.1 \
|
||||
- man/autom4te.1 \
|
||||
- man/autoreconf.1 \
|
||||
- man/autoscan.1 \
|
||||
- man/autoupdate.1 \
|
||||
- man/ifnames.1
|
||||
-
|
||||
-
|
||||
@@ -1,60 +0,0 @@
|
||||
The check for solaris 'print' causes significant problems on a linux machine
|
||||
with dash as /bin/sh since it triggers the execution of "print" which on some
|
||||
linux systems is a perl script which is part of mailcap. Worse, this perl
|
||||
script calls "which file" and if successful ignores the path file was found
|
||||
in and just runs "file" without a path. Each exection causes PATH to be searched.
|
||||
|
||||
Simply assuming the shell's printf function works cuts out all the fork overhead
|
||||
and when parallel tasks are running, this overhead appears to be significant.
|
||||
|
||||
RP
|
||||
2015/11/28
|
||||
Upstream-Status: Inappropriate
|
||||
|
||||
Index: autoconf-2.69/lib/m4sugar/m4sh.m4
|
||||
===================================================================
|
||||
--- autoconf-2.69.orig/lib/m4sugar/m4sh.m4
|
||||
+++ autoconf-2.69/lib/m4sugar/m4sh.m4
|
||||
@@ -1045,40 +1045,8 @@ m4_defun([_AS_ECHO_PREPARE],
|
||||
[[as_nl='
|
||||
'
|
||||
export as_nl
|
||||
-# Printing a long string crashes Solaris 7 /usr/bin/printf.
|
||||
-as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
|
||||
-as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo
|
||||
-as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo
|
||||
-# Prefer a ksh shell builtin over an external printf program on Solaris,
|
||||
-# but without wasting forks for bash or zsh.
|
||||
-if test -z "$BASH_VERSION$ZSH_VERSION" \
|
||||
- && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then
|
||||
- as_echo='print -r --'
|
||||
- as_echo_n='print -rn --'
|
||||
-elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then
|
||||
- as_echo='printf %s\n'
|
||||
- as_echo_n='printf %s'
|
||||
-else
|
||||
- if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then
|
||||
- as_echo_body='eval /usr/ucb/echo -n "$][1$as_nl"'
|
||||
- as_echo_n='/usr/ucb/echo -n'
|
||||
- else
|
||||
- as_echo_body='eval expr "X$][1" : "X\\(.*\\)"'
|
||||
- as_echo_n_body='eval
|
||||
- arg=$][1;
|
||||
- case $arg in @%:@(
|
||||
- *"$as_nl"*)
|
||||
- expr "X$arg" : "X\\(.*\\)$as_nl";
|
||||
- arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;;
|
||||
- esac;
|
||||
- expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl"
|
||||
- '
|
||||
- export as_echo_n_body
|
||||
- as_echo_n='sh -c $as_echo_n_body as_echo'
|
||||
- fi
|
||||
- export as_echo_body
|
||||
- as_echo='sh -c $as_echo_body as_echo'
|
||||
-fi
|
||||
+as_echo='printf %s\n'
|
||||
+as_echo_n='printf %s'
|
||||
]])# _AS_ECHO_PREPARE
|
||||
|
||||
|
||||
@@ -1,25 +1,36 @@
|
||||
This value is used to determine CONFIG_SHELL and SHELL which may get exported into
|
||||
scripts shared via sstate onto other systems.
|
||||
From 0aac3047cd7681d610b22d79501c297fa3433148 Mon Sep 17 00:00:00 2001
|
||||
From: Ross Burton <ross.burton@intel.com>
|
||||
Date: Thu, 12 Mar 2020 17:25:41 +0000
|
||||
Subject: [PATCH 2/7] m4sh: prefer bash over sh
|
||||
|
||||
Some systems have /bin/sh -> dash and others /bin/sh -> bash. Bash is preferred
|
||||
but sometimes we can sometimes end up exporting /bin/sh yet use bashisms.
|
||||
_AS_DETECT_BETTER_SHELL looks for a good shell to use, and tries to look for
|
||||
'sh' before 'bash'. Whilst for many systems sh is a symlink to bash,
|
||||
there are many where sh is a symlink to a more minimal sh implementation.
|
||||
|
||||
This patch puts bash first in the search results which avoids the bash/dash confusion.
|
||||
For example, Debian by default has /bin/sh -> /bin/dash: dash is a faster
|
||||
shell to start (which makes a notable difference to boot speed) but is not
|
||||
as fast as bash at executing long scripts (and configure scripts are not
|
||||
known for their conciseness).
|
||||
|
||||
RP 2012/9/23
|
||||
Change the search order to bash then sh, so that a known-good shell (bash)
|
||||
is used if available over something which is merely POSIX compliant.
|
||||
---
|
||||
lib/m4sugar/m4sh.m4 | 2 +-
|
||||
1 file changed, 1 insertion(+), 1 deletion(-)
|
||||
|
||||
Upstream-Status: Inappropriate [OE specific configuration]
|
||||
|
||||
Index: autoconf-2.69/lib/m4sugar/m4sh.m4
|
||||
===================================================================
|
||||
--- autoconf-2.69.orig/lib/m4sugar/m4sh.m4 2012-03-07 17:35:26.000000000 +0000
|
||||
+++ autoconf-2.69/lib/m4sugar/m4sh.m4 2013-09-23 16:12:38.853597515 +0000
|
||||
@@ -229,7 +229,7 @@
|
||||
diff --git a/lib/m4sugar/m4sh.m4 b/lib/m4sugar/m4sh.m4
|
||||
index 9d543952..84ef84a9 100644
|
||||
--- a/lib/m4sugar/m4sh.m4
|
||||
+++ b/lib/m4sugar/m4sh.m4
|
||||
@@ -230,7 +230,7 @@ dnl Remove any tests from suggested that are also required
|
||||
[_AS_PATH_WALK([/bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH],
|
||||
[case $as_dir in @%:@(
|
||||
/*)
|
||||
- for as_base in sh bash ksh sh5; do
|
||||
+ for as_base in bash sh ksh sh5; do
|
||||
# Try only shells that exist, to save several forks.
|
||||
as_shell=$as_dir/$as_base
|
||||
as_shell=$as_dir$as_base
|
||||
AS_IF([{ test -f "$as_shell" || test -f "$as_shell.exe"; } &&
|
||||
--
|
||||
2.25.1
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user