mirror of
https://git.yoctoproject.org/poky
synced 2026-02-09 02:03:04 +01:00
The hashserv module implements a flexible RPC mechanism based on sending json formatted messages over unix or tcp sockets and uses Python's asyncio features to build an efficient message loop on both the client and server side. Much of this implementation is not specific to the hash equivalency service and can be extracted into a new module for easy re-use elsewhere in bitbake. (Bitbake rev: 4105ffd967fa86154ad67366aaf0f898abf78d14) Signed-off-by: Paul Barker <pbarker@konsulko.com> Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
32 lines
925 B
Python
32 lines
925 B
Python
#
|
|
# SPDX-License-Identifier: GPL-2.0-only
|
|
#
|
|
|
|
import itertools
|
|
import json
|
|
|
|
# The Python async server defaults to a 64K receive buffer, so we hardcode our
|
|
# maximum chunk size. It would be better if the client and server reported to
|
|
# each other what the maximum chunk sizes were, but that will slow down the
|
|
# connection setup with a round trip delay so I'd rather not do that unless it
|
|
# is necessary
|
|
DEFAULT_MAX_CHUNK = 32 * 1024
|
|
|
|
|
|
def chunkify(msg, max_chunk):
|
|
if len(msg) < max_chunk - 1:
|
|
yield ''.join((msg, "\n"))
|
|
else:
|
|
yield ''.join((json.dumps({
|
|
'chunk-stream': None
|
|
}), "\n"))
|
|
|
|
args = [iter(msg)] * (max_chunk - 1)
|
|
for m in map(''.join, itertools.zip_longest(*args, fillvalue='')):
|
|
yield ''.join(itertools.chain(m, "\n"))
|
|
yield "\n"
|
|
|
|
|
|
from .client import AsyncClient, Client
|
|
from .serv import AsyncServer, AsyncServerConnection
|