change chunking to work with actual byte-sizes

This commit is contained in:
Jakob Ketterl 2023-02-22 17:23:11 +01:00
parent 685b9970d2
commit cb5b2e64af

View File

@ -105,18 +105,25 @@ class Uploader(object):
# filter out any erroneous encodes # filter out any erroneous encodes
encoded = [e for e in encoded if e is not None] encoded = [e for e in encoded if e is not None]
def chunks(l, n): def chunks(block, max_size):
"""Yield successive n-sized chunks from l.""" size = 0
for i in range(0, len(l), n): current = []
yield l[i : i + n] for r in block:
if size + len(r) > max_size:
yield current
current = []
size = 0
size += len(r)
current.append(r)
yield current
rHeader = self.getReceiverInformationHeader() rHeader = self.getReceiverInformationHeader()
rInfo = self.getReceiverInformation() rInfo = self.getReceiverInformation()
sHeader = self.getSenderInformationHeader() sHeader = self.getSenderInformationHeader()
packets = [] packets = []
# 50 seems to be a safe bet # 1200 bytes of sender data should keep the packet size below MTU for most cases
for chunk in chunks(encoded, 50): for chunk in chunks(encoded, 1200):
sInfo = self.getSenderInformation(chunk) sInfo = self.getSenderInformation(chunk)
length = 16 + len(rHeader) + len(sHeader) + len(rInfo) + len(sInfo) length = 16 + len(rHeader) + len(sHeader) + len(rInfo) + len(sInfo)
header = self.getHeader(length) header = self.getHeader(length)