Skip to content
Snippets Groups Projects
Commit 031daad8 authored by Bruce Flynn's avatar Bruce Flynn
Browse files

massage merge stream iteration bug.

* Ensure streams is a know iterable type (deque)
* Make sure to reuse a stream in the case of a bad packet group. This was
  done with an exception so it was handled the same way as the other 2
  major cases.
parent fd59e81c
No related branches found
No related tags found
No related merge requests found
...@@ -15,9 +15,17 @@ from edosl0util.stream import ( ...@@ -15,9 +15,17 @@ from edosl0util.stream import (
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
class InvalidPacketGroup(Exception):
"""
If a there exists packets in a group with different apids or if the group
does not end in a last packet.
"""
def _group_packets(stream): def _group_packets(stream):
""" """
Returns a generator that yields all packets between timestamps. Returns a generator that yields all packets until a timestamps, i.e., a
packet group.
""" """
packet = stream.next() packet = stream.next()
while not packet.stamp: while not packet.stamp:
...@@ -27,65 +35,57 @@ def _group_packets(stream): ...@@ -27,65 +35,57 @@ def _group_packets(stream):
stream.push_back(packet) stream.push_back(packet)
def _is_valid_group(packets):
return packets[0].is_first() \
and packets[-1].is_last() \
and packets[0].apid == packets[-1].apid
def merge(streams, output=sys.stdout): def merge(streams, output=sys.stdout):
last_packet = None last_packet = None
# streams are removed as they are exhausted # streams are removed as they are exhausted
streams = list(streams)
while streams: while streams:
if 1: stream = streams.pop(0)
stream = streams.pop(0) try:
try: if last_packet is not None:
if last_packet is not None: LOG.debug("seeking to %s, %s", last_packet.stamp, last_packet.apid)
LOG.debug("seeking to %s, %s", last_packet.stamp, last_packet.apid) stream.seek_to(last_packet.stamp, last_packet.apid)
stream.seek_to(last_packet.stamp, last_packet.apid) stream.seek_to_next_stamp()
stream.seek_to_next_stamp()
# Do until `next` causes StopIteration
# Do until `next` causes StopIteration while True:
while True: packets_to_write = deque()
packets = deque() packet = stream.next()
packet = stream.next() if packet.is_standalone():
packets_to_write.append(packet)
if packet.is_standalone():
packets.append(packet) elif packet.is_first(): # packet group
group = deque([packet])
elif packet.is_first(): # packet group group.extend(_group_packets(stream))
group = deque([packet]) if not group[0].is_first() \
group.extend(_group_packets(stream)) and group[-1].is_last() \
if _is_valid_group(group): and group[0].apid == group[-1].apid:
packets.extend(group) raise InvalidPacketGroup()
elif group[0].is_first(): packets_to_write.extend(group)
last_packet = group[0]
break else:
else: LOG.debug("skipping hanging packet: %s", packet)
LOG.debug("invalid group, switching streams:%s", group)
break # First packet always has a stamp because it's either
# standalone or part of a valid group
else: last_packet = packets_to_write[0]
LOG.debug("skipping hanging packet: %s", packet) while packets_to_write:
pkt = packets_to_write.popleft()
# First packet always has a stamp because it's either output.write(pkt.bytes())
# standalone or part of a valid group
last_packet = packets[0] except NonConsecutiveSeqId:
streams.append(stream) # stream still usable
while packets: LOG.debug('missing sequence id, next stream')
pkt = packets.popleft()
output.write(pkt.bytes()) except InvalidPacketGroup:
streams.append(stream) # stream still usable
except PacketTooShort as err: LOG.debug("invalid group, switching streams:%s", group)
LOG.error("corrupt stream, removing: %s", err)
except PacketTooShort as err:
except NonConsecutiveSeqId: LOG.error("corrupt stream, removing: %s", err)
steams.push(stream)
LOG.debug('missing sequence id, next stream') except StopIteration:
LOG.debug("end-of-stream %s", stream)
except StopIteration:
LOG.debug("end-of-stream %s", stream)
def merge_files(filepaths, destpath): def merge_files(filepaths, destpath):
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment