You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@qpid.apache.org by tr...@apache.org on 2018/11/27 14:27:24 UTC

[1/2] qpid-dispatch git commit: DISPATCH-1191 - Import crolke's adverbl project This closes #421

Repository: qpid-dispatch
Updated Branches:
  refs/heads/master 42b4ab5d4 -> 5c3411a1a


http://git-wip-us.apache.org/repos/asf/qpid-dispatch/blob/5c3411a1/bin/log_scraper/parser.py
----------------------------------------------------------------------
diff --git a/bin/log_scraper/parser.py b/bin/log_scraper/parser.py
new file mode 100755
index 0000000..44c3e91
--- /dev/null
+++ b/bin/log_scraper/parser.py
@@ -0,0 +1,930 @@
+#!/usr/bin/env python
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+from __future__ import unicode_literals
+from __future__ import division
+from __future__ import absolute_import
+from __future__ import print_function
+
+from datetime import *
+import re
+import sys
+import traceback
+
+import splitter
+import test_data as td
+import common
+import text
+import router
+
+
+def colorize_bg(what):
+    # TODO: use the real colorize_bg
+    return what
+
+
+class LogLineData:
+
+    def direction_is_in(self):
+        return self.direction == text.direction_in()
+
+    def direction_is_out(self):
+        return self.direction == text.direction_out()
+
+    def __init__(self):
+        self.web_show_str = ""
+        self.name = ""
+        self.conn_num = ""  # source router's undecorated conn num
+        self.conn_id = ""  # decorated routerPrefixLetter'instanceNumber-conn_num
+        self.conn_peer = ""  # display name of peer in seen in Open 'A - routerId.Test'
+        self.channel = ""  # undecorated number - '0'
+        self.direction = ""  # '<-' IN, or '->' OUT, or '--'
+        self.described_type = DescribedType()  # DescribedType object
+        self.handle = ""  # undecorated number - '1'
+        self.delivery_id = ""  # "0"
+        self.delivery_tag = ""  # "00:00:00:00"
+        self.remote = ""  # undecorated number - '2'
+        self.channel_handle = ""  # decorated - '[0,0]'
+        self.channel_remote = ""  # decorated - '[1,2]'
+        self.flow_deliverycnt = ""  # undecorated number - '50'
+        self.flow_linkcredit = ""  # undecorated number - '100'
+        self.flow_cnt_credit = ""  # decorated - '(50,100)'
+        self.flow_drain = False
+        self.transfer_id = ""
+        self.role = ""
+        self.is_receiver = False
+        self.source = ""
+        self.target = ""
+        self.first = ""  # undecorated number - '10'
+        self.last = ""  # undecorated number - '20'
+        self.settled = ""  # Disposition or Transfer settled field
+        self.disposition_state = "?absent?"
+        self.snd_settle_mode = ""  # Attach
+        self.rcv_settle_mode = ""  # Attach
+        self.transfer_data = ""  # protonized transfer data value
+        self.transfer_bare = ""  # bare message from transfer_data
+        self.transfer_hdr_annos = ""  # header and annotation sections
+        self.transfer_size = ""  # size declared by number in parenthesis
+        self.transfer_short_name = ""
+        self.transfer_settled = False
+        self.transfer_more = False
+        self.transfer_resume = False
+        self.transfer_aborted = False
+        self.link_short_name = ""
+        self.link_short_name_popup = ""
+        self.is_policy_trace = False  # line is POLICY (trace)
+        self.is_server_info = False  # line is SERVER (info)
+        self.is_router_ls = False  # line is ROUTER_LS (info)
+        self.fid = ""  # Log line (frame) id as used in javascript code
+        self.amqp_error = False
+        self.link_class = "normal"  # attach sees: normal, router, router-data (, management?)
+        self.disposition_display = ""
+        self.final_disposition = None
+
+
+class DescribedType:
+    """
+    Given a line like:
+        @typename(00) [key1=val1, ...]
+    Extract the typename and create a map of the key-val pairs
+    May recursively find embedded described types
+    """
+
+    @staticmethod
+    def is_dtype_name(name):
+        """
+        Return true if the name is a pn_trace described type name
+        :param name:
+        :return:
+        """
+        return (name.startswith('@') and
+                '(' in name and
+                name.endswith(')'))
+
+    @staticmethod
+    def get_key_and_val(kvp):
+        eqi = kvp.find('=')
+        return kvp[:eqi], kvp[eqi + 1:]
+
+    @staticmethod
+    def dtype_name(name):
+        if not DescribedType.is_dtype_name(name):
+            raise ValueError("Name '%s' is not a described type name" % name)
+        return name[1: name.find('(')]
+
+    @staticmethod
+    def dtype_number(name):
+        if not DescribedType.is_dtype_name(name):
+            raise ValueError("Name '%s' is not a described type name" % name)
+        return int(name[name.find('(') + 1: -1])
+
+    def __init__(self):
+        self.dict = {}
+        self.dtype_name = "unparsed"
+        self.dtype_number = 0
+
+    def __repr__(self):
+        return self._representation()
+
+    def _representation(self):
+        return "DescribedType %s( %d ) : %s" % (self.dtype_name, self.dtype_number, self.dict)
+
+    def add_field_to_dict(self, f_text, expected_key=None):
+        if '=' not in f_text:
+            raise ValueError("Field does not contain equal sign '%s'" % self.line)
+        if expected_key is not None and not f_text.startswith(expected_key):
+            raise ValueError("Transfer field %s not in order from line: %s" % (expected_key, self.line))
+        key, val = DescribedType.get_key_and_val(f_text)
+        if val.endswith(','):
+            val = val[:-1]
+        self.dict[key] = val
+
+    def process_transfer_tail_key(self):
+        keys = ["batchable", "aborted", "resume", "state", "rcv-settle-mode", "more", "settled", "message-format"]
+        for key in keys:
+            idx = self.line.rfind(key)
+            if idx != -1:
+                field = self.line[idx:]
+                self.add_field_to_dict(field, key)
+                self.line = self.line[:idx].strip()
+                return True
+        return False
+
+    def parseTransfer(self):
+        """
+        Figure out the described type fields for the transfer.
+        Transfers are handled specially with the ill-formatted binary delivery-tag field
+        :return:
+        """
+        # strip leading '[' and trailing ']'
+        if not (self.line.startswith('[') and self.line.endswith(']')):
+            raise ValueError("Described type not delimited with square brackets: '%s'" % self.line)
+        self.line = self.line[1:]
+        self.line = self.line[:-1]
+
+        # process fields from head
+        fHandle = self.line.split()[0]
+        self.add_field_to_dict(fHandle)
+        self.line = self.line[(len(fHandle) + 1):]
+
+        fDelId = self.line.split()[0]
+        self.add_field_to_dict(fDelId)
+        self.line = self.line[(len(fDelId) + 1):]
+
+        # process fields from tail
+        while len(self.line) > 0 and self.process_transfer_tail_key():
+            pass
+
+        # the remainder, no matter how unlikely, must be the delivery-tag
+        self.add_field_to_dict(self.line, "delivery-tag")
+
+    def parse_dtype_line(self, _dtype, _line):
+        """
+        Figure out the fields for the described type.
+        The line format is:
+
+        Transfers are handled specially with the ill-formatted binary delivery-tag field
+        Note other performatives with ill-formatted binary data might get rejected. We
+        only struggle figuring out the delivery-tag because it happens so often.
+        :param _dtype: @describedtypename(num)
+        :param _line: [key=val [, key=val]...]
+        :return:
+        """
+        self.dtype = _dtype
+        self.oline = str(_line)
+        self.line = self.oline
+        self.dtype_name = DescribedType.dtype_name(self.dtype)
+        self.dtype_number = DescribedType.dtype_number(self.dtype)
+
+        # Process transfers separately..
+        # Transfer perfomatives will not call parse recursively while others might
+        if self.dtype_name == "transfer":
+            self.parseTransfer()
+            return
+
+        # strip leading '[' and trailing ']'
+        if not (self.line.startswith('[') and self.line.endswith(']')):
+            raise ValueError("Described type not delimited with square brackets: '%s'" % _line)
+        self.line = self.line[1:]
+        self.line = self.line[:-1]
+
+        # process fields
+        fields = splitter.Splitter.split(self.line)
+        while len(fields) > 0 and len(fields[0]) > 0:
+            if '=' not in fields[0]:
+                raise ValueError("Field does not contain equal sign '%s'" % fields[0])
+            key, val = DescribedType.get_key_and_val(fields[0])
+            del fields[0]
+            if DescribedType.is_dtype_name(val):
+                # recursing to process subtype
+                # pull subtype's data out of fields. The fields list belongs to parent.
+                subfields = []
+                if fields[0] == "[]":
+                    # degenerate case of empty subtype closing parent type
+                    #  @disposition .. state=@accepted(36) []]
+                    subfields.append("[]")
+                    del fields[0]
+                else:
+                    while len(fields) > 0:
+                        if fields[0].endswith('],'):
+                            subfields.append(fields[0][:-2])
+                            subfields.append(']')
+                            del fields[0]
+                            break
+                        if fields[0].endswith(']'):
+                            subfields.append(fields[0][:-1])
+                            subfields.append(']')
+                            del fields[0]
+                            break
+                        subfields.append(fields[0])
+                        del fields[0]
+
+                subtype = DescribedType()
+                subtype.parse_dtype_line(val, ' '.join(subfields))
+                self.dict[key] = subtype
+            elif val.startswith('{'):
+                # handle some embedded map: properties={:product=\"qpid-dispatch-router\", :version=\"1.3.0-SNAPSHOT\"}
+                # pull subtype's data out of fields. The fields list belongs to parent.
+                submap = {}
+                fields.insert(0, val)
+                skey, sval = DescribedType.get_key_and_val(fields[0][1:])
+                submap[skey] = sval
+                del fields[0]
+                while len(fields) > 0:
+                    if fields[0].endswith('},'):
+                        skey, sval = DescribedType.get_key_and_val(fields[0][:-2])
+                        submap[skey] = sval
+                        del fields[0]
+                        break
+                    if fields[0].endswith('}'):
+                        skey, sval = DescribedType.get_key_and_val(fields[0][:-1])
+                        submap[skey] = sval
+                        del fields[0]
+                        break
+                    skey, sval = DescribedType.get_key_and_val(fields[0])
+                    submap[skey] = sval
+                    del fields[0]
+                self.dict[key] = submap
+
+            else:
+                self.dict[key] = val
+
+
+class ParsedLogLine(object):
+    """
+    Grind through the log line and record some facts about it.
+    * Constructor returns Null if the log line is to be ignored
+    * Constructor args:
+    ** log_index          0 for 'A', 1 for 'B'
+    ** routerInstance     which instance in log file
+    ** lineno             line number
+    ** line               the log line
+    ** common             common block object
+    """
+    server_trace_key = "SERVER (trace) ["
+    server_info_key = "SERVER (info) ["
+    policy_trace_key = "POLICY (trace) ["
+    router_ls_key = "ROUTER_LS (info)"
+    transfer_key = "@transfer(20)"
+
+    def sender_settle_mode_of(self, value):
+        if value == "0":
+            return "unsettled(0)"
+        elif value == "1":
+            return "settled(1)"
+        elif value == "2":
+            return "mixed(2)"  # default
+        else:
+            return "unknown(%s) % value"
+
+    def receiver_settle_mode_of(self, value):
+        if value == "0":
+            return "first(0)"
+        elif value == "1":
+            return "second(1)"
+        else:
+            return "unknown(%s) % value"
+
+    def resdict_value(self, resdict, key, if_absent):
+        return resdict[key] if key in resdict else if_absent
+
+    def highlighted(self, name, value, color):
+        result = ""
+        if value:
+            result = "<span style=\"background-color:%s\">%s</span>" % (color, name)
+        return result
+
+    def extract_facts(self):
+        perf = self.data.described_type.dtype_number
+        res = self.data
+        resdict = self.data.described_type.dict
+
+        # the performatives
+        # Note: res.channel is already populated
+        if perf == 0x10:
+            # Performative: open [0] always channel 0
+            res.name = "open"
+            res.channel = "0"
+            res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
+            if res.direction == text.direction_in():
+                res.conn_peer = self.resdict_value(resdict, "container-id", "unknown")
+                res.web_show_str += (" (peer: %s)" % res.conn_peer)
+
+        elif perf == 0x11:
+            # Performative: begin [channel,remoteChannel]
+            # TODO: This has a bug where the local and remote channel numbers are confused.
+            #       Usually they are the same. See if anyone notices!
+            # res.channel
+            res.name = "begin"
+            res.remote = self.resdict_value(resdict, "remote-channel", "None)")
+            res.channel_remote = "[%s,%s]" % (res.channel, res.remote)
+            res.web_show_str = "<strong>%s</strong> %s" % (res.name, res.channel_remote)
+
+        elif perf == 0x12:
+            # Performative:  attach [channel,handle] role name (source: src, target: tgt)
+            res.name = "attach"
+            res.handle = resdict["handle"]
+            res.role = "receiver" if resdict["role"] == "true" else "sender"
+            res.is_receiver = res.role == "receiver"
+            name = self.resdict_value(resdict, "name", "None")
+            res.link_short_name_popup = self.shorteners.short_link_names.translate(name, True)
+            res.link_short_name = self.shorteners.short_link_names.translate(name, False)
+            tmpsrc = self.resdict_value(resdict, "source", None)
+            tmptgt = self.resdict_value(resdict, "target", None)
+            res.snd_settle_mode = self.sender_settle_mode_of(
+                resdict["snd-settle-mode"]) if "snd-settle-mode" in resdict else "mixed"
+            res.rcv_settle_mode = self.receiver_settle_mode_of(
+                resdict["rcv-settle-mode"]) if "rcv-settle-mode" in resdict else "first"
+            caps = ""
+            if tmpsrc is not None:
+                res.source = self.resdict_value(tmpsrc.dict, "address", "none")
+                caps = self.resdict_value(tmpsrc.dict, "capabilities", "")
+            else:
+                res.source = "none"
+            if tmptgt is not None:
+                res.target = self.resdict_value(tmptgt.dict, "address", "none")
+                if caps == "":
+                    caps = self.resdict_value(tmptgt.dict, "capabilities", "")
+            else:
+                res.target = "none"
+            res.channel_handle = "[%s,%s]" % (res.channel, res.handle)
+
+            if 'qd.router-data' in caps:
+                res.link_class = 'router-data'
+            elif 'qd.router' in caps:
+                res.link_class = 'router'
+            """
+            TODO:
+            res.source = short_endp_names.translate(res.source)
+            res.target = short_endp_names.translate(res.target)
+            res.snd_settle_mode = extract_name(tmpssm)
+            res.rcv_settle_mode = extract_name(tmprsm)
+            """
+            res.web_show_str = ("<strong>%s</strong> %s %s %s (source: %s, target: %s, class: %s)" %
+                                (res.name, colorize_bg(res.channel_handle), res.role, res.link_short_name_popup,
+                                 res.source, res.target, res.link_class))
+
+        elif perf == 0x13:
+            # Performative: flow [channel,handle]
+            res.name = "flow"
+            res.handle = resdict["handle"]
+            res.flow_deliverycnt = self.resdict_value(resdict, "delivery-count", "0")
+            res.flow_linkcredit = self.resdict_value(resdict, "link-credit", "0")
+            res.flow_drain = resdict.get("drain", "") == "true"
+            res.channel_handle = "[%s,%s]" % (res.channel, res.handle)
+            res.flow_cnt_credit = "(%s,%s)" % (res.flow_deliverycnt, res.flow_linkcredit)
+            res.web_show_str = "<strong>%s</strong> %s (%s,%s) %s" % (
+                res.name, colorize_bg(res.channel_handle), res.flow_deliverycnt, res.flow_linkcredit,
+                self.highlighted("drain", res.flow_drain, "yellow"))
+
+        elif perf == 0x14:
+            # Performative: transfer [channel,handle] (id)
+            res.name = "transfer"
+            res.handle = resdict["handle"]
+            res.channel_handle = "[%s,%s]" % (res.channel, res.handle)
+            res.delivery_id = self.resdict_value(resdict, "delivery-id", "none")
+            res.delivery_tag = self.resdict_value(resdict, "delivery-tag", "none")
+            res.settled = self.resdict_value(resdict, "settled", "false")
+            res.transfer_settled = resdict.get("settled", "") == "true"
+            res.transfer_more = resdict.get("more", "") == "true"
+            res.transfer_resume = resdict.get("resume", "") == "true"
+            res.transfer_aborted = resdict.get("aborted", "") == "true"
+            self.transfer_short_name = self.shorteners.short_data_names.translate(res.transfer_bare)
+            showdat = "<a href=\"#%s\">%s</a>" % (self.transfer_short_name, self.transfer_short_name)
+            res.web_show_str = "<strong>%s</strong>  %s (%s) %s %s %s %s %s - %s bytes" % (
+                res.name, colorize_bg(res.channel_handle), res.delivery_id,
+                self.highlighted("settled", res.transfer_settled, "green"),
+                self.highlighted("more", res.transfer_more, "purple"),
+                self.highlighted("resume", res.transfer_resume, "orange"),
+                self.highlighted("aborted", res.transfer_aborted, "yellow"),
+                showdat, res.transfer_size)
+
+        elif perf == 0x15:
+            # Performative: disposition [channel] (role first-last)
+            res.name = "disposition"
+            res.role = "receiver" if resdict["role"] == "true" else "sender"
+            res.is_receiver = res.role == "receiver"
+            res.first = self.resdict_value(resdict, "first", "0")
+            res.last = self.resdict_value(resdict, "last", res.first)
+            res.settled = self.resdict_value(resdict, "settled", "false")
+            state = resdict.get("state")
+            if state is not None:
+                res.disposition_state = state.dtype_name
+            ###    colorize_dispositions_not_accepted(proto, res, global_vars, count_anomalies)
+            res.web_show_str = ("<strong>%s</strong>  [%s] (%s %s-%s)" %
+                                (res.name, res.channel, res.role, res.first, res.last))
+
+        elif perf == 0x16:
+            # Performative: detach [channel, handle]
+            res.name = "detach"
+            res.handle = resdict["handle"]
+            ### TODO: colorize_performative_error(proto, res, global_vars, count_anomalies)
+            res.channel_handle = "[%s,%s]" % (res.channel, res.handle)
+            res.web_show_str = "<strong>%s</strong> %s" % (res.name, colorize_bg(res.channel_handle))
+
+        elif perf == 0x17:
+            # Performative: end [channel]
+            res.name = "end"
+            ### TODO: colorize_performative_error(proto, res, global_vars, count_anomalies)
+            res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
+
+        elif perf == 0x18:
+            # Performative: close [0] always channel 0
+            res.channel = "0"
+            res.name = "close"
+            ### colorize_performative_error(proto, res, global_vars, count_anomalies)
+            res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
+
+        elif perf == 0x1d:
+            # transport:definitions error
+            res.name = "error"
+            descr = self.resdict_value(resdict, "description", "none")
+            res.web_show_str = "<strong>%s</strong> [%s] %s" % (res.name, res.channel, descr)
+
+        elif perf == 0x23:
+            # messaging:delivery-state received
+            res.name = "received"
+            res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
+
+        elif perf == 0x24:
+            # messaging:delivery-state accepted
+            res.name = "accepted"
+            res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
+
+        elif perf == 0x25:
+            # messaging:delivery-state rejected
+            res.name = "rejected"
+            res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
+
+        elif perf == 0x26:
+            # messaging:delivery-state released
+            res.name = "released"
+            res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
+
+        elif perf == 0x27:
+            # messaging:delivery-state modified
+            res.name = "modified"
+            res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
+
+        elif perf == 0x28:
+            # messaging:addressing source
+            res.name = "source"
+            res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
+
+        elif perf == 0x29:
+            # messaging:addressing target
+            res.name = "target"
+            res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
+
+        elif perf == 0x2b:
+            # messaging:addressing delete-on-close
+            res.name = "delete-on-close"
+            res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
+
+        elif perf == 0x2c:
+            # messaging:addressing delete-on-no-links
+            res.name = "delete-on-no-links"
+            res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
+
+        elif perf == 0x2d:
+            # messaging:addressing delete-on-no-messages
+            res.name = "delete-on-no-messages"
+            res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
+
+        elif perf == 0x2e:
+            # messaging:addressing delete-on-no-links-or-messages
+            res.name = "delete-on-no-links-or-messages"
+            res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
+
+        elif perf == 0x30:
+            # transactions:coordination coordinator
+            res.name = "coordinator"
+            res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
+
+        elif perf == 0x31:
+            # transactions:coordination declare
+            res.name = "declare"
+            res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
+
+        elif perf == 0x32:
+            # transactions:coordination discharge
+            res.name = "discharge"
+            res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
+
+        elif perf == 0x33:
+            # transactions:coordination declared
+            res.name = "declared"
+            res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
+
+        elif perf == 0x34:
+            # transactions:coordination transactional-state
+            res.name = "transactional-state"
+            res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
+
+        elif perf == 0x40:
+            # security:sasl sasl-mechanisms
+            res.name = "sasl-mechanisms"
+            mechs = self.resdict_value(resdict, "sasl-server-mechanisms", "none")
+            res.web_show_str = "<strong>%s</strong> [%s] %s" % (res.name, res.channel, mechs)
+
+        elif perf == 0x41:
+            # security:sasl sasl-init
+            res.name = "sasl-init"
+            mech = self.resdict_value(resdict, "mechanism", "none")
+            res.web_show_str = "<strong>%s</strong> [%s] %s" % (res.name, res.channel, mech)
+
+        elif perf == 0x42:
+            # security:sasl sasl-challenge
+            res.name = "sasl-challenge"
+            res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
+
+        elif perf == 0x43:
+            # security:sasl sasl-response
+            res.name = "sasl-response"
+            res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
+
+        elif perf == 0x44:
+            # security:sasl sasl-outcome
+            res.name = "sasl-outcome"
+            code = self.resdict_value(resdict, "code", "none")
+            res.web_show_str = "<strong>%s</strong> [%s] code=%s" % (res.name, res.channel, code)
+
+        elif perf == 0x70:
+            # messaging:message-format header
+            res.name = "header"
+            res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
+
+        elif perf == 0x71:
+            # messaging:message-format delivery-annotations
+            res.name = "delivery-annotations"
+            res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
+
+        elif perf == 0x72:
+            # messaging:message-format message-annotations
+            res.name = "message-annotations"
+            res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
+
+        elif perf == 0x73:
+            # messaging:message-format properties
+            res.name = "properties"
+            res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
+
+        elif perf == 0x74:
+            # messaging:message-format application-properties
+            res.name = "application-properties"
+            res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
+
+        elif perf == 0x75:
+            # messaging:message-format data
+            res.name = "data"
+            res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
+
+        elif perf == 0x76:
+            # messaging:message-format amqp-sequence
+            res.name = "amqp-sequence"
+            res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
+
+        elif perf == 0x77:
+            # messaging:message-format amqp-value
+            res.name = "amqp-value"
+            res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
+
+        elif perf == 0x78:
+            # messaging:message-format footer
+            res.name = "footer"
+            res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
+
+        else:
+            res.web_show_str = "HELP I'M A ROCK - Unknown performative: %s" % perf
+
+        if "error" in resdict:
+            res.amqp_error = True
+            res.web_show_str += (" <span style=\"background-color:yellow\">error</span> "
+                                 "%s %s" % (resdict["error"].dict["condition"], resdict["error"].dict["description"]))
+
+    def adverbl_link_to(self):
+        """
+        :return: html link to the main adverbl data display for this line
+        """
+        return "<a href=\"#%s\">%s</a>" % (self.fid, "%s%d_%s" %
+                                           (common.log_letter_of(self.index), self.instance, str(self.lineno)))
+
+    def __init__(self, _log_index, _instance, _lineno, _line, _comn, _router):
+        """
+        Process a naked qpid-dispatch log line
+        A log line looks like this:
+          2018-07-20 10:58:40.179187 -0400 SERVER (trace) [2]:0 -> @begin(17) [next-outgoing-id=0, incoming-window=2147483647, outgoing-window=2147483647] (/home/chug/git/qpid-dispatch/src/server.c:106)
+        The process is:
+         1. If the line ends with a filename:fileline then strip that away
+         2. Peel off the leading time of day and put that into data.datetime.
+            Lines with no datetime are presumed start-of-epoch.
+         3. Find (SERVER) or (POLICY). If absent then raise to reject message.
+         4. If connection number in square brackets '[2]' is missing then raise.
+         5. Extract connection number; save in data.conn_num
+         6. Create decorated data.conn_id "A0_2"
+         7. Extract data.channel if present. Raise if malformed.
+         8. Create a web_show_str for lines that may not parse any further. Like policy lines.
+         9. Extract the direction arrows
+
+        The log line is now reduced to a described type:
+          @describedtypename(num) [key=val [, key=val ...]]
+            except for transfers that have the funky transfer data at end.
+
+        :param _log_index:   The router prefix index 0 for A, 1 for B, ...
+        :param _instance     The router instance
+        :param _lineno:
+        :param _line:
+        :param _comn:
+        :param _router:
+        """
+        if not (ParsedLogLine.server_trace_key in _line or
+                (ParsedLogLine.policy_trace_key in _line and "lookup_user:" in _line) or  # open (not begin, attach)
+                ParsedLogLine.server_info_key in _line or
+                ParsedLogLine.router_ls_key in _line):
+            raise ValueError("Line is not a candidate for parsing")
+        self.oline = _line  # original line
+        self.index = _log_index  # router prefix 0 for A, 1 for B
+        self.instance = _instance  # router instance in log file
+        self.lineno = _lineno  # log line number
+        self.comn = _comn
+        self.router = _router
+        self.prefixi = common.log_letter_of(self.index) + str(self.instance)  # prefix+instance A0
+        self.fid = "f_" + self.prefixi + "_" + str(self.lineno)  # frame id A0_100
+        self.shorteners = _comn.shorteners  # name shorteners
+
+        self.line = _line  # working line chopped, trimmed
+
+        self.data = LogLineData()  # parsed line fact store
+
+        # strip optional trailing file:line field
+        self.line = self.line.rstrip()
+        hasFileLine = False
+        if self.line.endswith(')'):
+            idxOP = self.line.rfind('(')
+            idxColon = self.line.rfind(':')
+            if idxOP != -1 and idxColon != -1:
+                if idxColon > idxOP:
+                    lNumStr = self.line[(idxColon + 1): (-1)]
+                    try:
+                        lnum = int(lNumStr)
+                        hasFileLine = True
+                    except:
+                        pass
+        if hasFileLine:
+            self.line = self.line[:self.line.rfind('(')].rstrip()
+
+        # Handle optional timestamp
+        # This whole project is brain dead without a timestamp. Just sayin'.
+        self.datetime = None
+        try:
+            self.datetime = datetime.strptime(self.line[:26], '%Y-%m-%d %H:%M:%S.%f')
+        except:
+            self.datetime = datetime(1970, 1, 1)
+
+        # extract connection number
+        sti = self.line.find(self.server_trace_key)
+        if sti < 0:
+            sti = self.line.find(self.policy_trace_key)
+            if sti < 0:
+                sti = self.line.find(self.server_info_key)
+                if sti < 0:
+                    sti = self.line.find(self.router_ls_key)
+                    if sti < 0:
+                        raise ValueError("Log keyword/level not found in line %s" % (self.line))
+                    else:
+                        self.line = self.line[sti + len(self.router_ls_key):]
+                        self.data.is_router_ls = True
+                        # this has no relationship to AMQP log lines
+                        return
+                else:
+                    self.line = self.line[sti + len(self.server_info_key):]
+                    self.data.is_server_info = True
+            else:
+                self.line = self.line[sti + len(self.policy_trace_key):]
+                self.data.is_policy_trace = True
+        else:
+            self.line = self.line[sti + len(self.server_trace_key):]
+        ste = self.line.find(']')
+        if ste < 0:
+            print("Failed to parse line ", _lineno, " : ", _line)
+            raise ValueError("'%s' not found in line %s" % ("]", self.line))
+        self.data.conn_num = self.line[:ste]
+        self.line = self.line[ste + 1:]
+
+        # create decorated connection id
+        self.data.conn_id = self.prefixi + "_" + self.data.conn_num
+
+        # get the session (channel) number
+        if self.line.startswith(':'):
+            self.line = self.line[1:]
+        sti = self.line.find(' ')
+        if sti < 0:
+            raise ValueError("space not found after channel number at head of line %s" % (self.line))
+        if sti > 0:
+            self.data.channel = self.line[:sti]
+        self.line = self.line[sti + 1:]
+        self.line = self.line.lstrip()
+
+        # cover for traces that don't get any better
+        self.data.web_show_str = ("<strong>%s</strong>" % self.line)
+
+        # policy lines have no direction and described type fields
+        if self.data.is_policy_trace or self.data.is_server_info:
+            return
+
+        # direction
+        if self.line.startswith('<') or self.line.startswith('-'):
+            self.data.direction = self.line[:2]
+            self.line = self.line[3:]
+            self.data.web_show_str = ("<strong>%s</strong>" % self.line)
+
+        # The log line is now reduced to a described type:
+        #  @describedtypename(num) [key=val [, key=val ...]]
+        # extract descriptor name
+        dname = self.line.split()[0]
+        self.line = self.line[(len(dname) + 1):]
+
+        # Dispose of the transfer data
+        if dname == self.transfer_key:
+            # Look for the '] (NNN) "' that separates the described type fields
+            # from the '(size) "data"'. Stick the required '(size) data' into
+            # data.transfer_data and delete it from the line.
+            rz = re.compile(r'\] \(\d+\) \"').search(self.line)
+            # aborted transfers may or may not have size/data in the log line
+            if rz is not None and len(rz.regs) > 0:
+                splitSt, splitTo = rz.regs[0]
+                self.data.transfer_size = self.line[splitSt + 3: splitTo - 3]
+                self.data.transfer_data = self.line[splitTo - 1:]  # discard (NNN) size field
+                self.line = self.line[: splitSt + 1]
+                # try to isolate the bare message
+                sti = self.data.transfer_data.find(r"\x00Ss")
+                if sti > 0:
+                    self.data.transfer_hdr_annos = self.data.transfer_data[:sti]
+                    self.data.transfer_bare = self.data.transfer_data[sti:]
+                else:
+                    raise ValueError("Transfer with no properties. Not really an error but just checking...")
+            else:
+                self.data.transfer_size = "0"
+                self.data.transfer_data = "(none)"
+
+        if DescribedType.is_dtype_name(dname):
+            self.data.described_type.parse_dtype_line(dname, self.line)
+            # data fron incoming line is now parsed out into facts in .data
+            # Now cook the data to get useful displays
+            self.extract_facts()
+
+
+def parse_log_file(fn, log_index, comn):
+    """
+    Given a file name, return an array of Routers that hold the parsed lines.
+    Lines that don't parse are identified on stderr and then discarded.
+    :param fn: file name
+    :param log_index: router id 0 for 'A', 1 for 'B', ...
+    :param comn: common data
+    :return: list of Routers
+    """
+    instance = 0
+    lineno = 0
+    search_for_in_progress = True
+    rtrs = []
+    rtr = None
+    key1 = "SERVER (trace) ["  # AMQP traffic
+    key2 = "SERVER (info) Container Name:"  # Normal 'router is starting' restart discovery line
+    key3 = "ROUTER_LS (info)"  # a log line placed in separate pool of lines
+    keys = [key1, key3]
+    key4 = "ROUTER (info) Version:"  # router version line
+    key5 = "ROUTER (info) Router started in " # router mode
+    with open(fn, 'r') as infile:
+        for line in infile:
+            if search_for_in_progress:
+                # What if the log file has no record of the router starting?
+                # This is an in_progress router and it is a pre-existing router instance
+                # and not one found by restart discovery.
+                # Any key or AMQP line indicates a router in-progress
+                if any(s in line for s in keys) or ("[" in line and "]" in line):
+                    assert rtr is None
+                    rtr = router.Router(fn, log_index, instance)
+                    rtrs.append(rtr)
+                    search_for_in_progress = False
+                    rtr.restart_rec = router.RestartRecord(rtr, line, lineno + 1)
+            lineno += 1
+            if key2 in line:
+                # This line closes the current router, if any, and opens a new one
+                if rtr is not None:
+                    instance += 1
+                rtr = router.Router(fn, log_index, instance)
+                rtrs.append(rtr)
+                rtr.restart_rec = router.RestartRecord(rtr, line, lineno)
+                search_for_in_progress = False
+                rtr.container_name = line[(line.find(key2) + len(key2)):].strip().split()[0]
+            elif key3 in line:
+                pl = ParsedLogLine(log_index, instance, lineno, line, comn, rtr)
+                if pl is not None:
+                    if pl.data.is_router_ls:
+                        rtr.router_ls.append(pl)
+            elif key4 in line:
+                rtr.version = line[(line.find(key4) + len(key4)):].strip().split()[0]
+            elif key5 in line:
+                rtr.mode = line[(line.find(key5) + len(key5)):].strip().split()[0].lower()
+            elif "[" in line and "]" in line:
+                try:
+                    if lineno == 130:
+                        pass
+                    do_this = comn.arg_index_data
+                    if not do_this:
+                        # not indexing data. maybe do this line anyway
+                        do_this = not any(s in line for s in [' @transfer', ' @disposition', ' @flow', 'EMPTY FRAME'])
+                    if do_this:
+                        pl = ParsedLogLine(log_index, instance, lineno, line, comn, rtr)
+                        if pl is not None:
+                            rtr.lines.append(pl)
+                    else:
+                        comn.data_skipped += 1
+                except ValueError as ve:
+                    pass
+                except Exception as e:
+                    # t, v, tb = sys.exc_info()
+                    if hasattr(e, 'message'):
+                        sys.stderr.write("Failed to parse file '%s', line %d : %s\n" % (fn, lineno, e.message))
+                    else:
+                        sys.stderr.write("Failed to parse file '%s', line %d : %s\n" % (fn, lineno, e))
+                    # raise t, v, tb
+            else:
+                # ignore this log line
+                pass
+    return rtrs
+
+
+if __name__ == "__main__":
+
+    data = td.TestData().data()
+    log_index = 0  # from file for router A
+    instance = 0  # all from router instance 0
+    comn = common.Common()
+    try:
+        for i in range(len(data)):
+            temp = ParsedLogLine(log_index, instance, i, data[i], comn, None)
+            print(temp.datetime, temp.data.conn_id, temp.data.direction, temp.data.web_show_str)
+        pass
+    except:
+        traceback.print_exc(file=sys.stdout)
+        pass
+
+    comn2 = common.Common()
+    routers = parse_log_file('test_data/A-two-instances.log', 0, comn2)
+    if len(routers) != 2:
+        print("ERROR: Expected two router instances in log file")
+
+    t_b4_0 = datetime.strptime('2018-10-15 10:57:32.151673', '%Y-%m-%d %H:%M:%S.%f')
+    t_in_0 = datetime.strptime('2018-10-15 10:57:32.338183', '%Y-%m-%d %H:%M:%S.%f')
+    t_in_1 = datetime.strptime('2018-10-15 10:59:07.584498', '%Y-%m-%d %H:%M:%S.%f')
+    t_af_1 = datetime.strptime('2019-10-15 10:59:07.584498', '%Y-%m-%d %H:%M:%S.%f')
+
+    rtr, idx = router.which_router_tod(routers, t_b4_0)
+    assert rtr is routers[0] and idx == 0
+    rtr, idx = router.which_router_tod(routers, t_in_0)
+    assert rtr is routers[0] and idx == 0
+    rtr, idx = router.which_router_tod(routers, t_in_1)
+    assert rtr is routers[1] and idx == 1
+    rtr, idx = router.which_router_tod(routers, t_af_1)
+    assert rtr is routers[1] and idx == 1
+
+    pass

http://git-wip-us.apache.org/repos/asf/qpid-dispatch/blob/5c3411a1/bin/log_scraper/router.py
----------------------------------------------------------------------
diff --git a/bin/log_scraper/router.py b/bin/log_scraper/router.py
new file mode 100755
index 0000000..7f27682
--- /dev/null
+++ b/bin/log_scraper/router.py
@@ -0,0 +1,226 @@
+#!/usr/bin/env python
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+# A single router log file may contain data from multiple instances of
+# that router booting and running. Thus there may be several different
+# connections labeled [0] and these connections may be to different
+# routers on each run.
+#
+# The 'router' class defined here represents a single boot-and-run
+# instance from the log file.
+#
+
+from __future__ import unicode_literals
+from __future__ import division
+from __future__ import absolute_import
+from __future__ import print_function
+
+import sys
+import traceback
+import datetime
+
+import amqp_detail
+import common
+import text
+
+class RestartRecord():
+    def __init__(self, _router, _line, _lineno):
+        self.router = _router
+        self.line = _line
+        self.lineno = _lineno
+        try:
+            self.datetime = datetime.datetime.strptime(self.line[:26], '%Y-%m-%d %H:%M:%S.%f')
+        except:
+            self.datetime = datetime.datetime(1970, 1, 1)
+
+    def __repr__(self):
+        return "%d instance %d start %s #%d" % (self.router.log_index, self.router.instance,
+                                                self.datetime, self.lineno)
+
+class Router():
+    '''A single dispatch boot-and-run instance from a log file'''
+
+    def __init__(self, _fn, _log_index, _instance):
+        self.fn = _fn                 # log file name
+        self.log_index = _log_index   # 0=A, 1=B, ...
+        self.instance = _instance     # log file instance of router
+        self.iletter = common.log_letter_of(self.log_index)  # A
+        self.iname = self.iletter + str(self.instance)       # A0
+
+        # discovered Container Name
+        self.container_name = None
+
+        # discovered Version
+        self.version = None
+
+        # discovered mode
+        self.mode = None
+
+        # restart_rec - when this router was identified in log file
+        self.restart_rec = None
+
+        # lines - the log lines as ParsedLogLine objects
+        self.lines = []
+
+        # conn_list - List of connections discovered in log lines
+        # Sorted in ascending order and not necessarily in packed sequence.
+        self.conn_list = []
+
+        # conn_log_lines - count of log lines per connection
+        self.conn_log_lines = {}
+
+        # conn_transfer_bytes - count of bytes transfered over this connection
+        self.conn_xfer_bytes = {}
+
+        # connection_to_frame_map
+        self.conn_to_frame_map = {}
+
+        # conn_peer - peer container long name
+        #   key= connection id '1', '2'
+        #   val= original peer container name
+        self.conn_peer = {}
+
+        # conn_peer_display - peer container display name
+        #   key= connection id '1', '2'
+        #   val= display name
+        # Peer display name shortened with popup if necessary
+        self.conn_peer_display = {}
+
+        # conn_peer_connid - display value for peer's connection id
+        #   key= connection id '1', '2'
+        #   val= peer's connid 'A.0_3', 'D.3_18'
+        self.conn_peer_connid = {}
+
+        # conn_dir - arrow indicating connection origin direction
+        #   key= connection id '1', '2'
+        #   val= '<-' peer created conn, '->' router created conn
+        self.conn_dir = {}
+
+        # router_ls - link state 'ROUTER_LS (info)' lines
+        self.router_ls = []
+
+        # open and close times
+        self.conn_open_time = {}   # first log line with [N] seen
+        self.conn_close_time = {}  # last close log line seen
+
+        # details: for each connection, for each session, for each link, whaaa?
+        self.details = None
+
+
+    def discover_connection_facts(self, comn):
+        '''
+        Discover all the connections in this router-instance log
+        For each connection:
+         * determine connection direction
+         * discover name of peer container
+         * generate html to use to display the peer nickname
+         * count log lines
+         * count transfer bytes
+        :param comn:
+        :return:
+        '''
+        for item in self.lines:
+            conn_num = int(item.data.conn_num)
+            id = item.data.conn_id           # full name A0_3
+            if conn_num not in self.conn_list:
+                cdir = ""
+                if item.data.direction != "":
+                    cdir = item.data.direction
+                else:
+                    if "Connecting" in item.data.web_show_str:
+                        cdir = text.direction_out()
+                    elif "Accepting" in item.data.web_show_str:
+                        cdir = text.direction_in()
+                self.conn_list.append(conn_num)
+                self.conn_to_frame_map[id] = []
+                self.conn_dir[id] = cdir
+                self.conn_log_lines[id] = 0   # line counter
+                self.conn_xfer_bytes[id] = 0  # byte counter
+                self.conn_open_time[id] = item
+            self.conn_to_frame_map[id].append(item)
+            # inbound open handling
+            if item.data.name == "open" and item.data.direction == text.direction_in():
+                if item.data.conn_id in self.conn_peer:
+                    sys.exit('ERROR: file: %s connection %s has multiple connection peers' % (
+                    self.fn, id))
+                self.conn_peer[id] = item.data.conn_peer
+                self.conn_peer_display[id] = comn.shorteners.short_peer_names.translate(
+                    item.data.conn_peer, True)
+            # close monitor
+            if item.data.name == "close":
+                self.conn_close_time[id] = item
+            # connection log-line count
+            self.conn_log_lines[id] += 1
+            # transfer byte count
+            if item.data.name == "transfer":
+                self.conn_xfer_bytes[id] += int(item.data.transfer_size)
+        self.conn_list = sorted(self.conn_list)
+        self.details = amqp_detail.AllDetails(self, comn)
+
+    def conn_id(self, conn_num):
+        '''
+        Given this router's connection number return the global connection id
+        :param conn_num: connection number
+        :return: conn_id in the for A0_3
+        '''
+        return self.iname + "_" + str(conn_num)
+
+    def is_interior(self):
+        return self.mode == "interior"
+
+
+def which_router_tod(router_list, at_time):
+    '''
+    Find a router in a list based on time of day
+    :param router_list: a list of Router objects
+    :param at_time: the datetime record identifying the router
+    :return: tuple: (a router from the list or None, router index)
+    '''
+    if len(router_list) == 0:
+        return (None, 0)
+    if len(router_list) == 1:
+        return (router_list[0], 0)
+    for i in range(1, len(router_list)):
+        if at_time < router_list[i].restart_rec.datetime:
+            return (router_list[i-1], i-1)
+    return (router_list[-1], len(router_list)-1)
+
+def which_router_id_tod(routers, id, at_time):
+    '''
+    Find a router by container_name and time of day
+    :param routers: a list of router instance lists
+    :param id: the container name
+    :param at_time: datetime of interest
+    :return: the router that had that container name at that time; None if not found
+    '''
+    for routerlist in routers:
+        if routerlist[0].container_name == id:
+            return which_router_tod(routerlist, at_time)
+    return (None, 0)
+
+
+
+if __name__ == "__main__":
+    try:
+        pass
+    except:
+        traceback.print_exc(file=sys.stdout)
+        pass

http://git-wip-us.apache.org/repos/asf/qpid-dispatch/blob/5c3411a1/bin/log_scraper/splitter.py
----------------------------------------------------------------------
diff --git a/bin/log_scraper/splitter.py b/bin/log_scraper/splitter.py
new file mode 100755
index 0000000..a0353f7
--- /dev/null
+++ b/bin/log_scraper/splitter.py
@@ -0,0 +1,94 @@
+#!/usr/bin/env python
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+from __future__ import unicode_literals
+from __future__ import division
+from __future__ import absolute_import
+from __future__ import print_function
+
+import traceback
+import test_data as td
+
+
+class Splitter():
+    @staticmethod
+    def split(line):
+        """
+        Split a log line into fields.
+         * allow commas and spaces in quoted strings.
+         * split on ', ' and on ' '.
+           strip trailing commas between fields.
+         * quoted fields must have both quotes
+        :param line:
+        :return:
+        """
+        result = []
+        indqs = False
+        pending_comma = False
+        res = ""
+        for i in range(len(line)):
+            c = line[i]
+            if c == '\"':
+                if pending_comma:
+                    res += ','
+                    pending_comma = False
+                indqs = not indqs
+                res += c
+            elif c == ',':
+                if pending_comma:
+                    res += c
+                pending_comma = True
+            elif c == ' ':
+                if indqs:
+                    if pending_comma:
+                        res += ','
+                        pending_comma = False
+                    res += c
+                else:
+                    if res != '':
+                        if pending_comma:
+                            pending_comma = False
+                        result.append(res)
+                        res = ''
+            else:
+                res += c
+        if res != '':
+            result.append(str(res))
+        if indqs:
+            raise ValueError("SPLIT ODD QUOTES: %s", line)
+        # print ("SPLIT: line: %s" % line)
+        # print ("SPLIT: flds: %s" % result)
+        return result
+
+
+if __name__ == "__main__":
+
+    try:
+        for line in td.TestData().data():
+            if "transfer" not in line:
+                print(Splitter.split(line))
+                print()
+            else:
+                pass  # splitter does not split transfers
+        pass
+    except:
+        traceback.print_exc(file=sys.stdout)
+        pass

http://git-wip-us.apache.org/repos/asf/qpid-dispatch/blob/5c3411a1/bin/log_scraper/test_data.py
----------------------------------------------------------------------
diff --git a/bin/log_scraper/test_data.py b/bin/log_scraper/test_data.py
new file mode 100755
index 0000000..a711931
--- /dev/null
+++ b/bin/log_scraper/test_data.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+from __future__ import unicode_literals
+from __future__ import division
+from __future__ import absolute_import
+from __future__ import print_function
+
+import sys
+import traceback
+
+class TestData():
+    '''
+    Extract list of test log lines from a data file.
+    The file holds literal log lines from some noteworthy test logs.
+    Embedding the lines as a python source code data statement involves escaping
+    double quotes and runs the risk of corrupting the data.
+    '''
+    def __init__(self, fn="test_data/test_data.txt"):
+        with open(fn, 'r') as f:
+            self.lines = [line.rstrip('\n') for line in f]
+
+    def data(self):
+        return self.lines
+
+
+if __name__ == "__main__":
+
+    try:
+        datasource = TestData()
+        for line in datasource.data():
+            print (line)
+        pass
+    except:
+        traceback.print_exc(file=sys.stdout)
+        pass
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/qpid-dispatch/blob/5c3411a1/bin/log_scraper/test_data/A-two-instances.log
----------------------------------------------------------------------
diff --git a/bin/log_scraper/test_data/A-two-instances.log b/bin/log_scraper/test_data/A-two-instances.log
new file mode 100644
index 0000000..05d43f3
--- /dev/null
+++ b/bin/log_scraper/test_data/A-two-instances.log
@@ -0,0 +1,113 @@
+2018-10-15 10:57:32.149418 -0400 AGENT (debug) Add entity: LogEntity(enable=trace+, identity=log/DEFAULT, includeSource=False, includeTimestamp=True, module=DEFAULT, name=log/DEFAULT, outputFile=taj-GRN.log, type=org.apache.qpid.dispatch.log)
+2018-10-15 10:57:32.149739 -0400 AGENT (debug) Add entity: LogEntity(identity=log/HTTP, module=HTTP, name=log/HTTP, type=org.apache.qpid.dispatch.log)
+2018-10-15 10:57:32.149990 -0400 AGENT (debug) Add entity: LogEntity(identity=log/ROUTER_LS, module=ROUTER_LS, name=log/ROUTER_LS, type=org.apache.qpid.dispatch.log)
+2018-10-15 10:57:32.150202 -0400 AGENT (debug) Add entity: LogEntity(identity=log/PYTHON, module=PYTHON, name=log/PYTHON, type=org.apache.qpid.dispatch.log)
+2018-10-15 10:57:32.150459 -0400 AGENT (debug) Add entity: LogEntity(identity=log/ROUTER_MA, module=ROUTER_MA, name=log/ROUTER_MA, type=org.apache.qpid.dispatch.log)
+2018-10-15 10:57:32.150681 -0400 AGENT (debug) Add entity: LogEntity(identity=log/CONN_MGR, module=CONN_MGR, name=log/CONN_MGR, type=org.apache.qpid.dispatch.log)
+2018-10-15 10:57:32.150949 -0400 AGENT (debug) Add entity: LogEntity(identity=log/ROUTER_HELLO, module=ROUTER_HELLO, name=log/ROUTER_HELLO, type=org.apache.qpid.dispatch.log)
+2018-10-15 10:57:32.151172 -0400 AGENT (debug) Add entity: LogEntity(identity=log/SERVER, module=SERVER, name=log/SERVER, type=org.apache.qpid.dispatch.log)
+2018-10-15 10:57:32.151411 -0400 AGENT (debug) Add entity: LogEntity(identity=log/POLICY, module=POLICY, name=log/POLICY, type=org.apache.qpid.dispatch.log)
+2018-10-15 10:57:32.151673 -0400 AGENT (debug) Add entity: LogEntity(identity=log/CONTAINER, module=CONTAINER, name=log/CONTAINER, type=org.apache.qpid.dispatch.log)
+2018-10-15 10:57:32.151943 -0400 AGENT (debug) Add entity: LogEntity(identity=log/AGENT, module=AGENT, name=log/AGENT, type=org.apache.qpid.dispatch.log)
+2018-10-15 10:57:32.152198 -0400 AGENT (debug) Add entity: LogEntity(identity=log/ERROR, module=ERROR, name=log/ERROR, type=org.apache.qpid.dispatch.log)
+2018-10-15 10:57:32.152471 -0400 AGENT (debug) Add entity: LogEntity(identity=log/ROUTER_CORE, module=ROUTER_CORE, name=log/ROUTER_CORE, type=org.apache.qpid.dispatch.log)
+2018-10-15 10:57:32.152716 -0400 AGENT (debug) Add entity: LogEntity(identity=log/ROUTER, module=ROUTER, name=log/ROUTER, type=org.apache.qpid.dispatch.log)
+2018-10-15 10:57:32.152958 -0400 AGENT (debug) Add entity: LogEntity(identity=log/AUTHSERVICE, module=AUTHSERVICE, name=log/AUTHSERVICE, type=org.apache.qpid.dispatch.log)
+2018-10-15 10:57:32.336561 -0400 AGENT (debug) Add entity: RouterEntity(allowResumableLinkRoute=True, allowUnsettledMulticast=False, area=0, defaultDistribution=balanced, helloIntervalSeconds=1, helloMaxAgeSeconds=3, hostName=taj.localdomain, id=central-qdr-green, mode=interior, raIntervalFluxSeconds=4, raIntervalSeconds=30, remoteLsMaxAgeSeconds=60, saslConfigName=qdrouterd, type=org.apache.qpid.dispatch.router, workerThreads=4)
+2018-10-15 10:57:32.337966 -0400 SERVER (warning) HTTP support is not available
+2018-10-15 10:57:32.338005 -0400 SERVER (info) Container Name: central-qdr-green
+2018-10-15 10:57:32.338082 -0400 CONTAINER (trace) Container Initialized
+2018-10-15 10:57:32.338183 -0400 CONTAINER (trace) Node Type Registered - router
+2018-10-15 10:57:32.338237 -0400 CONTAINER (trace) Node of type 'router' installed as default node
+2018-10-15 10:57:32.338297 -0400 ROUTER (info) Router started in Interior mode, area=0 id=central-qdr-green
+2018-10-15 10:57:32.338313 -0400 ROUTER (info) Version: 1.5.0-SNAPSHOT
+2018-10-15 10:57:32.338374 -0400 POLICY (trace) Policy Initialized
+2018-10-15 10:57:32.339516 -0400 ROUTER (info) Router Engine Instantiated: id=central-qdr-green instance=1539615452 max_routers=128
+2018-10-15 10:57:32.378087 -0400 SERVER (notice) Listening on 0.0.0.0:amqp
+2018-10-15 10:57:32.378189 -0400 SERVER (notice) Listening on 0.0.0.0:20001
+2018-10-15 10:57:33.342104 -0400 ROUTER_CORE (trace) Core action 'send_to'
+2018-10-15 10:57:33.342320 -0400 ROUTER_HELLO (trace) SENT: HELLO(id=central-qdr-green pv=1 area=0 inst=1539615452 seen=[])
+2018-10-15 10:57:42.865701 -0400 SERVER (trace) [1]: Accepting incoming connection to '0.0.0.0:amqp'
+2018-10-15 10:57:42.865785 -0400 POLICY (trace) ALLOW Connection '127.0.0.1' based on global connection count. nConnections= 1
+2018-10-15 10:57:42.865801 -0400 SERVER (info) [1]: Accepted connection to 0.0.0.0:amqp from 127.0.0.1:55854
+2018-10-15 10:57:42.865844 -0400 SERVER (trace) [1]:  <- AMQP
+2018-10-15 10:57:42.865872 -0400 SERVER (trace) [1]:0 <- @open(16) [container-id="d45d5f6a-ade4-437b-92eb-5978ddbffb4e", hostname="127.0.0.1", channel-max=32767]
+2018-10-15 10:57:42.865889 -0400 SERVER (trace) [1]:0 <- @begin(17) [next-outgoing-id=0, incoming-window=2147483647, outgoing-window=2147483647]
+2018-10-15 10:57:42.865926 -0400 SERVER (trace) [1]:0 <- @attach(18) [name="29b38f24-8c87-4964-8a19-0b4f3aa44660", handle=0, role=true, snd-settle-mode=2, rcv-settle-mode=0, source=@source(40) [address="collectd/telemetry", durable=0, timeout=0, dynamic=false], target=@target(41) [durable=0, timeout=0, dynamic=false], initial-delivery-count=0, max-message-size=0]
+2018-10-15 10:57:42.865981 -0400 SERVER (trace) [1]:  -> AMQP
+2018-10-15 10:57:42.866110 -0400 ROUTER_CORE (trace) Core action 'connection_opened'
+2018-10-15 10:57:42.866198 -0400 SERVER (trace) [1]:0 -> @open(16) [container-id="central-qdr-green", max-frame-size=16384, channel-max=32767, idle-time-out=8000, offered-capabilities=:"ANONYMOUS-RELAY", properties={:product="qpid-dispatch-router", :version="1.5.0-SNAPSHOT", :"qd.conn-id"=1}]
+2018-10-15 10:57:42.866217 -0400 ROUTER_CORE (trace) Core action 'link_first_attach'
+2018-10-15 10:57:42.866263 -0400 DEFAULT (trace) Parse tree search for 'collectd/telemetry'
+2018-10-15 10:57:42.866274 -0400 DEFAULT (trace) Parse tree match not found
+2018-10-15 10:57:42.866284 -0400 DEFAULT (trace) Parse tree search for 'collectd/telemetry'
+2018-10-15 10:57:42.866323 -0400 SERVER (trace) [1]:0 -> @begin(17) [remote-channel=0, next-outgoing-id=0, incoming-window=2147483647, outgoing-window=2147483647]
+2018-10-15 10:57:42.866441 -0400 SERVER (trace) [1]:0 -> @attach(18) [name="29b38f24-8c87-4964-8a19-0b4f3aa44660", handle=0, role=false, snd-settle-mode=2, rcv-settle-mode=0, source=@source(40) [address="collectd/telemetry", durable=0, expiry-policy=:"session-end", timeout=0, dynamic=false], target=@target(41) [durable=0, expiry-policy=:"session-end", timeout=0, dynamic=false], initial-delivery-count=0, max-message-size=0]
+2018-10-15 10:57:42.866591 -0400 SERVER (trace) [1]:0 <- @flow(19) [next-incoming-id=0, incoming-window=2147483647, next-outgoing-id=0, outgoing-window=2147483647, handle=0, delivery-count=0, link-credit=10, drain=false]
+2018-10-15 10:59:07.454660 -0400 AGENT (debug) Add entity: LogEntity(enable=trace+, identity=log/DEFAULT, includeSource=False, includeTimestamp=True, module=DEFAULT, name=log/DEFAULT, outputFile=taj-GRN.log, type=org.apache.qpid.dispatch.log)
+2018-10-15 10:59:07.454984 -0400 AGENT (debug) Add entity: LogEntity(identity=log/HTTP, module=HTTP, name=log/HTTP, type=org.apache.qpid.dispatch.log)
+2018-10-15 10:59:07.455250 -0400 AGENT (debug) Add entity: LogEntity(identity=log/ROUTER_LS, module=ROUTER_LS, name=log/ROUTER_LS, type=org.apache.qpid.dispatch.log)
+2018-10-15 10:59:07.455460 -0400 AGENT (debug) Add entity: LogEntity(identity=log/PYTHON, module=PYTHON, name=log/PYTHON, type=org.apache.qpid.dispatch.log)
+2018-10-15 10:59:07.455720 -0400 AGENT (debug) Add entity: LogEntity(identity=log/ROUTER_MA, module=ROUTER_MA, name=log/ROUTER_MA, type=org.apache.qpid.dispatch.log)
+2018-10-15 10:59:07.455943 -0400 AGENT (debug) Add entity: LogEntity(identity=log/CONN_MGR, module=CONN_MGR, name=log/CONN_MGR, type=org.apache.qpid.dispatch.log)
+2018-10-15 10:59:07.456202 -0400 AGENT (debug) Add entity: LogEntity(identity=log/ROUTER_HELLO, module=ROUTER_HELLO, name=log/ROUTER_HELLO, type=org.apache.qpid.dispatch.log)
+2018-10-15 10:59:07.456422 -0400 AGENT (debug) Add entity: LogEntity(identity=log/SERVER, module=SERVER, name=log/SERVER, type=org.apache.qpid.dispatch.log)
+2018-10-15 10:59:07.456658 -0400 AGENT (debug) Add entity: LogEntity(identity=log/POLICY, module=POLICY, name=log/POLICY, type=org.apache.qpid.dispatch.log)
+2018-10-15 10:59:07.456926 -0400 AGENT (debug) Add entity: LogEntity(identity=log/CONTAINER, module=CONTAINER, name=log/CONTAINER, type=org.apache.qpid.dispatch.log)
+2018-10-15 10:59:07.457184 -0400 AGENT (debug) Add entity: LogEntity(identity=log/AGENT, module=AGENT, name=log/AGENT, type=org.apache.qpid.dispatch.log)
+2018-10-15 10:59:07.457435 -0400 AGENT (debug) Add entity: LogEntity(identity=log/ERROR, module=ERROR, name=log/ERROR, type=org.apache.qpid.dispatch.log)
+2018-10-15 10:59:07.457704 -0400 AGENT (debug) Add entity: LogEntity(identity=log/ROUTER_CORE, module=ROUTER_CORE, name=log/ROUTER_CORE, type=org.apache.qpid.dispatch.log)
+2018-10-15 10:59:07.457951 -0400 AGENT (debug) Add entity: LogEntity(identity=log/ROUTER, module=ROUTER, name=log/ROUTER, type=org.apache.qpid.dispatch.log)
+2018-10-15 10:59:07.458187 -0400 AGENT (debug) Add entity: LogEntity(identity=log/AUTHSERVICE, module=AUTHSERVICE, name=log/AUTHSERVICE, type=org.apache.qpid.dispatch.log)
+2018-10-15 10:59:07.583032 -0400 AGENT (debug) Add entity: RouterEntity(allowResumableLinkRoute=True, allowUnsettledMulticast=False, area=0, defaultDistribution=balanced, helloIntervalSeconds=1, helloMaxAgeSeconds=3, hostName=taj.localdomain, id=central-qdr-green, mode=interior, raIntervalFluxSeconds=4, raIntervalSeconds=30, remoteLsMaxAgeSeconds=60, saslConfigName=qdrouterd, type=org.apache.qpid.dispatch.router, workerThreads=4)
+2018-10-15 10:59:07.584383 -0400 SERVER (warning) HTTP support is not available
+2018-10-15 10:59:07.584421 -0400 SERVER (info) Container Name: central-qdr-green
+2018-10-15 10:59:07.584498 -0400 CONTAINER (trace) Container Initialized
+2018-10-15 10:59:07.584600 -0400 CONTAINER (trace) Node Type Registered - router
+2018-10-15 10:59:07.584656 -0400 CONTAINER (trace) Node of type 'router' installed as default node
+2018-10-15 10:59:07.584730 -0400 ROUTER (info) Router started in Interior mode, area=0 id=central-qdr-green
+2018-10-15 10:59:07.584749 -0400 ROUTER (info) Version: 1.5.0-SNAPSHOT
+2018-10-15 10:59:07.584807 -0400 POLICY (trace) Policy Initialized
+2018-10-15 10:59:07.586002 -0400 ROUTER (info) Router Engine Instantiated: id=central-qdr-green instance=1539615547 max_routers=128
+2018-10-15 10:59:07.586126 -0400 ROUTER_CORE (info) Initializing core module: edge_router
+2018-10-15 10:59:07.586184 -0400 ROUTER_CORE (info) Initializing core module: core_test_hooks
+2018-10-15 10:59:07.622921 -0400 SERVER (notice) Operational, 4 Threads Running (process ID 22085)
+2018-10-15 10:59:07.622943 -0400 SERVER (info) Running in DEBUG Mode
+2018-10-15 10:59:07.623098 -0400 SERVER (notice) Listening on 0.0.0.0:20001
+2018-10-15 10:59:07.623218 -0400 SERVER (notice) Listening on 0.0.0.0:amqp
+2018-10-15 10:59:08.587951 -0400 ROUTER_CORE (trace) Core action 'send_to'
+2018-10-15 10:59:08.588089 -0400 ROUTER_HELLO (trace) SENT: HELLO(id=central-qdr-green pv=1 area=0 inst=1539615547 seen=[])
+2018-10-15 10:59:08.588233 -0400 ROUTER_CORE (trace) Core action 'send_to'
+2018-10-15 10:59:08.588319 -0400 ROUTER_LS (trace) SENT: RA(id=central-qdr-green pv=1 area=0 inst=1539615547 ls_seq=0 mobile_seq=0)
+2018-10-15 10:59:08.588402 -0400 ROUTER_CORE (trace) Core action 'process_tick'
+2018-10-15 10:59:09.588673 -0400 ROUTER_CORE (trace) Core action 'send_to'
+2018-10-15 10:59:09.588779 -0400 ROUTER_HELLO (trace) SENT: HELLO(id=central-qdr-green pv=1 area=0 inst=1539615547 seen=[])
+2018-10-15 10:59:09.590682 -0400 ROUTER_CORE (trace) Core action 'process_tick'
+2018-10-15 10:59:09.621170 -0400 SERVER (trace) Accepting connection on 0.0.0.0:20001
+2018-10-15 10:59:09.621347 -0400 SERVER (trace) [1]: Accepting incoming connection to '0.0.0.0:20001'
+2018-10-15 10:59:09.621503 -0400 POLICY (trace) ALLOW Connection '192.168.1.7' based on global connection count. nConnections= 1
+2018-10-15 10:59:09.621530 -0400 SERVER (info) [1]: Accepted connection to 0.0.0.0:20001 from 192.168.1.7:36494
+2018-10-15 10:59:09.621569 -0400 SERVER (trace) [1]:  <- SASL
+2018-10-15 10:59:09.621595 -0400 SERVER (trace) [1]:  -> SASL
+2018-10-15 10:59:09.622163 -0400 SERVER (trace) Accepting connection on 0.0.0.0:20001
+2018-10-15 10:59:09.622280 -0400 SERVER (trace) [2]: Accepting incoming connection to '0.0.0.0:20001'
+2018-10-15 10:59:09.622387 -0400 POLICY (trace) ALLOW Connection '192.168.1.7' based on global connection count. nConnections= 2
+2018-10-15 10:59:09.622410 -0400 SERVER (info) [2]: Accepted connection to 0.0.0.0:20001 from 192.168.1.7:36496
+2018-10-15 10:59:09.624269 -0400 SERVER (trace) [2]:  <- SASL
+2018-10-15 10:59:09.624310 -0400 SERVER (trace) [2]:  -> SASL
+2018-10-15 10:59:09.624629 -0400 SERVER (trace) [1]:0 -> @sasl-mechanisms(64) [sasl-server-mechanisms=@PN_SYMBOL[:ANONYMOUS, :"DIGEST-MD5", :PLAIN]]
+2018-10-15 10:59:09.624660 -0400 SERVER (trace) [2]:0 -> @sasl-mechanisms(64) [sasl-server-mechanisms=@PN_SYMBOL[:ANONYMOUS, :"DIGEST-MD5", :PLAIN]]
+2018-10-15 10:59:09.626696 -0400 SERVER (trace) [1]:0 <- @sasl-init(65) [mechanism=:ANONYMOUS, initial-response=b"anonymous@ratchet.localdomain"]
+2018-10-15 10:59:09.626787 -0400 SERVER (trace) [2]:0 <- @sasl-init(65) [mechanism=:ANONYMOUS, initial-response=b"anonymous@ratchet.localdomain"]
+2018-10-15 10:59:09.626861 -0400 SERVER (trace) [2]:0 -> @sasl-outcome(68) [code=0]
+2018-10-15 10:59:09.626894 -0400 SERVER (trace) [1]:0 -> @sasl-outcome(68) [code=0]
+2018-10-15 10:59:09.630886 -0400 SERVER (trace) [1]:  <- AMQP
+2018-10-15 10:59:09.630971 -0400 SERVER (trace) [1]:0 <- @open(16) [container-id="collectd", hostname="taj", max-frame-size=16384, channel-max=32767, idle-time-out=8000, offered-capabilities=:"ANONYMOUS-RELAY", properties={:product="qpid-dispatch-router", :version="1.4.0-SNAPSHOT", :"qd.conn-id"=5}]
+2018-10-15 10:59:09.631167 -0400 SERVER (trace) [1]:  -> AMQP
+2018-10-15 10:59:09.631197 -0400 ROUTER_CORE (trace) Core action 'connection_opened'
+2018-10-15 10:59:09.631246 -0400 SERVER (trace) [1]:0 -> @open(16) [container-id="central-qdr-green", max-frame-size=16384, channel-max=32767, idle-time-out=8000, offered-capabilities=:"ANONYMOUS-RELAY", properties={:product="qpid-dispatch-router", :version="1.5.0-SNAPSHOT", :"qd.conn-id"=1}]
+2018-10-15 10:59:09.631949 -0400 SERVER (trace) [2]:  <- AMQP
+2018-10-15 10:59:09.632028 -0400 SERVER (trace) [2]:0 <- @open(16) [container-id="central-qdr-blue", hostname="taj", max-frame-size=16384, channel-max=32767, idle-time-out=8000, offered-capabilities=:"ANONYMOUS-RELAY", properties={:product="qpid-dispatch-router", :version="1.4.0-SNAPSHOT", :"qd.conn-id"=5}]
+2018-10-15 10:59:09.632117 -0400 ROUTER_CORE (trace) Core action 'connection_opened'
+2018-10-15 10:59:09.632148 -0400 SERVER (trace) [2]:  -> AMQP
+2018-10-15 10:59:09.632221 -0400 SERVER (trace) [2]:0 -> @open(16) [container-id="central-qdr-green", max-frame-size=16384, channel-max=32767, idle-time-out=8000, offered-capabilities=:"ANONYMOUS-RELAY", properties={:product="qpid-dispatch-router", :version="1.5.0-SNAPSHOT", :"qd.conn-id"=2}]

http://git-wip-us.apache.org/repos/asf/qpid-dispatch/blob/5c3411a1/bin/log_scraper/test_data/test_data.txt
----------------------------------------------------------------------
diff --git a/bin/log_scraper/test_data/test_data.txt b/bin/log_scraper/test_data/test_data.txt
new file mode 100644
index 0000000..6c91258
--- /dev/null
+++ b/bin/log_scraper/test_data/test_data.txt
@@ -0,0 +1,28 @@
+2018-07-20 10:58:40.176528 -0400 SERVER (trace) [2] Connecting to 127.0.0.1:23731 (/home/chug/git/qpid-dispatch/src/server.c:1052)
+2018-07-20 10:58:40.176628 -0400 SERVER (trace) [2]:  -> SASL (/home/chug/git/qpid-dispatch/src/server.c:106)
+2018-07-20 10:58:40.176841 -0400 SERVER (trace) [2]:  <- SASL (/home/chug/git/qpid-dispatch/src/server.c:106)
+2018-07-20 10:58:40.176869 -0400 SERVER (trace) [2]:0 <- @sasl-mechanisms(64) [sasl-server-mechanisms=@PN_SYMBOL[:ANONYMOUS]] (/home/chug/git/qpid-dispatch/src/server.c:106)
+2018-07-20 10:58:40.178334 -0400 SERVER (trace) [2]:0 -> @sasl-init(65) [mechanism=:ANONYMOUS, initial-response=b"anonymous@ratchet.localdomain"] (/home/chug/git/qpid-dispatch/src/server.c:106)
+2018-07-20 10:58:40.178470 -0400 SERVER (trace) [2]:0 <- @sasl-outcome(68) [code=0] (/home/chug/git/qpid-dispatch/src/server.c:106)
+2018-07-20 10:58:40.178756 -0400 SERVER (trace) [2]:  <- AMQP (/home/chug/git/qpid-dispatch/src/server.c:106)
+2018-07-20 10:58:40.178799 -0400 SERVER (trace) [2]:0 <- @open(16) [container-id="A", max-frame-size=16384, channel-max=32767, idle-time-out=60000, offered-capabilities=:"ANONYMOUS-RELAY", properties={:product="qpid-dispatch-router", :version="1.3.0-SNAPSHOT"}] (/home/chug/git/qpid-dispatch/src/server.c:106)
+2018-07-20 10:58:40.179187 -0400 SERVER (trace) [2]:0 -> @begin(17) [next-outgoing-id=0, incoming-window=2147483647, outgoing-window=2147483647] (/home/chug/git/qpid-dispatch/src/server.c:106)
+2018-07-20 10:58:40.180136 -0400 SERVER (trace) [2]:0 <- @attach(18) [name="qdlink.xgqQHT+EqTH14nA", handle=3, role=true, snd-settle-mode=2, rcv-settle-mode=0, source=@source(40) [durable=0, expiry-policy=:"session-end", timeout=0, dynamic=false, capabilities=:"qd.router-data"], target=@target(41) [durable=0, expiry-policy=:"session-end", timeout=0, dynamic=false, capabilities=:"qd.router-data"], initial-delivery-count=0, max-message-size=0] (/home/chug/git/qpid-dispatch/src/server.c:106)
+2018-07-20 10:58:40.180171 -0400 SERVER (trace) [2]:0 <- @flow(19) [next-incoming-id=0, incoming-window=2147483647, next-outgoing-id=0, outgoing-window=2147483647, handle=1, delivery-count=0, link-credit=1000, drain=false] (/home/chug/git/qpid-dispatch/src/server.c:106)
+2018-07-20 10:58:41.811112 -0400 SERVER (trace) [2]:0 <- @transfer(20) [handle=2, delivery-id=1, delivery-tag=b"\x0f\x00\x00\x00\x00\x00\x00\x00", message-format=0] (320) "\x00SpE\x00Sr\xd1\x00\x00\x00k\x00\x00\x00\x08\xa3\x0bx-opt-qd.to\xa1\x15_topo/0/C/$management\xa3\x0ex-opt-qd.trace\xd0\x00\x00\x00\x09\x00\x00\x00\x01\xa1\x030/A\xa3\x10x-opt-qd.ingress\xa1\x030/A\xa3\x09x-opt-qd.\xa1\x01X\x00Ss\xd0\x00\x00\x002\x00\x00\x00\x06@@@@\xa1$amqp:/_topo/0/A/temp.Q+EzDvtPUeSkHaD\xa0\x0212\x00St\xd1\x00\x00\x00j\x00\x00\x00\x08\xa1\x09operation\xa1\x05QUERY\xa1\x0aentityType\xa1\x1forg.apache.qpid.dispatch.router\xa1\x04type\xa1\x13org.amqp.management\xa1\x04name\xa1\x04self\x00Sw\xd1\x00\x00\x00\x15\x00\x00\x00\x02\xa1\x0eattributeNamesE" (/home/chug/git/qpid-dispatch/src/server.c:106)
+2018-07-20 10:58:41.811312 -0400 SERVER (trace) [2]:0 -> @disposition(21) [role=true, first=1, settled=true, state=@accepted(36) []] (/home/chug/git/qpid-dispatch/src/server.c:106)
+2018-07-20 10:58:55.893356 -0400 SERVER (trace) [2]:0 -> @close(24) [error=@error(29) [condition=:"amqp:connection:framing-error", description="connection aborted"]] (/home/chug/git/qpid-dispatch/src/server.c:106)
+2018-07-20 10:58:55.893366 -0400 SERVER (trace) [2]:  <- EOS (/home/chug/git/qpid-dispatch/src/server.c:106)
+2018-07-20 10:58:56.180136 -0400 SERVER (trace) [2]:0 <- @attach(18) [name="97e94a05-1e69-492f-b9b4-282beb79484e#_aafc09ac-efc1-4f51-b038-c529408b7b61", handle=0, role=true, snd-settle-mode=2, rcv-settle-mode=0, source=@source(40) [durable=0, timeout=0, dynamic=true, dynamic-node-properties={:"lifetime-policy"=@:"amqp:delete-on-close:list" []}], target=@target(41) [durable=0, timeout=0, dynamic=false], initial-delivery-count=0, max-message-size=0] (/home/chug/git/qpid-dispatch/src/server.c:106)
+2018-07-20 10:58:57.368594 -0400 SERVER (trace) [2]:0 <- @transfer(20) [handle=2, delivery-id=160, delivery-tag=b""\x03\x00\x00\x00\x00\x00\x00", message-format=0] (120) "\x00SpE\x00Sr\xd1\x00\x00\x00`\x00\x00\x00\x08\xa3\x0bx-opt-qd.to\xa1\x0aclosest/02\xa3\x0ex-opt-qd.trace\xd0\x00\x00\x00\x09\x00\x00\x00\x01\xa1\x030/A\xa3\x10x-opt-qd.ingress\xa1\x030/A\xa3\x09x-opt-qd.\xa1\x01X\x00SsE\x00Swq\x00\x00\x023" (/home/chug/git/qpid-dispatch/src/server.c:106)
+2018-07-20 10:58:57.468234 -0400 SERVER (trace) [2]:0 <- @transfer(20) [handle=2, delivery-id=129, delivery-tag=b"\xff\x02\x00\x00\x00\x00\x00\x00", message-format=0] (120) "\x00SpE\x00Sr\xd1\x00\x00\x00`\x00\x00\x00\x08\xa3\x0bx-opt-qd.to\xa1\x0aclosest/02\xa3\x0ex-opt-qd.trace\xd0\x00\x00\x00\x09\x00\x00\x00\x01\xa1\x030/A\xa3\x10x-opt-qd.ingress\xa1\x030/A\xa3\x09x-opt-qd.\xa1\x01X\x00SsE\x00Swq\x00\x00\x02"" (/home/chug/git/qpid-dispatch/src/server.c:106)
+2018-08-03 10:59:37.457537 -0400 SERVER (trace) [5]:0 -> @transfer(20) [handle=0, delivery-id=6, delivery-tag=b" \x00\x00\x00\x00\x00\x00\x00", message-format=0, settled=true] (258) "\x00Sp\xd0\x00\x00\x00\x05\x00\x00\x00\x01B\x00Sr\xd1\x00\x00\x00_\x00\x00\x00\x08\xa3\x0ex-opt-qd.trace\xd0\x00\x00\x00\x13\x00\x00\x00\x03\xa1\x030/C\xa1\x030/B\xa1\x030/A\xa3\x10x-opt-qd.ingress\xa1\x030/C\xa3\x09x-opt-qd.\xa1\x01X\xa3\x09x-opt-qd.\xa1\x01X\x00Ss\xd0\x00\x00\x00%\x00\x00\x00\x06@@\xa1\x1aamqp:/_topo/0/all/qdrouter@@@\x00St\xd1\x00\x00\x00\x10\x00\x00\x00\x02\xa1\x06opcode\xa1\x02RA\x00Sw\xd1\x00\x00\x00A\x00\x00\x00\x0c\xa1\x06ls_seqT\x01\xa1\x02pvT\x01\xa1\x04area\xa1\x010\xa1\x08instanceq[dm\xd7\xa1\x0amobile_seqT\x00\xa1\x02id\xa1\x01C" (/home/chug/git/qpid-dispatch/src/server.c:106)
+2018-08-03 10:59:48.006844 -0400 SERVER (trace) [6]:0 -> @transfer(20) [handle=0, delivery-id=447, delivery-tag=b""\x02\x00\x00\x00\x00\x00\x00", message-format=0, settled=true] (208) "\x00Sp\xd0\x00\x00\x00\x05\x00\x00\x00\x01B\x00Sr\xd1\x00\x00\x00U\x00\x00\x00\x08\xa3\x0ex-opt-qd.trace\xd0\x00\x00\x00\x09\x00\x00\x00\x01\xa1\x030/A\xa3\x10x-opt-qd.ingress\xa1\x030/A\xa3\x09x-opt-qd.\xa1\x01X\xa3\x09x-opt-qd.\xa1\x01X\x00Ss\xd0\x00\x00\x00#\x00\x00\x00\x06@@\xa1\x18amqp:/_topo/0/D/qdrouter@@@\x00St\xd1\x00\x00\x00\x11\x00\x00\x00\x02\xa1\x06opcode\xa1\x03LSR\x00Sw\xd1\x00\x00\x00\x1a\x00\x00\x00\x06\xa1\x02pvT\x01\xa1\x02id\xa1\x01A\xa1\x04area\xa1\x010" (/home/chug/git/qpid-dispatch/src/server.c:106)
+2018-08-03 10:59:49.480073 -0400 SERVER (trace) [7]:0 -> @transfer(20) [handle=0, delivery-id=3, delivery-tag=b"\x8b\x01\x00\x00\x00\x00\x00\x00", message-format=0, settled=true] (233) "\x00Sp\xd0\x00\x00\x00\x05\x00\x00\x00\x01B\x00Sr\xd1\x00\x00\x00U\x00\x00\x00\x08\xa3\x0ex-opt-qd.trace\xd0\x00\x00\x00\x09\x00\x00\x00\x01\xa1\x030/C\xa3\x10x-opt-qd.ingress\xa1\x030/C\xa3\x09x-opt-qd.\xa1\x01X\xa3\x09x-opt-qd.\xa1\x01X\x00Ss\xd0\x00\x00\x00/\x00\x00\x00\x06@@\xa1$amqp:/_topo/0/C/temp.y0iWM_zBNSbDane@@@\x00St\xd1\x00\x00\x004\x00\x00\x00\x04\xa1\x11statusDescription\xa1\x0aNo Content\xa1\x0astatusCodeq\x00\x00\x00\xcc\x00Sw\xd1\x00\x00\x00\x04\x00\x00\x00\x00" (/home/chug/git/qpid-dispatch/src/server.c:106)
+2018-08-03 10:59:43.485362 -0400 SERVER (trace) [8]:0 -> @transfer(20) [handle=0, delivery-id=3, delivery-tag=b"o\x00\x00\x00\x00\x00\x00\x00", message-format=0, settled=true] (1589) "\x00Sp\xd0\x00\x00\x00\x05\x00\x00\x00\x01B\x00Ss\xd0\x00\x00\x00/\x00\x00\x00\x06@@\xa1$amqp:/_topo/0/D/temp._HHAoiYZ39HlEEH@@@\x00St\xd1\x00\x00\x00,\x00\x00\x00\x04\xa1\x11statusDescription\xa1\x02OK\xa1\x0astatusCodeq\x00\x00\x00\xc8\x00Sw\xd1\x00\x00\x05\xb5\x00\x00\x00\x04\xa1\x0eattributeNames\xd0\x00\x00\x00\x95\x00\x00\x00\x0b\xa1\x08linkType\xa1\x07linkDir\xa1\x08linkName\xa1\x0aowningAddr\xa1\x08capacity\xa1\x10undeliveredCount\xa1\x0eunsettledCount\xa1\x0dacceptedCount\xa1\x0drejectedCount\xa1\x0dreleasedCount\xa1\x0dmodifiedCount\xa1\x07results\xd0\x00\x00\x04\xf9\x00\x00\x00\x10\xd0\x00\x00\x00<\x00\x00\x00\x0b\xa1\x0erouter-control\xa1\x02in\xa1\x16qdlink.EZD43Jm5VvSht0w@p\x00\x00\x03\xe8DDDDDD\xd0\x00\x00\x00F\x00\x00\x00\x0b\xa1\x0erouter-control\xa1\x03out\xa1\x16qdlink.STppD563DOcP2Z
 R\xa1\x08Lqdhellop\x00\x00\x03\xe8DDDDDD\xd0\x00\x00\x00:\x00\x00\x00\x0b\xa1\x0cinter-router\xa1\x02in\xa1\x16qdlink.inIy3q1zJObSUhB@p\x00\x00\x03\xe8DDDDDD\xd0\x00\x00\x00<\x00\x00\x00\x0b\xa1\x0cinter-route"... (truncated) (/home/chug/git/qpid-dispatch/src/server.c:106)
+2018-08-06 13:41:42.793480 -0400 SERVER (trace) [7]:0 -> (EMPTY FRAME)
+2018-08-16 13:53:22.276203 -0400 SERVER (trace) [22]:0 <- @attach(18) [name="6216f14e-16db-4000-89eb-9716c02ef9e3-1acf0c3a-bdaa-4ce2-8786-e39c65bdb7bf", handle=0, role=true, snd-settle-mode=2, rcv-settle-mode=0, source=@source(40) [durable=0, timeout=0, dynamic=true, dynamic-node-properties={:"x-opt-qd.address"="pulp.task.abc"}], target=@target(41) [durable=0, timeout=0, dynamic=false], initial-delivery-count=0, max-message-size=0] (/home/chug/git/qpid-dispatch/src/server.c:106)
+2018-08-16 13:56:59.038310 -0400 SERVER (trace) [8]:0 -> @transfer(20) [handle=0, delivery-id=1, delivery-tag=b"\x14\x00\x00\x00\x00\x00\x00\x00", message-format=0, settled=true, aborted=true] (/home/chug/git/qpid-dispatch/src/server.c:106)
+2018-08-24 14:29:26.821739 -0400 SERVER (trace) [2]:0 <- @attach(18) [name="qdlink.YoQaLsapwDzqhOL", handle=1, role=true, snd-settle-mode=2, rcv-settle-mode=0, source=@source(40) [durable=0, expiry-policy=:"session-end", timeout=0, dynamic=false, capabilities=:"qd.router"], target=@target(41) [durable=0, expiry-policy=:"session-end", timeout=0, dynamic=false, capabilities=:"qd.router"], initial-delivery-count=0, max-message-size=0] (/home/chug/git/qpid-dispatch/src/server.c:106)
+2018-08-24 14:29:26.821750 -0400 SERVER (trace) [1]:0 <- @attach(18) [name="qdlink.qvRSF0ysELu13cM", handle=2, role=false, snd-settle-mode=2, rcv-settle-mode=0, source=@source(40) [durable=0, expiry-policy=:"session-end", timeout=0, dynamic=false, capabilities=:"qd.router-data"], target=@target(41) [durable=0, expiry-policy=:"session-end", timeout=0, dynamic=false, capabilities=:"qd.router-data"], initial-delivery-count=0, max-message-size=0] (/home/chug/git/qpid-dispatch/src/server.c:106)
+2018-10-11 14:55:31.302512 -0400 SERVER (trace) [4]:0 -> @open(16) [container-id="A", max-frame-size=16384, channel-max=32767, idle-time-out=60000, offered-capabilities=:"ANONYMOUS-RELAY", properties={:product="qpid-dispatch-router", :version="1.4.0-SNAPSHOT", :"qd.conn-id"=4}] (/home/chug/git/qpid-dispatch/src/server.c:106)

http://git-wip-us.apache.org/repos/asf/qpid-dispatch/blob/5c3411a1/bin/log_scraper/text.py
----------------------------------------------------------------------
diff --git a/bin/log_scraper/text.py b/bin/log_scraper/text.py
new file mode 100755
index 0000000..ba6a1f8
--- /dev/null
+++ b/bin/log_scraper/text.py
@@ -0,0 +1,137 @@
+#!/usr/bin/env python
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+"""Common text strings"""
+
+
+def direction_in():
+    """Log line text indicating received by router"""
+    return "<-"
+
+
+def direction_out():
+    """Log line text indicating transmitted by router"""
+    return "->"
+
+
+def lozenge():
+    """
+    :return: HTML document lozenge character
+    """
+    return "&#9674;"
+
+
+def nbsp():
+    """
+    :return: HTML Non-breaking space
+    """
+    return "&#160;"
+
+
+"""Large text strings used by main that change infrequently"""
+
+
+# html head, start body
+def web_page_head():
+    return """<!DOCTYPE html>
+<html>
+<head>
+<title>Adverbl Analysis - qpid-dispatch router logs</title>
+
+<style>
+    * { 
+    font-family: sans-serif; 
+}
+table {
+    border-collapse: collapse;
+}
+table, td, th {
+    border: 1px solid black;
+    padding: 3px;
+}
+</style>
+
+<script src="http://ajax.googleapis.com/ajax/libs/dojo/1.4/dojo/dojo.xd.js" type="text/javascript"></script>
+<!-- <script src="http://ajax.googleapis.com/ajax/libs/dojo/1.4/dojo/dojo.xd.js" type="text/javascript"></script> -->
+<script type="text/javascript">
+function node_is_visible(node)
+{
+  if(dojo.isString(node))
+    node = dojo.byId(node);
+  if(!node) 
+    return false;
+  return node.style.display == "block";
+}
+function set_node(node, str)
+{
+  if(dojo.isString(node))
+    node = dojo.byId(node);
+  if(!node) return;
+  node.style.display = str;
+}
+function toggle_node(node)
+{
+  if(dojo.isString(node))
+    node = dojo.byId(node);
+  if(!node) return;
+  set_node(node, (node_is_visible(node)) ? 'none' : 'block');
+}
+function hide_node(node)
+{
+  set_node(node, 'none');
+}
+function show_node(node)
+{
+  set_node(node, 'block');
+}
+
+function go_back()
+{
+  window.history.back();
+}
+"""
+
+
+def web_page_toc():
+    return """
+<h3>Contents</h3>
+<table>
+<tr> <th>Section</th>                                                 <th>Description</th> </tr>
+<tr><td><a href=\"#c_logfiles\"       >Log files</a></td>             <td>Router and log file info</td></tr>
+<tr><td><a href=\"#c_rtrinstances\"   >Router Instances</a></td>      <td>Router reboot chronology</td></tr>
+<tr><td><a href=\"#c_connections\"    >Connections</a></td>           <td>Connection overview; per connection log data view control</td></tr>
+<tr><td><a href=\"#c_conndetails\"    >Connection Details</a></td>    <td>Connection details; frames sorted by link</td></tr>
+<tr><td><a href=\"#c_noteworthy\"     >Noteworthy log lines</a></td>  <td>AMQP errors and interesting flags</td></tr>
+<tr><td><a href=\"#c_logdata\"        >Log data</a></td>              <td>Main AMQP traffic table</td></tr>
+<tr><td><a href=\"#c_messageprogress\">Message progress</a></td>      <td>Tracking messages through the system</td></tr>
+<tr><td><a href=\"#c_linkprogress\"   >Link name propagation</a></td> <td>Tracking link names</td></tr>
+<tr><td><a href=\"#c_rtrdump\"        >Router name index</a></td>     <td>Short vs. long router container names</td></tr>
+<tr><td><a href=\"#c_peerdump\"       >Peer name index</a></td>       <td>Short vs. long peer names</td></tr>
+<tr><td><a href=\"#c_linkdump\"       >Link name index</a></td>       <td>Short vs. long link names</td></tr>
+<tr><td><a href=\"#c_msgdump\"        >Transfer name index</a></td>   <td>Short names representing transfer data</td></tr>
+<tr><td><a href=\"#c_ls\"             >Router link state</a></td>     <td>Link state analysis</td></tr>
+</table>
+<hr>
+"""
+
+
+if __name__ == "__main__":
+    pass


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@qpid.apache.org
For additional commands, e-mail: commits-help@qpid.apache.org


[2/2] qpid-dispatch git commit: DISPATCH-1191 - Import crolke's adverbl project This closes #421

Posted by tr...@apache.org.
DISPATCH-1191 - Import crolke's adverbl project
This closes #421


Project: http://git-wip-us.apache.org/repos/asf/qpid-dispatch/repo
Commit: http://git-wip-us.apache.org/repos/asf/qpid-dispatch/commit/5c3411a1
Tree: http://git-wip-us.apache.org/repos/asf/qpid-dispatch/tree/5c3411a1
Diff: http://git-wip-us.apache.org/repos/asf/qpid-dispatch/diff/5c3411a1

Branch: refs/heads/master
Commit: 5c3411a1a66e883f1c509b2668f870feac26134b
Parents: 42b4ab5
Author: Chuck Rolke <cr...@redhat.com>
Authored: Tue Nov 13 15:53:37 2018 -0500
Committer: Ted Ross <tr...@redhat.com>
Committed: Tue Nov 27 09:26:40 2018 -0500

----------------------------------------------------------------------
 bin/log_scraper/README.md                     | 179 ++++
 bin/log_scraper/amqp_detail.py                | 633 ++++++++++++++
 bin/log_scraper/common.py                     | 139 +++
 bin/log_scraper/main.py                       | 785 +++++++++++++++++
 bin/log_scraper/nicknamer.py                  | 123 +++
 bin/log_scraper/parser.py                     | 930 +++++++++++++++++++++
 bin/log_scraper/router.py                     | 226 +++++
 bin/log_scraper/splitter.py                   |  94 +++
 bin/log_scraper/test_data.py                  |  54 ++
 bin/log_scraper/test_data/A-two-instances.log | 113 +++
 bin/log_scraper/test_data/test_data.txt       |  28 +
 bin/log_scraper/text.py                       | 137 +++
 12 files changed, 3441 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/qpid-dispatch/blob/5c3411a1/bin/log_scraper/README.md
----------------------------------------------------------------------
diff --git a/bin/log_scraper/README.md b/bin/log_scraper/README.md
new file mode 100644
index 0000000..64ec977
--- /dev/null
+++ b/bin/log_scraper/README.md
@@ -0,0 +1,179 @@
+#  Scraper - Render qpid-dispatch log files
+
+Scraper is a spinoff of https://github.com/ChugR/Adverb that uses qpid-dispatch log
+files as the data source instead of pcap trace files. Scraper is a Python processing
+engine that does not require Wireshark or any network trace capture utilities.
+
+## Apache License, Version 2.0
+
+Licensed under the Apache License, Version 2.0 (the "License"); you may not use
+this file except in compliance with the License.  You may obtain a copy of the
+License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software distributed
+under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
+CONDITIONS OF ANY KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations under the License.
+
+
+## Concepts
+
+Scraper is a data scraping program. It reads qpid-dispatch router log files,
+categorizes and sorts the data, and produces an HTML summary.
+
+From each log file Scraper extracts:
+ * Router version
+ * Router container name
+ * Router restart times. A single log file may contain data from several router
+   reboot instances.
+ * Router link state calculation reports
+ * Interrouter and client connections
+ * AMQP facts
+   * Connection peers
+   * Link pair establishment
+   * Transfer traffic
+   * Message disposition
+   * Flow and credit propagation
+   
+ Scraper sorts these facts with microsecond precision using the log timestamps.
+ 
+ Then Scraper merges the data from any number (as long as that number is less than 27!)
+ of independent log files into a single view.
+ 
+ Next Scraper performs some higher-level analysis.
+ 
+ * Routers are identified by letter rather than by the container name: 'A', 'B', and
+   so on. Log data in a file is grouped into instances and is identified by a number
+   for that router instance: 'A0', 'A1', and so on.
+ * Per router each AMQP data log entry is sorted into per-connection data lists.
+ * Connection data lists are searched to discover router-to-router and router-to-client
+   connection pairs.
+ * Per connection data are subdivided into per-session and per-link lists, sorting
+   the AMQP data into per-link-only views.
+ * Bulk AMQP data may be shown or hidden on arbitrary per-connection selections.
+ * Noteworthy AMQP frames are identified. By hand these are hard to find.
+   * AMQP errors
+   * Presettled transfers
+   * Transfers with 'more' bit set
+   * Resumed transfers
+   * Aborted transfers
+   * Flow with 'drain' set
+ * Transfer messages are sorted by signature. Then a table is made showing where
+   each message leaves or arrives over a connection.
+ * Settlement state for each unsettled transfer is identified, displayed, and
+   shown with delta and elapsed time values. See example in the Advanced section.
+ * Link name propagation for each named link is shown in a table.
+ * Router, peer, and link names can get really long. Nicknames for each are used
+   with popups showing the full name.
+ * Transfer data is identified with nicknames but without the popups. The popups
+   were so big that Firefox refused to show them; so forget it and they weren't useful anyway.
+ * Router link state cost calculations are merged with router restart records to
+   create a comprehensive link state cost view. Routers may publish cost reports that
+   do not include all other routers. In this case the other routers are identified
+   visually to indicate that they are unreachable.
+
+### The basics
+
+* Enable router logging
+
+The routers need to generate proper logging for Scraper.
+The information classes are exposed by enabling log levels.
+
+| Log level      | Information               |
+|----------------|---------------------------|
+| ROUTER info    | Router version            |
+| SERVER info    | Router restart discovery  |
+| SERVER trace   | AMQP control and data     |
+| ROUTER_LS info | Router link state reports |
+
+
+* Run your tests to populate log files used as Scraper input.
+
+* Run Scraper to generate web content
+
+    bin/scraper/main.py somefile.log > somefile.html
+
+    bin/scraper/mina.py *.log > somefile.html
+
+* Profit
+
+    firefox somefile.html
+
+###  Advanced
+
+* Merging multiple qpid-dispatch log files
+
+Scraper accepts multiple log files names in the command line and
+merges the log data according to the router log timestamps.
+
+    bin/scraper/main.py A.log B.log C.log > abc.html
+
+Note that the qpid-dispatch host system clocks for merged log files
+must be synchronized to within a few microseconds in order for the
+result to be useful. This is easiest to achieve when the routers are
+run on the same CPU core on a single system. Running Fedora 27 and 28
+on two hosts in a router network where the routers run _ntp_ to the same
+time provider produces perfectly acceptable results.
+
+Scraper does a decent job merging log files created within a
+qpid-dispatch self test.
+
+* Wow, that's a lot of data
+
+Indeed it is and good luck figuring it out. Sometimes, though, it's too much.
+The AMQP transfer data analysis is the worst offender in terms of CPU time, 
+run-time memory usage, and monstrous html output files.
+Scraper provides one command line switch to
+turn off the data analysis:
+
+    bin/scraper/main.py --no-data FILE [FILE ...]
+    
+In no-data mode AMQP transfer, disposition, and flow frames in the log files are
+discarded. The resulting web page still includes lots of useful information with
+connection info, link name propagation, and link state analysis.
+
+* How to read the transfer analysis tables. Here's an instance:
+
+
+|Src    |Time           |Rtr|ConnId|Dir|ConnId|Peer  |T delta  |T elapsed |Settlement                   |S elapsed
+|-------|---------------|---|------|---|------|------|---------|----------|-----------------------------|---------
+|A0_2035|09:50:52.027975|A0 |A0_11 |<- |      |peer_7|0.000000 |0.000000  |(accepted settled 0.005142 S)|0.005142
+|A0_2071|09:50:52.028556|A0 |A0_6  |-> |D0_4  |D     |0.000581 |0.000581  |(accepted settled 0.004253 S)|0.004834
+|D0_1587|09:50:52.028696|D0 |D0_4  |<- |A0_6  |A     |0.000140 |0.000721  |(accepted settled 0.003988 S)|0.004709
+|D0_1612|09:50:52.029260|D0 |D0_1  |-> |C0_6  |C     |0.000564 |0.001285  |(accepted settled 0.003044 S)|0.004329
+|C0_1610|09:50:52.029350|C0 |C0_6  |<- |D0_1  |D     |0.000090 |0.001375  |(accepted settled 0.002846 S)|0.004221
+|C0_1625|09:50:52.029672|C0 |C0_1  |-> |B0_5  |B     |0.000322 |0.001697  |(accepted settled 0.002189 S)|0.003886
+|B0_1438|09:50:52.029760|B0 |B0_5  |<- |C0_1  |C     |0.000088 |0.001785  |(accepted settled 0.002002 S)|0.003787
+|B0_1451|09:50:52.030117|B0 |B0_7  |-> |      |peer_7|0.000357 |0.002142  |(accepted settled 0.001318 S)|0.003460
+
+Each row in this table represents the facts about when a single transfer and its corresponding settlement was seen entering or exiting a router.
+
+| Field        | Contents |
+|--------------|----------|
+|Src | Router instance and file line number where the transfer was seen|
+| Time | timestamp
+| Rtr | Router letter id and instance
+| ConnId | Router connection id
+| Dir | transfer direction. _<-_ indicates into the router, _->_ indicates out of the router
+| ConnId | peer's connection id. Blank if the peer is a normal client and not a router.
+| Peer | Peer's name. _peer7_ whold show the peer's container name in a popup.
+| T delta | Time since previous row
+| T elapsed | Time since the message first entered the system
+| Settlement | Settlement state and time delta since message time in column 2 for this row. The settlement disposition log line is hyperlinked from the word _accepted_.
+| S elapsed | Settlement elapsed time. This is the difference between the accepted disposition log record and the time when the message first entered the system.
+
+Row-by-row it is easiest to read the each line from left to right
+* A0 connecton 11 received the transfer from peer_7.
+* A0 connection 6 sent the message to D0 connection 4.
+* D0 connection 4 received the message from A0 connection 6.
+
+and so on. This message came from a sender on peer_7, went through routers A, D, C, and B, and finally was
+returned to a listener on peer_7. The receiver received the message 0.002142 S after the sender sent it. The
+sender received the accepted disposition 0.005142 S after the sender sent the message.
+
+The transmit times are in order from top to bottom and the settlement times are in order from bottom to top.
+
+This table will morph a little if one of the router is missing from the analysis. If log file D.log was not
+presented to Scraper then the table would not make as much sense as when all logs are included.

http://git-wip-us.apache.org/repos/asf/qpid-dispatch/blob/5c3411a1/bin/log_scraper/amqp_detail.py
----------------------------------------------------------------------
diff --git a/bin/log_scraper/amqp_detail.py b/bin/log_scraper/amqp_detail.py
new file mode 100755
index 0000000..042a19b
--- /dev/null
+++ b/bin/log_scraper/amqp_detail.py
@@ -0,0 +1,633 @@
+#!/usr/bin/env python
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+from __future__ import unicode_literals
+from __future__ import division
+from __future__ import absolute_import
+from __future__ import print_function
+
+import sys
+import traceback
+
+import common
+import text
+
+"""
+Given a map of all connections with lists of the associated frames
+analyze and show per-connection, per-session, and per-link details.
+
+This is done in a two-step process: 
+ * Run through the frame lists and generates an intermediate structure 
+   with the the details for display.
+ * Generate the html from the detail structure.
+This strategy allows for a third step that would allow more details
+to be gleaned from the static details. For instance, if router A
+sends a transfer to router B then router A's details could show 
+how long it took for the transfer to reach router B. Similarly
+router B's details could show how long ago router A sent the transfer. 
+"""
+
+
+class ConnectionDetail():
+    """
+    Holds facts about sessions over the connection's lifetime
+    """
+
+    def __init__(self, id):
+        # id in form 'A_15':
+        #   A is the router logfile key
+        #   15 is the log connection number [15]
+        self.id = id
+
+        # seq_no number differentiates items that otherwise have same identifiers.
+        # Sessions, for example: a given connection may have N distinct session
+        # with local channel 0.
+        self.seq_no = 0
+
+        # combined amqp_error frames on this connection
+        self.amqp_errors = 0
+
+        # session_list holds all SessionDetail records either active or retired
+        # Sessions for a connection are identified by the local channel number.
+        # There may be many sessions all using the same channel number.
+        # This list holds all of them.
+        self.session_list = []
+
+        # this map indexed by the channel refers to the current item in the session_list
+        self.chan_map = {}
+
+        # count of AMQP performatives for this connection that are not accounted
+        # properly in session and link processing.
+        # Server Accepting, SASL mechs, init, outcome, AMQP, and so on
+        self.unaccounted_frame_list = []
+
+    def FindSession(self, channel):
+        """
+        Find the current session by channel number
+        :param channel: the performative channel
+        :return: the session or None
+        """
+        return self.chan_map[channel] if channel in self.chan_map else None
+
+    def GetId(self):
+        return self.id
+
+    def GetSeqNo(self):
+        self.seq_no += 1
+        return str(self.seq_no)
+
+    def EndChannel(self, channel):
+        # take existing session out of connection chan map
+        if channel in self.chan_map:
+            del self.chan_map[channel]
+
+    def GetLinkEventCount(self):
+        c = 0
+        for session in self.session_list:
+            c += session.GetLinkEventCount()
+        return c
+
+
+class SessionDetail:
+    """
+    Holds facts about a session
+    """
+
+    def __init__(self, conn_detail, conn_seq, start_time):
+        # parent connection
+        self.conn_detail = conn_detail
+
+        # some seq number
+        self.conn_epoch = conn_seq
+
+        # Timing
+        self.time_start = start_time
+        self.time_end = start_time
+
+        self.amqp_errors = 0
+
+        self.channel = -1
+        self.peer_chan = -1
+
+        self.direction = ""
+
+        # seq_no number differentiates items that otherwise have same identifiers.
+        # links for example
+        self.seq_no = 0
+
+        self.log_line_list = []
+
+        # link_list holds LinkDetail records
+        # Links for a session are identified by a (handle, remote-handle) number pair.
+        # There may be many links all using the same handle pairs.
+        # This list holds all of them.
+        self.link_list = []
+
+        # link_list holds all links either active or retired
+        # this map indexed by the handle refers to the current item in the link_list
+        self.input_handle_link_map = {}  # link created by peer
+        self.output_handle_link_map = {}  # link created locally
+
+        # Link name in attach finds link details in link_list
+        # This map contains the link handle to disambiguate the name
+        self.link_name_to_detail_map = {}
+        #
+        # The map contains the pure link name and is used only to resolve name collisions
+        self.link_name_conflict_map = {}
+
+        # count of AMQP performatives for this connection that are not accounted
+        # properly in link processing
+        self.session_frame_list = []
+
+        # Session dispositions
+        # Sender/receiver dispositions may be sent or received
+        self.rx_rcvr_disposition_map = {}  # key=delivery id, val=disposition plf
+        self.rx_sndr_disposition_map = {}  # key=delivery id, val=disposition plf
+        self.tx_rcvr_disposition_map = {}  # key=delivery id, val=disposition plf
+        self.tx_sndr_disposition_map = {}  # key=delivery id, val=disposition plf
+
+    def FrameCount(self):
+        count = 0
+        for link in self.link_list:
+            count += len(link.frame_list)
+        count += len(self.session_frame_list)
+        return count
+
+    def FindLinkByName(self, attach_name, link_name_unambiguous, parsed_log_line):
+        # find conflicted name
+        cnl = None
+        if attach_name in self.link_name_conflict_map:
+            cnl = self.link_name_conflict_map[attach_name]
+            if cnl.input_handle == -1 and cnl.output_handle == -1:
+                cnl = None
+        # find non-conflicted name
+        nl = None
+        if link_name_unambiguous in self.link_name_to_detail_map:
+            nl = self.link_name_to_detail_map[link_name_unambiguous]
+            if nl.input_handle == -1 and nl.output_handle == -1:
+                nl = None
+        # report conflict
+        # TODO: There's an issue with this logic generating false positives
+        # if nl is None and (not cnl is None):
+        #     parsed_log_line.data.amqp_error = True
+        #     parsed_log_line.data.web_show_str += " <span style=\"background-color:yellow\">Link name conflict</span>"
+        # return unambiguous link
+        return nl
+
+    def FindLinkByHandle(self, handle, find_remote):
+        """
+        Find the current link by handle number
+        qualify lookup based on packet direction
+        :param link: the performative channel
+        :param dst_is_broker: packet direction
+        :return: the session or None
+        """
+        if find_remote:
+            return self.input_handle_link_map[handle] if handle in self.input_handle_link_map else None
+        else:
+            return self.output_handle_link_map[handle] if handle in self.output_handle_link_map else None
+
+    def GetId(self):
+        return self.conn_detail.GetId() + "_" + str(self.conn_epoch)
+
+    def GetSeqNo(self):
+        self.seq_no += 1
+        return self.seq_no
+
+    def DetachOutputHandle(self, handle):
+        # take existing link out of session handle map
+        if handle in self.output_handle_link_map:
+            nl = self.output_handle_link_map[handle]
+            del self.output_handle_link_map[handle]
+            nl.output_handle = -1
+
+    def DetachInputHandle(self, handle):
+        # take existing link out of session remote handle map
+        if handle in self.input_handle_link_map:
+            nl = self.input_handle_link_map[handle]
+            del self.input_handle_link_map[handle]
+            nl.input_handle = -1
+
+    def DetachHandle(self, handle, is_remote):
+        if is_remote:
+            self.DetachInputHandle(handle)
+        else:
+            self.DetachOutputHandle(handle)
+
+    def GetLinkEventCount(self):
+        c = 0
+        for link in self.link_list:
+            c += link.GetLinkEventCount()
+        return c
+
+
+class LinkDetail():
+    """
+    Holds facts about a link endpoint
+    This structure binds input and output links with same name
+    """
+
+    def __init__(self, session_detail, session_seq, link_name, start_time):
+        # parent session
+        self.session_detail = session_detail
+
+        # some seq number
+        self.session_seq = session_seq
+
+        # link name
+        self.name = link_name  # plf.data.link_short_name
+        self.display_name = link_name  # show short name; hover to see long name
+
+        # Timing
+        self.time_start = start_time
+        self.time_end = start_time
+
+        self.amqp_errors = 0
+
+        # paired handles
+        self.output_handle = -1
+        self.input_handle = -1
+
+        # link originator
+        self.direction = ""
+        self.is_receiver = True
+        self.first_address = ''
+
+        # set by sender
+        self.snd_settle_mode = ''
+        self.sender_target_address = "none"
+        self.sender_class = ''
+
+        # set by receiver
+        self.rcv_settle_mode = ''
+        self.receiver_source_address = "none"
+        self.receiver_class = ''
+
+        self.frame_list = []
+
+    def GetId(self):
+        return self.session_detail.GetId() + "_" + str(self.session_seq)
+
+    def FrameCount(self):
+        return len(self.frame_list)
+
+
+class AllDetails():
+    #
+    #
+    def format_errors(self, n_errors):
+        return ("<span style=\"background-color:yellow\">%d</span>" % n_errors) if n_errors > 0 else ""
+
+    def classify_connection(self, id):
+        """
+        Return probable connection class based on the kinds of links the connection uses.
+        TODO: This assumes that the connection has one session and one
+        :param id:
+        :return:
+        """
+        return "oops"
+
+    def time_offset(self, ttest, t0):
+        """
+        Return a string time delta between two datetime objects in seconds formatted
+        to six significant decimal places.
+        :param ttest:
+        :param t0:
+        :return:
+        """
+        delta = ttest - t0
+        t = float(delta.seconds) + float(delta.microseconds) / 1000000.0
+        return "%0.06f" % t
+
+    def links_in_connection(self, id):
+        conn_details = self.conn_details[id]
+        n_links = 0
+        for sess in conn_details.session_list:
+            n_links += len(sess.link_list)
+        return n_links
+
+    def settlement_display(self, transfer, disposition):
+        """
+        Generate the details for a disposition settlement
+        :param transfer: plf
+        :param disposition: plf
+        :return: display string
+        """
+        state = disposition.data.disposition_state  # accept, reject, release, ...
+        if state != "accepted":
+            state = "<span style=\"background-color:orange\">%s</span>" % state
+        l2disp = "<a href=\"#%s\">%s</a>" % (disposition.fid, state)
+        sttld = "settled" if disposition.data.settled == "true" else "unsettled"
+        delay = self.time_offset(disposition.datetime, transfer.datetime)
+        return "(%s %s %s S)" % (l2disp, sttld, delay)
+
+    def resolve_settlement(self, link, transfer, rcv_disposition, snd_disposition):
+        """
+        Generate the settlement display string for this transfer.
+        :param link: linkDetails - holds settlement modes
+        :param transfer: plf of the transfer frame
+        :param rcv_disposition: plf of receiver role disposition
+        :param snd_disposition: plf of sender   role disposition
+        :return: display string
+        """
+        if transfer.data.settled is not None and transfer.data.settled == "true":
+            result = "transfer presettled"
+            if rcv_disposition is not None:
+                sys.stderr.write("WARING: Receiver disposition for presettled message. connid:%s, line:%s\n" %
+                                 (rcv_disposition.data.conn_id, rcv_disposition.lineno))
+            if snd_disposition is not None:
+                sys.stderr.write("WARING: Sender disposition for presettled message. connid:%s, line:%s\n" %
+                                 (snd_disposition.data.conn_id, snd_disposition.lineno))
+        else:
+            if "1" in link.snd_settle_mode:
+                # link mode sends only settled transfers
+                result = "link presettled"
+                if rcv_disposition is not None:
+                    sys.stderr.write("WARING: Receiver disposition for presettled link. connid:%s, line:%s\n" %
+                                     (rcv_disposition.data.conn_id, rcv_disposition.lineno))
+                if snd_disposition is not None:
+                    sys.stderr.write("WARING: Sender disposition for presettled link. connid:%s, line:%s\n" %
+                                     (snd_disposition.data.conn_id, snd_disposition.lineno))
+            else:
+                # transfer unsettled and link mode requires settlement
+                if rcv_disposition is not None:
+                    rtext = self.settlement_display(transfer, rcv_disposition)
+                    transfer.data.final_disposition = rcv_disposition
+                if snd_disposition is not None:
+                    stext = self.settlement_display(transfer, snd_disposition)
+                    transfer.data.final_disposition = snd_disposition
+
+                if "0" in link.rcv_settle_mode:
+                    # one settlement expected
+                    if rcv_disposition is not None:
+                        result = rtext
+                        if snd_disposition is not None:
+                            sys.stderr.write("WARING: Sender disposition for single first(0) settlement link. "
+                                             "connid:%s, line:%s\n" %
+                                             (snd_disposition.data.conn_id, snd_disposition.lineno))
+                    else:
+                        result = "rcvr: absent"
+                else:
+                    # two settlements expected
+                    if rcv_disposition is not None:
+                        result = "rcvr: " + rtext
+                        if snd_disposition is not None:
+                            result += ", sndr: " + stext
+                        else:
+                            result += ", sndr: absent"
+                    else:
+                        result = "rcvr: absent"
+                        if snd_disposition is not None:
+                            result += ", sndr: " + stext
+                        else:
+                            result += ", sndr: absent"
+        return result
+
+    def __init__(self, _router, _common):
+        self.rtr = _router
+        self.comn = _common
+
+        # conn_details - AMQP analysis
+        #   key= connection id '1', '2'
+        #   val= ConnectionDetails
+        # for each connection, for each session, for each link:
+        #   what happened
+        self.conn_details = {}
+
+        for conn in self.rtr.conn_list:
+            id = self.rtr.conn_id(conn)
+            self.conn_details[id] = ConnectionDetail(id)
+            conn_details = self.conn_details[id]
+            conn_frames = self.rtr.conn_to_frame_map[id]
+            for plf in conn_frames:
+                pname = plf.data.name
+                if plf.data.amqp_error:
+                    conn_details.amqp_errors += 1
+                if pname in ['', 'open', 'close']:
+                    conn_details.unaccounted_frame_list.append(plf)
+                    continue
+                # session required
+                channel = plf.data.channel
+                sess_details = conn_details.FindSession(channel)
+                if sess_details == None:
+                    sess_details = SessionDetail(conn_details, conn_details.GetSeqNo(), plf.datetime)
+                    conn_details.session_list.append(sess_details)
+                    conn_details.EndChannel(channel)
+                    conn_details.chan_map[channel] = sess_details
+                    sess_details.direction = plf.data.direction
+                    sess_details.channel = channel
+                if plf.data.amqp_error:
+                    sess_details.amqp_errors += 1
+
+                if pname in ['begin', 'end', 'disposition']:
+                    sess_details.session_frame_list.append(plf)
+
+                elif pname in ['attach']:
+                    handle = plf.data.handle  # proton local handle
+                    link_name = plf.data.link_short_name
+                    link_name_unambiguous = link_name + "_" + str(handle)
+                    error_was = plf.data.amqp_error
+                    nl = sess_details.FindLinkByName(link_name, link_name_unambiguous, plf)
+                    # if finding an ambiguous link name generated an error then propagate to session/connection
+                    if not error_was and plf.data.amqp_error:
+                        conn_details.amqp_errors += 1
+                        sess_details.amqp_errors += 1
+                    if nl is None:
+                        # Creating a new link from scratch resulting in a half attached link pair
+                        nl = LinkDetail(sess_details, sess_details.GetSeqNo(), link_name, plf.datetime)
+                        sess_details.link_list.append(nl)
+                        sess_details.link_name_to_detail_map[link_name_unambiguous] = nl
+                        sess_details.link_name_conflict_map[link_name] = nl
+                        nl.display_name = plf.data.link_short_name_popup
+                        nl.direction = plf.data.direction
+                        nl.is_receiver = plf.data.role == "receiver"
+                        nl.first_address = plf.data.source if nl.is_receiver else plf.data.target
+                    if plf.data.amqp_error:
+                        nl.amqp_errors += 1
+
+                    if plf.data.direction_is_in():
+                        # peer is creating link
+                        nl.input_handle = handle
+                        sess_details.DetachInputHandle(handle)
+                        sess_details.input_handle_link_map[handle] = nl
+                    else:
+                        # local is creating link
+                        nl.output_handle = handle
+                        sess_details.DetachOutputHandle(handle)
+                        sess_details.output_handle_link_map[handle] = nl
+                    if plf.data.is_receiver:
+                        nl.rcv_settle_mode = plf.data.rcv_settle_mode
+                        nl.receiver_source_address = plf.data.source
+                        nl.receiver_class = plf.data.link_class
+                    else:
+                        nl.snd_settle_mode = plf.data.snd_settle_mode
+                        nl.sender_target_address = plf.data.target
+                        nl.sender_class = plf.data.link_class
+                    nl.frame_list.append(plf)
+
+                elif pname in ['detach']:
+                    ns = conn_details.FindSession(channel)
+                    if ns is None:
+                        conn_details.unaccounted_frame_list.append(plf)
+                        continue
+                    handle = plf.data.handle
+                    nl = ns.FindLinkByHandle(handle, plf.data.direction_is_in())
+                    ns.DetachHandle(handle, plf.data.direction_is_in())
+                    if nl is None:
+                        ns.session_frame_list.append(plf)
+                    else:
+                        if plf.data.amqp_error:
+                            nl.amqp_errors += 1
+                        nl.frame_list.append(plf)
+
+                elif pname in ['transfer', 'flow']:
+                    ns = conn_details.FindSession(channel)
+                    if ns is None:
+                        conn_details.unaccounted_frame_list.append(plf)
+                        continue
+                    handle = plf.data.handle
+                    nl = ns.FindLinkByHandle(handle, plf.data.direction_is_in())
+                    if nl is None:
+                        ns.session_frame_list.append(plf)
+                    else:
+                        if plf.data.amqp_error:
+                            nl.amqp_errors += 1
+                        nl.frame_list.append(plf)
+        # identify and index dispositions
+        for conn in self.rtr.conn_list:
+            id = self.rtr.conn_id(conn)
+            conn_detail = self.conn_details[id]
+            for sess in conn_detail.session_list:
+                # for each disposition add state to disposition_map
+                for splf in sess.session_frame_list:
+                    if splf.data.name == "disposition":
+                        if splf.data.direction == "<-":
+                            sdispmap = sess.rx_rcvr_disposition_map if splf.data.is_receiver else sess.rx_sndr_disposition_map
+                        else:
+                            sdispmap = sess.tx_rcvr_disposition_map if splf.data.is_receiver else sess.tx_sndr_disposition_map
+                        for sdid in range(int(splf.data.first), (int(splf.data.last) + 1)):
+                            did = str(sdid)
+                            if did in sdispmap:
+                                sys.stderr.write("ERROR: Delivery ID collision in disposition map. connid:%s, \n" %
+                                                 (splf.data.conn_id))
+                            sdispmap[did] = splf
+
+    def show_html(self):
+        for conn in self.rtr.conn_list:
+            id = self.rtr.conn_id(conn)
+            conn_detail = self.rtr.details.conn_details[id]
+            conn_frames = self.rtr.conn_to_frame_map[id]
+            print("<a name=\"cd_%s\"></a>" % id)
+            # This lozenge shows/hides the connection's data
+            print("<a href=\"javascript:toggle_node('%s_data')\">%s%s</a>" %
+                  (id, text.lozenge(), text.nbsp()))
+            dir = self.rtr.conn_dir[id] if id in self.rtr.conn_dir else ""
+            peer = self.rtr.conn_peer_display.get(id, "")  # peer container id
+            peerconnid = self.comn.conn_peers_connid.get(id, "")
+            # show the connection title
+            print("%s %s %s %s (nFrames=%d) %s<br>" % \
+                  (id, dir, peerconnid, peer, len(conn_frames), self.format_errors(conn_detail.amqp_errors)))
+            # data div
+            print("<div id=\"%s_data\" style=\"display:none; margin-bottom: 2px; margin-left: 10px\">" % id)
+
+            # unaccounted frames
+            print("<a href=\"javascript:toggle_node('%s_data_unacc')\">%s%s</a>" %
+                  (id, text.lozenge(), text.nbsp()))
+            # show the connection-level frames
+            errs = sum(1 for plf in conn_detail.unaccounted_frame_list if plf.data.amqp_error)
+            print("Connection-based entries %s<br>" % self.format_errors(errs))
+            print("<div id=\"%s_data_unacc\" style=\"display:none; margin-bottom: 2px; margin-left: 10px\">" % id)
+            for plf in conn_detail.unaccounted_frame_list:
+                print(plf.adverbl_link_to(), plf.datetime, plf.data.direction, peer, plf.data.web_show_str, "<br>")
+            print("</div>")  # end unaccounted frames
+
+            # loop to print session details
+            for sess in conn_detail.session_list:
+                # show the session toggle and title
+                print("<a href=\"javascript:toggle_node('%s_sess_%s')\">%s%s</a>" %
+                      (id, sess.conn_epoch, text.lozenge(), text.nbsp()))
+                print("Session %s: channel: %s, peer channel: %s; Time: start %s, Counts: frames: %d %s<br>" % \
+                      (sess.conn_epoch, sess.channel, sess.peer_chan, sess.time_start, \
+                       sess.FrameCount(), self.format_errors(sess.amqp_errors)))
+                print("<div id=\"%s_sess_%s\" style=\"display:none; margin-bottom: 2px; margin-left: 10px\">" %
+                      (id, sess.conn_epoch))
+                # show the session-level frames
+                errs = sum(1 for plf in sess.session_frame_list if plf.data.amqp_error)
+                print("<a href=\"javascript:toggle_node('%s_sess_%s_unacc')\">%s%s</a>" %
+                      (id, sess.conn_epoch, text.lozenge(), text.nbsp()))
+                print("Session-based entries %s<br>" % self.format_errors(errs))
+                print("<div id=\"%s_sess_%s_unacc\" style=\"display:none; margin-bottom: 2px; margin-left: 10px\">" %
+                      (id, sess.conn_epoch))
+                for plf in sess.session_frame_list:
+                    print(plf.adverbl_link_to(), plf.datetime, plf.data.direction, peer, plf.data.web_show_str, "<br>")
+                print("</div>")  # end <id>_sess_<conn_epoch>_unacc
+                # loops to print session link details
+                # first loop prints link table
+                print("<table")
+                print("<tr><th>Link</th> <th>Dir</th> <th>Role</th>  <th>Address</th>  <th>Class</th>  "
+                      "<th>snd-settle-mode</th>  <th>rcv-settle-mode</th>  <th>Start time</th>  <th>Frames</th> "
+                      "<th>AMQP errors</tr>")
+                for link in sess.link_list:
+                    # show the link toggle and title
+                    showthis = ("<a href=\"javascript:toggle_node('%s_sess_%s_link_%s')\">%s</a>" %
+                                (id, sess.conn_epoch, link.session_seq, link.display_name))
+                    role = "receiver" if link.is_receiver else "sender"
+                    print("<tr><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td>"
+                          "<td>%s</td><td>%d</td><td>%s</td></tr>" % \
+                          (showthis, link.direction, role, link.first_address,
+                           (link.sender_class + '-' + link.receiver_class), link.snd_settle_mode,
+                           link.rcv_settle_mode, link.time_start, link.FrameCount(),
+                           self.format_errors(link.amqp_errors)))
+                print("</table>")
+                # second loop prints the link's frames
+                for link in sess.link_list:
+                    print(
+                        "<div id=\"%s_sess_%s_link_%s\" style=\"display:none; margin-top: 2px; margin-bottom: 2px; margin-left: 10px\">" %
+                        (id, sess.conn_epoch, link.session_seq))
+                    print("<h4>Connection %s Session %s Link %s</h4>" %
+                          (id, sess.conn_epoch, link.display_name))
+                    for plf in link.frame_list:
+                        if plf.data.name == "transfer":
+                            tdid = plf.data.delivery_id
+                            if plf.data.direction == "->":
+                                rmap = sess.rx_rcvr_disposition_map
+                                tmap = sess.rx_sndr_disposition_map
+                            else:
+                                rmap = sess.tx_rcvr_disposition_map
+                                tmap = sess.tx_sndr_disposition_map
+                            plf.data.disposition_display = self.resolve_settlement(link, plf,
+                                                                                   rmap.get(tdid),
+                                                                                   tmap.get(tdid))
+                        print(plf.adverbl_link_to(), plf.datetime, plf.data.direction, peer, plf.data.web_show_str,
+                              plf.data.disposition_display, "<br>")
+                    print("</div>")  # end link <id>_sess_<conn_epoch>_link_<sess_seq>
+
+                print("</div>")  # end session <id>_sess_<conn_epoch>
+
+            print("</div>")  # end current connection data
+
+
+if __name__ == "__main__":
+
+    try:
+        pass
+    except:
+        traceback.print_exc(file=sys.stdout)
+        pass

http://git-wip-us.apache.org/repos/asf/qpid-dispatch/blob/5c3411a1/bin/log_scraper/common.py
----------------------------------------------------------------------
diff --git a/bin/log_scraper/common.py b/bin/log_scraper/common.py
new file mode 100755
index 0000000..d570024
--- /dev/null
+++ b/bin/log_scraper/common.py
@@ -0,0 +1,139 @@
+#!/usr/bin/env python
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+# Common data storage and utilities
+
+import sys
+
+import nicknamer
+
+IS_PY2 = sys.version_info[0] == 2
+
+if IS_PY2:
+    def dict_iteritems(d):
+        return d.iteritems()
+    def dict_iterkeys(d):
+        return d.iterkeys()
+else:
+    def dict_iteritems(d):
+        return iter(d.items())
+    def dict_iterkeys(d):
+        return iter(d.keys())
+
+class Common():
+
+    # arg - index transfer data or not
+    # If a log file has 100M transfers then adverbl dies.
+    # With program arg --no-data then data indexing is turned off but
+    # the output still shows connections, links, and link state costs.
+    arg_index_data = True
+
+    # first letter of the connection names
+    log_char_base = 'A'
+
+    # number of logs processed
+    n_logs = 0
+
+    # array of file name strings from command line
+    # len=n_logs
+    log_fns = []
+
+    # discovered router container names
+    # len=n_logs
+    router_ids = [] # raw long names
+
+    # router display names shortened with popups
+    router_display_names = []
+
+    # router modes in plain text
+    router_modes = []
+
+    # list of router-instance lists
+    # [[A0, A1], [B0], [C0, C1, C2]]
+    routers = []
+
+    # ordered list of connection names across all routers
+    all_conn_names = []
+
+    # conn_details_map -
+    # key=conn_id, val=ConnectionDetail for that connection
+    conn_details_map = {}
+
+    # mapping of connected routers by connection id
+    # A0_1 is connected to B3_2
+    # key = full conn_id 'A0_5'
+    # val = full conn_id 'B0_8'
+    # note names[key]=val and names[val]=key mutual reference
+    conn_peers_connid = {}
+
+    # short display name for peer indexed by connection id
+    # A0_1 maps to B's container_name nickname
+    conn_peers_display = {}
+
+    # conn_to_frame_map - global list for easier iteration in main
+    # key = conn_id full A0_3
+    # val = list of plf lines
+    conn_to_frame_map = {}
+
+    shorteners = nicknamer.Shorteners()
+
+    # when --no-data is in effect, how many log lines were skipped?
+    data_skipped = 0
+
+    def router_id_index(self, id):
+        """
+        Given a router full container name, return the index in router_ids table
+        Throw value error if not found
+        :param id:
+        :return:
+        """
+        return self.router_ids.index(id)
+
+
+def log_letter_of(idx):
+    '''
+    Return the letter A, B, C, ... from the index 0..n
+    :param idx:
+    :return: A..Z
+    '''
+    if idx >= 26:
+        sys.exit('ERROR: too many log files')
+    return "ABCDEFGHIJKLMNOPQRSTUVWXYZ"[idx]
+
+def index_of_log_letter(letter):
+    '''
+    Return the index 0..25 of the firster letter of the 'letter' string
+    Raise error if out of range
+    :param letter:
+    :return:
+    '''
+    val = "ABCDEFGHIJKLMNOPQRSTUVWXYZ".find(letter[0].upper())
+    if val < 0 or val > 25:
+        raise ValueError("index_of_log_letter Invalid log letter: %s", letter)
+    return val
+
+class RestartRec():
+    def __init__(self, _id, _router, _event, _datetime):
+        self.id = _id
+        self.router = _router
+        self.event = _event
+        self.datetime = _datetime
+

http://git-wip-us.apache.org/repos/asf/qpid-dispatch/blob/5c3411a1/bin/log_scraper/main.py
----------------------------------------------------------------------
diff --git a/bin/log_scraper/main.py b/bin/log_scraper/main.py
new file mode 100755
index 0000000..a0d4b40
--- /dev/null
+++ b/bin/log_scraper/main.py
@@ -0,0 +1,785 @@
+#!/usr/bin/env python
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+# Adverbl concepts
+# * Multiple log files may be displayed at the same time.
+#   Each log file gets a letter prefix: A, B, C, ...
+# * Log AMQP proton trace channel numbers get prefix
+#    [1] becomes [A-1]
+# * The log file line numbers are equivalent to a wireshark trace frame number.
+# * There's no concept of client and server because the logs are from inside
+#   a router.
+
+from __future__ import unicode_literals
+from __future__ import division
+from __future__ import absolute_import
+from __future__ import print_function
+
+import ast
+import cgi
+import os
+import sys
+import traceback
+
+import common
+import parser
+import router
+import text
+
+
+def time_offset(ttest, t0):
+    """
+    Return a string time delta between two datetime objects in seconds formatted
+    to six significant decimal places.
+    :param ttest:
+    :param t0:
+    :return:
+    """
+    delta = ttest - t0
+    t = float(delta.seconds) + float(delta.microseconds) / 1000000.0
+    return "%0.06f" % t
+
+
+def show_noteworthy_line(plf, comn):
+    """
+    Given a log line, print the noteworthy display line
+    :param plf: parsed log line
+    :param comn:
+    :return:
+    """
+    rid = plf.router.iname
+    id = "[%s]" % plf.data.conn_id
+    peerconnid = "[%s]" % comn.conn_peers_connid.get(plf.data.conn_id, "")
+    peer = plf.router.conn_peer_display.get(plf.data.conn_id, "")  # peer container id
+    print("%s %s %s %s %s %s %s<br>" %
+          (plf.adverbl_link_to(), rid, id, plf.data.direction, peerconnid, peer,
+           plf.data.web_show_str))
+
+
+#
+#
+def main_except(argv):
+    """
+    Given a list of log file names, send the javascript web page to stdout
+    """
+    if len(argv) < 2:
+        sys.exit('Usage: %s [--no-data] log-file-name [log-file-name ...]' % argv[0])
+
+    # Instantiate a common block
+    comn = common.Common()
+
+    # optparse - look for --no-data switch
+    if argv[1] == "--no-data":
+        comn.arg_index_data = False
+        del argv[1]
+
+    # process the log files and add the results to router_array
+    for log_i in range(0, len(sys.argv) - 1):
+        arg_log_file = sys.argv[log_i + 1]
+        comn.log_fns.append(arg_log_file)
+        comn.n_logs += 1
+
+        if not os.path.exists(arg_log_file):
+            sys.exit('ERROR: log file %s was not found!' % arg_log_file)
+
+        # parse the log file
+        rtrs = parser.parse_log_file(arg_log_file, log_i, comn)
+        comn.routers.append(rtrs)
+
+        # marshall facts about the run
+        for rtr in rtrs:
+            rtr.discover_connection_facts(comn)
+
+    # Create lists of various things sorted by time
+    tree = []  # log line
+    ls_tree = []  # link state lines
+    rr_tree = []  # restart records
+    for rtrlist in comn.routers:
+        for rtr in rtrlist:
+            tree += rtr.lines
+            ls_tree += rtr.router_ls
+            rr_tree.append(rtr.restart_rec)
+    tree = sorted(tree, key=lambda lfl: lfl.datetime)
+    ls_tree = sorted(ls_tree, key=lambda lfl: lfl.datetime)
+    rr_tree = sorted(rr_tree, key=lambda lfl: lfl.datetime)
+
+    # Back-propagate a router name/version/mode to each list's router0.
+    # Complain if container name or version changes between instances.
+    # Fill in container_id and shortened display_name tables
+    for fi in range(comn.n_logs):
+        rtrlist = comn.routers[fi]
+        if len(rtrlist) > 1:
+            if rtrlist[0].container_name is None:
+                rtrlist[0].container_name = rtrlist[1].container_name
+            if rtrlist[0].version is None:
+                rtrlist[0].version = rtrlist[1].version
+            if rtrlist[0].mode is None:
+                rtrlist[0].mode = rtrlist[1].mode
+            for i in range(0, len(rtrlist) - 1):
+                namei = rtrlist[i].container_name
+                namej = rtrlist[i + 1].container_name
+                if namei != namej:
+                    sys.exit('Inconsistent container names, log file %s, instance %d:%s but instance %d:%s' %
+                             (comn.log_fns[fi], i, namei, i + 1, namej))
+                namei = rtrlist[i].version
+                namej = rtrlist[i + 1].version
+                if namei != namej:
+                    sys.exit('Inconsistent router versions, log file %s, instance %d:%s but instance %d:%s' %
+                             (comn.log_fns[fi], i, namei, i + 1, namej))
+                namei = rtrlist[i].mode
+                namej = rtrlist[i + 1].mode
+                if namei != namej:
+                    sys.exit('Inconsistent router modes, log file %s, instance %d:%s but instance %d:%s' %
+                             (comn.log_fns[fi], i, namei, i + 1, namej))
+        name = rtrlist[0].container_name if len(rtrlist) > 0 and rtrlist[0].container_name is not None else ("Unknown_%d" % fi)
+        mode = rtrlist[0].mode if len(rtrlist) > 0 and rtrlist[0].mode is not None else "standalone"
+        comn.router_ids.append(name)
+        comn.router_display_names.append(comn.shorteners.short_rtr_names.translate(name))
+        comn.router_modes.append(mode)
+
+    # aggregate connection-to-frame maps into big map
+    for rtrlist in comn.routers:
+        for rtr in rtrlist:
+            comn.conn_to_frame_map.update(rtr.conn_to_frame_map)
+
+    # generate router-to-router connection peer relationships
+    peer_list = []
+    for plf in tree:
+        if plf.data.name == "open" and plf.data.direction_is_in():
+            cid = plf.data.conn_id  # the router that generated this log file
+            if "properties" in plf.data.described_type.dict:
+                peer_conn = plf.data.described_type.dict["properties"].get(':"qd.conn-id"',
+                                                                           "")  # router that sent the open
+                if peer_conn != "" and plf.data.conn_peer != "":
+                    pid_peer = plf.data.conn_peer.strip('\"')
+                    rtr, rtridx = router.which_router_id_tod(comn.routers, pid_peer, plf.datetime)
+                    if rtr is not None:
+                        pid = rtr.conn_id(peer_conn)
+                        hit = sorted((cid, pid))
+                        if hit not in peer_list:
+                            peer_list.append(hit)
+
+    for (key, val) in peer_list:
+        if key in comn.conn_peers_connid:
+            sys.exit('key val messed up')
+        if val in comn.conn_peers_connid:
+            sys.exit('key val messed up')
+        comn.conn_peers_connid[key] = val
+        comn.conn_peers_connid[val] = key
+        cn_k = comn.router_ids[common.index_of_log_letter(key)]
+        cn_v = comn.router_ids[common.index_of_log_letter(val)]
+        comn.conn_peers_display[key] = comn.shorteners.short_rtr_names.translate(cn_v)
+        comn.conn_peers_display[val] = comn.shorteners.short_rtr_names.translate(cn_k)
+    #
+    # Start producing the output stream
+    #
+    print(text.web_page_head())
+
+    #
+    # Generate javascript
+    #
+    # output the frame show/hide functions into the header
+    for conn_id, plfs in common.dict_iteritems(comn.conn_to_frame_map):
+        print("function show_%s() {" % conn_id)
+        for plf in plfs:
+            print("  javascript:show_node(\'%s\');" % plf.fid)
+        print("}")
+        print("function hide_%s() {" % conn_id)
+        for plf in plfs:
+            print("  javascript:hide_node(\'%s\');" % plf.fid)
+        print("}")
+        # manipulate checkboxes
+        print("function show_if_cb_sel_%s() {" % conn_id)
+        print("  if (document.getElementById(\"cb_sel_%s\").checked) {" % conn_id)
+        print("    javascript:show_%s();" % conn_id)
+        print("  } else {")
+        print("    javascript:hide_%s();" % conn_id)
+        print("  }")
+        print("}")
+        print("function select_cb_sel_%s() {" % conn_id)
+        print("  document.getElementById(\"cb_sel_%s\").checked = true;" % conn_id)
+        print("  javascript:show_%s();" % conn_id)
+        print("}")
+        print("function deselect_cb_sel_%s() {" % conn_id)
+        print("  document.getElementById(\"cb_sel_%s\").checked = false;" % conn_id)
+        print("  javascript:hide_%s();" % conn_id)
+        print("}")
+        print("function toggle_cb_sel_%s() {" % conn_id)
+        print("  if (document.getElementById(\"cb_sel_%s\").checked) {" % conn_id)
+        print("    document.getElementById(\"cb_sel_%s\").checked = false;" % conn_id)
+        print("  } else {")
+        print("    document.getElementById(\"cb_sel_%s\").checked = true;" % conn_id)
+        print("  }")
+        print("  javascript:show_if_cb_sel_%s();" % conn_id)
+        print("}")
+
+    # Select/Deselect/Toggle All Connections functions
+    print("function select_all() {")
+    for conn_id, frames_ids in common.dict_iteritems(comn.conn_to_frame_map):
+        print("  javascript:select_cb_sel_%s();" % conn_id)
+    print("}")
+    print("function deselect_all() {")
+    for conn_id, frames_ids in common.dict_iteritems(comn.conn_to_frame_map):
+        print("  javascript:deselect_cb_sel_%s();" % conn_id)
+    print("}")
+    print("function toggle_all() {")
+    for conn_id, frames_ids in common.dict_iteritems(comn.conn_to_frame_map):
+        print("  javascript:toggle_cb_sel_%s();" % conn_id)
+    print("}")
+
+    #
+    print("</script>")
+    print("</head>")
+    print("<body>")
+    #
+
+    # Table of contents
+    print(text.web_page_toc())
+
+    # Report how much data was skipped if --no-data switch in effect
+    if not comn.arg_index_data:
+        print("--no-data switch in effect. %d log lines skipped" % comn.data_skipped)
+        print("<p><hr>")
+
+    # file(s) included in this doc
+    print("<a name=\"c_logfiles\"></a>")
+    print("<h3>Log files</h3>")
+    print("<table><tr><th>Log</th> <th>Container name</th> <th>Version</th> <th>Mode</th>"
+          "<th>Instances</th> <th>Log file path</th></tr>")
+    for i in range(comn.n_logs):
+        rtrlist = comn.routers[i]
+        if len(rtrlist) > 0:
+            print("<tr><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td></tr>" %
+                  (common.log_letter_of(i), rtrlist[0].container_name, rtrlist[0].version, rtrlist[0].mode,
+                   str(len(rtrlist)), os.path.abspath(comn.log_fns[i])))
+        else:
+            print("<tr><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td></tr>" %
+                  (common.log_letter_of(i), text.nbsp(), text.nbsp(),
+                   str(len(rtrlist)), os.path.abspath(comn.log_fns[i])))
+    print("</table>")
+    print("<hr>")
+
+    # reboot chronology
+    print("<a name=\"c_rtrinstances\"></a>")
+    print("<h3>Router Reboot Chronology</h3>")
+    print("<table><tr><th>Log</th> <th>Time</th> <th>Container name</th> ")
+    for i in range(len(comn.routers)):
+        print("<td>%s</td>" % common.log_letter_of(i))
+    print("</tr>")
+    for rr in rr_tree:
+        print("<tr><td>%s</td><td>%s</td><td>%s</td>" %
+              (rr.router.iname, rr.datetime, rr.router.container_name))
+        for i in range(len(comn.routers)):
+            print("<td>%s</td> " % (rr.router.iname if i == rr.router.log_index else text.nbsp()))
+        print("</tr>")
+    print("</table>")
+    print("<hr>")
+
+    # print the connection peer tables
+    #
+    # +------+--------------------+-----+--------------------+-------+-------+----------+--------+
+    # | View |       Router       | Dir |       Peer         | Log   | N     | Transfer | AMQP   |
+    # |      +-----------+--------+     +--------+-----------+ lines | links | bytes    | errors |
+    # |      | container | connid |     | connid | container |       |       |          |        |
+    # +------+-----------+--------+-----+--------+-----------+-------+-------+----------+--------+
+
+    print("<a name=\"c_connections\"></a>")
+    print("<h3>Connections</h3>")
+
+    print("<p>")
+    print("<button onclick=\"javascript:select_all()\">Select All</button>")
+    print("<button onclick=\"javascript:deselect_all()\">Deselect All</button>")
+    print("<button onclick=\"javascript:toggle_all()\">Toggle All</button>")
+    print("</p>")
+
+    print("<h3>Connections by ConnectionId</h3>")
+    print(
+        "<table><tr> <th rowspan=\"2\">View</th> <th colspan=\"2\">Router</th> <th rowspan=\"2\">Dir</th> <th colspan=\"2\">Peer</th> <th rowspan=\"2\">Log lines</th> "
+        "<th rowspan=\"2\">N links</th><th rowspan=\"2\">Transfer bytes</th> <th rowspan=\"2\">AMQP errors</th> <th rowspan=\"2\">Open time</th> <th rowspan=\"2\">Close time</th></tr>")
+    print("<tr> <th>container</th> <th>connid</th> <th>connid</th> <th>container</th></tr>")
+
+    tConn = 0
+    tLines = 0
+    tBytes = 0
+    tErrs = 0
+    tLinks = 0
+    for rtrlist in comn.routers:
+        for rtr in rtrlist:
+            rid = rtr.container_name
+            for conn in rtr.conn_list:
+                tConn += 1
+                id = rtr.conn_id(conn)  # this router's full connid 'A0_3'
+                peer = rtr.conn_peer_display.get(id, "")  # peer container id
+                peerconnid = comn.conn_peers_connid.get(id, "")
+                n_links = rtr.details.links_in_connection(id)
+                tLinks += n_links
+                errs = sum(1 for plf in rtr.conn_to_frame_map[id] if plf.data.amqp_error)
+                tErrs += errs
+                stime = rtr.conn_open_time.get(id, text.nbsp())
+                if stime != text.nbsp():
+                    stime = stime.datetime
+                etime = rtr.conn_close_time.get(id, text.nbsp())
+                if etime != text.nbsp():
+                    etime = etime.datetime
+                print("<tr>")
+                print("<td> <input type=\"checkbox\" id=\"cb_sel_%s\" " % id)
+                print("checked=\"true\" onclick=\"javascript:show_if_cb_sel_%s()\"> </td>" % (id))
+                print("<td>%s</td><td><a href=\"#cd_%s\">%s</a></td><td>%s</td><td>%s</td><td>%s</td><td>%s</td>"
+                      "<td>%d</td><td>%s</td><td>%d</td><td>%s</td><td>%s</td></tr>" %
+                      (rid, id, id, rtr.conn_dir[id], peerconnid, peer, rtr.conn_log_lines[id], n_links,
+                       rtr.conn_xfer_bytes[id], errs, stime, etime))
+                tLines += rtr.conn_log_lines[id]
+                tBytes += rtr.conn_xfer_bytes[id]
+    print(
+        "<td>Total</td><td>%d</td><td> </td><td> </td><td> </td><td> </td><td>%d</td><td>%d</td><td>%d</td><td>%d</td></tr>" %
+        (tConn, tLines, tLinks, tBytes, tErrs))
+    print("</table>")
+
+    print("<h3>Router Restart and Connection chronology</h3>")
+
+    cl = []
+    for rtrlist in comn.routers:
+        for rtr in rtrlist:
+            rid = rtr.container_name
+            cl.append(common.RestartRec(rtr.iname, rtr, "restart", rtr.restart_rec.datetime))
+            for conn in rtr.conn_list:
+                id = rtr.conn_id(conn)
+                if id in rtr.conn_open_time:
+                    cl.append(common.RestartRec(id, rtr, "open", rtr.conn_open_time[id].datetime))
+                if id in rtr.conn_close_time:
+                    cl.append(common.RestartRec(id, rtr, "close", rtr.conn_close_time[id].datetime))
+    cl = sorted(cl, key=lambda lfl: lfl.datetime)
+
+    print("<table><tr> <th>Time</th> <th>Id</th> <th>Event</th> <th>container</th> <th>connid</th> "
+          "<th>Dir</th> <th>connid</th> <th>container</th>")
+    for i in range(len(comn.routers)):
+        print("<td>%s</td>" % common.log_letter_of(i))
+    print("</tr>")
+    for c in cl:
+        if c.event == "restart":
+            rid = c.router.container_name
+            print("<tr><td>%s</td> <td>%s</td> <td><span style=\"background-color:yellow\">%s</span></td><td>%s</td> "
+                  "<td>%s</td> <td>%s</td><td>%s</td> <td>%s</td>" %
+                  (c.datetime, c.id, c.event, rid, "", "", "", ""))
+            for i in range(len(comn.routers)):
+                print("<td>%s</td> " % (c.id if i == c.router.log_index else text.nbsp()))
+            print("</tr>")
+        else:
+            rid = c.router.container_name
+            cdir = c.router.conn_dir[c.id]
+            peer = c.router.conn_peer_display.get(c.id, "")  # peer container id
+            peerconnid = comn.conn_peers_connid.get(c.id, "")
+            print("<tr><td>%s</td> <td>%s</td> <td>%s</td><td>%s</td> <td>%s</td> <td>%s</td><td>%s</td> <td>%s</td>" %
+                  (c.datetime, c.id, c.event, rid, c.id, cdir, peerconnid, peer))
+            for i in range(len(comn.routers)):
+                print("<td>%s</td> " % (text.nbsp()))
+            print("</tr>")
+    print("</table>")
+    print("<hr>")
+
+    # connection details
+    print("<a name=\"c_conndetails\"></a>")
+    print("<h3>Connection Details</h3>")
+    for rtrlist in comn.routers:
+        for rtr in rtrlist:
+            rtr.details.show_html()
+    print("<hr>")
+
+    # noteworthy log lines: highlight errors and stuff
+    print("<a name=\"c_noteworthy\"></a>")
+    print("<h3>Noteworthy</h3>")
+    n_errors = 0
+    n_settled = 0
+    n_more = 0
+    n_resume = 0
+    n_aborted = 0
+    n_drain = 0
+    for plf in tree:
+        if plf.data.amqp_error:
+            n_errors += 1
+        if plf.data.transfer_settled:
+            n_settled += 1
+        if plf.data.transfer_more:
+            n_more += 1
+        if plf.data.transfer_resume:
+            n_resume += 1
+        if plf.data.transfer_aborted:
+            n_aborted += 1
+        if plf.data.flow_drain:
+            n_drain += 1
+    # amqp errors
+    print("<a href=\"javascript:toggle_node('noteworthy_errors')\">%s%s</a> AMQP errors: %d<br>" %
+          (text.lozenge(), text.nbsp(), n_errors))
+    print(" <div width=\"100%%\"; "
+          "style=\"display:none; font-weight: normal; margin-bottom: 2px; margin-left: 10px\" "
+          "id=\"noteworthy_errors\">")
+    for plf in tree:
+        if plf.data.amqp_error:
+            show_noteworthy_line(plf, comn)
+    print("</div>")
+    # transfers with settled=true
+    print("<a href=\"javascript:toggle_node('noteworthy_settled')\">%s%s</a> Presettled transfers: %d<br>" %
+          (text.lozenge(), text.nbsp(), n_settled))
+    print(" <div width=\"100%%\"; "
+          "style=\"display:none; font-weight: normal; margin-bottom: 2px; margin-left: 10px\" "
+          "id=\"noteworthy_settled\">")
+    for plf in tree:
+        if plf.data.transfer_settled:
+            show_noteworthy_line(plf, comn)
+    print("</div>")
+    # transfers with more=true
+    print("<a href=\"javascript:toggle_node('noteworthy_more')\">%s%s</a> Partial transfers with 'more' set: %d<br>" %
+          (text.lozenge(), text.nbsp(), n_more))
+    print(" <div width=\"100%%\"; "
+          "style=\"display:none; font-weight: normal; margin-bottom: 2px; margin-left: 10px\" "
+          "id=\"noteworthy_more\">")
+    for plf in tree:
+        if plf.data.transfer_more:
+            show_noteworthy_line(plf, comn)
+    print("</div>")
+    # transfers with resume=true, whatever that is
+    print("<a href=\"javascript:toggle_node('noteworthy_resume')\">%s%s</a> Resumed transfers: %d<br>" %
+          (text.lozenge(), text.nbsp(), n_resume))
+    print(" <div width=\"100%%\"; "
+          "style=\"display:none; font-weight: normal; margin-bottom: 2px; margin-left: 10px\" "
+          "id=\"noteworthy_resume\">")
+    for plf in tree:
+        if plf.data.transfer_resume:
+            show_noteworthy_line(plf, comn)
+    print("</div>")
+    # transfers with abort=true
+    print("<a href=\"javascript:toggle_node('noteworthy_aborts')\">%s%s</a> Aborted transfers: %d<br>" %
+          (text.lozenge(), text.nbsp(), n_aborted))
+    print(" <div width=\"100%%\"; "
+          "style=\"display:none; font-weight: normal; margin-bottom: 2px; margin-left: 10px\" "
+          "id=\"noteworthy_aborts\">")
+    for plf in tree:
+        if plf.data.transfer_aborted:
+            show_noteworthy_line(plf, comn)
+    print("</div>")
+    # flow with drain=true
+    print("<a href=\"javascript:toggle_node('noteworthy_drain')\">%s%s</a> Flow with 'drain' set: %d<br>" %
+          (text.lozenge(), text.nbsp(), n_drain))
+    print(" <div width=\"100%%\"; "
+          "style=\"display:none; font-weight: normal; margin-bottom: 2px; margin-left: 10px\" "
+          "id=\"noteworthy_drain\">")
+    for plf in tree:
+        if plf.data.flow_drain:
+            show_noteworthy_line(plf, comn)
+    print("</div>")
+    print("<hr>")
+
+    # the proton log lines
+    # log lines in         f_A_116
+    # log line details in  f_A_116_d
+    print("<a name=\"c_logdata\"></a>")
+    print("<h3>Log data</h3>")
+    for plf in tree:
+        l_dict = plf.data.described_type.dict
+        print("<div width=\"100%%\" style=\"display:block  margin-bottom: 2px\" id=\"%s\">" % plf.fid)
+        print("<a name=\"%s\"></a>" % plf.fid)
+        detailname = plf.fid + "_d"  # type: str
+        loz = "<a href=\"javascript:toggle_node('%s')\">%s%s</a>" % (detailname, text.lozenge(), text.nbsp())
+        rtr = plf.router
+        rid = comn.router_display_names[rtr.log_index]
+
+        peerconnid = "%s" % comn.conn_peers_connid.get(plf.data.conn_id, "")
+        peer = rtr.conn_peer_display.get(plf.data.conn_id, "")  # peer container id
+        print(loz, plf.datetime, ("%s#%d" % (plf.prefixi, plf.lineno)), rid, ("[%s]" % plf.data.conn_id),
+              plf.data.direction, ("[%s]" % peerconnid), peer,
+              plf.data.web_show_str, plf.data.disposition_display, "<br>")
+        print(" <div width=\"100%%\"; "
+              "style=\"display:none; font-weight: normal; margin-bottom: 2px; margin-left: 10px\" "
+              "id=\"%s\">" %
+              detailname)
+        for key in sorted(common.dict_iterkeys(l_dict)):
+            val = l_dict[key]
+            print("%s : %s <br>" % (key, cgi.escape(str(val))))
+        if plf.data.name == "transfer":
+            print("Header and annotations : %s <br>" % plf.data.transfer_hdr_annos)
+        print("</div>")
+        print("</div>")
+    print("<hr>")
+
+    # data traversing network
+    print("<a name=\"c_messageprogress\"></a>")
+    print("<h3>Message progress</h3>")
+    for i in range(0, comn.shorteners.short_data_names.len()):
+        sname = comn.shorteners.short_data_names.shortname(i)
+        size = 0
+        for plf in tree:
+            if plf.data.name == "transfer" and plf.transfer_short_name == sname:
+                size = plf.data.transfer_size
+                break
+        print("<a name=\"%s\"></a> <h4>%s (%s)" % (sname, sname, size))
+        print(" <span> <a href=\"javascript:toggle_node('%s')\"> %s</a>" % ("data_" + sname, text.lozenge()))
+        print(" <div width=\"100%%\"; style=\"display:none; font-weight: normal; margin-bottom: 2px\" id=\"%s\">" %
+              ("data_" + sname))
+        print(" ", comn.shorteners.short_data_names.longname(i, True))
+        print("</div> </span>")
+        print("</h4>")
+        print("<table>")
+        print(
+            "<tr><th>Src</th> <th>Time</th> <th>Router</th> <th>ConnId</th> <th>Dir</th> <th>ConnId</th> <th>Peer</th> "
+            "<th>T delta</th> <th>T elapsed</th><th>Settlement</th><th>S elapsed</th></tr>")
+        t0 = None
+        tlast = None
+        for plf in tree:
+            if plf.data.name == "transfer" and plf.transfer_short_name == sname:
+                if t0 is None:
+                    t0 = plf.datetime
+                    tlast = plf.datetime
+                    delta = "0.000000"
+                    epsed = "0.000000"
+                else:
+                    delta = time_offset(plf.datetime, tlast)
+                    epsed = time_offset(plf.datetime, t0)
+                    tlast = plf.datetime
+                sepsed = ""
+                if plf.data.final_disposition is not None:
+                    sepsed = time_offset(plf.data.final_disposition.datetime, t0)
+                rid = plf.router.iname
+                peerconnid = "%s" % comn.conn_peers_connid.get(plf.data.conn_id, "")
+                peer = plf.router.conn_peer_display.get(plf.data.conn_id, "")  # peer container id
+                print("<tr><td>%s</td> <td>%s</td> <td>%s</td> <td>%s</td> <td>%s</td> <td>%s</td> "
+                      "<td>%s</td> <td>%s</td> <td>%s</td> <td>%s</td> <td>%s</td> </tr>" %
+                      (plf.adverbl_link_to(), plf.datetime, rid, plf.data.conn_id, plf.data.direction,
+                       peerconnid, peer, delta, epsed,
+                       plf.data.disposition_display, sepsed))
+        print("</table>")
+
+    print("<hr>")
+
+    # link names traversing network
+    print("<a name=\"c_linkprogress\"></a>")
+    print("<h3>Link name propagation</h3>")
+    for i in range(0, comn.shorteners.short_link_names.len()):
+        if comn.shorteners.short_link_names.len() == 0:
+            break
+        sname = comn.shorteners.short_link_names.prefixname(i)
+        print("<a name=\"%s\"></a> <h4>%s" % (sname, sname))
+        print(" <span> <div width=\"100%%\"; style=\"display:block; font-weight: normal; margin-bottom: 2px\" >")
+        print(comn.shorteners.short_link_names.longname(i, True))
+        print("</div> </span>")
+        print("</h4>")
+        print("<table>")
+        print("<tr><th>src</th> <th>Time</th> <th>Router</th> <th>ConnId</th> <th>Dir</th> <th>ConnId> <th>Peer</th> "
+              "<th>T delta</th> <th>T elapsed</th></tr>")
+        t0 = None
+        tlast = None
+        for plf in tree:
+            if plf.data.name == "attach" and plf.data.link_short_name == sname:
+                if t0 is None:
+                    t0 = plf.datetime
+                    delta = "0.000000"
+                    epsed = "0.000000"
+                else:
+                    delta = time_offset(plf.datetime, tlast)
+                    epsed = time_offset(plf.datetime, t0)
+                tlast = plf.datetime
+                rid = plf.router.iname
+                peerconnid = "%s" % comn.conn_peers_connid.get(plf.data.conn_id, "")
+                peer = plf.router.conn_peer_display.get(plf.data.conn_id, "")  # peer container id
+                print("<tr><td>%s</td> <td>%s</td> <td>%s</td> <td>%s</td> <td>%s</td> <td>%s</td> "
+                      "<td>%s</td> <td>%s</td> <td>%s</td></tr>" %
+                      (plf.adverbl_link_to(), plf.datetime, rid, plf.data.conn_id, plf.data.direction, peerconnid, peer,
+                       delta, epsed))
+        print("</table>")
+
+    print("<hr>")
+
+    # short data index
+    print("<a name=\"c_rtrdump\"></a>")
+    comn.shorteners.short_rtr_names.htmlDump(False)
+    print("<hr>")
+
+    print("<a name=\"c_peerdump\"></a>")
+    comn.shorteners.short_peer_names.htmlDump(False)
+    print("<hr>")
+
+    print("<a name=\"c_linkdump\"></a>")
+    comn.shorteners.short_link_names.htmlDump(True)
+    print("<hr>")
+
+    print("<a name=\"c_msgdump\"></a>")
+    comn.shorteners.short_data_names.htmlDump(True)
+    print("<hr>")
+
+    # link state info
+    # merge link state and restart records into single time based list
+    cl = []
+    for rtrlist in comn.routers:
+        for rtr in rtrlist:
+            rid = rtr.container_name
+            cl.append(common.RestartRec(rtr.iname, rtr, "restart", rtr.restart_rec.datetime))
+    for plf in ls_tree:
+        if "costs" in plf.line:
+            cl.append(common.RestartRec("ls", plf, "ls", plf.datetime))
+    cl = sorted(cl, key=lambda lfl: lfl.datetime)
+
+    # create a map of lists for each router
+    # the list holds the name of other routers for which the router publishes a cost
+    costs_pub = {}
+    for i in range(0, comn.n_logs):
+        costs_pub[comn.router_ids[i]] = []
+
+    # cur_costs is a 2D array of costs used to tell when cost calcs have stabilized
+    # Each incoming LS cost line replaces a row in this table
+    # cur_costs tracks only interior routers
+    interior_rtrs = []
+    for rtrs in comn.routers:
+        if rtrs[0].is_interior():
+            interior_rtrs.append(rtrs[0].container_name)
+
+    PEER_COST_REBOOT = -1
+    PEER_COST_ABSENT = 0
+    def new_costs_row(val):
+        """
+        return a costs row.
+        :param val: -1 when router reboots, 0 when router log line processed
+        :return:
+        """
+        res = {}
+        for rtr in interior_rtrs:
+            res[rtr] = val
+        return res
+
+    cur_costs = {}
+    for rtr in interior_rtrs:
+        cur_costs[rtr] = new_costs_row(PEER_COST_REBOOT)
+
+    print("<a name=\"c_ls\"></a>")
+    print("<h3>Routing link state</h3>")
+    print("<h4>Link state costs</h4>")
+    print("<table>")
+    print("<tr><th>Time</th> <th>Router</th>")
+    for i in range(0, comn.n_logs):
+        print("<th>%s</th>" % common.log_letter_of(i))
+    print("</tr>")
+    for c in cl:
+        if c.event == "ls":
+            # link state computed costs and router reachability
+            plf = c.router # cruel overload here: router is a parsed line not a router
+            # Processing: Computed costs: {u'A': 1, u'C': 51L, u'B': 101L}
+            print("<tr><td>%s</td> <td>%s</td>" % (plf.datetime, ("%s#%d" % (plf.router.iname, plf.lineno))))
+            try:
+                line = plf.line
+                sti = line.find("{")
+                line = line[sti:]
+                l_dict = ast.literal_eval(line)
+                costs_row = new_costs_row(PEER_COST_ABSENT)
+                for i in range(0, comn.n_logs):
+                    if len(comn.routers[i]) > 0:
+                        tst_name = comn.routers[i][0].container_name
+                        if tst_name in l_dict:
+                            val = l_dict[tst_name]
+                            costs_row[tst_name] = val
+                        elif i == plf.router.log_index:
+                            val = text.nbsp()
+                        else:
+                            val = "<span style=\"background-color:orange\">%s</span>" % (text.nbsp() * 2)
+                    else:
+                        val = "<span style=\"background-color:orange\">%s</span>" % (text.nbsp() * 2)
+                    print("<td>%s</td>" % val)
+                # track costs published when there is no column to put the number
+                tgts = costs_pub[c.router.router.container_name]
+                for k, v in common.dict_iteritems(l_dict):
+                    if k not in comn.router_ids:
+                        if k not in tgts:
+                            tgts.append(k)  # this cost went unreported
+                # update this router's cost view in running table
+                if plf.router.is_interior():
+                    cur_costs[plf.router.container_name] = costs_row
+            except:
+                pass
+            print("</tr>")
+            # if the costs are stable across all routers then put an indicator in table
+            costs_stable = True
+            for c_rtr in interior_rtrs:
+                for r_rtr in interior_rtrs:
+                    if r_rtr != c_rtr \
+                            and (cur_costs[r_rtr][c_rtr] != cur_costs[c_rtr][r_rtr] \
+                            or cur_costs[c_rtr][r_rtr] <= PEER_COST_ABSENT):
+                        costs_stable = False
+                        break
+                if not costs_stable:
+                    break
+            if costs_stable:
+                print("<tr><td><span style=\"background-color:green\">stable</span></td></tr>")
+        else:
+            # restart
+            print("<tr><td>%s</td> <td>%s</td>" % (c.datetime, ("%s restart" % (c.router.iname))))
+            for i in range(0, comn.n_logs):
+                color = "green" if i == c.router.log_index else "orange"
+                print("<td><span style=\"background-color:%s\">%s</span></td>" % (color, text.nbsp() * 2))
+            print("</tr>")
+            if c.router.is_interior():
+                cur_costs[c.router.container_name] = new_costs_row(PEER_COST_REBOOT)
+    print("</table>")
+    print("<br>")
+
+    # maybe display cost declarations that were not displayed
+    costs_clean = True
+    for k, v in common.dict_iteritems(costs_pub):
+        if len(v) > 0:
+            costs_clean = False
+            break
+    if not costs_clean:
+        print("<h4>Router costs declared in logs but not displayed in Link state cost table</h4>")
+        print("<table>")
+        print("<tr><th>Router</th><Peers whose logs are absent</th></tr>")
+        for k, v in common.dict_iteritems(costs_pub):
+            if len(v) > 0:
+                print("<tr><td>%s</td><td>%s</td></tr>" % (k, str(v)))
+        print("</table>")
+        print("<br>")
+
+    print("<a href=\"javascript:toggle_node('ls_costs')\">%s%s</a> Link state costs data<br>" %
+          (text.lozenge(), text.nbsp()))
+    print(" <div width=\"100%%\"; "
+          "style=\"display:none; font-weight: normal; margin-bottom: 2px; margin-left: 10px\" "
+          "id=\"ls_costs\">")
+    print("<table>")
+    print("<tr><th>Time</th> <th>Router</th> <th>Name</th> <th>Log</th></tr>")
+    for plf in ls_tree:
+        if "costs" in plf.line:
+            print("<tr><td>%s</td> <td>%s</td>" % (plf.datetime, ("%s#%d" % (plf.router.iname, plf.lineno))))
+            print("<td>%s</td>" % plf.router.container_name)
+            print("<td>%s</td></tr>" % plf.line)
+    print("</table>")
+    print("</div>")
+
+    print("<hr>")
+
+    print("</body>")
+
+
+def main(argv):
+    try:
+        main_except(argv)
+        return 0
+    except Exception as e:
+        traceback.print_exc()
+        return 1
+
+
+if __name__ == "__main__":
+    sys.exit(main(sys.argv))

http://git-wip-us.apache.org/repos/asf/qpid-dispatch/blob/5c3411a1/bin/log_scraper/nicknamer.py
----------------------------------------------------------------------
diff --git a/bin/log_scraper/nicknamer.py b/bin/log_scraper/nicknamer.py
new file mode 100755
index 0000000..9dc2f9f
--- /dev/null
+++ b/bin/log_scraper/nicknamer.py
@@ -0,0 +1,123 @@
+#!/usr/bin/env python
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+import cgi
+
+class ShortNames():
+    '''
+    Name shortener.
+    The short name for display is "name_" + index(longName)
+    Embellish the display name with an html popup
+    Link and endpoint names, and data are tracked separately
+    Names longer than threshold are shortened
+    Each class has a prefix used when the table is dumped as HTML
+    '''
+    def __init__(self, prefixText, _threshold=25):
+        self.longnames = []
+        self.prefix = prefixText
+        self.threshold = _threshold
+
+    def translate(self, lname, show_popup=False):
+        '''
+        Translate a long name into a short name, maybe.
+        Memorize all names, translated or not
+        Strip leading/trailing double quotes
+        :param lname: the name
+        :return: If shortened HTML string of shortened name with popup containing long name else
+        not-so-long name.
+        '''
+        if lname.startswith("\"") and lname.endswith("\""):
+            lname = lname[1:-1]
+        try:
+            idx = self.longnames.index(lname)
+        except:
+            self.longnames.append(lname)
+            idx = self.longnames.index(lname)
+        # return as-given if short enough
+        if len(lname) < self.threshold:
+            return lname
+        if show_popup:
+            return "<span title=\"" + cgi.escape(lname) + "\">" + self.prefix + "_" + str(idx) + "</span>"
+        else:
+            return self.prefix + "_" + str(idx)
+
+    def len(self):
+        return len(self.longnames)
+
+    def prefix(self):
+        return self.prefix
+
+    def shortname(self, idx):
+        name = self.longnames[idx]
+        if len(name) < self.threshold:
+            return name
+        return self.prefix + "_" + str(idx)
+
+    def prefixname(self, idx):
+        return self.prefix + "_" + str(idx)
+
+    def sname_to_popup(self, sname):
+        if not sname.startswith(self.prefix):
+            raise ValueError("Short name '%s' does not start with prefix '%s'" % (sname, self.prefix))
+        try:
+            lname = self.longnames[ int(sname[ (len(self.prefix) + 1): ])]
+        except:
+            raise ValueError("Short name '%s' did not translate to a long name" % (sname))
+        return "<span title=\"" + cgi.escape(lname) + sname + "</span>"
+
+    def longname(self, idx, cgi_escape=False):
+        '''
+        Get the cgi.escape'd long name
+        :param idx:
+        :param cgi_escape: true if caller wants the string for html display
+        :return:
+        '''
+        return cgi.escape(self.longnames[idx]) if cgi_escape else self.longnames[idx]
+
+    def htmlDump(self, with_link=False):
+        '''
+        Print the name table as an unnumbered list to stdout
+        long names are cgi.escape'd
+        :param with_link: true if link name link name is hyperlinked targeting itself
+        :return: null
+        '''
+        if len(self.longnames) > 0:
+            print ("<h3>" + self.prefix + " Name Index</h3>")
+            print ("<ul>")
+            for i in range(0, len(self.longnames)):
+                name = self.prefix + "_" + str(i)
+                if with_link:
+                    name = "<a href=\"#%s\">%s</a>" % (name, name)
+                print ("<li> " + name + " - " + cgi.escape(self.longnames[i]) + "</li>")
+            print ("</ul>")
+
+
+class Shorteners():
+    def __init__(self):
+        self.short_link_names = ShortNames("link", 15)
+        self.short_addr_names = ShortNames("address")
+        self.short_data_names = ShortNames("transfer", 2)
+        self.short_peer_names = ShortNames("peer")
+        self.short_rtr_names  = ShortNames("router")
+
+
+if __name__ == "__main__":
+    pass


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@qpid.apache.org
For additional commands, e-mail: commits-help@qpid.apache.org