You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@qpid.apache.org by tr...@apache.org on 2018/11/27 14:27:25 UTC
[2/2] qpid-dispatch git commit: DISPATCH-1191 - Import crolke's
adverbl project This closes #421
DISPATCH-1191 - Import crolke's adverbl project
This closes #421
Project: http://git-wip-us.apache.org/repos/asf/qpid-dispatch/repo
Commit: http://git-wip-us.apache.org/repos/asf/qpid-dispatch/commit/5c3411a1
Tree: http://git-wip-us.apache.org/repos/asf/qpid-dispatch/tree/5c3411a1
Diff: http://git-wip-us.apache.org/repos/asf/qpid-dispatch/diff/5c3411a1
Branch: refs/heads/master
Commit: 5c3411a1a66e883f1c509b2668f870feac26134b
Parents: 42b4ab5
Author: Chuck Rolke <cr...@redhat.com>
Authored: Tue Nov 13 15:53:37 2018 -0500
Committer: Ted Ross <tr...@redhat.com>
Committed: Tue Nov 27 09:26:40 2018 -0500
----------------------------------------------------------------------
bin/log_scraper/README.md | 179 ++++
bin/log_scraper/amqp_detail.py | 633 ++++++++++++++
bin/log_scraper/common.py | 139 +++
bin/log_scraper/main.py | 785 +++++++++++++++++
bin/log_scraper/nicknamer.py | 123 +++
bin/log_scraper/parser.py | 930 +++++++++++++++++++++
bin/log_scraper/router.py | 226 +++++
bin/log_scraper/splitter.py | 94 +++
bin/log_scraper/test_data.py | 54 ++
bin/log_scraper/test_data/A-two-instances.log | 113 +++
bin/log_scraper/test_data/test_data.txt | 28 +
bin/log_scraper/text.py | 137 +++
12 files changed, 3441 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/qpid-dispatch/blob/5c3411a1/bin/log_scraper/README.md
----------------------------------------------------------------------
diff --git a/bin/log_scraper/README.md b/bin/log_scraper/README.md
new file mode 100644
index 0000000..64ec977
--- /dev/null
+++ b/bin/log_scraper/README.md
@@ -0,0 +1,179 @@
+# Scraper - Render qpid-dispatch log files
+
+Scraper is a spinoff of https://github.com/ChugR/Adverb that uses qpid-dispatch log
+files as the data source instead of pcap trace files. Scraper is a Python processing
+engine that does not require Wireshark or any network trace capture utilities.
+
+## Apache License, Version 2.0
+
+Licensed under the Apache License, Version 2.0 (the "License"); you may not use
+this file except in compliance with the License. You may obtain a copy of the
+License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software distributed
+under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
+CONDITIONS OF ANY KIND, either express or implied. See the License for the
+specific language governing permissions and limitations under the License.
+
+
+## Concepts
+
+Scraper is a data scraping program. It reads qpid-dispatch router log files,
+categorizes and sorts the data, and produces an HTML summary.
+
+From each log file Scraper extracts:
+ * Router version
+ * Router container name
+ * Router restart times. A single log file may contain data from several router
+ reboot instances.
+ * Router link state calculation reports
+ * Interrouter and client connections
+ * AMQP facts
+ * Connection peers
+ * Link pair establishment
+ * Transfer traffic
+ * Message disposition
+ * Flow and credit propagation
+
+ Scraper sorts these facts with microsecond precision using the log timestamps.
+
+ Then Scraper merges the data from any number (as long as that number is less than 27!)
+ of independent log files into a single view.
+
+ Next Scraper performs some higher-level analysis.
+
+ * Routers are identified by letter rather than by the container name: 'A', 'B', and
+ so on. Log data in a file is grouped into instances and is identified by a number
+ for that router instance: 'A0', 'A1', and so on.
+ * Per router each AMQP data log entry is sorted into per-connection data lists.
+ * Connection data lists are searched to discover router-to-router and router-to-client
+ connection pairs.
+ * Per connection data are subdivided into per-session and per-link lists, sorting
+ the AMQP data into per-link-only views.
+ * Bulk AMQP data may be shown or hidden on arbitrary per-connection selections.
+ * Noteworthy AMQP frames are identified. By hand these are hard to find.
+ * AMQP errors
+ * Presettled transfers
+ * Transfers with 'more' bit set
+ * Resumed transfers
+ * Aborted transfers
+ * Flow with 'drain' set
+ * Transfer messages are sorted by signature. Then a table is made showing where
+ each message leaves or arrives over a connection.
+ * Settlement state for each unsettled transfer is identified, displayed, and
+ shown with delta and elapsed time values. See example in the Advanced section.
+ * Link name propagation for each named link is shown in a table.
+ * Router, peer, and link names can get really long. Nicknames for each are used
+ with popups showing the full name.
+ * Transfer data is identified with nicknames but without the popups. The popups
+ were so big that Firefox refused to show them; so forget it and they weren't useful anyway.
+ * Router link state cost calculations are merged with router restart records to
+ create a comprehensive link state cost view. Routers may publish cost reports that
+ do not include all other routers. In this case the other routers are identified
+ visually to indicate that they are unreachable.
+
+### The basics
+
+* Enable router logging
+
+The routers need to generate proper logging for Scraper.
+The information classes are exposed by enabling log levels.
+
+| Log level | Information |
+|----------------|---------------------------|
+| ROUTER info | Router version |
+| SERVER info | Router restart discovery |
+| SERVER trace | AMQP control and data |
+| ROUTER_LS info | Router link state reports |
+
+
+* Run your tests to populate log files used as Scraper input.
+
+* Run Scraper to generate web content
+
+ bin/scraper/main.py somefile.log > somefile.html
+
+ bin/scraper/mina.py *.log > somefile.html
+
+* Profit
+
+ firefox somefile.html
+
+### Advanced
+
+* Merging multiple qpid-dispatch log files
+
+Scraper accepts multiple log files names in the command line and
+merges the log data according to the router log timestamps.
+
+ bin/scraper/main.py A.log B.log C.log > abc.html
+
+Note that the qpid-dispatch host system clocks for merged log files
+must be synchronized to within a few microseconds in order for the
+result to be useful. This is easiest to achieve when the routers are
+run on the same CPU core on a single system. Running Fedora 27 and 28
+on two hosts in a router network where the routers run _ntp_ to the same
+time provider produces perfectly acceptable results.
+
+Scraper does a decent job merging log files created within a
+qpid-dispatch self test.
+
+* Wow, that's a lot of data
+
+Indeed it is and good luck figuring it out. Sometimes, though, it's too much.
+The AMQP transfer data analysis is the worst offender in terms of CPU time,
+run-time memory usage, and monstrous html output files.
+Scraper provides one command line switch to
+turn off the data analysis:
+
+ bin/scraper/main.py --no-data FILE [FILE ...]
+
+In no-data mode AMQP transfer, disposition, and flow frames in the log files are
+discarded. The resulting web page still includes lots of useful information with
+connection info, link name propagation, and link state analysis.
+
+* How to read the transfer analysis tables. Here's an instance:
+
+
+|Src |Time |Rtr|ConnId|Dir|ConnId|Peer |T delta |T elapsed |Settlement |S elapsed
+|-------|---------------|---|------|---|------|------|---------|----------|-----------------------------|---------
+|A0_2035|09:50:52.027975|A0 |A0_11 |<- | |peer_7|0.000000 |0.000000 |(accepted settled 0.005142 S)|0.005142
+|A0_2071|09:50:52.028556|A0 |A0_6 |-> |D0_4 |D |0.000581 |0.000581 |(accepted settled 0.004253 S)|0.004834
+|D0_1587|09:50:52.028696|D0 |D0_4 |<- |A0_6 |A |0.000140 |0.000721 |(accepted settled 0.003988 S)|0.004709
+|D0_1612|09:50:52.029260|D0 |D0_1 |-> |C0_6 |C |0.000564 |0.001285 |(accepted settled 0.003044 S)|0.004329
+|C0_1610|09:50:52.029350|C0 |C0_6 |<- |D0_1 |D |0.000090 |0.001375 |(accepted settled 0.002846 S)|0.004221
+|C0_1625|09:50:52.029672|C0 |C0_1 |-> |B0_5 |B |0.000322 |0.001697 |(accepted settled 0.002189 S)|0.003886
+|B0_1438|09:50:52.029760|B0 |B0_5 |<- |C0_1 |C |0.000088 |0.001785 |(accepted settled 0.002002 S)|0.003787
+|B0_1451|09:50:52.030117|B0 |B0_7 |-> | |peer_7|0.000357 |0.002142 |(accepted settled 0.001318 S)|0.003460
+
+Each row in this table represents the facts about when a single transfer and its corresponding settlement was seen entering or exiting a router.
+
+| Field | Contents |
+|--------------|----------|
+|Src | Router instance and file line number where the transfer was seen|
+| Time | timestamp
+| Rtr | Router letter id and instance
+| ConnId | Router connection id
+| Dir | transfer direction. _<-_ indicates into the router, _->_ indicates out of the router
+| ConnId | peer's connection id. Blank if the peer is a normal client and not a router.
+| Peer | Peer's name. _peer7_ whold show the peer's container name in a popup.
+| T delta | Time since previous row
+| T elapsed | Time since the message first entered the system
+| Settlement | Settlement state and time delta since message time in column 2 for this row. The settlement disposition log line is hyperlinked from the word _accepted_.
+| S elapsed | Settlement elapsed time. This is the difference between the accepted disposition log record and the time when the message first entered the system.
+
+Row-by-row it is easiest to read the each line from left to right
+* A0 connecton 11 received the transfer from peer_7.
+* A0 connection 6 sent the message to D0 connection 4.
+* D0 connection 4 received the message from A0 connection 6.
+
+and so on. This message came from a sender on peer_7, went through routers A, D, C, and B, and finally was
+returned to a listener on peer_7. The receiver received the message 0.002142 S after the sender sent it. The
+sender received the accepted disposition 0.005142 S after the sender sent the message.
+
+The transmit times are in order from top to bottom and the settlement times are in order from bottom to top.
+
+This table will morph a little if one of the router is missing from the analysis. If log file D.log was not
+presented to Scraper then the table would not make as much sense as when all logs are included.
http://git-wip-us.apache.org/repos/asf/qpid-dispatch/blob/5c3411a1/bin/log_scraper/amqp_detail.py
----------------------------------------------------------------------
diff --git a/bin/log_scraper/amqp_detail.py b/bin/log_scraper/amqp_detail.py
new file mode 100755
index 0000000..042a19b
--- /dev/null
+++ b/bin/log_scraper/amqp_detail.py
@@ -0,0 +1,633 @@
+#!/usr/bin/env python
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+from __future__ import unicode_literals
+from __future__ import division
+from __future__ import absolute_import
+from __future__ import print_function
+
+import sys
+import traceback
+
+import common
+import text
+
+"""
+Given a map of all connections with lists of the associated frames
+analyze and show per-connection, per-session, and per-link details.
+
+This is done in a two-step process:
+ * Run through the frame lists and generates an intermediate structure
+ with the the details for display.
+ * Generate the html from the detail structure.
+This strategy allows for a third step that would allow more details
+to be gleaned from the static details. For instance, if router A
+sends a transfer to router B then router A's details could show
+how long it took for the transfer to reach router B. Similarly
+router B's details could show how long ago router A sent the transfer.
+"""
+
+
+class ConnectionDetail():
+ """
+ Holds facts about sessions over the connection's lifetime
+ """
+
+ def __init__(self, id):
+ # id in form 'A_15':
+ # A is the router logfile key
+ # 15 is the log connection number [15]
+ self.id = id
+
+ # seq_no number differentiates items that otherwise have same identifiers.
+ # Sessions, for example: a given connection may have N distinct session
+ # with local channel 0.
+ self.seq_no = 0
+
+ # combined amqp_error frames on this connection
+ self.amqp_errors = 0
+
+ # session_list holds all SessionDetail records either active or retired
+ # Sessions for a connection are identified by the local channel number.
+ # There may be many sessions all using the same channel number.
+ # This list holds all of them.
+ self.session_list = []
+
+ # this map indexed by the channel refers to the current item in the session_list
+ self.chan_map = {}
+
+ # count of AMQP performatives for this connection that are not accounted
+ # properly in session and link processing.
+ # Server Accepting, SASL mechs, init, outcome, AMQP, and so on
+ self.unaccounted_frame_list = []
+
+ def FindSession(self, channel):
+ """
+ Find the current session by channel number
+ :param channel: the performative channel
+ :return: the session or None
+ """
+ return self.chan_map[channel] if channel in self.chan_map else None
+
+ def GetId(self):
+ return self.id
+
+ def GetSeqNo(self):
+ self.seq_no += 1
+ return str(self.seq_no)
+
+ def EndChannel(self, channel):
+ # take existing session out of connection chan map
+ if channel in self.chan_map:
+ del self.chan_map[channel]
+
+ def GetLinkEventCount(self):
+ c = 0
+ for session in self.session_list:
+ c += session.GetLinkEventCount()
+ return c
+
+
+class SessionDetail:
+ """
+ Holds facts about a session
+ """
+
+ def __init__(self, conn_detail, conn_seq, start_time):
+ # parent connection
+ self.conn_detail = conn_detail
+
+ # some seq number
+ self.conn_epoch = conn_seq
+
+ # Timing
+ self.time_start = start_time
+ self.time_end = start_time
+
+ self.amqp_errors = 0
+
+ self.channel = -1
+ self.peer_chan = -1
+
+ self.direction = ""
+
+ # seq_no number differentiates items that otherwise have same identifiers.
+ # links for example
+ self.seq_no = 0
+
+ self.log_line_list = []
+
+ # link_list holds LinkDetail records
+ # Links for a session are identified by a (handle, remote-handle) number pair.
+ # There may be many links all using the same handle pairs.
+ # This list holds all of them.
+ self.link_list = []
+
+ # link_list holds all links either active or retired
+ # this map indexed by the handle refers to the current item in the link_list
+ self.input_handle_link_map = {} # link created by peer
+ self.output_handle_link_map = {} # link created locally
+
+ # Link name in attach finds link details in link_list
+ # This map contains the link handle to disambiguate the name
+ self.link_name_to_detail_map = {}
+ #
+ # The map contains the pure link name and is used only to resolve name collisions
+ self.link_name_conflict_map = {}
+
+ # count of AMQP performatives for this connection that are not accounted
+ # properly in link processing
+ self.session_frame_list = []
+
+ # Session dispositions
+ # Sender/receiver dispositions may be sent or received
+ self.rx_rcvr_disposition_map = {} # key=delivery id, val=disposition plf
+ self.rx_sndr_disposition_map = {} # key=delivery id, val=disposition plf
+ self.tx_rcvr_disposition_map = {} # key=delivery id, val=disposition plf
+ self.tx_sndr_disposition_map = {} # key=delivery id, val=disposition plf
+
+ def FrameCount(self):
+ count = 0
+ for link in self.link_list:
+ count += len(link.frame_list)
+ count += len(self.session_frame_list)
+ return count
+
+ def FindLinkByName(self, attach_name, link_name_unambiguous, parsed_log_line):
+ # find conflicted name
+ cnl = None
+ if attach_name in self.link_name_conflict_map:
+ cnl = self.link_name_conflict_map[attach_name]
+ if cnl.input_handle == -1 and cnl.output_handle == -1:
+ cnl = None
+ # find non-conflicted name
+ nl = None
+ if link_name_unambiguous in self.link_name_to_detail_map:
+ nl = self.link_name_to_detail_map[link_name_unambiguous]
+ if nl.input_handle == -1 and nl.output_handle == -1:
+ nl = None
+ # report conflict
+ # TODO: There's an issue with this logic generating false positives
+ # if nl is None and (not cnl is None):
+ # parsed_log_line.data.amqp_error = True
+ # parsed_log_line.data.web_show_str += " <span style=\"background-color:yellow\">Link name conflict</span>"
+ # return unambiguous link
+ return nl
+
+ def FindLinkByHandle(self, handle, find_remote):
+ """
+ Find the current link by handle number
+ qualify lookup based on packet direction
+ :param link: the performative channel
+ :param dst_is_broker: packet direction
+ :return: the session or None
+ """
+ if find_remote:
+ return self.input_handle_link_map[handle] if handle in self.input_handle_link_map else None
+ else:
+ return self.output_handle_link_map[handle] if handle in self.output_handle_link_map else None
+
+ def GetId(self):
+ return self.conn_detail.GetId() + "_" + str(self.conn_epoch)
+
+ def GetSeqNo(self):
+ self.seq_no += 1
+ return self.seq_no
+
+ def DetachOutputHandle(self, handle):
+ # take existing link out of session handle map
+ if handle in self.output_handle_link_map:
+ nl = self.output_handle_link_map[handle]
+ del self.output_handle_link_map[handle]
+ nl.output_handle = -1
+
+ def DetachInputHandle(self, handle):
+ # take existing link out of session remote handle map
+ if handle in self.input_handle_link_map:
+ nl = self.input_handle_link_map[handle]
+ del self.input_handle_link_map[handle]
+ nl.input_handle = -1
+
+ def DetachHandle(self, handle, is_remote):
+ if is_remote:
+ self.DetachInputHandle(handle)
+ else:
+ self.DetachOutputHandle(handle)
+
+ def GetLinkEventCount(self):
+ c = 0
+ for link in self.link_list:
+ c += link.GetLinkEventCount()
+ return c
+
+
+class LinkDetail():
+ """
+ Holds facts about a link endpoint
+ This structure binds input and output links with same name
+ """
+
+ def __init__(self, session_detail, session_seq, link_name, start_time):
+ # parent session
+ self.session_detail = session_detail
+
+ # some seq number
+ self.session_seq = session_seq
+
+ # link name
+ self.name = link_name # plf.data.link_short_name
+ self.display_name = link_name # show short name; hover to see long name
+
+ # Timing
+ self.time_start = start_time
+ self.time_end = start_time
+
+ self.amqp_errors = 0
+
+ # paired handles
+ self.output_handle = -1
+ self.input_handle = -1
+
+ # link originator
+ self.direction = ""
+ self.is_receiver = True
+ self.first_address = ''
+
+ # set by sender
+ self.snd_settle_mode = ''
+ self.sender_target_address = "none"
+ self.sender_class = ''
+
+ # set by receiver
+ self.rcv_settle_mode = ''
+ self.receiver_source_address = "none"
+ self.receiver_class = ''
+
+ self.frame_list = []
+
+ def GetId(self):
+ return self.session_detail.GetId() + "_" + str(self.session_seq)
+
+ def FrameCount(self):
+ return len(self.frame_list)
+
+
+class AllDetails():
+ #
+ #
+ def format_errors(self, n_errors):
+ return ("<span style=\"background-color:yellow\">%d</span>" % n_errors) if n_errors > 0 else ""
+
+ def classify_connection(self, id):
+ """
+ Return probable connection class based on the kinds of links the connection uses.
+ TODO: This assumes that the connection has one session and one
+ :param id:
+ :return:
+ """
+ return "oops"
+
+ def time_offset(self, ttest, t0):
+ """
+ Return a string time delta between two datetime objects in seconds formatted
+ to six significant decimal places.
+ :param ttest:
+ :param t0:
+ :return:
+ """
+ delta = ttest - t0
+ t = float(delta.seconds) + float(delta.microseconds) / 1000000.0
+ return "%0.06f" % t
+
+ def links_in_connection(self, id):
+ conn_details = self.conn_details[id]
+ n_links = 0
+ for sess in conn_details.session_list:
+ n_links += len(sess.link_list)
+ return n_links
+
+ def settlement_display(self, transfer, disposition):
+ """
+ Generate the details for a disposition settlement
+ :param transfer: plf
+ :param disposition: plf
+ :return: display string
+ """
+ state = disposition.data.disposition_state # accept, reject, release, ...
+ if state != "accepted":
+ state = "<span style=\"background-color:orange\">%s</span>" % state
+ l2disp = "<a href=\"#%s\">%s</a>" % (disposition.fid, state)
+ sttld = "settled" if disposition.data.settled == "true" else "unsettled"
+ delay = self.time_offset(disposition.datetime, transfer.datetime)
+ return "(%s %s %s S)" % (l2disp, sttld, delay)
+
+ def resolve_settlement(self, link, transfer, rcv_disposition, snd_disposition):
+ """
+ Generate the settlement display string for this transfer.
+ :param link: linkDetails - holds settlement modes
+ :param transfer: plf of the transfer frame
+ :param rcv_disposition: plf of receiver role disposition
+ :param snd_disposition: plf of sender role disposition
+ :return: display string
+ """
+ if transfer.data.settled is not None and transfer.data.settled == "true":
+ result = "transfer presettled"
+ if rcv_disposition is not None:
+ sys.stderr.write("WARING: Receiver disposition for presettled message. connid:%s, line:%s\n" %
+ (rcv_disposition.data.conn_id, rcv_disposition.lineno))
+ if snd_disposition is not None:
+ sys.stderr.write("WARING: Sender disposition for presettled message. connid:%s, line:%s\n" %
+ (snd_disposition.data.conn_id, snd_disposition.lineno))
+ else:
+ if "1" in link.snd_settle_mode:
+ # link mode sends only settled transfers
+ result = "link presettled"
+ if rcv_disposition is not None:
+ sys.stderr.write("WARING: Receiver disposition for presettled link. connid:%s, line:%s\n" %
+ (rcv_disposition.data.conn_id, rcv_disposition.lineno))
+ if snd_disposition is not None:
+ sys.stderr.write("WARING: Sender disposition for presettled link. connid:%s, line:%s\n" %
+ (snd_disposition.data.conn_id, snd_disposition.lineno))
+ else:
+ # transfer unsettled and link mode requires settlement
+ if rcv_disposition is not None:
+ rtext = self.settlement_display(transfer, rcv_disposition)
+ transfer.data.final_disposition = rcv_disposition
+ if snd_disposition is not None:
+ stext = self.settlement_display(transfer, snd_disposition)
+ transfer.data.final_disposition = snd_disposition
+
+ if "0" in link.rcv_settle_mode:
+ # one settlement expected
+ if rcv_disposition is not None:
+ result = rtext
+ if snd_disposition is not None:
+ sys.stderr.write("WARING: Sender disposition for single first(0) settlement link. "
+ "connid:%s, line:%s\n" %
+ (snd_disposition.data.conn_id, snd_disposition.lineno))
+ else:
+ result = "rcvr: absent"
+ else:
+ # two settlements expected
+ if rcv_disposition is not None:
+ result = "rcvr: " + rtext
+ if snd_disposition is not None:
+ result += ", sndr: " + stext
+ else:
+ result += ", sndr: absent"
+ else:
+ result = "rcvr: absent"
+ if snd_disposition is not None:
+ result += ", sndr: " + stext
+ else:
+ result += ", sndr: absent"
+ return result
+
+ def __init__(self, _router, _common):
+ self.rtr = _router
+ self.comn = _common
+
+ # conn_details - AMQP analysis
+ # key= connection id '1', '2'
+ # val= ConnectionDetails
+ # for each connection, for each session, for each link:
+ # what happened
+ self.conn_details = {}
+
+ for conn in self.rtr.conn_list:
+ id = self.rtr.conn_id(conn)
+ self.conn_details[id] = ConnectionDetail(id)
+ conn_details = self.conn_details[id]
+ conn_frames = self.rtr.conn_to_frame_map[id]
+ for plf in conn_frames:
+ pname = plf.data.name
+ if plf.data.amqp_error:
+ conn_details.amqp_errors += 1
+ if pname in ['', 'open', 'close']:
+ conn_details.unaccounted_frame_list.append(plf)
+ continue
+ # session required
+ channel = plf.data.channel
+ sess_details = conn_details.FindSession(channel)
+ if sess_details == None:
+ sess_details = SessionDetail(conn_details, conn_details.GetSeqNo(), plf.datetime)
+ conn_details.session_list.append(sess_details)
+ conn_details.EndChannel(channel)
+ conn_details.chan_map[channel] = sess_details
+ sess_details.direction = plf.data.direction
+ sess_details.channel = channel
+ if plf.data.amqp_error:
+ sess_details.amqp_errors += 1
+
+ if pname in ['begin', 'end', 'disposition']:
+ sess_details.session_frame_list.append(plf)
+
+ elif pname in ['attach']:
+ handle = plf.data.handle # proton local handle
+ link_name = plf.data.link_short_name
+ link_name_unambiguous = link_name + "_" + str(handle)
+ error_was = plf.data.amqp_error
+ nl = sess_details.FindLinkByName(link_name, link_name_unambiguous, plf)
+ # if finding an ambiguous link name generated an error then propagate to session/connection
+ if not error_was and plf.data.amqp_error:
+ conn_details.amqp_errors += 1
+ sess_details.amqp_errors += 1
+ if nl is None:
+ # Creating a new link from scratch resulting in a half attached link pair
+ nl = LinkDetail(sess_details, sess_details.GetSeqNo(), link_name, plf.datetime)
+ sess_details.link_list.append(nl)
+ sess_details.link_name_to_detail_map[link_name_unambiguous] = nl
+ sess_details.link_name_conflict_map[link_name] = nl
+ nl.display_name = plf.data.link_short_name_popup
+ nl.direction = plf.data.direction
+ nl.is_receiver = plf.data.role == "receiver"
+ nl.first_address = plf.data.source if nl.is_receiver else plf.data.target
+ if plf.data.amqp_error:
+ nl.amqp_errors += 1
+
+ if plf.data.direction_is_in():
+ # peer is creating link
+ nl.input_handle = handle
+ sess_details.DetachInputHandle(handle)
+ sess_details.input_handle_link_map[handle] = nl
+ else:
+ # local is creating link
+ nl.output_handle = handle
+ sess_details.DetachOutputHandle(handle)
+ sess_details.output_handle_link_map[handle] = nl
+ if plf.data.is_receiver:
+ nl.rcv_settle_mode = plf.data.rcv_settle_mode
+ nl.receiver_source_address = plf.data.source
+ nl.receiver_class = plf.data.link_class
+ else:
+ nl.snd_settle_mode = plf.data.snd_settle_mode
+ nl.sender_target_address = plf.data.target
+ nl.sender_class = plf.data.link_class
+ nl.frame_list.append(plf)
+
+ elif pname in ['detach']:
+ ns = conn_details.FindSession(channel)
+ if ns is None:
+ conn_details.unaccounted_frame_list.append(plf)
+ continue
+ handle = plf.data.handle
+ nl = ns.FindLinkByHandle(handle, plf.data.direction_is_in())
+ ns.DetachHandle(handle, plf.data.direction_is_in())
+ if nl is None:
+ ns.session_frame_list.append(plf)
+ else:
+ if plf.data.amqp_error:
+ nl.amqp_errors += 1
+ nl.frame_list.append(plf)
+
+ elif pname in ['transfer', 'flow']:
+ ns = conn_details.FindSession(channel)
+ if ns is None:
+ conn_details.unaccounted_frame_list.append(plf)
+ continue
+ handle = plf.data.handle
+ nl = ns.FindLinkByHandle(handle, plf.data.direction_is_in())
+ if nl is None:
+ ns.session_frame_list.append(plf)
+ else:
+ if plf.data.amqp_error:
+ nl.amqp_errors += 1
+ nl.frame_list.append(plf)
+ # identify and index dispositions
+ for conn in self.rtr.conn_list:
+ id = self.rtr.conn_id(conn)
+ conn_detail = self.conn_details[id]
+ for sess in conn_detail.session_list:
+ # for each disposition add state to disposition_map
+ for splf in sess.session_frame_list:
+ if splf.data.name == "disposition":
+ if splf.data.direction == "<-":
+ sdispmap = sess.rx_rcvr_disposition_map if splf.data.is_receiver else sess.rx_sndr_disposition_map
+ else:
+ sdispmap = sess.tx_rcvr_disposition_map if splf.data.is_receiver else sess.tx_sndr_disposition_map
+ for sdid in range(int(splf.data.first), (int(splf.data.last) + 1)):
+ did = str(sdid)
+ if did in sdispmap:
+ sys.stderr.write("ERROR: Delivery ID collision in disposition map. connid:%s, \n" %
+ (splf.data.conn_id))
+ sdispmap[did] = splf
+
+ def show_html(self):
+ for conn in self.rtr.conn_list:
+ id = self.rtr.conn_id(conn)
+ conn_detail = self.rtr.details.conn_details[id]
+ conn_frames = self.rtr.conn_to_frame_map[id]
+ print("<a name=\"cd_%s\"></a>" % id)
+ # This lozenge shows/hides the connection's data
+ print("<a href=\"javascript:toggle_node('%s_data')\">%s%s</a>" %
+ (id, text.lozenge(), text.nbsp()))
+ dir = self.rtr.conn_dir[id] if id in self.rtr.conn_dir else ""
+ peer = self.rtr.conn_peer_display.get(id, "") # peer container id
+ peerconnid = self.comn.conn_peers_connid.get(id, "")
+ # show the connection title
+ print("%s %s %s %s (nFrames=%d) %s<br>" % \
+ (id, dir, peerconnid, peer, len(conn_frames), self.format_errors(conn_detail.amqp_errors)))
+ # data div
+ print("<div id=\"%s_data\" style=\"display:none; margin-bottom: 2px; margin-left: 10px\">" % id)
+
+ # unaccounted frames
+ print("<a href=\"javascript:toggle_node('%s_data_unacc')\">%s%s</a>" %
+ (id, text.lozenge(), text.nbsp()))
+ # show the connection-level frames
+ errs = sum(1 for plf in conn_detail.unaccounted_frame_list if plf.data.amqp_error)
+ print("Connection-based entries %s<br>" % self.format_errors(errs))
+ print("<div id=\"%s_data_unacc\" style=\"display:none; margin-bottom: 2px; margin-left: 10px\">" % id)
+ for plf in conn_detail.unaccounted_frame_list:
+ print(plf.adverbl_link_to(), plf.datetime, plf.data.direction, peer, plf.data.web_show_str, "<br>")
+ print("</div>") # end unaccounted frames
+
+ # loop to print session details
+ for sess in conn_detail.session_list:
+ # show the session toggle and title
+ print("<a href=\"javascript:toggle_node('%s_sess_%s')\">%s%s</a>" %
+ (id, sess.conn_epoch, text.lozenge(), text.nbsp()))
+ print("Session %s: channel: %s, peer channel: %s; Time: start %s, Counts: frames: %d %s<br>" % \
+ (sess.conn_epoch, sess.channel, sess.peer_chan, sess.time_start, \
+ sess.FrameCount(), self.format_errors(sess.amqp_errors)))
+ print("<div id=\"%s_sess_%s\" style=\"display:none; margin-bottom: 2px; margin-left: 10px\">" %
+ (id, sess.conn_epoch))
+ # show the session-level frames
+ errs = sum(1 for plf in sess.session_frame_list if plf.data.amqp_error)
+ print("<a href=\"javascript:toggle_node('%s_sess_%s_unacc')\">%s%s</a>" %
+ (id, sess.conn_epoch, text.lozenge(), text.nbsp()))
+ print("Session-based entries %s<br>" % self.format_errors(errs))
+ print("<div id=\"%s_sess_%s_unacc\" style=\"display:none; margin-bottom: 2px; margin-left: 10px\">" %
+ (id, sess.conn_epoch))
+ for plf in sess.session_frame_list:
+ print(plf.adverbl_link_to(), plf.datetime, plf.data.direction, peer, plf.data.web_show_str, "<br>")
+ print("</div>") # end <id>_sess_<conn_epoch>_unacc
+ # loops to print session link details
+ # first loop prints link table
+ print("<table")
+ print("<tr><th>Link</th> <th>Dir</th> <th>Role</th> <th>Address</th> <th>Class</th> "
+ "<th>snd-settle-mode</th> <th>rcv-settle-mode</th> <th>Start time</th> <th>Frames</th> "
+ "<th>AMQP errors</tr>")
+ for link in sess.link_list:
+ # show the link toggle and title
+ showthis = ("<a href=\"javascript:toggle_node('%s_sess_%s_link_%s')\">%s</a>" %
+ (id, sess.conn_epoch, link.session_seq, link.display_name))
+ role = "receiver" if link.is_receiver else "sender"
+ print("<tr><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td>"
+ "<td>%s</td><td>%d</td><td>%s</td></tr>" % \
+ (showthis, link.direction, role, link.first_address,
+ (link.sender_class + '-' + link.receiver_class), link.snd_settle_mode,
+ link.rcv_settle_mode, link.time_start, link.FrameCount(),
+ self.format_errors(link.amqp_errors)))
+ print("</table>")
+ # second loop prints the link's frames
+ for link in sess.link_list:
+ print(
+ "<div id=\"%s_sess_%s_link_%s\" style=\"display:none; margin-top: 2px; margin-bottom: 2px; margin-left: 10px\">" %
+ (id, sess.conn_epoch, link.session_seq))
+ print("<h4>Connection %s Session %s Link %s</h4>" %
+ (id, sess.conn_epoch, link.display_name))
+ for plf in link.frame_list:
+ if plf.data.name == "transfer":
+ tdid = plf.data.delivery_id
+ if plf.data.direction == "->":
+ rmap = sess.rx_rcvr_disposition_map
+ tmap = sess.rx_sndr_disposition_map
+ else:
+ rmap = sess.tx_rcvr_disposition_map
+ tmap = sess.tx_sndr_disposition_map
+ plf.data.disposition_display = self.resolve_settlement(link, plf,
+ rmap.get(tdid),
+ tmap.get(tdid))
+ print(plf.adverbl_link_to(), plf.datetime, plf.data.direction, peer, plf.data.web_show_str,
+ plf.data.disposition_display, "<br>")
+ print("</div>") # end link <id>_sess_<conn_epoch>_link_<sess_seq>
+
+ print("</div>") # end session <id>_sess_<conn_epoch>
+
+ print("</div>") # end current connection data
+
+
+if __name__ == "__main__":
+
+ try:
+ pass
+ except:
+ traceback.print_exc(file=sys.stdout)
+ pass
http://git-wip-us.apache.org/repos/asf/qpid-dispatch/blob/5c3411a1/bin/log_scraper/common.py
----------------------------------------------------------------------
diff --git a/bin/log_scraper/common.py b/bin/log_scraper/common.py
new file mode 100755
index 0000000..d570024
--- /dev/null
+++ b/bin/log_scraper/common.py
@@ -0,0 +1,139 @@
+#!/usr/bin/env python
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+# Common data storage and utilities
+
+import sys
+
+import nicknamer
+
+IS_PY2 = sys.version_info[0] == 2
+
+if IS_PY2:
+ def dict_iteritems(d):
+ return d.iteritems()
+ def dict_iterkeys(d):
+ return d.iterkeys()
+else:
+ def dict_iteritems(d):
+ return iter(d.items())
+ def dict_iterkeys(d):
+ return iter(d.keys())
+
+class Common():
+
+ # arg - index transfer data or not
+ # If a log file has 100M transfers then adverbl dies.
+ # With program arg --no-data then data indexing is turned off but
+ # the output still shows connections, links, and link state costs.
+ arg_index_data = True
+
+ # first letter of the connection names
+ log_char_base = 'A'
+
+ # number of logs processed
+ n_logs = 0
+
+ # array of file name strings from command line
+ # len=n_logs
+ log_fns = []
+
+ # discovered router container names
+ # len=n_logs
+ router_ids = [] # raw long names
+
+ # router display names shortened with popups
+ router_display_names = []
+
+ # router modes in plain text
+ router_modes = []
+
+ # list of router-instance lists
+ # [[A0, A1], [B0], [C0, C1, C2]]
+ routers = []
+
+ # ordered list of connection names across all routers
+ all_conn_names = []
+
+ # conn_details_map -
+ # key=conn_id, val=ConnectionDetail for that connection
+ conn_details_map = {}
+
+ # mapping of connected routers by connection id
+ # A0_1 is connected to B3_2
+ # key = full conn_id 'A0_5'
+ # val = full conn_id 'B0_8'
+ # note names[key]=val and names[val]=key mutual reference
+ conn_peers_connid = {}
+
+ # short display name for peer indexed by connection id
+ # A0_1 maps to B's container_name nickname
+ conn_peers_display = {}
+
+ # conn_to_frame_map - global list for easier iteration in main
+ # key = conn_id full A0_3
+ # val = list of plf lines
+ conn_to_frame_map = {}
+
+ shorteners = nicknamer.Shorteners()
+
+ # when --no-data is in effect, how many log lines were skipped?
+ data_skipped = 0
+
+ def router_id_index(self, id):
+ """
+ Given a router full container name, return the index in router_ids table
+ Throw value error if not found
+ :param id:
+ :return:
+ """
+ return self.router_ids.index(id)
+
+
+def log_letter_of(idx):
+ '''
+ Return the letter A, B, C, ... from the index 0..n
+ :param idx:
+ :return: A..Z
+ '''
+ if idx >= 26:
+ sys.exit('ERROR: too many log files')
+ return "ABCDEFGHIJKLMNOPQRSTUVWXYZ"[idx]
+
+def index_of_log_letter(letter):
+ '''
+ Return the index 0..25 of the firster letter of the 'letter' string
+ Raise error if out of range
+ :param letter:
+ :return:
+ '''
+ val = "ABCDEFGHIJKLMNOPQRSTUVWXYZ".find(letter[0].upper())
+ if val < 0 or val > 25:
+ raise ValueError("index_of_log_letter Invalid log letter: %s", letter)
+ return val
+
+class RestartRec():
+ def __init__(self, _id, _router, _event, _datetime):
+ self.id = _id
+ self.router = _router
+ self.event = _event
+ self.datetime = _datetime
+
http://git-wip-us.apache.org/repos/asf/qpid-dispatch/blob/5c3411a1/bin/log_scraper/main.py
----------------------------------------------------------------------
diff --git a/bin/log_scraper/main.py b/bin/log_scraper/main.py
new file mode 100755
index 0000000..a0d4b40
--- /dev/null
+++ b/bin/log_scraper/main.py
@@ -0,0 +1,785 @@
+#!/usr/bin/env python
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+# Adverbl concepts
+# * Multiple log files may be displayed at the same time.
+# Each log file gets a letter prefix: A, B, C, ...
+# * Log AMQP proton trace channel numbers get prefix
+# [1] becomes [A-1]
+# * The log file line numbers are equivalent to a wireshark trace frame number.
+# * There's no concept of client and server because the logs are from inside
+# a router.
+
+from __future__ import unicode_literals
+from __future__ import division
+from __future__ import absolute_import
+from __future__ import print_function
+
+import ast
+import cgi
+import os
+import sys
+import traceback
+
+import common
+import parser
+import router
+import text
+
+
+def time_offset(ttest, t0):
+ """
+ Return a string time delta between two datetime objects in seconds formatted
+ to six significant decimal places.
+ :param ttest:
+ :param t0:
+ :return:
+ """
+ delta = ttest - t0
+ t = float(delta.seconds) + float(delta.microseconds) / 1000000.0
+ return "%0.06f" % t
+
+
+def show_noteworthy_line(plf, comn):
+ """
+ Given a log line, print the noteworthy display line
+ :param plf: parsed log line
+ :param comn:
+ :return:
+ """
+ rid = plf.router.iname
+ id = "[%s]" % plf.data.conn_id
+ peerconnid = "[%s]" % comn.conn_peers_connid.get(plf.data.conn_id, "")
+ peer = plf.router.conn_peer_display.get(plf.data.conn_id, "") # peer container id
+ print("%s %s %s %s %s %s %s<br>" %
+ (plf.adverbl_link_to(), rid, id, plf.data.direction, peerconnid, peer,
+ plf.data.web_show_str))
+
+
+#
+#
+def main_except(argv):
+ """
+ Given a list of log file names, send the javascript web page to stdout
+ """
+ if len(argv) < 2:
+ sys.exit('Usage: %s [--no-data] log-file-name [log-file-name ...]' % argv[0])
+
+ # Instantiate a common block
+ comn = common.Common()
+
+ # optparse - look for --no-data switch
+ if argv[1] == "--no-data":
+ comn.arg_index_data = False
+ del argv[1]
+
+ # process the log files and add the results to router_array
+ for log_i in range(0, len(sys.argv) - 1):
+ arg_log_file = sys.argv[log_i + 1]
+ comn.log_fns.append(arg_log_file)
+ comn.n_logs += 1
+
+ if not os.path.exists(arg_log_file):
+ sys.exit('ERROR: log file %s was not found!' % arg_log_file)
+
+ # parse the log file
+ rtrs = parser.parse_log_file(arg_log_file, log_i, comn)
+ comn.routers.append(rtrs)
+
+ # marshall facts about the run
+ for rtr in rtrs:
+ rtr.discover_connection_facts(comn)
+
+ # Create lists of various things sorted by time
+ tree = [] # log line
+ ls_tree = [] # link state lines
+ rr_tree = [] # restart records
+ for rtrlist in comn.routers:
+ for rtr in rtrlist:
+ tree += rtr.lines
+ ls_tree += rtr.router_ls
+ rr_tree.append(rtr.restart_rec)
+ tree = sorted(tree, key=lambda lfl: lfl.datetime)
+ ls_tree = sorted(ls_tree, key=lambda lfl: lfl.datetime)
+ rr_tree = sorted(rr_tree, key=lambda lfl: lfl.datetime)
+
+ # Back-propagate a router name/version/mode to each list's router0.
+ # Complain if container name or version changes between instances.
+ # Fill in container_id and shortened display_name tables
+ for fi in range(comn.n_logs):
+ rtrlist = comn.routers[fi]
+ if len(rtrlist) > 1:
+ if rtrlist[0].container_name is None:
+ rtrlist[0].container_name = rtrlist[1].container_name
+ if rtrlist[0].version is None:
+ rtrlist[0].version = rtrlist[1].version
+ if rtrlist[0].mode is None:
+ rtrlist[0].mode = rtrlist[1].mode
+ for i in range(0, len(rtrlist) - 1):
+ namei = rtrlist[i].container_name
+ namej = rtrlist[i + 1].container_name
+ if namei != namej:
+ sys.exit('Inconsistent container names, log file %s, instance %d:%s but instance %d:%s' %
+ (comn.log_fns[fi], i, namei, i + 1, namej))
+ namei = rtrlist[i].version
+ namej = rtrlist[i + 1].version
+ if namei != namej:
+ sys.exit('Inconsistent router versions, log file %s, instance %d:%s but instance %d:%s' %
+ (comn.log_fns[fi], i, namei, i + 1, namej))
+ namei = rtrlist[i].mode
+ namej = rtrlist[i + 1].mode
+ if namei != namej:
+ sys.exit('Inconsistent router modes, log file %s, instance %d:%s but instance %d:%s' %
+ (comn.log_fns[fi], i, namei, i + 1, namej))
+ name = rtrlist[0].container_name if len(rtrlist) > 0 and rtrlist[0].container_name is not None else ("Unknown_%d" % fi)
+ mode = rtrlist[0].mode if len(rtrlist) > 0 and rtrlist[0].mode is not None else "standalone"
+ comn.router_ids.append(name)
+ comn.router_display_names.append(comn.shorteners.short_rtr_names.translate(name))
+ comn.router_modes.append(mode)
+
+ # aggregate connection-to-frame maps into big map
+ for rtrlist in comn.routers:
+ for rtr in rtrlist:
+ comn.conn_to_frame_map.update(rtr.conn_to_frame_map)
+
+ # generate router-to-router connection peer relationships
+ peer_list = []
+ for plf in tree:
+ if plf.data.name == "open" and plf.data.direction_is_in():
+ cid = plf.data.conn_id # the router that generated this log file
+ if "properties" in plf.data.described_type.dict:
+ peer_conn = plf.data.described_type.dict["properties"].get(':"qd.conn-id"',
+ "") # router that sent the open
+ if peer_conn != "" and plf.data.conn_peer != "":
+ pid_peer = plf.data.conn_peer.strip('\"')
+ rtr, rtridx = router.which_router_id_tod(comn.routers, pid_peer, plf.datetime)
+ if rtr is not None:
+ pid = rtr.conn_id(peer_conn)
+ hit = sorted((cid, pid))
+ if hit not in peer_list:
+ peer_list.append(hit)
+
+ for (key, val) in peer_list:
+ if key in comn.conn_peers_connid:
+ sys.exit('key val messed up')
+ if val in comn.conn_peers_connid:
+ sys.exit('key val messed up')
+ comn.conn_peers_connid[key] = val
+ comn.conn_peers_connid[val] = key
+ cn_k = comn.router_ids[common.index_of_log_letter(key)]
+ cn_v = comn.router_ids[common.index_of_log_letter(val)]
+ comn.conn_peers_display[key] = comn.shorteners.short_rtr_names.translate(cn_v)
+ comn.conn_peers_display[val] = comn.shorteners.short_rtr_names.translate(cn_k)
+ #
+ # Start producing the output stream
+ #
+ print(text.web_page_head())
+
+ #
+ # Generate javascript
+ #
+ # output the frame show/hide functions into the header
+ for conn_id, plfs in common.dict_iteritems(comn.conn_to_frame_map):
+ print("function show_%s() {" % conn_id)
+ for plf in plfs:
+ print(" javascript:show_node(\'%s\');" % plf.fid)
+ print("}")
+ print("function hide_%s() {" % conn_id)
+ for plf in plfs:
+ print(" javascript:hide_node(\'%s\');" % plf.fid)
+ print("}")
+ # manipulate checkboxes
+ print("function show_if_cb_sel_%s() {" % conn_id)
+ print(" if (document.getElementById(\"cb_sel_%s\").checked) {" % conn_id)
+ print(" javascript:show_%s();" % conn_id)
+ print(" } else {")
+ print(" javascript:hide_%s();" % conn_id)
+ print(" }")
+ print("}")
+ print("function select_cb_sel_%s() {" % conn_id)
+ print(" document.getElementById(\"cb_sel_%s\").checked = true;" % conn_id)
+ print(" javascript:show_%s();" % conn_id)
+ print("}")
+ print("function deselect_cb_sel_%s() {" % conn_id)
+ print(" document.getElementById(\"cb_sel_%s\").checked = false;" % conn_id)
+ print(" javascript:hide_%s();" % conn_id)
+ print("}")
+ print("function toggle_cb_sel_%s() {" % conn_id)
+ print(" if (document.getElementById(\"cb_sel_%s\").checked) {" % conn_id)
+ print(" document.getElementById(\"cb_sel_%s\").checked = false;" % conn_id)
+ print(" } else {")
+ print(" document.getElementById(\"cb_sel_%s\").checked = true;" % conn_id)
+ print(" }")
+ print(" javascript:show_if_cb_sel_%s();" % conn_id)
+ print("}")
+
+ # Select/Deselect/Toggle All Connections functions
+ print("function select_all() {")
+ for conn_id, frames_ids in common.dict_iteritems(comn.conn_to_frame_map):
+ print(" javascript:select_cb_sel_%s();" % conn_id)
+ print("}")
+ print("function deselect_all() {")
+ for conn_id, frames_ids in common.dict_iteritems(comn.conn_to_frame_map):
+ print(" javascript:deselect_cb_sel_%s();" % conn_id)
+ print("}")
+ print("function toggle_all() {")
+ for conn_id, frames_ids in common.dict_iteritems(comn.conn_to_frame_map):
+ print(" javascript:toggle_cb_sel_%s();" % conn_id)
+ print("}")
+
+ #
+ print("</script>")
+ print("</head>")
+ print("<body>")
+ #
+
+ # Table of contents
+ print(text.web_page_toc())
+
+ # Report how much data was skipped if --no-data switch in effect
+ if not comn.arg_index_data:
+ print("--no-data switch in effect. %d log lines skipped" % comn.data_skipped)
+ print("<p><hr>")
+
+ # file(s) included in this doc
+ print("<a name=\"c_logfiles\"></a>")
+ print("<h3>Log files</h3>")
+ print("<table><tr><th>Log</th> <th>Container name</th> <th>Version</th> <th>Mode</th>"
+ "<th>Instances</th> <th>Log file path</th></tr>")
+ for i in range(comn.n_logs):
+ rtrlist = comn.routers[i]
+ if len(rtrlist) > 0:
+ print("<tr><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td></tr>" %
+ (common.log_letter_of(i), rtrlist[0].container_name, rtrlist[0].version, rtrlist[0].mode,
+ str(len(rtrlist)), os.path.abspath(comn.log_fns[i])))
+ else:
+ print("<tr><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td></tr>" %
+ (common.log_letter_of(i), text.nbsp(), text.nbsp(),
+ str(len(rtrlist)), os.path.abspath(comn.log_fns[i])))
+ print("</table>")
+ print("<hr>")
+
+ # reboot chronology
+ print("<a name=\"c_rtrinstances\"></a>")
+ print("<h3>Router Reboot Chronology</h3>")
+ print("<table><tr><th>Log</th> <th>Time</th> <th>Container name</th> ")
+ for i in range(len(comn.routers)):
+ print("<td>%s</td>" % common.log_letter_of(i))
+ print("</tr>")
+ for rr in rr_tree:
+ print("<tr><td>%s</td><td>%s</td><td>%s</td>" %
+ (rr.router.iname, rr.datetime, rr.router.container_name))
+ for i in range(len(comn.routers)):
+ print("<td>%s</td> " % (rr.router.iname if i == rr.router.log_index else text.nbsp()))
+ print("</tr>")
+ print("</table>")
+ print("<hr>")
+
+ # print the connection peer tables
+ #
+ # +------+--------------------+-----+--------------------+-------+-------+----------+--------+
+ # | View | Router | Dir | Peer | Log | N | Transfer | AMQP |
+ # | +-----------+--------+ +--------+-----------+ lines | links | bytes | errors |
+ # | | container | connid | | connid | container | | | | |
+ # +------+-----------+--------+-----+--------+-----------+-------+-------+----------+--------+
+
+ print("<a name=\"c_connections\"></a>")
+ print("<h3>Connections</h3>")
+
+ print("<p>")
+ print("<button onclick=\"javascript:select_all()\">Select All</button>")
+ print("<button onclick=\"javascript:deselect_all()\">Deselect All</button>")
+ print("<button onclick=\"javascript:toggle_all()\">Toggle All</button>")
+ print("</p>")
+
+ print("<h3>Connections by ConnectionId</h3>")
+ print(
+ "<table><tr> <th rowspan=\"2\">View</th> <th colspan=\"2\">Router</th> <th rowspan=\"2\">Dir</th> <th colspan=\"2\">Peer</th> <th rowspan=\"2\">Log lines</th> "
+ "<th rowspan=\"2\">N links</th><th rowspan=\"2\">Transfer bytes</th> <th rowspan=\"2\">AMQP errors</th> <th rowspan=\"2\">Open time</th> <th rowspan=\"2\">Close time</th></tr>")
+ print("<tr> <th>container</th> <th>connid</th> <th>connid</th> <th>container</th></tr>")
+
+ tConn = 0
+ tLines = 0
+ tBytes = 0
+ tErrs = 0
+ tLinks = 0
+ for rtrlist in comn.routers:
+ for rtr in rtrlist:
+ rid = rtr.container_name
+ for conn in rtr.conn_list:
+ tConn += 1
+ id = rtr.conn_id(conn) # this router's full connid 'A0_3'
+ peer = rtr.conn_peer_display.get(id, "") # peer container id
+ peerconnid = comn.conn_peers_connid.get(id, "")
+ n_links = rtr.details.links_in_connection(id)
+ tLinks += n_links
+ errs = sum(1 for plf in rtr.conn_to_frame_map[id] if plf.data.amqp_error)
+ tErrs += errs
+ stime = rtr.conn_open_time.get(id, text.nbsp())
+ if stime != text.nbsp():
+ stime = stime.datetime
+ etime = rtr.conn_close_time.get(id, text.nbsp())
+ if etime != text.nbsp():
+ etime = etime.datetime
+ print("<tr>")
+ print("<td> <input type=\"checkbox\" id=\"cb_sel_%s\" " % id)
+ print("checked=\"true\" onclick=\"javascript:show_if_cb_sel_%s()\"> </td>" % (id))
+ print("<td>%s</td><td><a href=\"#cd_%s\">%s</a></td><td>%s</td><td>%s</td><td>%s</td><td>%s</td>"
+ "<td>%d</td><td>%s</td><td>%d</td><td>%s</td><td>%s</td></tr>" %
+ (rid, id, id, rtr.conn_dir[id], peerconnid, peer, rtr.conn_log_lines[id], n_links,
+ rtr.conn_xfer_bytes[id], errs, stime, etime))
+ tLines += rtr.conn_log_lines[id]
+ tBytes += rtr.conn_xfer_bytes[id]
+ print(
+ "<td>Total</td><td>%d</td><td> </td><td> </td><td> </td><td> </td><td>%d</td><td>%d</td><td>%d</td><td>%d</td></tr>" %
+ (tConn, tLines, tLinks, tBytes, tErrs))
+ print("</table>")
+
+ print("<h3>Router Restart and Connection chronology</h3>")
+
+ cl = []
+ for rtrlist in comn.routers:
+ for rtr in rtrlist:
+ rid = rtr.container_name
+ cl.append(common.RestartRec(rtr.iname, rtr, "restart", rtr.restart_rec.datetime))
+ for conn in rtr.conn_list:
+ id = rtr.conn_id(conn)
+ if id in rtr.conn_open_time:
+ cl.append(common.RestartRec(id, rtr, "open", rtr.conn_open_time[id].datetime))
+ if id in rtr.conn_close_time:
+ cl.append(common.RestartRec(id, rtr, "close", rtr.conn_close_time[id].datetime))
+ cl = sorted(cl, key=lambda lfl: lfl.datetime)
+
+ print("<table><tr> <th>Time</th> <th>Id</th> <th>Event</th> <th>container</th> <th>connid</th> "
+ "<th>Dir</th> <th>connid</th> <th>container</th>")
+ for i in range(len(comn.routers)):
+ print("<td>%s</td>" % common.log_letter_of(i))
+ print("</tr>")
+ for c in cl:
+ if c.event == "restart":
+ rid = c.router.container_name
+ print("<tr><td>%s</td> <td>%s</td> <td><span style=\"background-color:yellow\">%s</span></td><td>%s</td> "
+ "<td>%s</td> <td>%s</td><td>%s</td> <td>%s</td>" %
+ (c.datetime, c.id, c.event, rid, "", "", "", ""))
+ for i in range(len(comn.routers)):
+ print("<td>%s</td> " % (c.id if i == c.router.log_index else text.nbsp()))
+ print("</tr>")
+ else:
+ rid = c.router.container_name
+ cdir = c.router.conn_dir[c.id]
+ peer = c.router.conn_peer_display.get(c.id, "") # peer container id
+ peerconnid = comn.conn_peers_connid.get(c.id, "")
+ print("<tr><td>%s</td> <td>%s</td> <td>%s</td><td>%s</td> <td>%s</td> <td>%s</td><td>%s</td> <td>%s</td>" %
+ (c.datetime, c.id, c.event, rid, c.id, cdir, peerconnid, peer))
+ for i in range(len(comn.routers)):
+ print("<td>%s</td> " % (text.nbsp()))
+ print("</tr>")
+ print("</table>")
+ print("<hr>")
+
+ # connection details
+ print("<a name=\"c_conndetails\"></a>")
+ print("<h3>Connection Details</h3>")
+ for rtrlist in comn.routers:
+ for rtr in rtrlist:
+ rtr.details.show_html()
+ print("<hr>")
+
+ # noteworthy log lines: highlight errors and stuff
+ print("<a name=\"c_noteworthy\"></a>")
+ print("<h3>Noteworthy</h3>")
+ n_errors = 0
+ n_settled = 0
+ n_more = 0
+ n_resume = 0
+ n_aborted = 0
+ n_drain = 0
+ for plf in tree:
+ if plf.data.amqp_error:
+ n_errors += 1
+ if plf.data.transfer_settled:
+ n_settled += 1
+ if plf.data.transfer_more:
+ n_more += 1
+ if plf.data.transfer_resume:
+ n_resume += 1
+ if plf.data.transfer_aborted:
+ n_aborted += 1
+ if plf.data.flow_drain:
+ n_drain += 1
+ # amqp errors
+ print("<a href=\"javascript:toggle_node('noteworthy_errors')\">%s%s</a> AMQP errors: %d<br>" %
+ (text.lozenge(), text.nbsp(), n_errors))
+ print(" <div width=\"100%%\"; "
+ "style=\"display:none; font-weight: normal; margin-bottom: 2px; margin-left: 10px\" "
+ "id=\"noteworthy_errors\">")
+ for plf in tree:
+ if plf.data.amqp_error:
+ show_noteworthy_line(plf, comn)
+ print("</div>")
+ # transfers with settled=true
+ print("<a href=\"javascript:toggle_node('noteworthy_settled')\">%s%s</a> Presettled transfers: %d<br>" %
+ (text.lozenge(), text.nbsp(), n_settled))
+ print(" <div width=\"100%%\"; "
+ "style=\"display:none; font-weight: normal; margin-bottom: 2px; margin-left: 10px\" "
+ "id=\"noteworthy_settled\">")
+ for plf in tree:
+ if plf.data.transfer_settled:
+ show_noteworthy_line(plf, comn)
+ print("</div>")
+ # transfers with more=true
+ print("<a href=\"javascript:toggle_node('noteworthy_more')\">%s%s</a> Partial transfers with 'more' set: %d<br>" %
+ (text.lozenge(), text.nbsp(), n_more))
+ print(" <div width=\"100%%\"; "
+ "style=\"display:none; font-weight: normal; margin-bottom: 2px; margin-left: 10px\" "
+ "id=\"noteworthy_more\">")
+ for plf in tree:
+ if plf.data.transfer_more:
+ show_noteworthy_line(plf, comn)
+ print("</div>")
+ # transfers with resume=true, whatever that is
+ print("<a href=\"javascript:toggle_node('noteworthy_resume')\">%s%s</a> Resumed transfers: %d<br>" %
+ (text.lozenge(), text.nbsp(), n_resume))
+ print(" <div width=\"100%%\"; "
+ "style=\"display:none; font-weight: normal; margin-bottom: 2px; margin-left: 10px\" "
+ "id=\"noteworthy_resume\">")
+ for plf in tree:
+ if plf.data.transfer_resume:
+ show_noteworthy_line(plf, comn)
+ print("</div>")
+ # transfers with abort=true
+ print("<a href=\"javascript:toggle_node('noteworthy_aborts')\">%s%s</a> Aborted transfers: %d<br>" %
+ (text.lozenge(), text.nbsp(), n_aborted))
+ print(" <div width=\"100%%\"; "
+ "style=\"display:none; font-weight: normal; margin-bottom: 2px; margin-left: 10px\" "
+ "id=\"noteworthy_aborts\">")
+ for plf in tree:
+ if plf.data.transfer_aborted:
+ show_noteworthy_line(plf, comn)
+ print("</div>")
+ # flow with drain=true
+ print("<a href=\"javascript:toggle_node('noteworthy_drain')\">%s%s</a> Flow with 'drain' set: %d<br>" %
+ (text.lozenge(), text.nbsp(), n_drain))
+ print(" <div width=\"100%%\"; "
+ "style=\"display:none; font-weight: normal; margin-bottom: 2px; margin-left: 10px\" "
+ "id=\"noteworthy_drain\">")
+ for plf in tree:
+ if plf.data.flow_drain:
+ show_noteworthy_line(plf, comn)
+ print("</div>")
+ print("<hr>")
+
+ # the proton log lines
+ # log lines in f_A_116
+ # log line details in f_A_116_d
+ print("<a name=\"c_logdata\"></a>")
+ print("<h3>Log data</h3>")
+ for plf in tree:
+ l_dict = plf.data.described_type.dict
+ print("<div width=\"100%%\" style=\"display:block margin-bottom: 2px\" id=\"%s\">" % plf.fid)
+ print("<a name=\"%s\"></a>" % plf.fid)
+ detailname = plf.fid + "_d" # type: str
+ loz = "<a href=\"javascript:toggle_node('%s')\">%s%s</a>" % (detailname, text.lozenge(), text.nbsp())
+ rtr = plf.router
+ rid = comn.router_display_names[rtr.log_index]
+
+ peerconnid = "%s" % comn.conn_peers_connid.get(plf.data.conn_id, "")
+ peer = rtr.conn_peer_display.get(plf.data.conn_id, "") # peer container id
+ print(loz, plf.datetime, ("%s#%d" % (plf.prefixi, plf.lineno)), rid, ("[%s]" % plf.data.conn_id),
+ plf.data.direction, ("[%s]" % peerconnid), peer,
+ plf.data.web_show_str, plf.data.disposition_display, "<br>")
+ print(" <div width=\"100%%\"; "
+ "style=\"display:none; font-weight: normal; margin-bottom: 2px; margin-left: 10px\" "
+ "id=\"%s\">" %
+ detailname)
+ for key in sorted(common.dict_iterkeys(l_dict)):
+ val = l_dict[key]
+ print("%s : %s <br>" % (key, cgi.escape(str(val))))
+ if plf.data.name == "transfer":
+ print("Header and annotations : %s <br>" % plf.data.transfer_hdr_annos)
+ print("</div>")
+ print("</div>")
+ print("<hr>")
+
+ # data traversing network
+ print("<a name=\"c_messageprogress\"></a>")
+ print("<h3>Message progress</h3>")
+ for i in range(0, comn.shorteners.short_data_names.len()):
+ sname = comn.shorteners.short_data_names.shortname(i)
+ size = 0
+ for plf in tree:
+ if plf.data.name == "transfer" and plf.transfer_short_name == sname:
+ size = plf.data.transfer_size
+ break
+ print("<a name=\"%s\"></a> <h4>%s (%s)" % (sname, sname, size))
+ print(" <span> <a href=\"javascript:toggle_node('%s')\"> %s</a>" % ("data_" + sname, text.lozenge()))
+ print(" <div width=\"100%%\"; style=\"display:none; font-weight: normal; margin-bottom: 2px\" id=\"%s\">" %
+ ("data_" + sname))
+ print(" ", comn.shorteners.short_data_names.longname(i, True))
+ print("</div> </span>")
+ print("</h4>")
+ print("<table>")
+ print(
+ "<tr><th>Src</th> <th>Time</th> <th>Router</th> <th>ConnId</th> <th>Dir</th> <th>ConnId</th> <th>Peer</th> "
+ "<th>T delta</th> <th>T elapsed</th><th>Settlement</th><th>S elapsed</th></tr>")
+ t0 = None
+ tlast = None
+ for plf in tree:
+ if plf.data.name == "transfer" and plf.transfer_short_name == sname:
+ if t0 is None:
+ t0 = plf.datetime
+ tlast = plf.datetime
+ delta = "0.000000"
+ epsed = "0.000000"
+ else:
+ delta = time_offset(plf.datetime, tlast)
+ epsed = time_offset(plf.datetime, t0)
+ tlast = plf.datetime
+ sepsed = ""
+ if plf.data.final_disposition is not None:
+ sepsed = time_offset(plf.data.final_disposition.datetime, t0)
+ rid = plf.router.iname
+ peerconnid = "%s" % comn.conn_peers_connid.get(plf.data.conn_id, "")
+ peer = plf.router.conn_peer_display.get(plf.data.conn_id, "") # peer container id
+ print("<tr><td>%s</td> <td>%s</td> <td>%s</td> <td>%s</td> <td>%s</td> <td>%s</td> "
+ "<td>%s</td> <td>%s</td> <td>%s</td> <td>%s</td> <td>%s</td> </tr>" %
+ (plf.adverbl_link_to(), plf.datetime, rid, plf.data.conn_id, plf.data.direction,
+ peerconnid, peer, delta, epsed,
+ plf.data.disposition_display, sepsed))
+ print("</table>")
+
+ print("<hr>")
+
+ # link names traversing network
+ print("<a name=\"c_linkprogress\"></a>")
+ print("<h3>Link name propagation</h3>")
+ for i in range(0, comn.shorteners.short_link_names.len()):
+ if comn.shorteners.short_link_names.len() == 0:
+ break
+ sname = comn.shorteners.short_link_names.prefixname(i)
+ print("<a name=\"%s\"></a> <h4>%s" % (sname, sname))
+ print(" <span> <div width=\"100%%\"; style=\"display:block; font-weight: normal; margin-bottom: 2px\" >")
+ print(comn.shorteners.short_link_names.longname(i, True))
+ print("</div> </span>")
+ print("</h4>")
+ print("<table>")
+ print("<tr><th>src</th> <th>Time</th> <th>Router</th> <th>ConnId</th> <th>Dir</th> <th>ConnId> <th>Peer</th> "
+ "<th>T delta</th> <th>T elapsed</th></tr>")
+ t0 = None
+ tlast = None
+ for plf in tree:
+ if plf.data.name == "attach" and plf.data.link_short_name == sname:
+ if t0 is None:
+ t0 = plf.datetime
+ delta = "0.000000"
+ epsed = "0.000000"
+ else:
+ delta = time_offset(plf.datetime, tlast)
+ epsed = time_offset(plf.datetime, t0)
+ tlast = plf.datetime
+ rid = plf.router.iname
+ peerconnid = "%s" % comn.conn_peers_connid.get(plf.data.conn_id, "")
+ peer = plf.router.conn_peer_display.get(plf.data.conn_id, "") # peer container id
+ print("<tr><td>%s</td> <td>%s</td> <td>%s</td> <td>%s</td> <td>%s</td> <td>%s</td> "
+ "<td>%s</td> <td>%s</td> <td>%s</td></tr>" %
+ (plf.adverbl_link_to(), plf.datetime, rid, plf.data.conn_id, plf.data.direction, peerconnid, peer,
+ delta, epsed))
+ print("</table>")
+
+ print("<hr>")
+
+ # short data index
+ print("<a name=\"c_rtrdump\"></a>")
+ comn.shorteners.short_rtr_names.htmlDump(False)
+ print("<hr>")
+
+ print("<a name=\"c_peerdump\"></a>")
+ comn.shorteners.short_peer_names.htmlDump(False)
+ print("<hr>")
+
+ print("<a name=\"c_linkdump\"></a>")
+ comn.shorteners.short_link_names.htmlDump(True)
+ print("<hr>")
+
+ print("<a name=\"c_msgdump\"></a>")
+ comn.shorteners.short_data_names.htmlDump(True)
+ print("<hr>")
+
+ # link state info
+ # merge link state and restart records into single time based list
+ cl = []
+ for rtrlist in comn.routers:
+ for rtr in rtrlist:
+ rid = rtr.container_name
+ cl.append(common.RestartRec(rtr.iname, rtr, "restart", rtr.restart_rec.datetime))
+ for plf in ls_tree:
+ if "costs" in plf.line:
+ cl.append(common.RestartRec("ls", plf, "ls", plf.datetime))
+ cl = sorted(cl, key=lambda lfl: lfl.datetime)
+
+ # create a map of lists for each router
+ # the list holds the name of other routers for which the router publishes a cost
+ costs_pub = {}
+ for i in range(0, comn.n_logs):
+ costs_pub[comn.router_ids[i]] = []
+
+ # cur_costs is a 2D array of costs used to tell when cost calcs have stabilized
+ # Each incoming LS cost line replaces a row in this table
+ # cur_costs tracks only interior routers
+ interior_rtrs = []
+ for rtrs in comn.routers:
+ if rtrs[0].is_interior():
+ interior_rtrs.append(rtrs[0].container_name)
+
+ PEER_COST_REBOOT = -1
+ PEER_COST_ABSENT = 0
+ def new_costs_row(val):
+ """
+ return a costs row.
+ :param val: -1 when router reboots, 0 when router log line processed
+ :return:
+ """
+ res = {}
+ for rtr in interior_rtrs:
+ res[rtr] = val
+ return res
+
+ cur_costs = {}
+ for rtr in interior_rtrs:
+ cur_costs[rtr] = new_costs_row(PEER_COST_REBOOT)
+
+ print("<a name=\"c_ls\"></a>")
+ print("<h3>Routing link state</h3>")
+ print("<h4>Link state costs</h4>")
+ print("<table>")
+ print("<tr><th>Time</th> <th>Router</th>")
+ for i in range(0, comn.n_logs):
+ print("<th>%s</th>" % common.log_letter_of(i))
+ print("</tr>")
+ for c in cl:
+ if c.event == "ls":
+ # link state computed costs and router reachability
+ plf = c.router # cruel overload here: router is a parsed line not a router
+ # Processing: Computed costs: {u'A': 1, u'C': 51L, u'B': 101L}
+ print("<tr><td>%s</td> <td>%s</td>" % (plf.datetime, ("%s#%d" % (plf.router.iname, plf.lineno))))
+ try:
+ line = plf.line
+ sti = line.find("{")
+ line = line[sti:]
+ l_dict = ast.literal_eval(line)
+ costs_row = new_costs_row(PEER_COST_ABSENT)
+ for i in range(0, comn.n_logs):
+ if len(comn.routers[i]) > 0:
+ tst_name = comn.routers[i][0].container_name
+ if tst_name in l_dict:
+ val = l_dict[tst_name]
+ costs_row[tst_name] = val
+ elif i == plf.router.log_index:
+ val = text.nbsp()
+ else:
+ val = "<span style=\"background-color:orange\">%s</span>" % (text.nbsp() * 2)
+ else:
+ val = "<span style=\"background-color:orange\">%s</span>" % (text.nbsp() * 2)
+ print("<td>%s</td>" % val)
+ # track costs published when there is no column to put the number
+ tgts = costs_pub[c.router.router.container_name]
+ for k, v in common.dict_iteritems(l_dict):
+ if k not in comn.router_ids:
+ if k not in tgts:
+ tgts.append(k) # this cost went unreported
+ # update this router's cost view in running table
+ if plf.router.is_interior():
+ cur_costs[plf.router.container_name] = costs_row
+ except:
+ pass
+ print("</tr>")
+ # if the costs are stable across all routers then put an indicator in table
+ costs_stable = True
+ for c_rtr in interior_rtrs:
+ for r_rtr in interior_rtrs:
+ if r_rtr != c_rtr \
+ and (cur_costs[r_rtr][c_rtr] != cur_costs[c_rtr][r_rtr] \
+ or cur_costs[c_rtr][r_rtr] <= PEER_COST_ABSENT):
+ costs_stable = False
+ break
+ if not costs_stable:
+ break
+ if costs_stable:
+ print("<tr><td><span style=\"background-color:green\">stable</span></td></tr>")
+ else:
+ # restart
+ print("<tr><td>%s</td> <td>%s</td>" % (c.datetime, ("%s restart" % (c.router.iname))))
+ for i in range(0, comn.n_logs):
+ color = "green" if i == c.router.log_index else "orange"
+ print("<td><span style=\"background-color:%s\">%s</span></td>" % (color, text.nbsp() * 2))
+ print("</tr>")
+ if c.router.is_interior():
+ cur_costs[c.router.container_name] = new_costs_row(PEER_COST_REBOOT)
+ print("</table>")
+ print("<br>")
+
+ # maybe display cost declarations that were not displayed
+ costs_clean = True
+ for k, v in common.dict_iteritems(costs_pub):
+ if len(v) > 0:
+ costs_clean = False
+ break
+ if not costs_clean:
+ print("<h4>Router costs declared in logs but not displayed in Link state cost table</h4>")
+ print("<table>")
+ print("<tr><th>Router</th><Peers whose logs are absent</th></tr>")
+ for k, v in common.dict_iteritems(costs_pub):
+ if len(v) > 0:
+ print("<tr><td>%s</td><td>%s</td></tr>" % (k, str(v)))
+ print("</table>")
+ print("<br>")
+
+ print("<a href=\"javascript:toggle_node('ls_costs')\">%s%s</a> Link state costs data<br>" %
+ (text.lozenge(), text.nbsp()))
+ print(" <div width=\"100%%\"; "
+ "style=\"display:none; font-weight: normal; margin-bottom: 2px; margin-left: 10px\" "
+ "id=\"ls_costs\">")
+ print("<table>")
+ print("<tr><th>Time</th> <th>Router</th> <th>Name</th> <th>Log</th></tr>")
+ for plf in ls_tree:
+ if "costs" in plf.line:
+ print("<tr><td>%s</td> <td>%s</td>" % (plf.datetime, ("%s#%d" % (plf.router.iname, plf.lineno))))
+ print("<td>%s</td>" % plf.router.container_name)
+ print("<td>%s</td></tr>" % plf.line)
+ print("</table>")
+ print("</div>")
+
+ print("<hr>")
+
+ print("</body>")
+
+
+def main(argv):
+ try:
+ main_except(argv)
+ return 0
+ except Exception as e:
+ traceback.print_exc()
+ return 1
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv))
http://git-wip-us.apache.org/repos/asf/qpid-dispatch/blob/5c3411a1/bin/log_scraper/nicknamer.py
----------------------------------------------------------------------
diff --git a/bin/log_scraper/nicknamer.py b/bin/log_scraper/nicknamer.py
new file mode 100755
index 0000000..9dc2f9f
--- /dev/null
+++ b/bin/log_scraper/nicknamer.py
@@ -0,0 +1,123 @@
+#!/usr/bin/env python
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+import cgi
+
+class ShortNames():
+ '''
+ Name shortener.
+ The short name for display is "name_" + index(longName)
+ Embellish the display name with an html popup
+ Link and endpoint names, and data are tracked separately
+ Names longer than threshold are shortened
+ Each class has a prefix used when the table is dumped as HTML
+ '''
+ def __init__(self, prefixText, _threshold=25):
+ self.longnames = []
+ self.prefix = prefixText
+ self.threshold = _threshold
+
+ def translate(self, lname, show_popup=False):
+ '''
+ Translate a long name into a short name, maybe.
+ Memorize all names, translated or not
+ Strip leading/trailing double quotes
+ :param lname: the name
+ :return: If shortened HTML string of shortened name with popup containing long name else
+ not-so-long name.
+ '''
+ if lname.startswith("\"") and lname.endswith("\""):
+ lname = lname[1:-1]
+ try:
+ idx = self.longnames.index(lname)
+ except:
+ self.longnames.append(lname)
+ idx = self.longnames.index(lname)
+ # return as-given if short enough
+ if len(lname) < self.threshold:
+ return lname
+ if show_popup:
+ return "<span title=\"" + cgi.escape(lname) + "\">" + self.prefix + "_" + str(idx) + "</span>"
+ else:
+ return self.prefix + "_" + str(idx)
+
+ def len(self):
+ return len(self.longnames)
+
+ def prefix(self):
+ return self.prefix
+
+ def shortname(self, idx):
+ name = self.longnames[idx]
+ if len(name) < self.threshold:
+ return name
+ return self.prefix + "_" + str(idx)
+
+ def prefixname(self, idx):
+ return self.prefix + "_" + str(idx)
+
+ def sname_to_popup(self, sname):
+ if not sname.startswith(self.prefix):
+ raise ValueError("Short name '%s' does not start with prefix '%s'" % (sname, self.prefix))
+ try:
+ lname = self.longnames[ int(sname[ (len(self.prefix) + 1): ])]
+ except:
+ raise ValueError("Short name '%s' did not translate to a long name" % (sname))
+ return "<span title=\"" + cgi.escape(lname) + sname + "</span>"
+
+ def longname(self, idx, cgi_escape=False):
+ '''
+ Get the cgi.escape'd long name
+ :param idx:
+ :param cgi_escape: true if caller wants the string for html display
+ :return:
+ '''
+ return cgi.escape(self.longnames[idx]) if cgi_escape else self.longnames[idx]
+
+ def htmlDump(self, with_link=False):
+ '''
+ Print the name table as an unnumbered list to stdout
+ long names are cgi.escape'd
+ :param with_link: true if link name link name is hyperlinked targeting itself
+ :return: null
+ '''
+ if len(self.longnames) > 0:
+ print ("<h3>" + self.prefix + " Name Index</h3>")
+ print ("<ul>")
+ for i in range(0, len(self.longnames)):
+ name = self.prefix + "_" + str(i)
+ if with_link:
+ name = "<a href=\"#%s\">%s</a>" % (name, name)
+ print ("<li> " + name + " - " + cgi.escape(self.longnames[i]) + "</li>")
+ print ("</ul>")
+
+
+class Shorteners():
+ def __init__(self):
+ self.short_link_names = ShortNames("link", 15)
+ self.short_addr_names = ShortNames("address")
+ self.short_data_names = ShortNames("transfer", 2)
+ self.short_peer_names = ShortNames("peer")
+ self.short_rtr_names = ShortNames("router")
+
+
+if __name__ == "__main__":
+ pass
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@qpid.apache.org
For additional commands, e-mail: commits-help@qpid.apache.org