You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@qpid.apache.org by ch...@apache.org on 2020/06/18 20:28:56 UTC

[qpid-dispatch] branch master updated: DISPATCH-1692: Scraper improvement for sequence diagrams

This is an automated email from the ASF dual-hosted git repository.

chug pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/qpid-dispatch.git


The following commit(s) were added to refs/heads/master by this push:
     new df22f36  DISPATCH-1692: Scraper improvement for sequence diagrams
df22f36 is described below

commit df22f36b9d45ccf9c555f847919c911e6c6d57e8
Author: Chuck Rolke <ch...@apache.org>
AuthorDate: Thu Jun 18 16:26:42 2020 -0400

    DISPATCH-1692: Scraper improvement for sequence diagrams
    
     * Display trailing N (currently 50) bytes of transfers on main
       display and in sequence output
     * Add html breaks to sequence data on main page to make it
       easy to cut data of interest without editing html file
     * Allow sequence tool to read from stdin and not just a file.
     * Print actor and participant names for sequence headings
---
 tools/scraper/parser.py       | 21 ++++++++++++++++-----
 tools/scraper/scraper.py      |  9 +++++++--
 tools/scraper/seq-diag-gen.py | 30 +++++++++++++++++++++++++++---
 tools/scraper/text.py         |  1 +
 4 files changed, 51 insertions(+), 10 deletions(-)

diff --git a/tools/scraper/parser.py b/tools/scraper/parser.py
index 50d26e8..16e3da5 100755
--- a/tools/scraper/parser.py
+++ b/tools/scraper/parser.py
@@ -34,6 +34,11 @@ import common
 import text
 import router
 
+"""
+Sequence output copies the tailing end of transfers to a second display line.
+Define how many characters to show.
+"""
+SEQUENCE_TRANSFER_SIZE = 50
 
 def colorize_bg(what):
     # TODO: use the real colorize_bg
@@ -748,19 +753,21 @@ class ParsedLogLine(object):
             # Performative: transfer [channel,handle] (id)
             self.transfer_short_name = self.shorteners.short_data_names.translate(res.transfer_bare, customer=self)
             showdat = "<a href=\"#%s_dump\">%s</a>" % (self.transfer_short_name, self.transfer_short_name)
-            res.web_show_str = "<strong>%s</strong>  %s (%s) %s %s %s %s %s - %s bytes" % (
+            res.web_show_str = "<strong>%s</strong>  %s (%s) %s %s %s %s %s %s - %s bytes" % (
                 res.name, colorize_bg(res.channel_handle), res.delivery_id,
                 self.highlighted("settled", res.transfer_settled, common.color_of("presettled")),
                 self.highlighted("more", res.transfer_more, common.color_of("more")),
                 self.highlighted("resume", res.transfer_resume, common.color_of("aborted")),
                 self.highlighted("aborted", res.transfer_aborted, common.color_of("aborted")),
-                showdat, res.transfer_size)
-            res.sdorg_str = "%s %s (%s) %s %s%s%s%s" % (
+                showdat, common.strings_of_proton_log(res.transfer_bare)[-SEQUENCE_TRANSFER_SIZE:],
+                res.transfer_size)
+            res.sdorg_str = "%s %s (%s) %s (%s%s%s%s)\\n%s" % (
                 res.name, res.channel_handle, res.delivery_id, res.transfer_size,
                 self.unhighlighted(" settled", res.transfer_settled),
                 self.unhighlighted(" more", res.transfer_more),
                 self.unhighlighted(" resume", res.transfer_resume),
-                self.unhighlighted(" aborted", res.transfer_aborted))
+                self.unhighlighted(" aborted", res.transfer_aborted),
+                common.strings_of_proton_log(res.transfer_bare)[-SEQUENCE_TRANSFER_SIZE:])
 
     def adverbl_link_to(self):
         """
@@ -868,10 +875,14 @@ class ParsedLogLine(object):
         # Pull out scraper literal logs
         sti = self.line.find(self.scraper_key)
         if sti > 0:
-            self.data.is_scraper = True
             # strip datetime and show literal string
             sti += len("SCRAPER")
+            self.data.is_scraper = True
             self.data.web_show_str = ("<strong>SCRAPER</strong> %s" % common.html_escape(self.line[sti:]))
+            stcp = self.line[sti:].find(')') # close paren after log level
+            if stcp <  0:
+                stcp = 0
+            self.data.sdorg_str = self.line[sti + stcp + 1:]
             return
 
         # extract connection number
diff --git a/tools/scraper/scraper.py b/tools/scraper/scraper.py
index 8e21277..c6a347f 100755
--- a/tools/scraper/scraper.py
+++ b/tools/scraper/scraper.py
@@ -929,7 +929,8 @@ def main_except(argv):
 
     # Emit data for source to be processed by seq-diag-gen utility
     if comn.args.sequence:
-        print("<h3>sequencediagram.org data</h3>")
+        print("<a name=\"c_sequence\"></a>")
+        print("<h3>sequence diagram data</h3>")
         for plf in tree:
             rtr = plf.router
             rid = comn.router_display_names[rtr.log_index]
@@ -947,7 +948,11 @@ def main_except(argv):
             if (not plf.data.sdorg_str == "" and
                 not plf.data.direction == "" and
                 not plf.data.sdorg_str.startswith("HELP")):
-                print("%s|%s|%s|%s|%s|%s" % (plf.datetime, rid, plf.data.direction, peer, plf.data.sdorg_str, ("%s#%d" % (plf.prefixi, plf.lineno))))
+                print("%s|%s|%s|%s|%s|%s|<br>" % (plf.datetime, rid, plf.data.direction, peer, plf.data.sdorg_str, ("%s#%d" % (plf.prefixi, plf.lineno))))
+            else:
+                if plf.data.is_scraper:
+                    print("%s|%s|%s|%s|%s|%s|<br>" % (plf.datetime, rid, "->", rid, plf.data.sdorg_str,
+                                                 ("%s#%d" % (plf.prefixi, plf.lineno))))
             #import pdb
             #pdb.set_trace()
         print("<hr>")
diff --git a/tools/scraper/seq-diag-gen.py b/tools/scraper/seq-diag-gen.py
index 1c7f8f4..295a9cf 100755
--- a/tools/scraper/seq-diag-gen.py
+++ b/tools/scraper/seq-diag-gen.py
@@ -44,7 +44,7 @@ MAGIC_SPACE_NUMBER = 1   # tested with entryspacing 0.1
 class log_record:
     def __init__(self, index, line):
         # print("DEBUG input line: ", index, line)
-        dateandtime, name_left, direction, name_right, perf, router_line = line.split('|')
+        dateandtime, name_left, direction, name_right, perf, router_line, dummy = line.split('|')
         self.dateandtime = dateandtime.strip()
         self.time = self.dateandtime.split(' ')[1]
         self.index = index
@@ -100,6 +100,10 @@ class log_record:
             else:
                 print("space %d" % MAGIC_SPACE_NUMBER)
 
+    def sender_receiver(self):
+        # Return sender receiver
+        return self.sentby, self.rcvdby
+
     def diag_dump(self):
         cmn = ("index: %d, dateandtime: %s, sentby: %s, rcvdby: %s, performative: %s, router_line: %s" %
                (self.dateandtime, self.index, self.sentby, self.rcvdby, self.performative, self.router_line))
@@ -116,8 +120,13 @@ def split_log_file(filename):
     :param filename:
     :return:
     '''
-    with open(logfile, 'r') as log:
+    if filename == "STDIN" or filename == "" or filename == "-":
+        log = sys.stdin
+        log_lines = log.read().split("\n")
+    else:
+        log = open(filename, 'r')
         log_lines = log.read().split("\n")
+        log.close()
     return log_lines
 
 
@@ -139,7 +148,7 @@ def match_logline_pairs(log_recs):
 
 if __name__ == "__main__":
     parser = optparse.OptionParser(usage="%prog [options]",
-                                   description="cooks a scraper log snippet into sequencediagram.org source")
+                                   description="cooks a scraper log snippet into sequence diagram source")
     parser.add_option("-f", "--filename", action="append", help="logfile to use or - for stdin")
     parser.add_option('--timestamp', '-t',
                       action='store_true',
@@ -161,6 +170,21 @@ if __name__ == "__main__":
                     index += 1
             match_logline_pairs(log_recs)
 
+            # print senders and receivers marking as actor or participant
+            names = set()
+            for log_rec in log_recs:
+                sndr, rcvr = log_rec.sender_receiver()
+                if sndr is not None:
+                    names.add(sndr)
+                if rcvr is not None:
+                    names.add(rcvr)
+            for name in names:
+                if name.startswith("peer"):
+                    print("actor %s" % name)
+                else:
+                    print("participant %s" % name)
+            print()
+
             # process the list of records
             for log_rec in log_recs:
                 log_rec.show_for_sdorg(opts.timestamp)
diff --git a/tools/scraper/text.py b/tools/scraper/text.py
index 8cf3094..bbd71e3 100755
--- a/tools/scraper/text.py
+++ b/tools/scraper/text.py
@@ -130,6 +130,7 @@ def web_page_toc():
 <tr><td><a href=\"#c_linkdump\"       >Link name index</a></td>       <td>Short vs. long link names</td></tr>
 <tr><td><a href=\"#c_msgdump\"        >Transfer name index</a></td>   <td>Short names representing transfer data</td></tr>
 <tr><td><a href=\"#c_ls\"             >Router link state</a></td>     <td>Link state analysis</td></tr>
+<tr><td><a href=\"#c_sequence\"       >Sequence diagram data</a></td> <td>Input data for seq-diag-gen.py utility</td></tr>
 </table>
 <hr>
 """


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@qpid.apache.org
For additional commands, e-mail: commits-help@qpid.apache.org