qpid-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From c...@apache.org
Subject [qpid-dispatch] branch master updated: DISPATCH-1688: Inject literal log lines into scraper output
Date Thu, 11 Jun 2020 19:16:49 GMT
This is an automated email from the ASF dual-hosted git repository.

chug pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/qpid-dispatch.git


The following commit(s) were added to refs/heads/master by this push:
     new e6eadab  DISPATCH-1688: Inject literal log lines into scraper output
e6eadab is described below

commit e6eadab5fc4e7cd6be55d80b0943b3508a453c93
Author: Chuck Rolke <chug@apache.org>
AuthorDate: Thu Jun 11 15:15:37 2020 -0400

    DISPATCH-1688: Inject literal log lines into scraper output
---
 tools/scraper/parser.py | 20 +++++++++++++++++---
 tools/scraper/router.py |  3 +++
 2 files changed, 20 insertions(+), 3 deletions(-)

diff --git a/tools/scraper/parser.py b/tools/scraper/parser.py
index 135dbc7..50d26e8 100755
--- a/tools/scraper/parser.py
+++ b/tools/scraper/parser.py
@@ -145,6 +145,7 @@ class LogLineData:
         self.is_policy_trace = False  # line is POLICY (trace)
         self.is_server_info = False  # line is SERVER (info)
         self.is_router_ls = False  # line is ROUTER_LS (info)
+        self.is_scraper = False # line is SCRAPER (any-level)
         self.fid = ""  # Log line (frame) id as used in javascript code
         self.amqp_error = False
         self.link_class = "client"  # attach sees: normal, router, router-data (, management?)
@@ -383,6 +384,7 @@ class ParsedLogLine(object):
     router_ls_key = "ROUTER_LS (info)"
     transfer_key = "@transfer(20)"
     proton_frame_key = "FRAME: "
+    scraper_key = "SCRAPER ("
 
     def sender_settle_mode_of(self, value):
         if value == "0":
@@ -799,7 +801,8 @@ class ParsedLogLine(object):
                 ParsedLogLine.protocol_trace_key in _line or
                 (ParsedLogLine.policy_trace_key in _line and "lookup_user:" in _line) or
 # open (not begin, attach)
                 ParsedLogLine.server_info_key in _line or
-                ParsedLogLine.router_ls_key in _line):
+                ParsedLogLine.router_ls_key in _line or
+                ParsedLogLine.scraper_key in _line):
             raise ValueError("Line is not a candidate for parsing")
         self.index = _log_index  # router prefix 0 for A, 1 for B
         self.instance = _instance  # router instance in log file
@@ -862,6 +865,15 @@ class ParsedLogLine(object):
                 if self.datetime > self.comn.args.time_end:
                     raise ValueError("Line too late outside time-of-day limits")
 
+        # Pull out scraper literal logs
+        sti = self.line.find(self.scraper_key)
+        if sti > 0:
+            self.data.is_scraper = True
+            # strip datetime and show literal string
+            sti += len("SCRAPER")
+            self.data.web_show_str = ("<strong>SCRAPER</strong> %s" % common.html_escape(self.line[sti:]))
+            return
+
         # extract connection number
         sti = self.line.find(self.server_trace_key)
         if sti < 0:
@@ -986,6 +998,7 @@ def parse_log_file(fn, log_index, comn):
     keys = [key1, key3]
     key4 = "ROUTER (info) Version:"  # router version line
     key5 = "ROUTER (info) Router started in " # router mode
+    key6 = "SCRAPER (" # verbatim log lines to copy to Log Data
     with open(fn, 'r') as infile:
         for line in infile:
             if search_for_in_progress:
@@ -1030,10 +1043,11 @@ def parse_log_file(fn, log_index, comn):
                 rtr.version = line[(line.find(key4) + len(key4)):].strip().split()[0]
             elif key5 in line:
                 rtr.mode = line[(line.find(key5) + len(key5)):].strip().split()[0].lower()
+            elif key6 in line:
+                pl = ParsedLogLine(log_index, instance, lineno, line, comn, rtr)
+                rtr.lines.append(pl)
             elif "[" in line and "]" in line:
                 try:
-                    if lineno == 130:
-                        pass
                     do_this = True if not hasattr(comn.args, 'skip_all_data') else not comn.args.skip_all_data
                     if not do_this:
                         # not indexing data. maybe do this line anyway
diff --git a/tools/scraper/router.py b/tools/scraper/router.py
index 7f27682..9fd3835 100755
--- a/tools/scraper/router.py
+++ b/tools/scraper/router.py
@@ -138,6 +138,9 @@ class Router():
         :return:
         '''
         for item in self.lines:
+            if item.data.is_scraper:
+                # scraper lines are pass-through
+                continue
             conn_num = int(item.data.conn_num)
             id = item.data.conn_id           # full name A0_3
             if conn_num not in self.conn_list:


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@qpid.apache.org
For additional commands, e-mail: commits-help@qpid.apache.org


Mime
View raw message