blob: 8ee0fac0237ecdb6142d6444faecf8a2aabd8727 [file] [log] [blame]
#!/usr/bin/env python
"""report: make a table summarizing output from one or more runs of `sample`."""
from collections import Counter
import csv
import os
import re
import sys
import options
optspec = """
report [options...] <journal>
r,report_dir= path to a single report directory to be parsed
NFILE = 'n-datarates.tsv'
nrates = {}
CHANNELFILE = 'channels.tsv'
channels = {}
def _Resource(name):
return os.path.join(os.path.dirname(os.path.abspath(__file__)), name)
def LoadNRates():
"""Loads 802.11n coding and data rates into a global variable."""
if nrates: return
raw = []
with open(_Resource(NFILE), 'rb') as csvfile:
reader = csv.reader(csvfile, delimiter='\t')
next(reader) # skip header row when reading by machine
for mcs, width, gi, rate in reader:
raw.append([int(mcs), int(width), int(gi), float(rate)])
# Load global table, computing MCS 8-31 statistics from MCS 0-7.
for mcs, width, gi, rate in raw:
for i in range(4):
nrates[(8*i + mcs, width, gi)] = rate * (i + 1)
def LoadChannels():
"""Load 802.11n channels and frequencies into a global variable."""
if channels: return
with open(_Resource(CHANNELFILE), 'rb') as csvfile:
reader = csv.reader(csvfile, delimiter='\t')
for channel, freq in reader:
channels[int(channel)] = int(freq)
def ParseMCSFile(outfile, width=20):
"""Extract MCS and PHY rate statistics from an MCS report file."""
# assume long guard interval
guard = 800
counter = Counter()
for line in outfile:
for tok in line.split():
if tok == '.': continue
mcs = int(tok)
counter[mcs] += 1
phy = 0.0
alltimes = 0
for mcs, times in counter.iteritems():
phy += nrates[(mcs, width, guard)] * times
alltimes += times
return counter.most_common()[0][0], phy / alltimes
def ParseIperf(out, udp=False):
"""Parse output written by an `iperf` run into structured data."""
pattern = (r'\[(.{3})\]\s+(?P<interval>.*?sec)\s+(?P<transfer>.*?Bytes|bits)'
if udp:
pattern += r'\s+(?P<jitter>.*?s)\s+(?P<datagrams>.*)'
iperf_re = re.compile(pattern)
for line in out.splitlines():
match = iperf_re.match(line)
if match:
iperf = match.groupdict()
bval, bunit = iperf['bandwidth'].split()
iperf['bandwidth'] = float(bval)
iperf['bandwidth_unit'] = bunit
tval, tunit = iperf['transfer'].split()
iperf['transfer'] = float(tval)
iperf['transfer_unit'] = tunit
return iperf
return {}
def ParseIperfTCP(out):
"""ParseIperfTCP parses the output of TCP `iperf` runs."""
# sample line: [ 4] 0.0-10.0 sec 245 MBytes 206 Mbits/sec
return ParseIperf(out)
def ParseIperfUDP(out):
"""ParseIperfUDP parses the output of UDP `iperf` runs."""
# pylint: disable=line-too-long
# sample line: [ 5] 0.0-10.0 sec 1.25 MBytes 1.05 Mbits/sec 0.593 ms 0/ 893 (0%)
return ParseIperf(out, udp=True)
def Channel(text_channel):
"""Given a text channel spec like 149,+1 return the central freq and width."""
if ',' in text_channel:
base, offset = text_channel.split(',')
freq = channels[int(base)]
offset = int(offset)
return (2 * freq + offset * 20) / 2, 40
return channels[int(text_channel)], 20
def ParseAirportI(output):
"""Parse output of `airport -I` and return it as a dictionary."""
result = {}
for line in output.splitlines():
key, value = [cell.strip() for cell in line.split(':', 1)]
if key in ['agrCtlRSSI', 'agrCtlNoise']:
result[key] = int(value)
result[key] = value
except ValueError:
return result
def ParseAirportScan(output):
"""Parse output of `airport -s` and return it as a dictionary."""
# This is a simple fixed-width format.
header = ['SSID', 'BSSID', 'RSSI', 'CHANNEL', 'HT', 'CC',
'SECURITY (auth/unicast/group)']
result = []
chre = re.compile(r'\d+(?:,\+|-\d+)?')
for line in output.splitlines():
ssid, bssid, rssi, channel, ht, cc, security = (
[cell.strip() for cell in (line[:32], line[33:50], line[51:55],
line[56:63], line[64:66], line[67:69],
# the scan sometimes includes comment lines. assume that anything that has
# a valid channel isn't a comment line.
if chre.match(channel):
result += [[ssid, bssid, int(rssi), channel, ht, cc, security]]
return [header] + result
def ParseIwLink(output):
"""Parse output of `iw dev <devname> link` and return it as a dictionary."""
ol = output.splitlines()
# BSSID is in the first line, in an idiosyncratic format.
# sample: Connected to d8:c7:c8:d7:72:30 (on wlan0)
m ='(\w{2}:){5}\w{2}', ol[0])
if m:
result = {'BSSID':}
raise ValueError('dev was not connected.')
for line in ol[1:]:
key, value = line.split(':', 1)
result[key.strip()] = value.strip()
except ValueError:
return result
def ParseIpAddr(output):
"""Parse output of one-line `ip addr` and return it as a dictionary."""
ol = output.splitlines()
result = {}
for line in ol:
_, interface, _, addr, _ = line.split(None, 4)
result[interface] = addr
return result
def Overlap(c1, w1, c2, w2):
"""Return True if two WiFi channels overlap, or False otherwise."""
# TODO(willangley): replace with code from Waveguide
b1 = c1 - w1 / 2
t1 = c1 + w1 / 2
b2 = c2 - w2 / 2
t2 = c2 + w2 / 2
return ((b1 <= b2 <= t1) or (b2 <= b1 <= t2)
or (b1 <= t2 <= t1) or (b2 <= t1 <= t2))
def ReportLine(report_dir):
"""Condense the output of a run into a one-line summary report."""
_, _, steps = os.path.basename(report_dir).split('-')
line = [int(steps)]
# Reports generated on Mac have 'airport'
apath = os.path.join(report_dir, 'airport')
if os.path.isfile(apath):
with open(apath) as ai:
airport = ParseAirportI(
channel, width = Channel(airport['channel'])
shared = 0
overlap = 0
cpath = os.path.join(report_dir, 'airportscan')
if os.path.exists(cpath):
with open(cpath) as ac:
for row in ParseAirportScan([1:]:
oc, ow = Channel(row[3])
if channel == oc and width == ow:
shared += 1
if Overlap(channel, width, oc, ow):
overlap += 1
rssi = airport['agrCtlRSSI']
noise = airport['agrCtlNoise']
line += [channel, width, rssi, noise, shared, overlap - shared]
# assume the report was generated on Linux.
with open(os.path.join(report_dir, 'iwlink')) as il:
iwlink = ParseIwLink(
signal = int(iwlink.get('signal', '0 dBm').split()[0])
channel = int(iwlink.get('freq'))
width = 20
m ='(\d+)MHz', iwlink.get('tx bitrate'), flags=re.I)
if m:
width = int(
# Noise and contention not yet gathered in samples run on Linux systems.
line += [channel, width, signal, None, None, None]
mpath = os.path.join(report_dir, 'mcs')
if os.path.isfile(mpath):
with open(os.path.join(report_dir, 'mcs')) as mf:
line += ParseMCSFile(mf, width)
line += [None, None]
# If the initial ping test fails, we won't collect performance information.
# deal with this gracefully.
for fn, infile in [(ParseIperfTCP, 'iperf'),
(ParseIperfUDP, 'iperfu')]:
ipath = os.path.join(report_dir, infile)
if not os.path.isfile(ipath):
line += [None, None]
with open(ipath) as ip:
perf = fn(
line += [perf.get('bandwidth'), perf.get('bandwidth_unit')]
return line
def main():
o = options.Options(optspec)
(opt, _, extra) = o.parse(sys.argv[1:])
if len(extra) > 1:
o.fatal('expected at most one journal name.')
lines = []
if opt.report_dir:
lines += [ReportLine(opt.report_dir)]
if extra:
for jname in extra[:1]:
jname = os.path.realpath(jname)
with open(jname) as journal:
for line in journal:
lines += [ReportLine(os.path.join(os.path.dirname(jname),
if len(lines) < 1:
o.fatal("didn't find any samples. did you supply at least one report dir"
' or journal?')
header = ['Steps', 'Channel', 'Width', 'RSSI', 'Noise', 'Shared',
'Interfering', 'MCS', 'PHY', 'TCP BW', '(Units)', 'UDP BW',
writer = csv.writer(sys.stdout, delimiter='\t', quoting=csv.QUOTE_MINIMAL)
if __name__ == '__main__':