blob: b5770c644853bebcd892b1b420511e35e0be28e2 [file] [log] [blame]
#!/usr/bin/python2.7
"""report: make a table summarizing output from one or more runs of `sample`."""
import collections
import csv
import datetime
import os
import re
import sys
import ifstats
import iperf
import isostream
import options
import wifipacket
optspec = """
report [options...] <journal>
--
r,report_dir= path to a single report directory to be parsed
"""
NFILE = 'n-datarates.tsv'
nrates = {}
CHANNELFILE = 'channels.tsv'
channels = {}
def _Resource(name):
return os.path.join(os.path.dirname(os.path.abspath(__file__)), name)
def LoadNRates():
"""Loads 802.11n coding and data rates into a global variable."""
if nrates: return
raw = []
with open(_Resource(NFILE), 'rb') as csvfile:
reader = csv.reader(csvfile, delimiter='\t')
next(reader) # skip header row when reading by machine
for mcs, width, gi, rate in reader:
raw.append([int(mcs), int(width), int(gi), float(rate)])
# Load global table, computing MCS 8-31 statistics from MCS 0-7.
for mcs, width, gi, rate in raw:
for i in range(4):
nrates[(8*i + mcs, width, gi)] = rate * (i + 1)
def LoadChannels():
"""Load 802.11n channels and frequencies into a global variable."""
if channels: return
with open(_Resource(CHANNELFILE), 'rb') as csvfile:
reader = csv.reader(csvfile, delimiter='\t')
next(reader)
for channel, freq in reader:
channels[int(channel)] = int(freq)
def ParseMCSFile(outfile, width=20):
"""Extract MCS and PHY rate statistics from an MCS report file."""
LoadNRates()
# assume long guard interval
guard = 800
counter = collections.Counter()
for line in outfile:
for tok in line.split():
if tok == '.': continue
mcs = int(tok)
counter[mcs] += 1
phy = 0.0
alltimes = 0
for mcs, times in counter.iteritems():
phy += nrates[(mcs, width, guard)] * times
alltimes += times
return counter.most_common()[0][0], phy / alltimes
def Channel(text_channel):
"""Given a text channel spec like 149,+1 return the central freq and width."""
LoadChannels()
if ',' in text_channel:
base, offset = text_channel.split(',')
freq = channels[int(base)]
offset = int(offset)
return (2 * freq + offset * 20) / 2, 40
else:
return channels[int(text_channel)], 20
def Overlap(c1, w1, c2, w2):
"""Return True if two WiFi channels overlap, or False otherwise."""
# TODO(willangley): replace with code from Waveguide
b1 = c1 - w1 / 2
t1 = c1 + w1 / 2
b2 = c2 - w2 / 2
t2 = c2 + w2 / 2
return ((b1 <= b2 <= t1) or (b2 <= b1 <= t2)
or (b1 <= t2 <= t1) or (b2 <= t1 <= t2))
def ReportLine(report_dir, series=None):
"""Condense the output of a sample.py run into a one-line summary report."""
line = collections.OrderedDict()
if series:
line['Series'] = series
_, stamp, steps = os.path.basename(report_dir).split('-')
line['Time'] = datetime.datetime.fromtimestamp(float(stamp))
line['Steps'] = int(steps)
system, cache = ifstats.Restore(report_dir)
result = ifstats.Parse(system, cache)
if not result.get('link'):
pass
elif system == 'Darwin':
airport = result.get('link')
channel, width = Channel(airport['channel'])
shared = 0
overlap = 0
scan = result.get('scan')
if len(scan) > 1:
for row in scan[1:]:
oc, ow = Channel(row[3])
if channel == oc and width == ow:
shared += 1
if Overlap(channel, width, oc, ow):
overlap += 1
line.update({
'Channel': channel,
'Width': width,
'RSSI': airport['agrCtlRSSI'],
'Noise': airport['agrCtlNoise'],
'Shared': shared,
'Interfering': overlap - shared
})
elif system == 'Linux':
iwlink = result.get('link')
signal = int(iwlink.get('signal', '0 dBm').split()[0])
channel = int(iwlink.get('freq', '0'))
width = 20
m = re.search(r'(\d+)MHz', iwlink.get('tx bitrate', ''), flags=re.I)
if m:
width = int(m.group(1))
# Noise and contention not yet gathered in samples run on Linux systems.
line.update({
'Channel': channel,
'Width': width,
'RSSI': signal,
})
try:
ppath = os.path.join(report_dir, 'testnetwork.pcap')
with open(ppath) as stream:
rates = [float(opt.rate) for opt, _ in wifipacket.Packetize(stream)]
# TODO(willangley): come up with a meaningful modal MCS for mixed
# 802.11n/802.11ac captures like we have here.
line['PHY'] = sum(rates)/max(len(rates), 1)
except IOError:
try:
mpath = os.path.join(report_dir, 'mcs')
with open(mpath) as mf:
mcs, phy = ParseMCSFile(mf, width)
line['MCS'] = mcs
line['PHY'] = phy
except IOError:
pass
# If the initial ping test fails, we won't collect performance information.
# deal with this gracefully.
ips = iperf.Restore(report_dir)
if 'iperf' in ips:
# pylint:disable=line-too-long
for key, perf in [('TCP BW up', iperf.ParseIperfTCP(ips.get('iperf', ''))),
('UDP BW up', iperf.ParseIperfUDP(ips.get('iperfu', '')))]:
line[key] = perf.get('bandwidth')
line['{} units'.format(key)] = perf.get('bandwidth_unit')
elif 'iperf3' in ips:
for name in (key for key in ips
if key.startswith('iperf3')):
perf = iperf.ParseIperf3(ips[name])
if not perf or 'error' in perf:
continue
test_start = perf['start']['test_start']
protocol = test_start['protocol']
direction = 'down' if test_start['reverse'] else 'up'
key = '{protocol} BW {direction}'.format(protocol=protocol,
direction=direction)
if protocol == 'TCP':
line[key] = perf['end']['sum_received']['bits_per_second']
elif protocol == 'UDP':
line[key] = perf['end']['sum']['bits_per_second']
else:
continue
line['{} units'.format(key)] = 'bit/s'
try:
with open(os.path.join(report_dir, 'isostream')) as istm:
text = istm.read()
line['isostream'] = isostream.ParseIsostream(text)
except IOError:
pass
return line
def ReadJournal(jname):
"""Read a journal, returning a series name and its data folders."""
jname = os.path.realpath(jname)
series = os.path.basename(jname)
if series == 'journal':
series = os.path.basename(os.path.dirname(jname))
folders = []
with open(jname) as journal:
for line in journal:
line = line.strip()
if line.startswith('#'):
continue
folders.append(os.path.join(os.path.dirname(jname), line))
return series, folders
def Report(journals):
"""Given the name of a journal file, return a list of ReportLines."""
report = []
bad = []
for jname in journals:
series, folders = ReadJournal(jname)
for folder in folders:
try:
report += [ReportLine(folder, series=series)]
except (TypeError, IOError) as e:
bad += [collections.OrderedDict(folder=folder, error=repr(e))]
return report, bad
def WriteReport(lines):
"""Write a network testing report in .tsv format to stdout."""
# include every field we can write in the header row
header = ['Series', 'Time', 'Steps', 'Channel', 'Width', 'RSSI', 'Noise',
'Shared', 'Interfering', 'MCS', 'PHY', 'TCP BW up',
'TCP BW up units', 'UDP BW up', 'UDP BW up units', 'TCP BW down',
'TCP BW down units', 'UDP BW down', 'UDP BW down units',
'isostream']
writer = csv.DictWriter(sys.stdout, header, dialect=csv.excel_tab)
writer.writeheader()
writer.writerows(lines)
def main():
o = options.Options(optspec)
(opt, _, extra) = o.parse(sys.argv[1:])
if opt.report_dir:
report = [ReportLine(opt.report_dir)]
elif extra:
report, bad = Report(extra)
if bad:
writer = csv.DictWriter(sys.stdout, bad[0].keys(), dialect=csv.excel_tab)
writer.writeheader()
writer.writerows(bad)
print
if len(report) < 1:
o.fatal("Didn't find any samples. Did you supply at least one report dir"
' or journal?')
WriteReport(report)
if __name__ == '__main__':
main()