blob: 2d4a1a58a49f394d881dbdf45a8cd3c7fe7165fe [file] [log] [blame]
"""report: make a table summarizing output from one or more runs of `sample`."""
import collections
import csv
import datetime
import os
import re
import sys
import ifstats
import iperf
import isostream
import options
import wifipacket
optspec = """
report [options...] <journal>
r,report_dir= path to a single report directory to be parsed
NFILE = 'n-datarates.tsv'
nrates = {}
CHANNELFILE = 'channels.tsv'
channels = {}
def _Resource(name):
return os.path.join(os.path.dirname(os.path.abspath(__file__)), name)
def LoadNRates():
"""Loads 802.11n coding and data rates into a global variable."""
if nrates: return
raw = []
with open(_Resource(NFILE), 'rb') as csvfile:
reader = csv.reader(csvfile, delimiter='\t')
next(reader) # skip header row when reading by machine
for mcs, width, gi, rate in reader:
raw.append([int(mcs), int(width), int(gi), float(rate)])
# Load global table, computing MCS 8-31 statistics from MCS 0-7.
for mcs, width, gi, rate in raw:
for i in range(4):
nrates[(8*i + mcs, width, gi)] = rate * (i + 1)
def LoadChannels():
"""Load 802.11n channels and frequencies into a global variable."""
if channels: return
with open(_Resource(CHANNELFILE), 'rb') as csvfile:
reader = csv.reader(csvfile, delimiter='\t')
for channel, freq in reader:
channels[int(channel)] = int(freq)
def ParseMCSFile(outfile, width=20):
"""Extract MCS and PHY rate statistics from an MCS report file."""
# assume long guard interval
guard = 800
counter = collections.Counter()
for line in outfile:
for tok in line.split():
if tok == '.': continue
mcs = int(tok)
counter[mcs] += 1
phy = 0.0
alltimes = 0
for mcs, times in counter.iteritems():
phy += nrates[(mcs, width, guard)] * times
alltimes += times
return counter.most_common()[0][0], phy / alltimes
def ParsePcap(stream, known_ssids):
"""ParsePcap computes PHY performance for a packet capture in `stream`.
stream: an open `file` object pointing to a file with pcap data.
known_ssids: a dict mapping known BSSIDs to SSIDs.
a dict containing PHY performance information. keys are descriptive strings,
values are strings or numbers in Mbit/s.
rates = collections.defaultdict(list)
times_seen = collections.Counter()
start_secs = None
for opt, frame in wifipacket.Packetize(stream):
if start_secs is None:
start_secs = opt.pcap_secs
# Fill in known SSIDs array if our scan didn't see anything.
ssid = wifipacket.IdentifySSID(opt, frame)
if ssid is not None:
known_ssids[opt.ta] = ssid
for sta, direction in [('ra', 'up'), ('ta', 'down'), ('xa', 'across')]:
bssid = opt.get(sta)
ssid = known_ssids.get(bssid)
if ssid:
rates[ssid].append((opt.pcap_secs - start_secs,
times_seen[ssid] += 1
if not times_seen:
return {}
modal_ssid, _ = times_seen.most_common(1)[0]
summary = {}
for _, direction, rate, size in rates[modal_ssid]:
size_weighted_rate = rate * float(size)
if direction not in summary:
summary[direction] = [size_weighted_rate, size]
summary[direction][0] += size_weighted_rate
summary[direction][1] += size
line = {'PHY ssid': modal_ssid}
for direction, accum in summary.items():
size_weighted_rate, size = accum
line['PHY {}'.format(direction)] = ((size_weighted_rate / size) if size
else 0.0)
return line
def Channel(text_channel):
"""Given a text channel spec like 149,+1 return the central freq and width."""
if ',' in text_channel:
base, offset = text_channel.split(',')
freq = channels[int(base)]
offset = int(offset)
return (2 * freq + offset * 20) / 2, 40
return channels[int(text_channel)], 20
def Overlap(c1, w1, c2, w2):
"""Return True if two WiFi channels overlap, or False otherwise."""
# TODO(willangley): replace with code from Waveguide
b1 = c1 - w1 / 2
t1 = c1 + w1 / 2
b2 = c2 - w2 / 2
t2 = c2 + w2 / 2
return ((b1 <= b2 <= t1) or (b2 <= b1 <= t2)
or (b1 <= t2 <= t1) or (b2 <= t1 <= t2))
def ReportLine(report_dir, series=None):
"""Condense the output of a run into a one-line summary report."""
line = collections.OrderedDict()
if series:
line['Series'] = series
_, stamp, steps = os.path.basename(report_dir).split('-')
line['Time'] = datetime.datetime.fromtimestamp(float(stamp))
line['Steps'] = int(steps)
system, cache = ifstats.Restore(report_dir)
# known_ssids is a map from BSSID(string) => SSID(string)
known_ssids = {}
if 'Darwin' in system:
result = ifstats.Parse('Darwin', cache)
airport = result.get('link')
channel, width = Channel(airport['channel'])
shared = 0
overlap = 0
scan = result.get('scan')
if len(scan) > 1:
for row in scan:
oc, ow = Channel(row['CHANNEL'])
if channel == oc and width == ow:
shared += 1
if Overlap(channel, width, oc, ow):
overlap += 1
known_ssids[row['BSSID']] = row['SSID']
'Channel': channel,
'Width': width,
'RSSI': airport['agrCtlRSSI'],
'Noise': airport['agrCtlNoise'],
'Shared': shared,
'Interfering': overlap - shared
if 'Linux' in system:
result = ifstats.Parse('Linux', cache)
iwlink = result.get('link')
signal = int(iwlink.get('signal', '0 dBm').split()[0])
channel = int(iwlink.get('freq', '0'))
width = 20
m ='(\d+)MHz', iwlink.get('tx bitrate', ''), flags=re.I)
if m:
width = int(
# Noise and contention not yet gathered in samples run on Linux systems.
'Channel': channel,
'Width': width,
'RSSI': signal,
# TODO(willangley): integrate skid statistics with the rest of the benchmark
# framework system detection.
with open(os.path.join(report_dir, 'status_wireless')) as status_wireless:
result = ifstats.skids.ParseStatusWireless(
width = result['Bandwidth'].split()[0]
'Channel': result['Channel'],
'Width': width,
'RSSI': result['RSSI'],
except IOError:
ppath = os.path.join(report_dir, 'testnetwork.pcap')
with open(ppath) as stream:
line.update(ParsePcap(stream, known_ssids))
except IOError:
mpath = os.path.join(report_dir, 'mcs')
with open(mpath) as mf:
mcs, phy = ParseMCSFile(mf, width)
line['MCS'] = mcs
line['PHY'] = phy
except IOError:
# If the initial ping test fails, we won't collect performance information.
# deal with this gracefully.
ips = iperf.Restore(report_dir)
if 'iperf' in ips:
# pylint:disable=line-too-long
for key, perf in [('TCP BW up', iperf.ParseIperfTCP(ips.get('iperf', ''))),
('UDP BW up', iperf.ParseIperfUDP(ips.get('iperfu', '')))]:
line[key] = perf.get('bandwidth')
line['{} units'.format(key)] = perf.get('bandwidth_unit')
elif 'iperf3' in ips:
for name in (key for key in ips
if key.startswith('iperf3')):
perf = iperf.ParseIperf3(ips[name])
if not perf or 'error' in perf:
test_start = perf['start']['test_start']
protocol = test_start['protocol']
direction = 'down' if test_start['reverse'] else 'up'
key = '{protocol} BW {direction}'.format(protocol=protocol,
if protocol == 'TCP':
line[key] = perf['end']['sum_received']['bits_per_second']
elif protocol == 'UDP':
line[key] = perf['end']['sum']['bits_per_second']
line['{} units'.format(key)] = 'bit/s'
with open(os.path.join(report_dir, 'isostream')) as istm:
text =
line['isostream'] = isostream.ParseIsostream(text)
except IOError:
return line
def ReadJournal(jname):
"""Read a journal, returning a series name and its data folders."""
jname = os.path.realpath(jname)
series = os.path.basename(jname)
if series == 'journal':
series = os.path.basename(os.path.dirname(jname))
folders = []
with open(jname) as journal:
for line in journal:
line = line.strip()
if line.startswith('#'):
folders.append(os.path.join(os.path.dirname(jname), line))
return series, folders
def Report(journals):
"""Given the name of a journal file, return a list of ReportLines."""
report = []
bad = []
for jname in journals:
series, folders = ReadJournal(jname)
for folder in folders:
report += [ReportLine(folder, series=series)]
except (TypeError, IOError) as e:
bad += [collections.OrderedDict(folder=folder, error=repr(e))]
return report, bad
def WriteReport(lines):
"""Write a network testing report in .tsv format to stdout."""
# include every field we can write in the header row
header = ['Series', 'Time', 'Steps', 'Channel', 'Width', 'RSSI', 'Noise',
'Shared', 'Interfering',
'MCS', 'PHY', 'PHY ssid', 'PHY up', 'PHY down', 'PHY across',
'TCP BW up', 'TCP BW up units', 'UDP BW up', 'UDP BW up units',
'TCP BW down', 'TCP BW down units', 'UDP BW down',
'UDP BW down units', 'isostream']
writer = csv.DictWriter(sys.stdout, header, dialect=csv.excel_tab)
def main():
o = options.Options(optspec)
(opt, _, extra) = o.parse(sys.argv[1:])
if opt.report_dir:
report = [ReportLine(opt.report_dir)]
elif extra:
report, bad = Report(extra)
if bad:
writer = csv.DictWriter(sys.stdout, bad[0].keys(), dialect=csv.excel_tab)
if len(report) < 1:
o.fatal("Didn't find any samples. Did you supply at least one report dir"
' or journal?')
if __name__ == '__main__':