| #!/usr/bin/python2.7 |
| |
| """report: make a table summarizing output from one or more runs of `sample`.""" |
| |
| import collections |
| import csv |
| import datetime |
| import os |
| import re |
| import sys |
| |
| import ifstats |
| import iperf |
| import isostream |
| import options |
| import wifipacket |
| |
| optspec = """ |
| report [options...] <journal> |
| -- |
| r,report_dir= path to a single report directory to be parsed |
| """ |
| |
| NFILE = 'n-datarates.tsv' |
| nrates = {} |
| |
| CHANNELFILE = 'channels.tsv' |
| channels = {} |
| |
| |
| def _Resource(name): |
| return os.path.join(os.path.dirname(os.path.abspath(__file__)), name) |
| |
| |
| def LoadNRates(): |
| """Loads 802.11n coding and data rates into a global variable.""" |
| if nrates: return |
| |
| raw = [] |
| |
| with open(_Resource(NFILE), 'rb') as csvfile: |
| reader = csv.reader(csvfile, delimiter='\t') |
| next(reader) # skip header row when reading by machine |
| for mcs, width, gi, rate in reader: |
| raw.append([int(mcs), int(width), int(gi), float(rate)]) |
| |
| # Load global table, computing MCS 8-31 statistics from MCS 0-7. |
| for mcs, width, gi, rate in raw: |
| for i in range(4): |
| nrates[(8*i + mcs, width, gi)] = rate * (i + 1) |
| |
| |
| def LoadChannels(): |
| """Load 802.11n channels and frequencies into a global variable.""" |
| if channels: return |
| |
| with open(_Resource(CHANNELFILE), 'rb') as csvfile: |
| reader = csv.reader(csvfile, delimiter='\t') |
| next(reader) |
| |
| for channel, freq in reader: |
| channels[int(channel)] = int(freq) |
| |
| |
| def ParseMCSFile(outfile, width=20): |
| """Extract MCS and PHY rate statistics from an MCS report file.""" |
| LoadNRates() |
| |
| # assume long guard interval |
| guard = 800 |
| |
| counter = collections.Counter() |
| for line in outfile: |
| for tok in line.split(): |
| if tok == '.': continue |
| |
| mcs = int(tok) |
| counter[mcs] += 1 |
| |
| phy = 0.0 |
| alltimes = 0 |
| for mcs, times in counter.iteritems(): |
| phy += nrates[(mcs, width, guard)] * times |
| alltimes += times |
| |
| return counter.most_common()[0][0], phy / alltimes |
| |
| |
| def ParsePcap(stream, known_ssids): |
| """ParsePcap computes PHY performance for a packet capture in `stream`. |
| |
| Args: |
| stream: an open `file` object pointing to a file with pcap data. |
| known_ssids: a dict mapping known BSSIDs to SSIDs. |
| |
| Returns: |
| a dict containing PHY performance information. keys are descriptive strings, |
| values are strings or numbers in Mbit/s. |
| """ |
| |
| rates = collections.defaultdict(list) |
| beacon_powers = collections.defaultdict(list) |
| times_seen = collections.Counter() |
| start_secs = None |
| |
| for opt, frame in wifipacket.Packetize(stream): |
| if start_secs is None: |
| start_secs = opt.pcap_secs |
| |
| # Fill in known SSIDs array if our scan didn't see anything. |
| ssid = wifipacket.IdentifySSID(opt, frame) |
| if ssid is not None: |
| known_ssids[opt.ta] = ssid |
| |
| for sta, direction in [('ra', 'up'), ('ta', 'down'), ('xa', 'across')]: |
| bssid = opt.get(sta) |
| ssid = known_ssids.get(bssid) |
| if ssid: |
| if opt.type == 0x08: # Beacon |
| rssi = opt.get('dbm_antsignal') |
| if rssi is not None: beacon_powers[ssid].append(rssi) |
| else: |
| rates[ssid].append((opt.pcap_secs - start_secs, |
| direction, |
| opt.get('dbm_antsignal'), |
| opt.rate, |
| len(frame))) |
| times_seen[ssid] += 1 |
| break |
| |
| if not times_seen: |
| return {} |
| |
| modal_ssid, _ = times_seen.most_common(1)[0] |
| summary = {} |
| for _, direction, rssi, rate, size in rates[modal_ssid]: |
| size_weighted_rate = rate * float(size) |
| if direction not in summary: |
| summary[direction] = [size_weighted_rate, size, 0, 0] |
| else: |
| summary[direction][0] += size_weighted_rate |
| summary[direction][1] += size |
| if rssi is not None: |
| summary[direction][2] += rssi |
| summary[direction][3] += 1 |
| |
| line = { |
| 'PHY ssid': modal_ssid, |
| 'Beacon RSSI': (sum(beacon_powers[modal_ssid]) / |
| float(len(beacon_powers[modal_ssid])) |
| if beacon_powers.get(modal_ssid) else 0.0) |
| } |
| |
| for direction, accum in summary.items(): |
| size_weighted_rate, size = accum[:2] |
| line['PHY {}'.format(direction)] = ((size_weighted_rate / size) if size |
| else 0.0) |
| |
| rssi, count = accum[2:] |
| line['PHY RSSI {}'.format(direction)] = ((rssi / float(count)) if count |
| else 0.0) |
| |
| return line |
| |
| |
| def Channel(text_channel): |
| """Given a text channel spec like 149,+1 return the central freq and width.""" |
| LoadChannels() |
| |
| if ',' in text_channel: |
| control, second = text_channel.split(',') |
| control = int(control) |
| second = int(second) |
| |
| if second == 80: |
| five_ghz_channels = sorted(ch for ch in channels.keys() if ch >= 36) |
| i = five_ghz_channels.index(control) |
| base = five_ghz_channels[i - i % 4] |
| return control, channels[base] + 30, 80 |
| elif second in [-1, 1]: |
| freq = channels[control] |
| offset = second |
| return control, freq + offset * 10, 40 |
| else: |
| raise AssertionError('text channel "{}" does not match any known format' |
| .format(text_channel)) |
| else: |
| control = int(text_channel) |
| return control, channels[control], 20 |
| |
| |
| def Overlap(c1, w1, c2, w2): |
| """Return True if two WiFi channels overlap, or False otherwise.""" |
| # TODO(willangley): replace with code from Waveguide |
| b1 = c1 - w1 / 2 |
| t1 = c1 + w1 / 2 |
| b2 = c2 - w2 / 2 |
| t2 = c2 + w2 / 2 |
| |
| return ((b1 <= b2 <= t1) or (b2 <= b1 <= t2) |
| or (b1 <= t2 <= t1) or (b2 <= t1 <= t2)) |
| |
| |
| def ReportLine(report_dir, series=None): |
| """Condense the output of a sample.py run into a one-line summary report.""" |
| line = collections.OrderedDict() |
| if series: |
| line['Series'] = series |
| |
| _, stamp, steps = os.path.basename(report_dir).split('-') |
| line['Time'] = datetime.datetime.fromtimestamp(float(stamp)) |
| line['Steps'] = int(steps) |
| |
| system, cache = ifstats.Restore(report_dir) |
| |
| # known_ssids is a map from BSSID(string) => SSID(string) |
| known_ssids = {} |
| |
| if 'Darwin' in system: |
| result = ifstats.Parse('Darwin', cache) |
| airport = result.get('link') |
| control, freq, width = Channel(airport['channel']) |
| shared = 0 |
| overlap = 0 |
| |
| scan = result.get('scan') |
| if len(scan) > 1: |
| for row in scan: |
| oc, of, ow = Channel(row['CHANNEL']) |
| if control == oc: |
| shared += 1 |
| elif Overlap(freq, width, of, ow): |
| overlap += 1 |
| |
| known_ssids[row['BSSID']] = row['SSID'] |
| |
| line.update({ |
| 'Channel': freq, |
| 'Width': width, |
| 'RSSI': airport['agrCtlRSSI'], |
| 'Noise': airport['agrCtlNoise'], |
| 'Shared': shared, |
| 'Interfering': overlap |
| }) |
| |
| if 'Linux' in system: |
| result = ifstats.Parse('Linux', cache) |
| iwlink = result.get('link') |
| signal = int(iwlink.get('signal', '0 dBm').split()[0]) |
| channel = int(iwlink.get('freq', '0')) |
| width = 20 |
| m = re.search(r'(\d+)MHz', iwlink.get('tx bitrate', ''), flags=re.I) |
| if m: |
| width = int(m.group(1)) |
| |
| # Noise and contention not yet gathered in samples run on Linux systems. |
| line.update({ |
| 'Channel': channel, |
| 'Width': width, |
| 'RSSI': signal, |
| }) |
| |
| # TODO(willangley): integrate skid statistics with the rest of the benchmark |
| # framework system detection. |
| try: |
| with open(os.path.join(report_dir, 'status_wireless')) as status_wireless: |
| result = ifstats.skids.ParseStatusWireless(status_wireless.read()) |
| width = result['Bandwidth'].split()[0] |
| line.update({ |
| 'Channel': result['Channel'], |
| 'Width': width, |
| 'RSSI': result['RSSI'], |
| }) |
| except IOError: |
| pass |
| |
| try: |
| ppath = os.path.join(report_dir, 'testnetwork.pcap') |
| with open(ppath) as stream: |
| line.update(ParsePcap(stream, known_ssids)) |
| except IOError: |
| try: |
| mpath = os.path.join(report_dir, 'mcs') |
| with open(mpath) as mf: |
| mcs, phy = ParseMCSFile(mf, width) |
| line['MCS'] = mcs |
| line['PHY'] = phy |
| except IOError: |
| pass |
| |
| # If the initial ping test fails, we won't collect performance information. |
| # deal with this gracefully. |
| ips = iperf.Restore(report_dir) |
| if 'iperf' in ips: |
| # pylint:disable=line-too-long |
| for key, perf in [('TCP BW up', iperf.ParseIperfTCP(ips.get('iperf', ''))), |
| ('UDP BW up', iperf.ParseIperfUDP(ips.get('iperfu', '')))]: |
| line[key] = perf.get('bandwidth') |
| line['{} units'.format(key)] = perf.get('bandwidth_unit') |
| elif 'iperf3' in ips: |
| for name in (key for key in ips |
| if key.startswith('iperf3')): |
| perf = iperf.ParseIperf3(ips[name]) |
| if not perf or 'error' in perf: |
| continue |
| |
| test_start = perf['start']['test_start'] |
| protocol = test_start['protocol'] |
| direction = 'down' if test_start['reverse'] else 'up' |
| key = '{protocol} BW {direction}'.format(protocol=protocol, |
| direction=direction) |
| |
| if protocol == 'TCP': |
| line[key] = perf['end']['sum_received']['bits_per_second'] |
| elif protocol == 'UDP': |
| line[key] = (perf['end']['sum']['bits_per_second'] * |
| (100 - float(perf['end']['sum']['lost_percent'])) / 100.0) |
| else: |
| continue |
| |
| line['{} units'.format(key)] = 'bit/s' |
| |
| try: |
| with open(os.path.join(report_dir, 'isostream')) as istm: |
| text = istm.read() |
| line['isostream'] = isostream.ParseIsostream(text) |
| except IOError: |
| pass |
| |
| return line |
| |
| |
| def ReadJournal(jname): |
| """Read a journal, returning a series name and its data folders.""" |
| jname = os.path.realpath(jname) |
| series = os.path.basename(jname) |
| if series == 'journal': |
| series = os.path.basename(os.path.dirname(jname)) |
| |
| folders = [] |
| with open(jname) as journal: |
| for line in journal: |
| line = line.strip() |
| if line.startswith('#'): |
| continue |
| folders.append(os.path.join(os.path.dirname(jname), line)) |
| |
| return series, folders |
| |
| |
| def Report(journals): |
| """Given the name of a journal file, return a list of ReportLines.""" |
| report = [] |
| bad = [] |
| |
| for jname in journals: |
| series, folders = ReadJournal(jname) |
| for folder in folders: |
| try: |
| report += [ReportLine(folder, series=series)] |
| except (TypeError, IOError, ValueError) as e: |
| bad += [collections.OrderedDict(folder=folder, error=repr(e))] |
| |
| return report, bad |
| |
| |
| def WriteReport(lines): |
| """Write a network testing report in .tsv format to stdout.""" |
| # include every field we can write in the header row |
| header = ['Series', 'Time', 'Steps', 'Channel', 'Width', 'RSSI', |
| 'Beacon RSSI', 'Noise', 'Shared', 'Interfering', |
| 'MCS', 'PHY', 'PHY ssid', 'PHY up', 'PHY down', 'PHY across', |
| 'PHY RSSI up', 'PHY RSSI down', 'PHY RSSI across', |
| 'TCP BW up', 'TCP BW up units', 'UDP BW up', 'UDP BW up units', |
| 'TCP BW down', 'TCP BW down units', 'UDP BW down', |
| 'UDP BW down units', 'isostream'] |
| |
| writer = csv.DictWriter(sys.stdout, header, dialect=csv.excel_tab) |
| writer.writeheader() |
| writer.writerows(lines) |
| |
| |
| def main(): |
| o = options.Options(optspec) |
| (opt, _, extra) = o.parse(sys.argv[1:]) |
| |
| if opt.report_dir: |
| report = [ReportLine(opt.report_dir)] |
| elif extra: |
| report, bad = Report(extra) |
| if bad: |
| writer = csv.DictWriter(sys.stdout, bad[0].keys(), dialect=csv.excel_tab) |
| writer.writeheader() |
| writer.writerows(bad) |
| print |
| |
| if len(report) < 1: |
| o.fatal("Didn't find any samples. Did you supply at least one report dir" |
| ' or journal?') |
| |
| WriteReport(report) |
| |
| |
| if __name__ == '__main__': |
| main() |