Merge "platform: Added ipv6 field to onu device_stats"
diff --git a/cache_warming/cache_warming.py b/cache_warming/cache_warming.py
index 3416b56..986bcd7 100644
--- a/cache_warming/cache_warming.py
+++ b/cache_warming/cache_warming.py
@@ -56,7 +56,14 @@
   if os.path.isfile(HOSTS_JSON_PATH):
     with open(HOSTS_JSON_PATH, 'r') as hosts_json:
       global hit_log
-      hit_log = json.load(hosts_json)
+      try:
+        hit_log = json.load(hosts_json)
+      except ValueError as e:
+        if verbose:
+          print 'Failed to open %s: %s.' % (HOSTS_JSON_PATH, e)
+      finally:
+        if not isinstance(hit_log, dict):
+          hit_log = {}
 
 
 def process_query(qry):
@@ -172,7 +179,6 @@
   args = set_args()
   verbose = args.verbose
   load_hosts()
-
   server_address = UDP_SERVER_PATH
   try:
     os.remove(server_address)
@@ -188,7 +194,7 @@
 
   while 1:
     diff = datetime.now() - last_fetch
-    if diff.total_seconds() > 60:
+    if diff.total_seconds() > FETCH_INTERVAL:
       warm_cache(args.port, args.server)
     data = sock.recv(128)
     process_query(data)
diff --git a/cache_warming/cache_warming_test.py b/cache_warming/cache_warming_test.py
index 220edc1..ceed91b 100644
--- a/cache_warming/cache_warming_test.py
+++ b/cache_warming/cache_warming_test.py
@@ -1,6 +1,7 @@
 #!/usr/bin/python
 """Tests for cache_warming.py."""
 
+import os
 import cache_warming
 from wvtest import wvtest
 
@@ -72,5 +73,55 @@
   wvtest.WVPASSEQ(actual, expected)
 
 
+@wvtest.wvtest
+def test_no_cache_warming_hosts():
+  crash = False
+  cache_warming.HOSTS_JSON_PATH = '/tmp/cache_warming_hosts.json'
+  if os.path.exists('/tmp/cache_warming_hosts.json'):
+    os.remove('/tmp/cache_warming_hosts.json')
+  try:
+    cache_warming.load_hosts()
+    cache_warming.warm_cache(53, None)
+  except ValueError:
+    crash = True
+  wvtest.WVFAIL(crash)
+
+
+@wvtest.wvtest
+def test_empty_cache_warming_hosts():
+  crash = False
+  cache_warming.HOSTS_JSON_PATH = '/tmp/cache_warming_hosts.json'
+  if os.path.exists('/tmp/cache_warming_hosts.json'):
+    os.remove('/tmp/cache_warming_hosts.json')
+  open('/tmp/cache_warming_hosts.json', 'w').close()
+  try:
+    cache_warming.load_hosts()
+    cache_warming.warm_cache(53, None)
+  except ValueError:
+    crash = True
+  finally:
+    os.remove('/tmp/cache_warming_hosts.json')
+  wvtest.WVFAIL(crash)
+
+
+@wvtest.wvtest
+def test_wrong_cache_warming_hosts():
+  crash = False
+  cache_warming.HOSTS_JSON_PATH = '/tmp/cache_warming_hosts.json'
+  if os.path.exists('/tmp/cache_warming_hosts.json'):
+    os.remove('/tmp/cache_warming_hosts.json')
+  f = open('/tmp/cache_warming_hosts.json', 'w')
+  f.write('[]')
+  f.close()
+  try:
+    cache_warming.load_hosts()
+    cache_warming.warm_cache(53, None)
+  except ValueError:
+    crash = True
+  finally:
+    os.remove('/tmp/cache_warming_hosts.json')
+  wvtest.WVFAIL(crash)
+
+
 if __name__ == '__main__':
   wvtest.wvtest_main()
diff --git a/cmds/Makefile b/cmds/Makefile
index 60c1b5e..24bc8a1 100644
--- a/cmds/Makefile
+++ b/cmds/Makefile
@@ -19,6 +19,7 @@
 TARGETS=\
 	$(PORTABLE_TARGETS) \
 	alivemonitor \
+	anonid \
 	bsa2bluez \
 	burnin-flash \
 	buttonmon \
@@ -31,6 +32,7 @@
 	diskbench \
 	dnsck \
 	freemegs \
+	gfhd254_reboot \
 	gstatic \
 	http_bouncer \
 	ionice \
@@ -63,13 +65,15 @@
 endif
 
 ifeq ($(BUILD_SSDP),y)
-TARGETS += ssdptax
+TARGETS += ssdptax dialcheck
 HOST_TEST_TARGETS += host-test-ssdptax.sh
+HOST_TEST_TARGETS += host-test-dialcheck.sh
 endif
 
 ifeq ($(BUILD_DNSSD),y)
 # Don't bother building for host
 ARCH_TARGETS += dnssd_hosts
+SCRIPT_TARGETS += castcheck
 endif
 
 ifeq ($(BUILD_IBEACON),y)
@@ -116,6 +120,7 @@
 	for n in $(SCRIPT_TARGETS); do \
 		test ! -f $$n.$(BR2_TARGET_GENERIC_PLATFORM_NAME) || \
 			cp -f $$n.$(BR2_TARGET_GENERIC_PLATFORM_NAME) $(BINDIR)/$$n; \
+		test ! -f $$n || cp -f $$n $(BINDIR)/$$n; \
 	done
 
 install-libs:
@@ -202,6 +207,10 @@
 ssdptax: LIBS += -lcurl -lnl-3 -lstdc++ -lm
 host-ssdptax: host-ssdptax.o host-l2utils.o
 host-ssdptax: LIBS += $(HOST_LIBS) -lcurl -lnl-3 -lstdc++ -lm
+dialcheck: dialcheck.o
+dialcheck: LIBS += -lstdc++ -lm
+host-dialcheck: host-dialcheck.o
+host-dialcheck: LIBS += $(HOST_LIBS) -lstdc++ -lm
 statpitcher.o: device_stats.pb.o
 statpitcher: LIBS+=-L$(DESTDIR)$(PREFIX)/usr/lib -lprotobuf-lite -lpthread -lstdc++
 statpitcher: device_stats.pb.o statpitcher.o
@@ -248,6 +257,9 @@
 		--includes --output-file=$@ $<
 hostnamelookup.tmp.o: CFLAGS += -Wno-missing-field-initializers
 host-hostnamelookup.tmp.o: CFLAGS += -Wno-missing-field-initializers
+anonid: anonid.o
+host-anonid: host-anonid.o
+anonid host-anonid: LIBS += -lcrypto
 
 
 TESTS = $(wildcard test-*.sh) $(wildcard test-*.py) $(wildcard *_test.py) $(TEST_TARGETS)
diff --git a/cmds/anonid.c b/cmds/anonid.c
new file mode 100644
index 0000000..e7854ad
--- /dev/null
+++ b/cmds/anonid.c
@@ -0,0 +1,184 @@
+/*
+ * Copyright 2015 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <fcntl.h>
+#include <getopt.h>
+#include <stdint.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <time.h>
+#include <unistd.h>
+
+#include <openssl/md5.h>
+#include <openssl/hmac.h>
+
+
+const char SOFT[] = "AEIOUY" "V";
+const char HARD[] = "BCDFGHJKLMNPQRSTVWXYZ" "AEIOU";
+const char *consensus_key_file = "/tmp/waveguide/consensus_key";
+#define CONSENSUS_KEY_LEN 16
+uint8_t consensus_key[CONSENSUS_KEY_LEN] = {0};
+#define MAC_ADDR_LEN 17
+
+void default_consensus_key()
+{
+  int fd;
+
+  if ((fd = open("/dev/urandom", O_RDONLY)) >= 0) {
+    ssize_t siz = sizeof(consensus_key);
+    if (read(fd, consensus_key, siz) != siz) {
+      /* https://xkcd.com/221/ */
+      memset(consensus_key, time(NULL), siz);
+    }
+    close(fd);
+  }
+}
+
+/* Read the waveguide consensus_key, if any. Returns 0 if
+ * a key was present, 1 if not or something fails. */
+int get_consensus_key()
+{
+  int fd, rc = 1;
+  uint8_t new_key[sizeof(consensus_key)];
+
+  if ((fd = open(consensus_key_file, O_RDONLY)) < 0) {
+    return 1;
+  }
+
+  if (read(fd, new_key, sizeof(new_key)) == sizeof(new_key)) {
+    memcpy(consensus_key, new_key, sizeof(consensus_key));
+    rc = 0;
+  }
+  close(fd);
+
+  return rc;
+}
+
+/* Given a value from 0..4095, encode it as a cons+vowel+cons sequence. */
+void trigraph(int num, char *out)
+{
+  int ns = sizeof(SOFT) - 1;
+  int nh = sizeof(HARD) - 1;
+  int c1, c2, c3;
+
+  c3 = num % nh;
+  c2 = (num / nh) % ns;
+  c1 = num / nh / ns;
+  out[0] = HARD[c1];
+  out[1] = SOFT[c2];
+  out[2] = HARD[c3];
+}
+
+int hex_chr_to_int(char hex) {
+  switch(hex) {
+    case '0' ... '9':
+      return hex - '0';
+    case 'a' ... 'f':
+      return hex - 'a' + 10;
+    case 'A' ... 'F':
+      return hex - 'A' + 10;
+  }
+
+  return 0;
+}
+
+/*
+ * Convert a string of the form "00:11:22:33:44:55" to
+ * a binary array 001122334455.
+ */
+void get_binary_mac(const char *mac, uint8_t *out) {
+  int i;
+  for (i = 0; i < MAC_ADDR_LEN; i += 3) {
+    *out = (hex_chr_to_int(mac[i]) << 4) | hex_chr_to_int(mac[i+1]);
+    out++;
+  }
+}
+
+
+void get_anonid_for_mac(const char *mac, char *out) {
+  unsigned char digest[EVP_MAX_MD_SIZE];
+  unsigned int digest_len = sizeof(digest);
+  uint8_t macbin[6];
+  uint32_t num;
+
+  get_binary_mac(mac, macbin);
+  HMAC(EVP_md5(), consensus_key, CONSENSUS_KEY_LEN, macbin, sizeof(macbin),
+      digest, &digest_len);
+  num = (digest[0] << 16) | (digest[1] << 8) | digest[2];
+  trigraph((num >> 12) & 0x0fff, out);
+  trigraph((num      ) & 0x0fff, out + 3);
+}
+
+
+void usage(const char *progname)
+{
+  fprintf(stderr, "usage: %s: -a ##:##:##:##:##:## [-k consensus_key]\n",
+      progname);
+  fprintf(stderr, "\t-a addr: MAC address to generate an anonid for\n");
+  fprintf(stderr, "\t-k key: Use a specific consensus_key. "
+      "Default is to read it from %s\n", consensus_key_file);
+  exit(1);
+}
+
+
+int main(int argc, char **argv)
+{
+  struct option long_options[] = {
+    {"addr",          required_argument, 0, 'a'},
+    {"consensus_key", required_argument, 0, 'k'},
+    {0,          0,                 0, 0},
+  };
+  const char *addr = NULL;
+  char anonid[7];
+  size_t lim;
+  int c;
+
+  setlinebuf(stdout);
+  alarm(30);
+
+  if (get_consensus_key()) {
+    default_consensus_key();
+  }
+
+  while ((c = getopt_long(argc, argv, "a:k:", long_options, NULL)) != -1) {
+    switch (c) {
+    case 'a':
+      addr = optarg;
+      break;
+    case 'k':
+      lim = (sizeof(consensus_key) > strlen(optarg)) ? strlen(optarg) :
+        sizeof(consensus_key);
+      memset(consensus_key, 0, sizeof(consensus_key));
+      memcpy(consensus_key, optarg, lim);
+      break;
+    default:
+      usage(argv[0]);
+      break;
+    }
+  }
+
+  if (addr == NULL) {
+    usage(argv[0]);
+  }
+
+  get_anonid_for_mac(addr, anonid);
+  printf("%s\n", anonid);
+
+  exit(0);
+}
diff --git a/cmds/avahi-browse-fake.sh b/cmds/avahi-browse-fake.sh
new file mode 100755
index 0000000..c7ed603
--- /dev/null
+++ b/cmds/avahi-browse-fake.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+
+echo '+;br0;IPv4;GFiber\032TV\032Box1001;_googlecast._tcp;local'
+echo '=;br0;IPv4;GFiber\032TV\032Box1001;_googlecast._tcp;local;GFiber\032TV\032Box1001.local;1.1.1.1;8009;"rs=" "bs=FFFFFFFFFFFF" "st=2" "ca=4101" "fn=GFiber TV Box1001" "ic=/setup/icon.png" "md=GFiber TV Box" "ve=05" "rm=RMRMRMRMRMRMRMRMRM" "id=0123456789abcdef0123456789abcdef"'
+echo '+;br0;IPv4;GFiber\032TV\032Box1002;_googlecast._tcp;local'
+echo '=;br0;IPv4;GFiber\032TV\032Box1002;_googlecast._tcp;local;GFiber\032TV\032Box1002.local;3.3.3.3;8009;"rs=" "bs=FFFFFFFFFFFF" "st=2" "ca=4101" "fn=GFiber TV Box1002" "ic=/setup/icon.png" "md=GFiber TV Box" "ve=05" "rm=RMRMRMRMRMRMRMRMRM" "id=0123456789abcdef0123456789abcdef"'
+echo '+;br0;IPv4;GFiber\032TV\032Box1003;_googlecast._tcp;local'
+echo '=;br0;IPv4;GFiber\032TV\032Box1003;_googlecast._tcp;local;GFiber\032TV\032Box1003.local;2.2.2.2;8009;"rs=" "bs=FFFFFFFFFFFF" "st=2" "ca=4101" "fn=GFiber TV Box1003" "ic=/setup/icon.png" "md=GFiber TV Box" "ve=05" "rm=RMRMRMRMRMRMRMRMRM" "id=0123456789abcdef0123456789abcdef"'
diff --git a/cmds/buttonmon.c b/cmds/buttonmon.c
index e8bab1f..31148cc 100644
--- a/cmds/buttonmon.c
+++ b/cmds/buttonmon.c
@@ -13,18 +13,6 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-// GFLT110 the "reset" button is connected to MPP[18]
-//
-// This will periodically scan MPP[18].
-// If held < 1s &&  sysvar PRODUCTION_UNIT is NOT set
-//     start dropbear.
-// If held > 2s
-//   generate a reset.
-// if head > 10s
-//   remove sysvar PRODUCTION_UNIT AND
-//   generate a reset.
-//
-
 
 #include <fcntl.h>
 #include <stdint.h>
@@ -38,17 +26,26 @@
 #include <time.h>
 #include <unistd.h>
 
-
-// TODO(jnewlin): Export this LED register via the gpio sysfs.
 #define GPIO_INPUT_REG_ADDR 0xf1018110
-#define RESET_BIT 18
-#define RESET_BIT_MASK (1 << RESET_BIT)
 #define TRUE 1
 #define FALSE 0
 
+/* GFLT110: The reset button is connected to MPP18 */
+#define RESET_BIT_GFLT110 18
+#define RESET_BIT_MASK_GFLT110 (1 << RESET_BIT_GFLT110)
 
-// Only run on gflt110s.
-int IsGflt110() {
+/* GFLT300: The reset button is connected to MPP17 */
+#define RESET_BIT_GFLT300 17
+#define RESET_BIT_MASK_GFLT300 (1 << RESET_BIT_GFLT300)
+
+typedef enum {
+  PlatType_GFLT110 = 1,
+  PlatType_GFLT300,
+  PlatType_Unknown
+} PlatType;
+PlatType plat_type = PlatType_Unknown;
+
+int IsSupportedPlatform() {
   int bytes_read;
   char buf[64];
   memset(buf, 0, sizeof(buf));
@@ -61,9 +58,17 @@
   fclose(f);
   if (bytes_read <= 0) {
     printf("fread of /proc/board_type returned 0 data.\n");
-  }
-  if (strncmp(buf, "GFLT110", strlen("GFLT110")))
     return FALSE;
+  }
+
+  if (strncmp(buf, "GFLT110", strlen("GFLT110")) == 0) {
+    plat_type = PlatType_GFLT110;
+  } else if (strncmp(buf, "GFLT300", strlen("GFLT300")) == 0) {
+    plat_type = PlatType_GFLT300;
+  } else {
+    /* This platform is not supported. */
+    return FALSE;
+  }
   return TRUE;
 }
 
@@ -95,12 +100,20 @@
     exit(1);
   }
 
-  volatile uint32_t* reg_addr = base + ((GPIO_INPUT_REG_ADDR & page_mask) / sizeof(*base));
+  volatile uint32_t* reg_addr = base + (
+      (GPIO_INPUT_REG_ADDR & page_mask) / sizeof(*base));
   int button_down = FALSE;
   int button_down_sent = -1;
   uint64_t button_down_start_tick = 0;
   for(;;) {
-    int button_down_now = (*reg_addr & RESET_BIT_MASK) == 0;
+    int button_down_now;
+    if (plat_type == PlatType_GFLT110)
+      button_down_now = (*reg_addr & RESET_BIT_MASK_GFLT110) == 0;
+    else if (plat_type == PlatType_GFLT300)
+      button_down_now = (*reg_addr & RESET_BIT_MASK_GFLT300) == 0;
+    else
+      button_down_now = FALSE;
+
     if (!button_down && button_down_now) {
       // Handle button down toggle.
       button_down_start_tick = GetTick();
@@ -131,8 +144,8 @@
 
 
 int main() {
-  if (!IsGflt110()) {
-    printf("resetmonitor only works on gflt110.\n");
+  if (!IsSupportedPlatform()) {
+    printf("resetmonitor only works on GFLT platforms.\n");
     return 1;
   }
   setlinebuf(stdout);
diff --git a/cmds/castcheck b/cmds/castcheck
new file mode 100755
index 0000000..70080ec
--- /dev/null
+++ b/cmds/castcheck
@@ -0,0 +1,19 @@
+#!/bin/sh
+
+AVAHI=avahi-browse
+
+while getopts "a:" option
+do
+  case $option in
+  a) AVAHI="$OPTARG" ;;
+  esac
+done
+
+cast_devices=
+while IFS=";" read ip; do
+  cast_devices="$cast_devices $ip"
+done<<EOT
+$($AVAHI -tpvlr _googlecast._tcp | grep "^=" | cut -d";" -f8 | sort)
+EOT
+
+echo "Cast responses from:$cast_devices"
diff --git a/cmds/dialcheck-test-server.py b/cmds/dialcheck-test-server.py
new file mode 100644
index 0000000..2637384
--- /dev/null
+++ b/cmds/dialcheck-test-server.py
@@ -0,0 +1,60 @@
+#!/usr/bin/python
+"""Fake SSDP server for unit tests.
+
+"""
+
+import errno
+import os
+import signal
+import socket
+import SocketServer
+import struct
+import sys
+
+
+notify = """LOCATION: http://1.1.1.1:1/test.xml\r\n
+CACHE-CONTROL: max-age=1800\r\n
+EXT:\r\n
+SERVER: test_ssdp/1.0\r\n
+ST: urn:dial-multiscreen-org:service:dial:1\r\n
+USN: uuid:number::urn:dial-multiscreen-org:service:dial:1\r\n"""
+
+
+class SSDPHandler(SocketServer.BaseRequestHandler):
+  def handle(self):
+    self.request[1].sendto(notify, self.client_address)
+
+
+def check_pid(pid):
+  try:
+    os.kill(pid, 0)
+  except OSError as e:
+    if e.errno == errno.ESRCH:
+      return False
+  return True
+
+
+def timeout(unused_signum, unused_frame):
+  ppid = os.getppid()
+  if ppid == 1 or not check_pid(ppid):
+    print 'timed out!'
+    sys.exit(2)
+  else:
+    signal.alarm(1)
+
+
+def main():
+  signal.signal(signal.SIGALRM, timeout)
+  signal.alarm(1)
+  SocketServer.UDPServer.allow_reuse_address = True
+  s = SocketServer.UDPServer(('', 0), SSDPHandler)
+  s.socket.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP,
+      socket.inet_aton('239.255.255.250') + socket.inet_aton('0.0.0.0'))
+  sn = s.socket.getsockname()
+  port = sn[1]
+  open(sys.argv[1], "w").write(str(port))
+  s.handle_request()
+
+
+if __name__ == '__main__':
+  main()
diff --git a/cmds/dialcheck.cc b/cmds/dialcheck.cc
new file mode 100644
index 0000000..17f8fbd
--- /dev/null
+++ b/cmds/dialcheck.cc
@@ -0,0 +1,320 @@
+/*
+ * Copyright 2016 Google Inc. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * dialcheck
+ *
+ * Check for nearby devices supporting the DIAL protocol.
+ */
+
+#include <arpa/inet.h>
+#include <asm/types.h>
+#include <ctype.h>
+#include <getopt.h>
+#include <net/if.h>
+#include <netinet/in.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <sys/socket.h>
+#include <sys/time.h>
+#include <sys/types.h>
+#include <sys/un.h>
+#include <unistd.h>
+
+#include <iostream>
+#include <set>
+#include <tr1/unordered_map>
+
+
+typedef std::set<std::string> ResultsSet;
+int timeout_secs = 10;
+
+
+/* SSDP Discover packet */
+int ssdp_port = 1900;
+int ssdp_loop = 0;
+#define SSDP_IP4 "239.255.255.250"
+#define SSDP_IP6 "FF02::C"
+const char discover_template[] = "M-SEARCH * HTTP/1.1\r\n"
+    "HOST: %s:%d\r\n"
+    "MAN: \"ssdp:discover\"\r\n"
+    "MX: 2\r\n"
+    "USER-AGENT: dialcheck/1.0\r\n"
+    "ST: urn:dial-multiscreen-org:service:dial:1\r\n\r\n";
+
+
+int get_ipv4_ssdp_socket()
+{
+  int s;
+  int reuse = 1;
+  struct sockaddr_in sin;
+  struct ip_mreq mreq;
+  struct ip_mreqn mreqn;
+
+  if ((s = socket(AF_INET, SOCK_DGRAM, 0)) < 0) {
+    perror("socket SOCK_DGRAM");
+    exit(1);
+  }
+
+  if (setsockopt(s, SOL_SOCKET, SO_REUSEADDR, (char *)&reuse, sizeof(reuse))) {
+    perror("setsockopt SO_REUSEADDR");
+    exit(1);
+  }
+
+  if (setsockopt(s, IPPROTO_IP, IP_MULTICAST_LOOP,
+        &ssdp_loop, sizeof(ssdp_loop))) {
+    perror("setsockopt IP_MULTICAST_LOOP");
+    exit(1);
+  }
+
+  memset(&sin, 0, sizeof(sin));
+  sin.sin_family = AF_INET;
+  sin.sin_port = htons(ssdp_port);
+  sin.sin_addr.s_addr = INADDR_ANY;
+  if (bind(s, (struct sockaddr*)&sin, sizeof(sin))) {
+    perror("bind");
+    exit(1);
+  }
+
+  memset(&mreqn, 0, sizeof(mreqn));
+  mreqn.imr_ifindex = if_nametoindex("br0");
+  if (setsockopt(s, IPPROTO_IP, IP_MULTICAST_IF, &mreqn, sizeof(mreqn))) {
+    perror("IP_MULTICAST_IF");
+    exit(1);
+  }
+
+  memset(&mreq, 0, sizeof(mreq));
+  mreq.imr_multiaddr.s_addr = inet_addr(SSDP_IP4);
+  if (setsockopt(s, IPPROTO_IP, IP_ADD_MEMBERSHIP,
+        (char *)&mreq, sizeof(mreq))) {
+    perror("IP_ADD_MEMBERSHIP");
+    exit(1);
+  }
+
+  return s;
+}
+
+
+void send_ssdp_ip4_request(int s)
+{
+  struct sockaddr_in sin;
+  char buf[1024];
+  ssize_t len;
+
+  snprintf(buf, sizeof(buf), discover_template, SSDP_IP4, ssdp_port);
+  memset(&sin, 0, sizeof(sin));
+  sin.sin_family = AF_INET;
+  sin.sin_port = htons(ssdp_port);
+  sin.sin_addr.s_addr = inet_addr(SSDP_IP4);
+  len = strlen(buf);
+  if (sendto(s, buf, len, 0, (struct sockaddr*)&sin, sizeof(sin)) != len) {
+    perror("sendto multicast IPv4");
+    exit(1);
+  }
+}
+
+
+int get_ipv6_ssdp_socket()
+{
+  int s;
+  int reuse = 1;
+  int loop = 0;
+  struct sockaddr_in6 sin6;
+  struct ipv6_mreq mreq;
+  int idx;
+  int hops;
+
+  if ((s = socket(AF_INET6, SOCK_DGRAM, 0)) < 0) {
+    perror("socket SOCK_DGRAM");
+    exit(1);
+  }
+
+  if (setsockopt(s, SOL_SOCKET, SO_REUSEADDR, (char *)&reuse, sizeof(reuse))) {
+    perror("setsockopt SO_REUSEADDR");
+    exit(1);
+  }
+
+  if (setsockopt(s, IPPROTO_IPV6, IPV6_MULTICAST_LOOP, &loop, sizeof(loop))) {
+    perror("setsockopt IPV6_MULTICAST_LOOP");
+    exit(1);
+  }
+
+  memset(&sin6, 0, sizeof(sin6));
+  sin6.sin6_family = AF_INET6;
+  sin6.sin6_port = htons(ssdp_port);
+  sin6.sin6_addr = in6addr_any;
+  if (bind(s, (struct sockaddr*)&sin6, sizeof(sin6))) {
+    perror("bind");
+    exit(1);
+  }
+
+  idx = if_nametoindex("br0");
+  if (setsockopt(s, IPPROTO_IPV6, IPV6_MULTICAST_IF, &idx, sizeof(idx))) {
+    perror("IP_MULTICAST_IF");
+    exit(1);
+  }
+
+  hops = 2;
+  if (setsockopt(s, IPPROTO_IPV6, IPV6_MULTICAST_HOPS, &hops, sizeof(hops))) {
+    perror("IPV6_MULTICAST_HOPS");
+    exit(1);
+  }
+
+  memset(&mreq, 0, sizeof(mreq));
+  mreq.ipv6mr_interface = idx;
+  if (inet_pton(AF_INET6, SSDP_IP6, &mreq.ipv6mr_multiaddr) != 1) {
+    fprintf(stderr, "ERR: inet_pton(%s) failed", SSDP_IP6);
+    exit(1);
+  }
+  if (setsockopt(s, IPPROTO_IPV6, IPV6_JOIN_GROUP, &mreq, sizeof(mreq)) < 0) {
+    perror("ERR: setsockopt(IPV6_JOIN_GROUP)");
+    exit(1);
+  }
+
+  return s;
+}
+
+
+void send_ssdp_ip6_request(int s)
+{
+  struct sockaddr_in6 sin6;
+  char buf[1024];
+  ssize_t len;
+
+  snprintf(buf, sizeof(buf), discover_template, SSDP_IP6, ssdp_port);
+  memset(&sin6, 0, sizeof(sin6));
+  sin6.sin6_family = AF_INET6;
+  sin6.sin6_port = htons(ssdp_port);
+  if (inet_pton(AF_INET6, SSDP_IP6, &sin6.sin6_addr) != 1) {
+    fprintf(stderr, "ERR: inet_pton(%s) failed", SSDP_IP6);
+    exit(1);
+  }
+  len = strlen(buf);
+  if (sendto(s, buf, len, 0, (struct sockaddr*)&sin6, sizeof(sin6)) != len) {
+    perror("sendto multicast IPv6");
+    exit(1);
+  }
+}
+
+
+std::string handle_ssdp_response(int s, int family)
+{
+  char buffer[4096];
+  char ipbuf[INET6_ADDRSTRLEN];
+  ssize_t pktlen;
+  struct sockaddr from;
+  socklen_t len = sizeof(from);
+
+  pktlen = recvfrom(s, buffer, sizeof(buffer), 0, &from, &len);
+  if (pktlen <= 0) {
+    return std::string("");
+  }
+
+  if (family == AF_INET) {
+    struct sockaddr_in *sin = (struct sockaddr_in *)&from;
+    inet_ntop(AF_INET, &sin->sin_addr, ipbuf, sizeof(ipbuf));
+    return std::string(ipbuf);
+  } else if (family == AF_INET6) {
+    struct sockaddr_in6 *sin6 = (struct sockaddr_in6 *)&from;
+    inet_ntop(AF_INET6, &sin6->sin6_addr, ipbuf, sizeof(ipbuf));
+    return std::string(ipbuf);
+  }
+
+  return std::string("");
+}
+
+
+/* Wait for SSDP NOTIFY messages to arrive. */
+ResultsSet listen_for_responses(int s4, int s6)
+{
+  ResultsSet results;
+  struct timeval tv;
+  fd_set rfds;
+  int maxfd = (s4 > s6) ? s4 : s6;
+
+  memset(&tv, 0, sizeof(tv));
+  tv.tv_sec = timeout_secs;
+  tv.tv_usec = 0;
+
+  FD_ZERO(&rfds);
+  FD_SET(s4, &rfds);
+  FD_SET(s6, &rfds);
+
+  while (select(maxfd + 1, &rfds, NULL, NULL, &tv) > 0) {
+    if (FD_ISSET(s4, &rfds)) {
+      std::string ip = handle_ssdp_response(s4, AF_INET);
+      if (!ip.empty()) {
+        results.insert(ip);
+      }
+    }
+    if (FD_ISSET(s6, &rfds)) {
+      std::string ip = handle_ssdp_response(s6, AF_INET6);
+      if (!ip.empty()) {
+        results.insert(ip);
+      }
+    }
+
+    FD_ZERO(&rfds);
+    FD_SET(s4, &rfds);
+    FD_SET(s6, &rfds);
+  }
+
+  return results;
+}
+
+
+void usage(char *progname) {
+  fprintf(stderr, "usage: %s [-t port]\nwhere:\n", progname);
+  fprintf(stderr, "\t-t port:  test mode, send to localhost port\n");
+  exit(1);
+}
+
+
+int main(int argc, char **argv)
+{
+  int c;
+  int s4, s6;
+
+  setlinebuf(stdout);
+  alarm(30);
+
+  while ((c = getopt(argc, argv, "t:")) != -1) {
+    switch(c) {
+      case 't':
+        timeout_secs = 1;
+        ssdp_port = atoi(optarg);
+        ssdp_loop = 1;
+        break;
+      default: usage(argv[0]); break;
+    }
+  }
+
+  s4 = get_ipv4_ssdp_socket();
+  send_ssdp_ip4_request(s4);
+  s6 = get_ipv6_ssdp_socket();
+  send_ssdp_ip6_request(s6);
+  ResultsSet IPs = listen_for_responses(s4, s6);
+
+  std::string output("DIAL responses from: ");
+  for (ResultsSet::const_iterator ii = IPs.begin(); ii != IPs.end(); ++ii) {
+    output.append(*ii);
+    output.append(" ");
+  }
+  std::cout << output << std::endl;
+
+  exit(0);
+}
diff --git a/cmds/gfhd254_reboot.c b/cmds/gfhd254_reboot.c
new file mode 100644
index 0000000..fdc7e32
--- /dev/null
+++ b/cmds/gfhd254_reboot.c
@@ -0,0 +1,65 @@
+// GFHD254 has a bug where software reset doesn't reset the entire
+// chip, some state in the SAGE engine isn't getting reset.  This
+// drives a gpio that connects back to the chips own external reset
+// pin, resetting the chip with this pin works around the issue as
+// the SAGE engine is completely reset in this path.
+
+#include <fcntl.h>
+#include <stdint.h>
+#include <stdio.h>
+#include <sys/mman.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+
+#define REG_BASE 0xf0410000
+#define REG_SIZE 0x8000
+
+
+#define GPIO_DATA (0x7404 / 4)
+#define GPIO_IODIR (0x7408 / 4)
+#define CTRL_MUX_0 (0x0700 / 4)
+#define CTRL_MUX_1 (0x0704 / 4)
+
+static void *mmap_(
+    void* addr, size_t size, int prot, int flags, int fd,
+    off_t offset) {
+#ifdef __ANDROID__
+  return mmap64(addr, size, prot, flags, fd,
+                (off64_t)(uint64_t)(uint32_t)offset);
+#else
+  return mmap(addr, size, prot, flags, fd, offset);
+#endif
+}
+
+// TODO(jnewlin):  Revist this after the exact gpio being used
+// is settled on.
+
+int main() {
+  int fd = open("/dev/mem", O_RDWR);
+  volatile uint32_t* reg;
+
+  if (fd < 0) {
+    perror("mmap");
+    return 1;
+  }
+
+  reg = mmap_(NULL, REG_SIZE, PROT_READ | PROT_WRITE, MAP_SHARED,
+              fd, REG_BASE);
+  if (reg == MAP_FAILED) {
+    perror("mmap");
+    return 1;
+  }
+
+  // Set the pin mux to gpio, value of zero selects gpio mode, this
+  // is the reset value so this is probably not required, but just
+  // in case.
+  reg[CTRL_MUX_0] &= ~((0xf << 8) | (0xf << 12)); // aon_gio2 and 3
+  reg[CTRL_MUX_1] &= ~(0xf << 4); // aon_gio9
+
+
+  // Set the direction to be an output and drive it low.
+  reg[GPIO_IODIR] &= ~((1 << 2) | (1 << 3) | (1 << 9));
+  reg[GPIO_DATA] &= ~((1 << 2) | (1 << 3) | (1 << 9));
+
+  return 0;
+}
diff --git a/cmds/host-test-dialcheck.sh b/cmds/host-test-dialcheck.sh
new file mode 100755
index 0000000..2eb8a8a
--- /dev/null
+++ b/cmds/host-test-dialcheck.sh
@@ -0,0 +1,20 @@
+#!/bin/bash
+#
+# Copyright 2016 Google Inc. All Rights Reserved.
+
+. ./wvtest/wvtest.sh
+
+PORTFILE="/tmp/dialcheck.test.$$.port"
+OUTFILE="/tmp/dialcheck.test.$$.output"
+
+WVSTART "dialcheck test"
+
+rm -f "$PORTFILE" "$OUTFILE"
+python ./dialcheck-test-server.py "$PORTFILE" &
+for i in $(seq 50); do if [ ! -f "$PORTFILE" ]; then sleep 0.1; fi; done
+
+port=$(cat "$PORTFILE")
+# Dial response will come from the IP address of the builder.
+WVPASS ./host-dialcheck -t "$port" >"$OUTFILE"
+WVPASS grep "DIAL responses from: " "$OUTFILE"
+rm -f "$PORTFILE" "$OUTFILE"
diff --git a/cmds/host-test-ssdptax.sh b/cmds/host-test-ssdptax.sh
index ebb3ae7..584401e 100755
--- a/cmds/host-test-ssdptax.sh
+++ b/cmds/host-test-ssdptax.sh
@@ -6,20 +6,34 @@
 
 SSDP=./host-ssdptax
 FIFO="/tmp/ssdptax.test.$$"
+OUTFILE="/tmp/ssdptax.test.$$.output"
 
 WVSTART "ssdptax test"
 
 python ./ssdptax-test-server.py "$FIFO" 1 &
 sleep 0.5
-WVPASSEQ "$($SSDP -t $FIFO)" "ssdp 00:00:00:00:00:00 Test Device;Google Fiber ssdptax"
-rm "$FIFO"
+WVPASS $SSDP -t "$FIFO" >"$OUTFILE"
+WVPASS grep -q "ssdp 00:00:00:00:00:00 Test Device;Google Fiber ssdptax" "$OUTFILE"
+echo quitquitquit | nc -U "$FIFO"
+rm -f "$FIFO" "$OUTFILE"
 
 python ./ssdptax-test-server.py "$FIFO" 2 &
 sleep 0.5
-WVPASSEQ "$($SSDP -t $FIFO)" "ssdp 00:00:00:00:00:00 REDACTED;server type"
-rm "$FIFO"
+WVPASS $SSDP -t "$FIFO" >"$OUTFILE"
+WVPASS grep -q "ssdp 00:00:00:00:00:00 REDACTED;server type" "$OUTFILE"
+echo quitquitquit | nc -U "$FIFO"
+rm -f "$FIFO" "$OUTFILE"
 
 python ./ssdptax-test-server.py "$FIFO" 3 &
 sleep 0.5
-WVPASSEQ "$($SSDP -t $FIFO)" "ssdp 00:00:00:00:00:00 Unknown;server type"
-rm "$FIFO"
+WVPASS $SSDP -t "$FIFO" >"$OUTFILE"
+WVPASS grep -q "ssdp 00:00:00:00:00:00 Unknown;server type" "$OUTFILE"
+echo quitquitquit | nc -U "$FIFO"
+rm -f "$FIFO" "$OUTFILE"
+
+python ./ssdptax-test-server.py "$FIFO" 4 &
+sleep 0.5
+WVPASS $SSDP -t "$FIFO" >"$OUTFILE"
+WVPASS grep -q "ssdp 00:00:00:00:00:00 Test Device;Google Fiber ssdptax multicast" "$OUTFILE"
+echo quitquitquit | nc -U "$FIFO"
+rm -f "$FIFO" "$OUTFILE"
diff --git a/cmds/logos.c b/cmds/logos.c
index 93577ce..57a6d02 100644
--- a/cmds/logos.c
+++ b/cmds/logos.c
@@ -25,7 +25,6 @@
  *  - cleans up control characters (ie. chars < 32).
  *  - makes sure output lines are in "facility: message" format.
  *  - doesn't rely on syslogd.
- *  - suppresses logging of filenames of personal media.
  */
 #include <assert.h>
 #include <ctype.h>
@@ -460,48 +459,6 @@
 }
 
 
-/*
- * Return true for a character which we expect to terminate a
- * media filename.
- */
-static int is_filename_terminator(char c) {
-  switch(c) {
-    case ' ':
-    case '\'':
-    case '"':
-      return 1;
-  }
-
-  return 0;
-}
-
-/*
- * search for text patterns which look like filenames of
- * personal media, and cross out the filename portion with
- * 'X' characters.
- */
-static void suppress_media_filenames(uint8_t *line, ssize_t len,
-                                     const char *path) {
-  uint8_t *s = line;
-  ssize_t pathlen = strlen(path);
-
-  while (len > pathlen) {
-    if (strncmp((char *)s, path, pathlen) == 0) {
-      /* Found a filename, blot it out. */
-      s += pathlen;
-      len -= pathlen;
-      while (len > 0 && !is_filename_terminator(*s)) {
-        *s++ = 'X';
-        len--;
-      }
-    } else {
-      s += 1;
-      len -= 1;
-    }
-  }
-}
-
-
 static void usage(void) {
   fprintf(stderr,
       "Usage: [LOGOS_DEBUG=1] logos <facilityname> [bytes/burst] [bytes/day]\n"
@@ -657,8 +614,6 @@
       uint8_t *start = buf, *next = buf + used, *end = buf + used + got, *p;
       while ((p = memchr(next, '\n', end - next)) != NULL) {
         ssize_t linelen = p - start;
-        suppress_media_filenames(start, linelen, "/var/media/pictures/");
-        suppress_media_filenames(start, linelen, "/var/media/videos/");
         flush(header, headerlen, start, linelen);
         if (overlong) {
           // that flush() was the first newline after buffer length
diff --git a/cmds/ssdptax-test-server.py b/cmds/ssdptax-test-server.py
index 54831d4..c86283a 100644
--- a/cmds/ssdptax-test-server.py
+++ b/cmds/ssdptax-test-server.py
@@ -5,7 +5,9 @@
 
 import BaseHTTPServer
 import socket
+import SocketServer
 import sys
+import threading
 
 
 text_device_xml = """<root>
@@ -31,45 +33,107 @@
   <device></device></root>"""
 
 
-xml = ['']
+ssdp_device_xml = """<root>
+  <specVersion><major>1</major><minor>0</minor></specVersion>
+  <device><friendlyName>Test Device</friendlyName>
+  <manufacturer>Google Fiber</manufacturer>
+  <modelDescription>Unit Test</modelDescription>
+  <modelName>ssdptax multicast</modelName>
+</device></root>"""
 
 
-class XmlHandler(BaseHTTPServer.BaseHTTPRequestHandler):
+notify_template = 'NOTIFY\r\nHOST:239.255.255.250:1900\r\nLOCATION:%s\r\n'
+notify_text = ['']
+
+
+minissdpd_response = ['']
+keep_running = [True]
+
+
+class HttpHandler(BaseHTTPServer.BaseHTTPRequestHandler):
+  """Respond to an HHTP GET for SSDP DeviceInfo."""
+
   def do_GET(self):
     self.send_response(200)
     self.send_header('Content-type','text/xml')
     self.end_headers()
-    self.wfile.write(xml[0])
+    if self.path.endswith('text_device_xml'):
+      self.wfile.write(text_device_xml)
+    if self.path.endswith('email_address_xml'):
+      self.wfile.write(email_address_xml)
+    if self.path.endswith('no_friendlyname_xml'):
+      self.wfile.write(no_friendlyname_xml)
+    if self.path.endswith('ssdp_device_xml'):
+      self.wfile.write(ssdp_device_xml)
+
+
+class ThreadingHTTPServer(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer):
+  pass
+
+
+class UnixHandler(SocketServer.StreamRequestHandler):
+  """Respond to a command on MiniSSDPd's Unix socket."""
+
+  def handle(self):
+    data = self.request.recv(8192)
+    if 'quitquitquit' in data:
+      print 'Received quitquitquit, exiting...'
+      keep_running[0] = False
+      return
+    else:
+      self.request.sendall(bytearray(minissdpd_response[0]))
+
+
+class UdpHandler(SocketServer.DatagramRequestHandler):
+  def handle(self):
+    self.request[1].sendto(bytearray(notify_text[0]), self.client_address)
+
+
+class ThreadingUdpServer(SocketServer.ThreadingUDPServer):
+  allow_reuse_address = True
 
 
 def main():
-  un = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
-  un.bind(sys.argv[1])
-  un.listen(1)
-  conn, _ = un.accept()
-
+  socketpath = sys.argv[1]
   testnum = int(sys.argv[2])
   if testnum == 1:
-    xml[0] = text_device_xml
+    pathend = 'text_device_xml'
   if testnum == 2:
-    xml[0] = email_address_xml
+    pathend = 'email_address_xml'
   if testnum == 3:
-    xml[0] = no_friendlyname_xml
+    pathend = 'no_friendlyname_xml'
+  if testnum == 4:
+    pathend = 'ssdp_device_xml'
 
-  s = BaseHTTPServer.HTTPServer(("", 0), XmlHandler)
-  sn = s.socket.getsockname()
+  h = ThreadingHTTPServer(("", 0), HttpHandler)
+  sn = h.socket.getsockname()
   port = sn[1]
-  url = 'http://127.0.0.1:%d/foo.xml' % port
+  url = 'http://127.0.0.1:%d/%s' % (port, pathend)
   st = 'server type'
   uuid = 'uuid goes here'
-  data = [1]
-  data.extend([len(url)] + list(url))
-  data.extend([len(st)] + list(st))
-  data.extend([len(uuid)] + list(uuid))
+  if testnum == 4:
+    minissdpd_response[0] = [0]
+  else:
+    minissdpd_response[0] = [1]
+    minissdpd_response[0].extend([len(url)] + list(url))
+    minissdpd_response[0].extend([len(st)] + list(st))
+    minissdpd_response[0].extend([len(uuid)] + list(uuid))
+  notify_text[0] = notify_template % url
 
-  _ = conn.recv(8192)
-  conn.sendall(bytearray(data))
-  s.handle_request()
+  h_thread = threading.Thread(target=h.serve_forever)
+  h_thread.daemon = True
+  h_thread.start()
+
+  d = ThreadingUdpServer(('', 1900), UdpHandler)
+  d.socket.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP,
+      socket.inet_aton('239.255.255.250') + socket.inet_aton('0.0.0.0'))
+  d_thread = threading.Thread(target=d.serve_forever)
+  d_thread.daemon = True
+  d_thread.start()
+
+  u = SocketServer.UnixStreamServer(socketpath, UnixHandler)
+  while keep_running[0]:
+    u.handle_request()
 
 
 if __name__ == '__main__':
diff --git a/cmds/ssdptax.cc b/cmds/ssdptax.cc
index 2a06c7a..d2663fc 100644
--- a/cmds/ssdptax.cc
+++ b/cmds/ssdptax.cc
@@ -30,6 +30,7 @@
 #include <ctype.h>
 #include <curl/curl.h>
 #include <getopt.h>
+#include <net/if.h>
 #include <netinet/in.h>
 #include <regex.h>
 #include <stdio.h>
@@ -43,6 +44,7 @@
 
 #include <iostream>
 #include <set>
+#include <tr1/unordered_map>
 
 #include "l2utils.h"
 
@@ -68,10 +70,11 @@
 
 typedef struct ssdp_info {
   ssdp_info(): srv_type(), url(), friendlyName(), ipaddr(),
-    manufacturer(), model(), failed(0) {}
+    manufacturer(), model(), buffer(), failed(0) {}
   ssdp_info(const ssdp_info& s): srv_type(s.srv_type), url(s.url),
     friendlyName(s.friendlyName), ipaddr(s.ipaddr),
-    manufacturer(s.manufacturer), model(s.model), failed(s.failed) {}
+    manufacturer(s.manufacturer), model(s.model),
+    buffer(s.buffer), failed(s.failed) {}
   std::string srv_type;
   std::string url;
   std::string friendlyName;
@@ -84,6 +87,24 @@
 } ssdp_info_t;
 
 
+typedef std::tr1::unordered_map<std::string, ssdp_info_t*> ResponsesMap;
+
+
+int ssdp_loop = 0;
+
+
+/* SSDP Discover packet */
+#define SSDP_PORT 1900
+#define SSDP_IP4  "239.255.255.250"
+#define SSDP_IP6  "ff02::c"
+const char discover_template[] = "M-SEARCH * HTTP/1.1\r\n"
+                                 "HOST: %s:%d\r\n"
+                                 "MAN: \"ssdp:discover\"\r\n"
+                                 "MX: 2\r\n"
+                                 "USER-AGENT: ssdptax/1.0\r\n"
+                                 "ST: %s\r\n\r\n";
+
+
 static void strncpy_limited(char *dst, size_t dstlen,
     const char *src, size_t srclen)
 {
@@ -104,6 +125,13 @@
 }
 
 
+static time_t monotime(void) {
+  struct timespec ts;
+  clock_gettime(CLOCK_MONOTONIC, &ts);
+  return ts.tv_sec;
+}
+
+
 /*
  * Send a request to minissdpd. Returns a std::string containing
  * minissdpd's response.
@@ -124,19 +152,19 @@
 
   if (s < 0) {
     perror("socket AF_UNIX failed");
-    exit(1);
+    return rc;
   }
   memset(&addr, 0, sizeof(addr));
   addr.sun_family = AF_UNIX;
   strncpy(addr.sun_path, sock_path, sizeof(addr.sun_path));
-  if(connect(s, (struct sockaddr *)&addr, sizeof(struct sockaddr_un)) < 0) {
+  if (connect(s, (struct sockaddr *)&addr, sizeof(struct sockaddr_un)) < 0) {
     perror("connect to minisspd failed");
-    exit(1);
+    return rc;
   }
 
   if ((buffer = (char *)malloc(siz)) == NULL) {
     fprintf(stderr, "malloc(%zu) failed\n", siz);
-    exit(1);
+    return rc;
   }
   memset(buffer, 0, siz);
 
@@ -147,7 +175,8 @@
   p += device_len;
   if (write(s, buffer, p - buffer) < 0) {
     perror("write to minissdpd failed");
-    exit(1);
+    free(buffer);
+    return rc;
   }
 
   FD_ZERO(&readfds);
@@ -157,18 +186,174 @@
 
   if (select(s + 1, &readfds, NULL, NULL, &tv) < 1) {
     fprintf(stderr, "select failed\n");
-    exit(1);
+    free(buffer);
+    return rc;
   }
 
   if ((len = read(s, buffer, siz)) < 0) {
     perror("read from minissdpd failed");
-    exit(1);
+    free(buffer);
+    return rc;
   }
 
   close(s);
   rc = std::string(buffer, len);
   free(buffer);
-  return(rc);
+  return rc;
+}
+
+
+int get_ipv4_ssdp_socket()
+{
+  int s;
+  int reuse = 1;
+  struct sockaddr_in sin;
+  struct ip_mreq mreq;
+  struct ip_mreqn mreqn;
+
+  if ((s = socket(AF_INET, SOCK_DGRAM, 0)) < 0) {
+    perror("socket SOCK_DGRAM");
+    exit(1);
+  }
+
+  if (setsockopt(s, SOL_SOCKET, SO_REUSEADDR, (char *)&reuse, sizeof(reuse))) {
+    perror("setsockopt SO_REUSEADDR");
+    exit(1);
+  }
+
+  if (setsockopt(s, IPPROTO_IP, IP_MULTICAST_LOOP,
+        &ssdp_loop, sizeof(ssdp_loop))) {
+    perror("setsockopt IP_MULTICAST_LOOP");
+    exit(1);
+  }
+
+  memset(&sin, 0, sizeof(sin));
+  sin.sin_family = AF_INET;
+  sin.sin_port = htons(SSDP_PORT);
+  sin.sin_addr.s_addr = INADDR_ANY;
+  if (bind(s, (struct sockaddr*)&sin, sizeof(sin))) {
+    perror("bind");
+    exit(1);
+  }
+
+  memset(&mreqn, 0, sizeof(mreqn));
+  mreqn.imr_ifindex = if_nametoindex("br0");
+  if (setsockopt(s, IPPROTO_IP, IP_MULTICAST_IF, &mreqn, sizeof(mreqn))) {
+    perror("IP_MULTICAST_IF");
+    exit(1);
+  }
+
+  memset(&mreq, 0, sizeof(mreq));
+  mreq.imr_multiaddr.s_addr = inet_addr(SSDP_IP4);
+  if (setsockopt(s, IPPROTO_IP, IP_ADD_MEMBERSHIP,
+        (char *)&mreq, sizeof(mreq))) {
+    perror("IP_ADD_MEMBERSHIP");
+    exit(1);
+  }
+
+  return s;
+}
+
+
+void send_ssdp_ip4_request(int s, const char *search)
+{
+  struct sockaddr_in sin;
+  char buf[1024];
+  ssize_t len;
+
+  snprintf(buf, sizeof(buf), discover_template, SSDP_IP4, SSDP_PORT, search);
+  memset(&sin, 0, sizeof(sin));
+  sin.sin_family = AF_INET;
+  sin.sin_port = htons(SSDP_PORT);
+  sin.sin_addr.s_addr = inet_addr(SSDP_IP4);
+  len = strlen(buf);
+  if (sendto(s, buf, len, 0, (struct sockaddr*)&sin, sizeof(sin)) != len) {
+    perror("sendto multicast IPv4");
+    exit(1);
+  }
+}
+
+
+int get_ipv6_ssdp_socket()
+{
+  int s;
+  int reuse = 1;
+  struct sockaddr_in6 sin6;
+  struct ipv6_mreq mreq;
+  int idx;
+  int hops;
+
+  if ((s = socket(AF_INET6, SOCK_DGRAM, 0)) < 0) {
+    perror("socket SOCK_DGRAM");
+    exit(1);
+  }
+
+  if (setsockopt(s, SOL_SOCKET, SO_REUSEADDR, (char *)&reuse, sizeof(reuse))) {
+    perror("setsockopt SO_REUSEADDR");
+    exit(1);
+  }
+
+  if (setsockopt(s, IPPROTO_IPV6, IPV6_MULTICAST_LOOP,
+        &ssdp_loop, sizeof(ssdp_loop))) {
+    perror("setsockopt IPV6_MULTICAST_LOOP");
+    exit(1);
+  }
+
+  memset(&sin6, 0, sizeof(sin6));
+  sin6.sin6_family = AF_INET6;
+  sin6.sin6_port = htons(SSDP_PORT);
+  sin6.sin6_addr = in6addr_any;
+  if (bind(s, (struct sockaddr*)&sin6, sizeof(sin6))) {
+    perror("bind");
+    exit(1);
+  }
+
+  idx = if_nametoindex("br0");
+  if (setsockopt(s, IPPROTO_IPV6, IPV6_MULTICAST_IF, &idx, sizeof(idx))) {
+    perror("IP_MULTICAST_IF");
+    exit(1);
+  }
+
+  hops = 2;
+  if (setsockopt(s, IPPROTO_IPV6, IPV6_MULTICAST_HOPS, &hops, sizeof(hops))) {
+    perror("IPV6_MULTICAST_HOPS");
+    exit(1);
+  }
+
+  memset(&mreq, 0, sizeof(mreq));
+  mreq.ipv6mr_interface = idx;
+  if (inet_pton(AF_INET6, SSDP_IP6, &mreq.ipv6mr_multiaddr) != 1) {
+    fprintf(stderr, "ERR: inet_pton(%s) failed", SSDP_IP6);
+    exit(1);
+  }
+  if (setsockopt(s, IPPROTO_IPV6, IPV6_JOIN_GROUP, &mreq, sizeof(mreq)) < 0) {
+    perror("ERR: setsockopt(IPV6_JOIN_GROUP)");
+    exit(1);
+  }
+
+  return s;
+}
+
+
+void send_ssdp_ip6_request(int s, const char *search)
+{
+  struct sockaddr_in6 sin6;
+  char buf[1024];
+  ssize_t len;
+
+  snprintf(buf, sizeof(buf), discover_template, SSDP_IP6, SSDP_PORT, search);
+  memset(&sin6, 0, sizeof(sin6));
+  sin6.sin6_family = AF_INET6;
+  sin6.sin6_port = htons(SSDP_PORT);
+  if (inet_pton(AF_INET6, SSDP_IP6, &sin6.sin6_addr) != 1) {
+    fprintf(stderr, "ERR: inet_pton(%s) failed", SSDP_IP6);
+    exit(1);
+  }
+  len = strlen(buf);
+  if (sendto(s, buf, len, 0, (struct sockaddr*)&sin6, sizeof(sin6)) != len) {
+    perror("sendto multicast IPv6");
+    exit(1);
+  }
 }
 
 
@@ -389,8 +574,102 @@
 }
 
 
+std::string trim(std::string s)
+{
+  size_t start = s.find_first_not_of(" \t\v\f\b\r\n");
+  if (std::string::npos != start && 0 != start) s = s.erase(0, start);
+
+  size_t end = s.find_last_not_of(" \t\v\f\b\r\n");
+  if (std::string::npos != end) s = s.substr(0, end + 1);
+
+  return s;
+}
+
+
+void parse_ssdp_response(int s, ResponsesMap &responses)
+{
+  ssdp_info_t *info = new ssdp_info_t;
+  char buffer[4096];
+  char *p, *saveptr, *strtok_pos;
+  ssize_t pktlen;
+
+  memset(buffer, 0, sizeof(buffer));
+  pktlen = recv(s, buffer, sizeof(buffer) - 1, 0);
+  if (pktlen < 0 || (size_t)pktlen >= sizeof(buffer)) {
+    fprintf(stderr, "error receiving SSDP response, pktlen=%zd\n", pktlen);
+    delete info;
+    /* not fatal, just return */
+    return;
+  }
+  buffer[pktlen] = '\0';
+  strtok_pos = buffer;
+
+  while ((p = strtok_r(strtok_pos, "\r\n", &saveptr)) != NULL) {
+    if (strlen(p) > 9 && strncasecmp(p, "location:", 9) == 0) {
+      char urlbuf[512];
+      p += 9;
+      strncpy_limited(urlbuf, sizeof(urlbuf), p, strlen(p));
+      info->url = trim(std::string(urlbuf, strlen(urlbuf)));
+    } else if (strlen(p) > 7 && strncasecmp(p, "server:", 7) == 0) {
+      char srv_type_buf[256];
+      p += 7;
+      strncpy_limited(srv_type_buf, sizeof(srv_type_buf), p, strlen(p));
+      info->srv_type = trim(std::string(srv_type_buf, strlen(srv_type_buf)));
+    }
+    strtok_pos = NULL;
+  }
+
+  if (info->url.length() && responses.find(info->url) == responses.end()) {
+    fetch_device_info(info->url, info);
+    responses[info->url] = info;
+  } else {
+    delete info;
+  }
+}
+
+
+/* Wait for SSDP NOTIFY messages to arrive. */
+#define TIMEOUT_SECS  5
+void listen_for_responses(int s4, int s6, ResponsesMap &responses)
+{
+  struct timeval tv;
+  fd_set rfds;
+  int maxfd = (s4 > s6) ? s4 : s6;
+  time_t start = monotime();
+
+  memset(&tv, 0, sizeof(tv));
+  tv.tv_sec = TIMEOUT_SECS;
+  tv.tv_usec = 0;
+
+  FD_ZERO(&rfds);
+  FD_SET(s4, &rfds);
+  FD_SET(s6, &rfds);
+
+  while (select(maxfd + 1, &rfds, NULL, NULL, &tv) > 0) {
+    time_t end = monotime();
+    if (FD_ISSET(s4, &rfds)) {
+      parse_ssdp_response(s4, responses);
+    }
+    if (FD_ISSET(s6, &rfds)) {
+      parse_ssdp_response(s6, responses);
+    }
+
+    FD_ZERO(&rfds);
+    FD_SET(s4, &rfds);
+    FD_SET(s6, &rfds);
+
+    if ((end - start) > TIMEOUT_SECS) {
+      /* even on a network filled with SSDP packets,
+       * return after TIMEOUT_SECS. */
+      break;
+    }
+  }
+}
+
+
 void usage(char *progname) {
-  printf("usage: %s [-t /path/to/fifo]\n", progname);
+  printf("usage: %s [-t /path/to/fifo] [-s search]\n", progname);
+  printf("\t-s\tserver type to search for (default ssdp:all)\n");
   printf("\t-t\ttest mode, use a fake path instead of minissdpd.\n");
   exit(1);
 }
@@ -399,11 +678,11 @@
 int main(int argc, char **argv)
 {
   std::string buffer;
-  typedef std::tr1::unordered_map<std::string, ssdp_info_t*> ResponsesMap;
   ResponsesMap responses;
   L2Map l2map;
-  int c, num;
+  int c, s4, s6;
   const char *sock_path = SOCK_PATH;
+  const char *search = "ssdp:all";
 
   setlinebuf(stdout);
   alarm(30);
@@ -413,28 +692,52 @@
     exit(1);
   }
 
-  while ((c = getopt(argc, argv, "t:")) != -1) {
+  while ((c = getopt(argc, argv, "s:t:")) != -1) {
     switch(c) {
-      case 't': sock_path = optarg; break;
+      case 's': search = optarg; break;
+      case 't':
+        sock_path = optarg;
+        ssdp_loop = 1;
+        break;
       default: usage(argv[0]); break;
     }
   }
 
-  buffer = request_from_ssdpd(sock_path, 3, "ssdp:all");
-  num = buffer.c_str()[0];
-  buffer.erase(0, 1);
-  while ((num-- > 0) && buffer.length() > 0) {
-    ssdp_info_t *info = new ssdp_info_t;
+  /* Request the list from MiniSSDPd */
+  buffer = request_from_ssdpd(sock_path, 3, search);
+  if (!buffer.empty()) {
+    int num = buffer.c_str()[0];
+    buffer.erase(0, 1);
+    while ((num-- > 0) && buffer.length() > 0) {
+      ssdp_info_t *info = new ssdp_info_t;
 
-    parse_minissdpd_response(buffer, info->url, info->srv_type);
-    if (info->url.length() && responses.find(info->url) == responses.end()) {
-      fetch_device_info(info->url, info);
-      responses[info->url] = info;
-    } else {
-      delete info;
+      parse_minissdpd_response(buffer, info->url, info->srv_type);
+      if (info->url.length() && responses.find(info->url) == responses.end()) {
+        fetch_device_info(info->url, info);
+        responses[info->url] = info;
+      } else {
+        delete info;
+      }
     }
+
+    /* Capture the ARP table in its current state. */
+    get_l2_map(&l2map);
   }
 
+  /* Supplement what we got from MiniSSDPd by sending
+   * our own M-SEARCH and listening for responses. */
+  s4 = get_ipv4_ssdp_socket();
+  send_ssdp_ip4_request(s4, search);
+  s6 = get_ipv6_ssdp_socket();
+  send_ssdp_ip6_request(s6, search);
+  listen_for_responses(s4, s6, responses);
+  close(s4);
+  s4 = -1;
+  close(s6);
+  s6 = -1;
+
+  /* Capture any new ARP table entries which appeared after sending
+   * our own M-SEARCH. */
   get_l2_map(&l2map);
 
   typedef std::set<std::string> ResultsSet;
diff --git a/cmds/test-anonid.sh b/cmds/test-anonid.sh
new file mode 100755
index 0000000..87f2b0f
--- /dev/null
+++ b/cmds/test-anonid.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+. ./wvtest/wvtest.sh
+
+WVSTART "anonid test"
+ANONID="./host-anonid"
+
+WVPASSEQ "$($ANONID -a 00:11:22:33:44:55 -k 0123456789)" "KEALAE"
+WVPASSEQ "$($ANONID -a 00:11:22:33:44:66 -k 6789abcdef)" "AAKLYK"
diff --git a/cmds/test-castcheck.sh b/cmds/test-castcheck.sh
new file mode 100755
index 0000000..7aa3be2
--- /dev/null
+++ b/cmds/test-castcheck.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+. ./wvtest/wvtest.sh
+
+WVSTART "castcheck test"
+CASTCHECK="./castcheck -a ./avahi-browse-fake.sh"
+
+WVPASSEQ "$($CASTCHECK)" "Cast responses from: 1.1.1.1 2.2.2.2 3.3.3.3"
diff --git a/cmds/test-http_bouncer.sh b/cmds/test-http_bouncer.sh
index 4129d52..9cc0d9e 100755
--- a/cmds/test-http_bouncer.sh
+++ b/cmds/test-http_bouncer.sh
@@ -40,10 +40,6 @@
 INPUTS[3]=$(printf "\n\n"; printf "$SENTINEL")
 OUTPUTS[3]=$(printf "HTTP/1.0 302 Found\r\nLocation: $URL\r\n\r\n"; printf "$SENTINEL")
 
-INPUTS[4]=$(printf "GET /GIAG2.crl HTTP/1.0\r\nHost: pki.google.com\r\n\r\n"; printf "$SENTINEL")
-OUTPUTS[4]=$(curl "http://pki.google.com/GIAG2.crl"; printf "$SENTINEL")
-STRIP_HEADER[4]=1
-
 WVSTART "http_bouncer test"
 
 # fail with no arguments
@@ -59,10 +55,13 @@
 i=0
 while [ $i -lt ${#INPUTS[@]} ]; do
   output=$(echo -n "${INPUTS[$i]}" | nc localhost $PORT; printf "$SENTINEL")
-  if [ ${STRIP_HEADER[$i]} ]; then
-    output=$(echo -n "$output" | sed '1,/^\r$/d')
-  fi
-
   WVPASSEQ "$output" "${OUTPUTS[$i]}"
   i=$(expr $i + 1)
 done
+
+# Make sure we can download a CRL even through the bouncer.
+# Some Internet Explorer versions will refuse to connect if we can't.
+WVPASS printf "GET /GIAG2.crl HTTP/1.0\r\nHost: pki.google.com\r\n\r\n" |\
+  nc localhost $PORT |\
+  sed '1,/^\r$/d' |\
+  openssl crl -inform DER
diff --git a/cmds/test-logos.py b/cmds/test-logos.py
index d930ccb..34f5693 100755
--- a/cmds/test-logos.py
+++ b/cmds/test-logos.py
@@ -86,23 +86,6 @@
   os.write(fd1, '\n')
   WVPASSEQ('<7>fac: booga!\n', _Read())
 
-  # Filenames
-  os.write(fd1, 'Accessing /var/media/pictures/MyPicture.jpg for decode\n')
-  WVPASSEQ('<7>fac: Accessing /var/media/pictures/XXXXXXXXXXXXX for decode\n',
-           _Read())
-  os.write(fd1, '/var/media/pictures/MyPicture.jpg\n')
-  WVPASSEQ('<7>fac: /var/media/pictures/XXXXXXXXXXXXX\n',
-           _Read())
-  os.write(fd1, 'Accessing /var/media/videos/MyMovie.mpg for decode\n')
-  WVPASSEQ('<7>fac: Accessing /var/media/videos/XXXXXXXXXXX for decode\n',
-           _Read())
-  os.write(fd1, 'Accessing /var/media/tv/MyTvShow.ts for decode\n')
-  WVPASSEQ('<7>fac: Accessing /var/media/tv/MyTvShow.ts for decode\n',
-           _Read())
-  os.write(fd1, 'check "/var/media/videos/MyTvShow.ts"len=1024\n')
-  WVPASSEQ('<7>fac: check "/var/media/videos/XXXXXXXXXXX"len=1024\n',
-           _Read())
-
   # rate limiting
   os.write(fd1, (('x'*80) + '\n') * 500)
   result = ''
diff --git a/conman/connection_manager.py b/conman/connection_manager.py
index 2ab4763..040e812 100755
--- a/conman/connection_manager.py
+++ b/conman/connection_manager.py
@@ -8,7 +8,9 @@
 import json
 import logging
 import os
+import random
 import re
+import socket
 import subprocess
 import time
 
@@ -18,12 +20,16 @@
 import pyinotify
 
 import cycler
+import experiment
 import interface
 import iw
 import status
 
-GFIBER_OUIS = ['f4:f5:e8']
-VENDOR_IE_FEATURE_ID_AUTOPROVISIONING = '01'
+
+HOSTNAME = socket.gethostname()
+TMP_HOSTS = '/tmp/hosts'
+
+experiment.register('WifiNo2GClient')
 
 
 class FileChangeHandler(pyinotify.ProcessEvent):
@@ -91,13 +97,15 @@
       raise ValueError('Command file does not specify SSID')
 
     if self.wifi.initial_ssid == self.ssid:
-      logging.debug('Connected to WLAN at startup')
+      logging.info('Connected to WLAN at startup')
 
   @property
   def client_up(self):
     wpa_status = self.wifi.wpa_status()
     return (wpa_status.get('wpa_state') == 'COMPLETED'
-            and wpa_status.get('ssid') == self.ssid)
+            # NONE indicates we're on a provisioning network; anything else
+            # suggests we're already on the WLAN.
+            and wpa_status.get('key_mgmt') != 'NONE')
 
   def start_access_point(self):
     """Start an access point."""
@@ -113,7 +121,7 @@
     try:
       subprocess.check_output(self.command, stderr=subprocess.STDOUT)
       self.access_point_up = True
-      logging.debug('Started %s GHz AP', self.band)
+      logging.info('Started %s GHz AP', self.band)
     except subprocess.CalledProcessError as e:
       logging.error('Failed to start access point: %s', e.output)
 
@@ -128,18 +136,31 @@
     try:
       subprocess.check_output(command, stderr=subprocess.STDOUT)
       self.access_point_up = False
-      logging.debug('Stopped %s GHz AP', self.band)
+      logging.info('Stopped %s GHz AP', self.band)
     except subprocess.CalledProcessError as e:
       logging.error('Failed to stop access point: %s', e.output)
       return
 
   def start_client(self):
     """Join the WLAN as a client."""
+    if experiment.enabled('WifiNo2GClient') and self.band == '2.4':
+      logging.info('WifiNo2GClient enabled; not starting 2.4 GHz client.')
+      return
+
     up = self.client_up
     if up:
       logging.debug('Wifi client already started on %s GHz', self.band)
       return
 
+    if self._actually_start_client():
+      self._post_start_client()
+
+  def _actually_start_client(self):
+    """Actually run wifi setclient.
+
+    Returns:
+      Whether the command succeeded.
+    """
     command = self.WIFI_SETCLIENT + ['--ssid', self.ssid, '--band', self.band]
     env = dict(os.environ)
     if self.passphrase:
@@ -149,8 +170,11 @@
       subprocess.check_output(command, stderr=subprocess.STDOUT, env=env)
     except subprocess.CalledProcessError as e:
       logging.error('Failed to start wifi client: %s', e.output)
-      return
+      return False
 
+    return True
+
+  def _post_start_client(self):
     self._status.connected_to_wlan = True
     logging.info('Started wifi client on %s GHz', self.band)
     self.wifi.attach_wpa_control(self._wpa_control_interface)
@@ -167,7 +191,7 @@
                               stderr=subprocess.STDOUT)
       # TODO(rofrankel): Make this work for dual-radio devices.
       self._status.connected_to_wlan = False
-      logging.debug('Stopped wifi client on %s GHz', self.band)
+      logging.info('Stopped wifi client on %s GHz', self.band)
     except subprocess.CalledProcessError as e:
       logging.error('Failed to stop wifi client: %s', e.output)
 
@@ -194,6 +218,7 @@
   IP_LINK = ['ip', 'link']
   IFPLUGD_ACTION = ['/etc/ifplugd/ifplugd.action']
   BINWIFI = ['wifi']
+  UPLOAD_LOGS_AND_WAIT = ['timeout', '60', 'upload-logs-and-wait']
 
   def __init__(self,
                bridge_interface='br0',
@@ -202,7 +227,8 @@
                moca_tmp_dir='/tmp/cwmp/monitoring/moca2',
                wpa_control_interface='/var/run/wpa_supplicant',
                run_duration_s=1, interface_update_period=5,
-               wifi_scan_period_s=120, wlan_retry_s=15, acs_update_wait_s=10):
+               wifi_scan_period_s=120, wlan_retry_s=15, acs_update_wait_s=10,
+               dhcp_wait_s=10, bssid_cycle_length_s=30):
 
     self._tmp_dir = tmp_dir
     self._config_dir = config_dir
@@ -215,7 +241,10 @@
     self._wifi_scan_period_s = wifi_scan_period_s
     self._wlan_retry_s = wlan_retry_s
     self._acs_update_wait_s = acs_update_wait_s
+    self._dhcp_wait_s = dhcp_wait_s
+    self._bssid_cycle_length_s = bssid_cycle_length_s
     self._wlan_configuration = {}
+    self._try_to_upload_logs = False
 
     # Make sure all necessary directories exist.
     for directory in (self._tmp_dir, self._config_dir, self._moca_tmp_dir,
@@ -294,7 +323,7 @@
     # the routing table.
     for ifc in [self.bridge] + self.wifi:
       ifc.initialize()
-      logging.debug('%s initialized', ifc.name)
+      logging.info('%s initialized', ifc.name)
 
     self._interface_update_counter = 0
     self._try_wlan_after = {'5': 0, '2.4': 0}
@@ -425,7 +454,7 @@
       if self._connected_to_wlan(wifi):
         self._status.connected_to_wlan = True
         logging.debug('Connected to WLAN on %s, nothing else to do.', wifi.name)
-        return
+        break
 
       # This interface is not connected to the WLAN, so scan for potential
       # routes to the ACS for provisioning.
@@ -443,10 +472,10 @@
       for band in wifi.bands:
         wlan_configuration = self._wlan_configuration.get(band, None)
         if wlan_configuration and time.time() > self._try_wlan_after[band]:
-          logging.debug('Trying to join WLAN on %s.', wifi.name)
+          logging.info('Trying to join WLAN on %s.', wifi.name)
           wlan_configuration.start_client()
           if self._connected_to_wlan(wifi):
-            logging.debug('Joined WLAN on %s.', wifi.name)
+            logging.info('Joined WLAN on %s.', wifi.name)
             self._status.connected_to_wlan = True
             self._try_wlan_after[band] = 0
             break
@@ -464,10 +493,15 @@
         logging.debug('Unable to join WLAN on %s', wifi.name)
         self._status.connected_to_wlan = False
         if self.acs():
-          logging.debug('Connected to ACS on %s', wifi.name)
-          wifi.last_successful_bss_info = getattr(wifi,
-                                                  'last_attempted_bss_info',
-                                                  None)
+          logging.debug('Connected to ACS')
+          if self._try_to_upload_logs:
+            self._try_upload_logs()
+            self._try_to_upload_logs = False
+
+          if wifi.acs():
+            wifi.last_successful_bss_info = getattr(wifi,
+                                                    'last_attempted_bss_info',
+                                                    None)
           now = time.time()
           if (self._wlan_configuration and
               hasattr(wifi, 'waiting_for_acs_since')):
@@ -489,8 +523,21 @@
         # If we didn't manage to join the WLAN and we don't have an ACS
         # connection, we should try to establish one.
         else:
-          logging.debug('Not connected to ACS on %s', wifi.name)
-          self._try_next_bssid(wifi)
+          # If we are associated but waiting for a DHCP lease, try again later.
+          now = time.time()
+          connected_to_open = (
+              wifi.wpa_status().get('wpa_state', None) == 'COMPLETED' and
+              wifi.wpa_status().get('key_mgmt', None) == 'NONE')
+          wait_for_dhcp = (
+              not wifi.gateway() and
+              hasattr(wifi, 'waiting_for_dhcp_since') and
+              now - wifi.waiting_for_dhcp_since < self._dhcp_wait_s)
+          if connected_to_open and wait_for_dhcp:
+            logging.debug('Waiting for DHCP lease after %ds.',
+                          now - wifi.waiting_for_acs_since)
+          else:
+            logging.debug('Not connected to ACS')
+            self._try_next_bssid(wifi)
 
     time.sleep(max(0, self._run_duration_s - (time.time() - start_time)))
 
@@ -529,6 +576,37 @@
     self.acs()
     self.internet()
 
+    # Update /etc/hosts (depends on routing table)
+    self._update_tmp_hosts()
+
+  def _update_tmp_hosts(self):
+    """Update the contents of /tmp/hosts."""
+    lowest_metric_interface = None
+    for ifc in [self.bridge] + self.wifi:
+      route = ifc.current_route()
+      if route:
+        metric = route.get('metric', 0)
+        # Skip temporary connection_check routes.
+        if metric == '99':
+          continue
+        candidate = (metric, ifc)
+        if (lowest_metric_interface is None or
+            candidate < lowest_metric_interface):
+          lowest_metric_interface = candidate
+
+    ip_line = ''
+    if lowest_metric_interface:
+      ip = lowest_metric_interface[1].get_ip_address()
+      ip_line = '%s %s\n' % (ip, HOSTNAME) if ip else ''
+
+    new_tmp_hosts = '%s127.0.0.1 localhost' % ip_line
+
+    if not os.path.exists(TMP_HOSTS) or open(TMP_HOSTS).read() != new_tmp_hosts:
+      tmp_hosts_tmp_filename = TMP_HOSTS + '.tmp'
+      tmp_hosts_tmp = open(tmp_hosts_tmp_filename, 'w')
+      tmp_hosts_tmp.write(new_tmp_hosts)
+      os.rename(tmp_hosts_tmp_filename, TMP_HOSTS)
+
   def handle_event(self, path, filename, deleted):
     if deleted:
       self._handle_deleted_file(path, filename)
@@ -577,7 +655,7 @@
       if filename == self.ETHERNET_STATUS_FILE:
         try:
           self.bridge.ethernet = bool(int(contents))
-          logging.debug('Ethernet %s', 'up' if self.bridge.ethernet else 'down')
+          logging.info('Ethernet %s', 'up' if self.bridge.ethernet else 'down')
         except ValueError:
           logging.error('Status file contents should be 0 or 1, not %s',
                         contents)
@@ -600,7 +678,7 @@
           wifi = self.wifi_for_band(band)
           if wifi and band in self._wlan_configuration:
             self._wlan_configuration[band].access_point = True
-          logging.debug('AP enabled for %s GHz', band)
+          logging.info('AP enabled for %s GHz', band)
 
     elif path == self._tmp_dir:
       if filename.startswith(self.GATEWAY_FILE_PREFIX):
@@ -608,8 +686,8 @@
         ifc = self.interface_by_name(interface_name)
         if ifc:
           ifc.set_gateway_ip(contents)
-          logging.debug('Received gateway %r for interface %s', contents,
-                        ifc.name)
+          logging.info('Received gateway %r for interface %s', contents,
+                       ifc.name)
 
     elif path == self._moca_tmp_dir:
       match = re.match(r'^%s\d+$' % self.MOCA_NODE_FILE_PREFIX, filename)
@@ -654,20 +732,19 @@
     subprocess.call(self.IFUP + [wifi.name])
     # /bin/wifi takes a --band option but then finds the right interface for it,
     # so it's okay to just pick the first band here.
-    with_ie, without_ie = self._find_bssids(wifi.bands[0])
+    items = self._find_bssids(wifi.bands[0])
     logging.info('Done scanning on %s', wifi.name)
-    items = [(bss_info, 3) for bss_info in with_ie]
-    items += [(bss_info, 1) for bss_info in without_ie]
-    wifi.cycler = cycler.AgingPriorityCycler(cycle_length_s=30, items=items)
+    if not hasattr(wifi, 'cycler'):
+      wifi.cycler = cycler.AgingPriorityCycler(
+          cycle_length_s=self._bssid_cycle_length_s)
+    # Shuffle items to undefined determinism in scan results + dict
+    # implementation unfairly biasing BSSID order.
+    random.shuffle(items)
+    wifi.cycler.update(items)
 
   def _find_bssids(self, band):
-    def supports_autoprovisioning(oui, vendor_ie):
-      if oui not in GFIBER_OUIS:
-        return False
-
-      return vendor_ie.startswith(VENDOR_IE_FEATURE_ID_AUTOPROVISIONING)
-
-    return iw.find_bssids(band, supports_autoprovisioning, False)
+    """Wrapper used as a unit testing seam."""
+    return iw.find_bssids(band, False)
 
   def _try_next_bssid(self, wifi):
     """Attempt to connect to the next BSSID in wifi's BSSID cycler.
@@ -684,16 +761,19 @@
     last_successful_bss_info = getattr(wifi, 'last_successful_bss_info', None)
     bss_info = last_successful_bss_info or wifi.cycler.next()
     if bss_info is not None:
-      logging.debug('Attempting to connect to SSID %s for provisioning',
-                    bss_info.ssid)
+      logging.info('Attempting to connect to SSID %s (%s) for provisioning',
+                   bss_info.ssid, bss_info.bssid)
       self._status.trying_open = True
+      wifi.set_gateway_ip(None)
       connected = self._try_bssid(wifi, bss_info)
       if connected:
         self._status.connected_to_open = True
         now = time.time()
         wifi.waiting_for_acs_since = now
+        wifi.waiting_for_dhcp_since = now
         wifi.complain_about_acs_at = now + 5
         logging.info('Attempting to provision via SSID %s', bss_info.ssid)
+        self._try_to_upload_logs = True
       # If we can no longer connect to this, it's no longer successful.
       elif bss_info == last_successful_bss_info:
         wifi.last_successful_bss_info = None
@@ -735,7 +815,7 @@
         wlan_configuration.access_point = os.path.exists(ap_file)
       self._wlan_configuration[band] = wlan_configuration
       self._status.have_config = True
-      logging.debug('Updated WLAN configuration for %s GHz', band)
+      logging.info('Updated WLAN configuration for %s GHz', band)
       self._update_access_point(wlan_configuration)
 
   def _update_access_point(self, wlan_configuration):
@@ -789,6 +869,11 @@
     subprocess.check_output(self.BINWIFI + list(command),
                             stderr=subprocess.STDOUT)
 
+  def _try_upload_logs(self):
+    logging.info('Attempting to upload logs')
+    if subprocess.call(self.UPLOAD_LOGS_AND_WAIT) != 0:
+      logging.error('Failed to upload logs')
+
 
 def _wifi_show():
   try:
diff --git a/conman/connection_manager_test.py b/conman/connection_manager_test.py
index 0297ca1..1f90f96 100755
--- a/conman/connection_manager_test.py
+++ b/conman/connection_manager_test.py
@@ -6,8 +6,10 @@
 import os
 import shutil
 import tempfile
+import time
 
 import connection_manager
+import experiment_testutils
 import interface_test
 import iw
 import status
@@ -179,22 +181,25 @@
   WIFI_SETCLIENT = ['echo', 'setclient']
   WIFI_STOPCLIENT = ['echo', 'stopclient']
 
-  def start_client(self):
-    client_was_up = self.client_up
-    was_attached = self.wifi.attached()
+  def _actually_start_client(self):
+    self.client_was_up = self.client_up
+    self.was_attached = self.wifi.attached()
+    self.wifi._secure_testonly = True
     # Do this before calling the super method so that the attach call at the end
     # succeeds.
-    if not client_was_up and not was_attached:
+    if not self.client_was_up and not self.was_attached:
       self.wifi._initial_ssid_testonly = self.ssid
       self.wifi.start_wpa_supplicant_testonly(self._wpa_control_interface)
 
-    super(WLANConfiguration, self).start_client()
+    return True
 
-    if not client_was_up:
+  def _post_start_client(self):
+    if not self.client_was_up:
       self.wifi.set_connection_check_result('succeed')
 
-      if was_attached:
+      if self.was_attached:
         self.wifi._wpa_control.ssid_testonly = self.ssid
+        self.wifi._wpa_control.secure_testonly = True
         self.wifi.add_connected_event()
 
       # Normally, wpa_supplicant would bring up the client interface, which
@@ -262,6 +267,7 @@
   IFUP = ['echo', 'ifup']
   IFPLUGD_ACTION = ['echo', 'ifplugd.action']
   BINWIFI = ['echo', 'wifi']
+  UPLOAD_LOGS_AND_WAIT = ['echo', 'upload-logs-and-wait']
 
   def __init__(self, *args, **kwargs):
     self._binwifi_commands = []
@@ -294,12 +300,16 @@
     self.can_connect_to_s3 = True
     # Will s2 fail rather than providing ACS access?
     self.s2_fail = False
+    # Will s3 fail to acquire a DHCP lease?
+    self.dhcp_failure_on_s3 = False
+    self.log_upload_count = 0
 
   def create_wifi_interfaces(self):
     super(ConnectionManager, self).create_wifi_interfaces()
     for wifi in self.wifi_interfaces_already_up:
       # pylint: disable=protected-access
       self.interface_by_name(wifi)._initial_ssid_testonly = 'my ssid'
+      self.interface_by_name(wifi)._secure_testonly = True
 
   @property
   def IP_LINK(self):
@@ -315,20 +325,23 @@
         wifi.add_terminating_event()
 
   def _try_bssid(self, wifi, bss_info):
+    wifi.add_disconnected_event()
     self.last_provisioning_attempt = bss_info
 
     super(ConnectionManager, self)._try_bssid(wifi, bss_info)
 
-    def connect(connection_check_result):
+    def connect(connection_check_result, dhcp_failure=False):
       # pylint: disable=protected-access
       if wifi.attached():
-        wifi._wpa_control._ssid_testonly = bss_info.ssid
+        wifi._wpa_control.ssid_testonly = bss_info.ssid
+        wifi._wpa_control.secure_testonly = False
         wifi.add_connected_event()
       else:
         wifi._initial_ssid_testonly = bss_info.ssid
+        wifi._secure_testonly = False
         wifi.start_wpa_supplicant_testonly(self._wpa_control_interface)
       wifi.set_connection_check_result(connection_check_result)
-      self.ifplugd_action(wifi.name, True)
+      self.ifplugd_action(wifi.name, True, dhcp_failure)
 
     if bss_info and bss_info.ssid == 's1':
       connect('fail')
@@ -339,7 +352,7 @@
       return True
 
     if bss_info and bss_info.ssid == 's3' and self.can_connect_to_s3:
-      connect('restricted')
+      connect('restricted', self.dhcp_failure_on_s3)
       return True
 
     return False
@@ -371,14 +384,14 @@
     super(ConnectionManager, self)._wifi_scan(wifi)
     wifi.wifi_scan_counter += 1
 
-  def ifplugd_action(self, interface_name, up):
+  def ifplugd_action(self, interface_name, up, dhcp_failure=False):
     # Typically, when moca comes up, conman calls ifplugd.action, which writes
     # this file.  Also, when conman starts, it calls ifplugd.action for eth0.
     self.write_interface_status_file(interface_name, '1' if up else '0')
 
     # ifplugd calls run-dhclient, which results in a gateway file if the link is
     # up (and working).
-    if up:
+    if up and not dhcp_failure:
       self.write_gateway_file('br0' if interface_name in ('eth0', 'moca0')
                               else interface_name)
 
@@ -400,6 +413,10 @@
 
     return self._wlan_configuration[band].client_up
 
+  def _try_upload_logs(self):
+    self.log_upload_count += 1
+    return super(ConnectionManager, self)._try_upload_logs()
+
   # Test methods
 
   def delete_wlan_config(self, band):
@@ -492,6 +509,10 @@
     os.unlink(ap_filename)
 
 
+def check_tmp_hosts(expected_contents):
+  wvtest.WVPASSEQ(open(connection_manager.TMP_HOSTS).read(), expected_contents)
+
+
 def connection_manager_test(radio_config, wlan_configs=None,
                             quantenna_interfaces=None, **cm_kwargs):
   """Returns a decorator that does ConnectionManager test boilerplate."""
@@ -506,6 +527,7 @@
       interface_update_period = 5
       wifi_scan_period = 15
       wifi_scan_period_s = run_duration_s * wifi_scan_period
+      dhcp_wait_s = .5
 
       # pylint: disable=protected-access
       old_wifi_show = connection_manager._wifi_show
@@ -517,6 +539,7 @@
 
       try:
         # No initial state.
+        connection_manager.TMP_HOSTS = tempfile.mktemp()
         tmp_dir = tempfile.mkdtemp()
         config_dir = tempfile.mkdtemp()
         os.mkdir(os.path.join(tmp_dir, 'interfaces'))
@@ -539,13 +562,18 @@
                               run_duration_s=run_duration_s,
                               interface_update_period=interface_update_period,
                               wifi_scan_period_s=wifi_scan_period_s,
+                              dhcp_wait_s=dhcp_wait_s,
+                              bssid_cycle_length_s=1,
                               **cm_kwargs)
 
         c.test_interface_update_period = interface_update_period
         c.test_wifi_scan_period = wifi_scan_period
+        c.test_dhcp_wait_s = dhcp_wait_s
 
         f(c)
       finally:
+        if os.path.exists(connection_manager.TMP_HOSTS):
+          os.unlink(connection_manager.TMP_HOSTS)
         shutil.rmtree(tmp_dir)
         shutil.rmtree(config_dir)
         shutil.rmtree(moca_tmp_dir)
@@ -583,6 +611,7 @@
   wvtest.WVPASS(c.internet())
   wvtest.WVPASS(c.has_status_files([status.P.CAN_REACH_ACS,
                                     status.P.CAN_REACH_INTERNET]))
+  hostname = connection_manager.HOSTNAME
 
   c.run_once()
   wvtest.WVPASS(c.acs())
@@ -646,12 +675,15 @@
   wvtest.WVFAIL(c.acs())
   wvtest.WVFAIL(c.internet())
   wvtest.WVFAIL(c.bridge.current_route())
+  check_tmp_hosts('127.0.0.1 localhost')
 
   # Now there are some scan results.
   c.interface_with_scan_results = c.wifi_for_band(band).name
   # Wait for a scan, plus 3 cycles, so that s2 will have been tried.
   c.run_until_scan(band)
-  for _ in range(3):
+  wvtest.WVPASSEQ(c.log_upload_count, 0)
+  c.wifi_for_band(band).ip_testonly = '192.168.1.100'
+  for _ in range(len(c.wifi_for_band(band).cycler)):
     c.run_once()
     wvtest.WVPASS(c.has_status_files([status.P.CONNECTED_TO_OPEN]))
 
@@ -665,8 +697,11 @@
   wvtest.WVPASS(c.internet())
   wvtest.WVFAIL(c.client_up(band))
   wvtest.WVPASS(c.wifi_for_band(band).current_route())
+  wvtest.WVPASSEQ(c.log_upload_count, 1)
   # Disable scan results again.
   c.interface_with_scan_results = None
+  c.run_until_interface_update()
+  check_tmp_hosts('192.168.1.100 %s\n127.0.0.1 localhost' % hostname)
 
   # Now, create a WLAN configuration which should be connected to.
   ssid = 'wlan'
@@ -704,6 +739,9 @@
   wvtest.WVPASS(c.has_status_files([status.P.CONNECTED_TO_OPEN]))
   wvtest.WVPASSEQ(c.last_provisioning_attempt.ssid, 's3')
   wvtest.WVPASSEQ(c.last_provisioning_attempt.bssid, 'ff:ee:dd:cc:bb:aa')
+  # The log upload happens on the next main loop after joining s3.
+  c.run_once()
+  wvtest.WVPASSEQ(c.log_upload_count, 2)
 
   # Now, recreate the same WLAN configuration, which should be connected to.
   # Also, test that atomic writes/renames work.
@@ -723,16 +761,20 @@
   wvtest.WVPASS(c.client_up(band))
   wvtest.WVPASS(c.wifi_for_band(band).current_route())
   wvtest.WVFAIL(c.bridge.current_route())
+  c.run_until_interface_update()
+  check_tmp_hosts('192.168.1.100 %s\n127.0.0.1 localhost' % hostname)
 
   # Now bring up the bridge.  We should remove the wifi connection and start
   # an AP.
   c.set_ethernet(True)
   c.bridge.set_connection_check_result('succeed')
+  c.bridge.ip_testonly = '192.168.1.101'
   c.run_until_interface_update()
   wvtest.WVPASS(c.access_point_up(band))
   wvtest.WVFAIL(c.client_up(band))
   wvtest.WVFAIL(c.wifi_for_band(band).current_route())
   wvtest.WVPASS(c.bridge.current_route())
+  check_tmp_hosts('192.168.1.101 %s\n127.0.0.1 localhost' % hostname)
 
   # Now move (rather than delete) the configuration file.  The AP should go
   # away, and we should not be able to join the WLAN.  Routes should not be
@@ -763,6 +805,7 @@
   c.run_until_interface_update()
   wvtest.WVFAIL(c.acs())
   wvtest.WVFAIL(c.internet())
+  check_tmp_hosts('127.0.0.1 localhost')
   # s3 is not what the cycler would suggest trying next.
   wvtest.WVPASSNE('s3', c.wifi_for_band(band).cycler.peek())
   # Run only once, so that only one BSS can be tried.  It should be the s3 one,
@@ -771,6 +814,8 @@
   wvtest.WVPASS(c.acs())
   # Make sure we didn't scan on `band`.
   wvtest.WVPASSEQ(scan_count_for_band, c.wifi_for_band(band).wifi_scan_counter)
+  c.run_once()
+  wvtest.WVPASSEQ(c.log_upload_count, 3)
 
   # Now re-create the WLAN config, connect to the WLAN, and make sure that s3 is
   # unset as last_successful_bss_info, since it is no longer available.
@@ -805,11 +850,16 @@
   wvtest.WVFAIL(c.wifi_for_band(band).acs())
 
   c.can_connect_to_s2 = True
-  # Give it time to try all BSSIDs.
-  for _ in range(3):
+  # Give it time to try all BSSIDs.  This means sleeping long enough that
+  # everything in the cycler is active, then doing n+1 loops (the n+1st loop is
+  # when we decided that the SSID in the nth loop was successful).
+  time.sleep(c._bssid_cycle_length_s)
+  for _ in range(len(c.wifi_for_band(band).cycler) + 1):
     c.run_once()
   s2_bss = iw.BssInfo('01:23:45:67:89:ab', 's2')
   wvtest.WVPASSEQ(c.wifi_for_band(band).last_successful_bss_info, s2_bss)
+  c.run_once()
+  wvtest.WVPASSEQ(c.log_upload_count, 4)
 
   c.s2_fail = True
   c.write_wlan_config(band, ssid, psk)
@@ -825,6 +875,32 @@
   c.run_until_interface_update()
   wvtest.WVPASSEQ(c.wifi_for_band(band).last_successful_bss_info, None)
 
+  # Test that we wait dhcp_wait_s seconds for a DHCP lease before trying the
+  # next BSSID.  The scan results contain an s3 AP with vendor IEs that fails to
+  # send a DHCP lease.  This ensures that s3 will be tried before any other AP,
+  # which lets us force a timeout and proceed to the next AP.
+  del c.wifi_for_band(band).cycler
+  c.interface_with_scan_results = c.wifi_for_band(band).name
+  c.scan_results_include_hidden = True
+  c.can_connect_to_s3 = True
+  c.dhcp_failure_on_s3 = True
+  # First iteration: check that we try s3.
+  c.run_until_scan(band)
+  last_bss_info = c.wifi_for_band(band).last_attempted_bss_info
+  wvtest.WVPASSEQ(last_bss_info.ssid, 's3')
+  wvtest.WVPASSEQ(last_bss_info.bssid, 'ff:ee:dd:cc:bb:aa')
+  # Second iteration: check that we try s3 again since there's no gateway yet.
+  c.run_once()
+  last_bss_info = c.wifi_for_band(band).last_attempted_bss_info
+  wvtest.WVPASSEQ(last_bss_info.ssid, 's3')
+  wvtest.WVPASSEQ(last_bss_info.bssid, 'ff:ee:dd:cc:bb:aa')
+  # Third iteration: sleep for dhcp_wait_s and check that we try another AP.
+  time.sleep(c.test_dhcp_wait_s)
+  c.run_once()
+  last_bss_info = c.wifi_for_band(band).last_attempted_bss_info
+  wvtest.WVPASSNE(last_bss_info.ssid, 's3')
+  wvtest.WVPASSNE(last_bss_info.bssid, 'ff:ee:dd:cc:bb:aa')
+
 
 @wvtest.wvtest
 @connection_manager_test(WIFI_SHOW_OUTPUT_MARVELL8897)
@@ -954,14 +1030,16 @@
   # The next 2.4 GHz scan will have results.
   c.interface_with_scan_results = c.wifi_for_band('2.4').name
   c.run_until_scan('2.4')
-  # Now run 3 cycles, so that s2 will have been tried.
-  for _ in range(3):
+  # Now run for enough cycles that s2 will have been tried.
+  for _ in range(len(c.wifi_for_band('2.4').cycler)):
     c.run_once()
   c.run_until_interface_update()
   wvtest.WVPASS(c.acs())
   wvtest.WVFAIL(c.bridge.current_route())
   wvtest.WVPASS(c.wifi_for_band('2.4').current_route())
   wvtest.WVFAIL(c.wifi_for_band('5').current_route())
+  c.run_once()
+  wvtest.WVPASSEQ(c.log_upload_count, 1)
 
 
 @wvtest.wvtest
@@ -1044,16 +1122,18 @@
   wvtest.WVFAIL(c.wifi_for_band('2.4').current_route())
   wvtest.WVFAIL(c.wifi_for_band('5').current_route())
 
-  # The 2.4 GHz scan will have results that will lead to ACS access.
+  # The scan will have results that will lead to ACS access.
   c.interface_with_scan_results = c.wifi_for_band('2.4').name
   c.run_until_scan('5')
-  for _ in range(3):
+  for _ in range(len(c.wifi_for_band('2.4').cycler)):
     c.run_once()
   c.run_until_interface_update()
   wvtest.WVPASS(c.acs())
   wvtest.WVFAIL(c.bridge.current_route())
   wvtest.WVPASS(c.wifi_for_band('2.4').current_route())
   wvtest.WVPASS(c.wifi_for_band('5').current_route())
+  c.run_once()
+  wvtest.WVPASSEQ(c.log_upload_count, 1)
 
 
 @wvtest.wvtest
@@ -1139,5 +1219,29 @@
                 in c._binwifi_commands)
 
 
+@wvtest.wvtest
+@connection_manager_test(WIFI_SHOW_OUTPUT_MARVELL8897)
+def connection_manager_conman_no_2g_wlan(c):
+  unused_raii = experiment_testutils.MakeExperimentDirs()
+
+  # First, establish that we connect on 2.4 without the experiment, to make sure
+  # this test doesn't spuriously pass.
+  c.write_wlan_config('2.4', 'my ssid', 'my psk')
+  c.run_once()
+  wvtest.WVPASS(c.client_up('2.4'))
+
+  # Now, force a disconnect by deleting the config.
+  c.delete_wlan_config('2.4')
+  c.run_once()
+  wvtest.WVFAIL(c.client_up('2.4'))
+
+  # Now enable the experiment, recreate the config, and make sure we don't
+  # connect.
+  experiment_testutils.enable('WifiNo2GClient')
+  c.write_wlan_config('2.4', 'my ssid', 'my psk')
+  c.run_once()
+  wvtest.WVFAIL(c.client_up('2.4'))
+
+
 if __name__ == '__main__':
   wvtest.wvtest_main()
diff --git a/conman/cycler.py b/conman/cycler.py
index ff65bfc..23f2ce4 100755
--- a/conman/cycler.py
+++ b/conman/cycler.py
@@ -27,9 +27,10 @@
       queue after being automatically reinserted.
       items: Initial items for the queue, as tuples of (item, priority).
     """
-    t = time.time()
-    self._items = {item: [priority, t] for item, priority in items}
     self._min_time_in_queue_s = cycle_length_s
+    self._items = {}
+    if items:
+      self.update(items)
 
   def empty(self):
     return not self._items
@@ -79,3 +80,23 @@
 
     return result
 
+  def update(self, items):
+    """Update to the given items, adding new ones and removing old ones.
+
+    Args:
+      items:  An iterable of (item, priority).
+    """
+    now = time.time()
+    new_items = {}
+    for item, priority in items:
+      t = now
+      existing = self._items.get(item, None)
+      if existing:
+        t = existing[1]
+      new_items[item] = [priority, t]
+
+    self._items = new_items
+
+  def __len__(self):
+    return len(self._items)
+
diff --git a/conman/cycler_test.py b/conman/cycler_test.py
index c4e498b..de1e6c0 100755
--- a/conman/cycler_test.py
+++ b/conman/cycler_test.py
@@ -20,26 +20,40 @@
   # We should get all three in order, since they all have the same insertion
   # time.  They will all get slightly different insertion times, but next()
   # should be fast enough that the differences don't make much difference.
-  wvtest.WVPASS(c.peek() == 'A')
-  wvtest.WVPASS(c.next() == 'A')
-  wvtest.WVPASS(c.next() == 'B')
-  wvtest.WVPASS(c.next() == 'C')
+  wvtest.WVPASSEQ(c.peek(), 'A')
+  wvtest.WVPASSEQ(c.next(), 'A')
+  wvtest.WVPASSEQ(c.next(), 'B')
+  wvtest.WVPASSEQ(c.next(), 'C')
   wvtest.WVPASS(c.peek() is None)
   wvtest.WVPASS(c.next() is None)
   wvtest.WVPASS(c.next() is None)
 
   # Now, wait for items to be ready again and just cycle one of them.
   time.sleep(cycle_length_s)
-  wvtest.WVPASS(c.next() == 'A')
+  wvtest.WVPASSEQ(c.next(), 'A')
 
   # Now, if we wait 1.9 cycles, the aged priorities will be as follows:
   # A: 0.9 * 10 = 9
   # B: 1.9 * 5 = 9.5
   # C: 1.9 * 1 = 1.9
   time.sleep(cycle_length_s * 1.9)
-  wvtest.WVPASS(c.next() == 'B')
-  wvtest.WVPASS(c.next() == 'A')
-  wvtest.WVPASS(c.next() == 'C')
+  wvtest.WVPASSEQ(c.next(), 'B')
+  wvtest.WVPASSEQ(c.next(), 'A')
+  wvtest.WVPASSEQ(c.next(), 'C')
+
+  # Update c, keeping A as-is, removing B, updating C's priority, and adding D.
+  # Sleep for two cycles.  After the first cycle, D has priority 20 and A and C
+  # have priority 0 (since we just cycled them).  After the second cycle, the
+  # priorities are as follows:
+  # A: 1 * 10 = 10
+  # C: 1 * 20 = 20
+  # D: 2 * 20 = 40
+  c.update((('A', 10), ('C', 20), ('D', 20)))
+  time.sleep(cycle_length_s * 2)
+  wvtest.WVPASSEQ(c.next(), 'D')
+  wvtest.WVPASSEQ(c.next(), 'C')
+  wvtest.WVPASSEQ(c.next(), 'A')
+  wvtest.WVPASS(c.next() is None)
 
 if __name__ == '__main__':
   wvtest.wvtest_main()
diff --git a/conman/interface.py b/conman/interface.py
index 04c809c..e172a82 100755
--- a/conman/interface.py
+++ b/conman/interface.py
@@ -28,6 +28,7 @@
 
   CONNECTION_CHECK = 'connection_check'
   IP_ROUTE = ['ip', 'route']
+  IP_ADDR_SHOW = ['ip', 'addr', 'show', 'dev']
 
   def __init__(self, name, metric):
     self.name = name
@@ -57,18 +58,18 @@
     """
     # Until initialized, we want to act as if the interface is down.
     if not self._initialized:
-      logging.debug('%s not initialized; not running connection_check%s',
-                    self.name, ' (ACS)' if check_acs else '')
+      logging.info('%s not initialized; not running connection_check%s',
+                   self.name, ' (ACS)' if check_acs else '')
       return None
 
     if not self.links:
-      logging.debug('Connection check for %s failed due to no links', self.name)
+      logging.info('Connection check for %s failed due to no links', self.name)
       return False
 
     logging.debug('Gateway IP for %s is %s', self.name, self._gateway_ip)
     if self._gateway_ip is None:
-      logging.debug('Connection check for %s failed due to no gateway IP',
-                    self.name)
+      logging.info('Connection check%s for %s failed due to no gateway IP',
+                   ' (ACS)' if check_acs else '', self.name)
       return False
 
     # Temporarily add a route to make sure the connection check can be run.
@@ -90,10 +91,10 @@
 
     with open(os.devnull, 'w') as devnull:
       result = subprocess.call(cmd, stdout=devnull, stderr=devnull) == 0
-      logging.debug('Connection check%s for %s %s',
-                    ' (ACS)' if check_acs else '',
-                    self.name,
-                    'passed' if result else 'failed')
+      logging.info('Connection check%s for %s %s',
+                   ' (ACS)' if check_acs else '',
+                   self.name,
+                   'passed' if result else 'failed')
 
     # Delete the temporary route.
     if added_temporary_route:
@@ -105,6 +106,9 @@
 
     return result
 
+  def gateway(self):
+    return self._gateway_ip
+
   def acs(self):
     if self._has_acs is None:
       self._has_acs = self._connection_check(check_acs=True)
@@ -175,8 +179,8 @@
 
   def _ip_route(self, *args):
     if not self._initialized:
-      logging.debug('Not initialized, not running %s %s',
-                    ' '.join(self.IP_ROUTE), ' '.join(args))
+      logging.info('Not initialized, not running %s %s',
+                   ' '.join(self.IP_ROUTE), ' '.join(args))
       return ''
 
     return self._really_ip_route(*args)
@@ -190,8 +194,20 @@
                     e.message)
       return ''
 
+  def _ip_addr_show(self):
+    try:
+      return subprocess.check_output(self.IP_ADDR_SHOW + [self.name])
+    except subprocess.CalledProcessError as e:
+      logging.error('Could not get IP address for %s: %s', self.name, e.message)
+      return None
+
+  def get_ip_address(self):
+    match = re.search(r'^\s*inet (?P<IP>\d+\.\d+\.\d+\.\d+)',
+                      self._ip_addr_show(), re.MULTILINE)
+    return match and match.group('IP') or None
+
   def set_gateway_ip(self, gateway_ip):
-    logging.debug('New gateway IP %s for %s', gateway_ip, self.name)
+    logging.info('New gateway IP %s for %s', gateway_ip, self.name)
     self._gateway_ip = gateway_ip
     self.update_routes()
 
@@ -203,10 +219,10 @@
     had_links = bool(self.links)
 
     if is_up:
-      logging.debug('%s gained link %s', self.name, link)
+      logging.info('%s gained link %s', self.name, link)
       self.links.add(link)
     else:
-      logging.debug('%s lost link %s', self.name, link)
+      logging.info('%s lost link %s', self.name, link)
       self.links.remove(link)
 
     # If a link goes away, we may have lost access to something but not gained
@@ -319,9 +335,9 @@
     failure_s = self._acs_session_failure_s()
     if (experiment.enabled('WifiSimulateWireless')
         and failure_s < MAX_ACS_FAILURE_S):
-      logging.debug('WifiSimulateWireless: failing bridge connection check (no '
-                    'ACS contact for %d seconds, max %d seconds)',
-                    failure_s, MAX_ACS_FAILURE_S)
+      logging.info('WifiSimulateWireless: failing bridge connection check%s '
+                   '(no ACS contact for %d seconds, max %d seconds)',
+                   ' (ACS)' if check_acs else '', failure_s, MAX_ACS_FAILURE_S)
       return False
 
     return super(Bridge, self)._connection_check(check_acs)
@@ -379,14 +395,17 @@
       return True
 
     socket = os.path.join(path, self.name)
+    logging.debug('%s socket is %s', self.name, socket)
     try:
       self._wpa_control = self.get_wpa_control(socket)
       self._wpa_control.attach()
+      logging.debug('%s successfully attached', self.name)
     except wpactrl.error as e:
       logging.error('Error attaching to wpa_supplicant: %s', e)
       return False
 
     status = self.wpa_status()
+    logging.debug('%s status after attaching is %s', self.name, status)
     self.wpa_supplicant = status.get('wpa_state') == 'COMPLETED'
     if not self._initialized:
       self.initial_ssid = status.get('ssid')
@@ -403,17 +422,21 @@
     status = {}
 
     if self._wpa_control and self._wpa_control.attached:
+      logging.debug('%s ctrl_iface_path %s',
+                    self, self._wpa_control.ctrl_iface_path)
       lines = []
       try:
         lines = self._wpa_control.request('STATUS').splitlines()
-      except wpactrl.error:
-        logging.error('wpa_control STATUS request failed')
+      except wpactrl.error as e:
+        logging.error('wpa_control STATUS request failed %s args %s',
+                      e.message, e.args)
       for line in lines:
         if '=' not in line:
           continue
         k, v = line.strip().split('=', 1)
         status[k] = v
 
+    logging.debug('%s wpa status is %s', self.name, status)
     return status
 
   def get_wpa_control(self, socket):
@@ -442,7 +465,7 @@
         if event == 'CONNECTED':
           self.wpa_supplicant = True
         elif event in ('DISCONNECTED', 'TERMINATING', 'ASSOC-REJECT',
-                       'AUTH-REJECT'):
+                       'SSID-TEMP-DISABLED', 'AUTH-REJECT'):
           self.wpa_supplicant = False
           if event == 'TERMINATING':
             self.detach_wpa_control()
@@ -468,17 +491,22 @@
   WIFIINFO_PATH = '/tmp/wifi/wifiinfo'
 
   def __init__(self, socket):
-    self._interface = os.path.split(socket)[-1]
+    self.ctrl_iface_path, self._interface = os.path.split(socket)
 
     # State from QCSAPI and wifi_files.
     self._client_mode = False
     self._ssid = None
     self._status = None
+    self._security = None
 
     self._events = []
 
   def _qcsapi(self, *command):
-    return subprocess.check_output(['qcsapi'] + list(command)).strip()
+    try:
+      return subprocess.check_output(['qcsapi'] + list(command)).strip()
+    except subprocess.CalledProcessError as e:
+      logging.error('QCSAPI call failed: %s: %s', e, e.output)
+      raise
 
   def attach(self):
     self._update()
@@ -500,6 +528,7 @@
       client_mode = self._qcsapi('get_mode', 'wifi0') == 'Station'
       ssid = self._qcsapi('get_ssid', 'wifi0')
       status = self._qcsapi('get_status', 'wifi0')
+      security = self._qcsapi('ssid_get_authentication_mode', 'wifi0', ssid)
     except subprocess.CalledProcessError:
       # If QCSAPI failed, skip update.
       return
@@ -520,7 +549,7 @@
     # return 'Error', we are probably not connected, and we don't do anything
     # special with auth/assoc failures specifically.
     if client_mode and status == 'Error' and self._status != 'Error':
-      self._events.append('<2>CTRL-EVENT-AUTH-REJECT')
+      self._events.append('<2>CTRL-EVENT-SSID-TEMP-DISABLED')
 
     # If we left client mode, wpa_supplicant has terminated.
     if self._client_mode and not client_mode:
@@ -529,6 +558,7 @@
     self._client_mode = client_mode
     self._ssid = ssid
     self._status = status
+    self._security = security
 
   def recv(self):
     return self._events.pop(0)
@@ -544,7 +574,8 @@
     if not self._client_mode or not self._ssid:
       return ''
 
-    return 'wpa_state=COMPLETED\nssid=%s' % self._ssid
+    return ('wpa_state=COMPLETED\nssid=%s\nkey_mgmt=%s' %
+            (self._ssid, self._security or 'NONE'))
 
 
 class FrenzyWifi(Wifi):
diff --git a/conman/interface_test.py b/conman/interface_test.py
index 4c7d52b..13dcf14 100755
--- a/conman/interface_test.py
+++ b/conman/interface_test.py
@@ -22,6 +22,13 @@
 from wvtest import wvtest
 
 
+# pylint: disable=line-too-long
+_IP_ADDR_SHOW_TPL = """4: {name}: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc mq state UP qlen 1000
+    inet {ip}/21 brd 100.100.55.255 scope global {name}
+       valid_lft forever preferred_lft forever
+"""
+
+
 class FakeInterfaceMixin(object):
   """Replace Interface methods which interact with the system."""
 
@@ -29,6 +36,7 @@
     super(FakeInterfaceMixin, self).__init__(*args, **kwargs)
     self.set_connection_check_result('succeed')
     self.routing_table = {}
+    self.ip_testonly = None
 
   def set_connection_check_result(self, result):
     if result in ['succeed', 'fail', 'restricted']:
@@ -63,6 +71,12 @@
             del self.routing_table[k]
             break
 
+  def _ip_addr_show(self):
+    if self.ip_testonly:
+      return _IP_ADDR_SHOW_TPL.format(name=self.name, ip=self.ip_testonly)
+
+    return ''
+
 
 class Bridge(FakeInterfaceMixin, interface.Bridge):
   pass
@@ -78,6 +92,7 @@
     self.attached = False
     self.connected = False
     self.ssid_testonly = None
+    self.secure_testonly = False
     self.request_status_fails = False
 
   def pending(self):
@@ -96,6 +111,7 @@
   def detach(self):
     self.attached = False
     self.ssid_testonly = None
+    self.secure_testonly = False
     self.connected = False
     self.check_socket_exists('wpactrl_detach failed')
 
@@ -103,11 +119,19 @@
     if request_type == 'STATUS':
       if self.request_status_fails:
         raise wpactrl.error('test error')
-      return ('foo\nwpa_state=COMPLETED\nssid=%s\nbar' % self.ssid_testonly
-              if self.connected else 'foo')
+      if self.connected:
+        return ('foo\nwpa_state=COMPLETED\nssid=%s\nkey_mgmt=%s\nbar' %
+                (self.ssid_testonly,
+                 'WPA2-PSK' if self.secure_testonly else 'NONE'))
+      else:
+        return 'wpa_state=SCANNING\naddress=12:34:56:78:90:ab'
     else:
       raise ValueError('Invalid request_type %s' % request_type)
 
+  @property
+  def ctrl_iface_path(self):
+    return os.path.split(self._socket)[0]
+
   # Below methods are not part of WPACtrl.
 
   def add_event(self, event):
@@ -142,6 +166,7 @@
   def __init__(self, *args, **kwargs):
     super(Wifi, self).__init__(*args, **kwargs)
     self._initial_ssid_testonly = None
+    self._secure_testonly = False
 
   def attach_wpa_control(self, path):
     if self._initial_ssid_testonly and self._wpa_control:
@@ -153,6 +178,7 @@
     if self._initial_ssid_testonly:
       result.connected = True
       result.ssid_testonly = self._initial_ssid_testonly
+      result.secure_testonly = self._secure_testonly
     return result
 
   def add_connected_event(self):
@@ -161,16 +187,19 @@
 
   def add_disconnected_event(self):
     self._initial_ssid_testonly = None
+    self._secure_testonly = False
     if self.attached():
       self._wpa_control.add_disconnected_event()
 
   def add_terminating_event(self):
     self._initial_ssid_testonly = None
+    self._secure_testonly = False
     if self.attached():
       self._wpa_control.add_terminating_event()
 
   def detach_wpa_control(self):
     self._initial_ssid_testonly = None
+    self._secure_testonly = False
     super(Wifi, self).detach_wpa_control()
 
   def start_wpa_supplicant_testonly(self, path):
@@ -191,6 +220,7 @@
   def __init__(self, *args, **kwargs):
     super(FrenzyWPACtrl, self).__init__(*args, **kwargs)
     self.ssid_testonly = None
+    self.secure_testonly = False
     self.request_status_fails = False
 
   def _qcsapi(self, *command):
@@ -199,15 +229,21 @@
   def add_connected_event(self):
     self.fake_qcsapi['get_mode'] = 'Station'
     self.fake_qcsapi['get_ssid'] = self.ssid_testonly
+    security = 'PSKAuthentication' if self.secure_testonly else 'NONE'
+    self.fake_qcsapi['ssid_get_authentication_mode'] = security
 
   def add_disconnected_event(self):
     self.ssid_testonly = None
+    self.secure_testonly = False
     self.fake_qcsapi['get_ssid'] = None
+    self.fake_qcsapi['ssid_get_authentication_mode'] = 'NONE'
 
   def add_terminating_event(self):
     self.ssid_testonly = None
+    self.secure_testonly = False
     self.fake_qcsapi['get_ssid'] = None
     self.fake_qcsapi['get_mode'] = 'AP'
+    self.fake_qcsapi['ssid_get_authentication_mode'] = 'NONE'
 
   def detach(self):
     self.add_terminating_event()
@@ -226,12 +262,14 @@
   def __init__(self, *args, **kwargs):
     super(FrenzyWifi, self).__init__(*args, **kwargs)
     self._initial_ssid_testonly = None
+    self._secure_testonly = False
     self.fake_qcsapi = {}
 
   def attach_wpa_control(self, *args, **kwargs):
     super(FrenzyWifi, self).attach_wpa_control(*args, **kwargs)
     if self._wpa_control:
       self._wpa_control.ssid_testonly = self._initial_ssid_testonly
+      self._wpa_control.secure_testonly = self._secure_testonly
       if self._initial_ssid_testonly:
         self._wpa_control.add_connected_event()
 
@@ -241,6 +279,7 @@
     if self._initial_ssid_testonly:
       result.fake_qcsapi['get_mode'] = 'Station'
       result.ssid_testonly = self._initial_ssid_testonly
+      result.secure_testonly = self._secure_testonly
       result.add_connected_event()
     return result
 
@@ -250,16 +289,19 @@
 
   def add_disconnected_event(self):
     self._initial_ssid_testonly = None
+    self._secure_testonly = False
     if self.attached():
       self._wpa_control.add_disconnected_event()
 
   def add_terminating_event(self):
     self._initial_ssid_testonly = None
+    self._secure_testonly = False
     if self.attached():
       self._wpa_control.add_terminating_event()
 
   def detach_wpa_control(self):
     self._initial_ssid_testonly = None
+    self._secure_testonly = False
     super(FrenzyWifi, self).detach_wpa_control()
 
   def start_wpa_supplicant_testonly(self, unused_path):
@@ -337,6 +379,10 @@
     wvtest.WVPASS(b.current_route())
     wvtest.WVPASS(os.path.exists(autoprov_filepath))
 
+    wvtest.WVFAIL(b.get_ip_address())
+    b.ip_testonly = '192.168.1.100'
+    wvtest.WVPASSEQ(b.get_ip_address(), '192.168.1.100')
+
   finally:
     shutil.rmtree(tmp_dir)
 
diff --git a/conman/iw.py b/conman/iw.py
index 300c3e2..f2e15d8 100755
--- a/conman/iw.py
+++ b/conman/iw.py
@@ -6,10 +6,19 @@
 import subprocess
 
 
-FIBER_OUI = 'f4:f5:e8'
+GFIBER_VENDOR_IE_OUI = 'f4:f5:e8'
+GFIBER_OUIS = ['00:1a:11', 'f4:f5:e8', 'f8:8f:ca']
+VENDOR_IE_FEATURE_ID_AUTOPROVISIONING = '01'
 DEFAULT_GFIBERSETUP_SSID = 'GFiberSetupAutomation'
 
 
+_BSSID_RE = r'BSS (?P<BSSID>([0-9a-f]{2}:?){6})\(on .*\)'
+_SSID_RE = r'SSID: (?P<SSID>.*)'
+_RSSI_RE = r'signal: (?P<RSSI>.*) dBm'
+_VENDOR_IE_RE = (r'Vendor specific: OUI (?P<OUI>([0-9a-f]{2}:?){3}), '
+                 'data:(?P<data>( [0-9a-f]{2})+)')
+
+
 def _scan(band, **kwargs):
   try:
     return subprocess.check_output(('wifi', 'scan', '-b', band), **kwargs)
@@ -17,24 +26,20 @@
     return ''
 
 
-_BSSID_RE = r'BSS (?P<BSSID>([0-9a-f]{2}:?){6})\(on .*\)'
-_SSID_RE = r'SSID: (?P<SSID>.*)'
-_VENDOR_IE_RE = (r'Vendor specific: OUI (?P<OUI>([0-9a-f]{2}:?){3}), '
-                 'data:(?P<data>( [0-9a-f]{2})+)')
-
-
 class BssInfo(object):
   """Contains info about a BSS, parsed from 'iw scan'."""
 
-  def __init__(self, bssid='', ssid='', security=None, vendor_ies=None):
+  def __init__(self, bssid='', ssid='', rssi=-100, security=None,
+               vendor_ies=None):
     self.bssid = bssid
     self.ssid = ssid
+    self.rssi = rssi
     self.vendor_ies = vendor_ies or []
     self.security = security or []
 
   def __attrs(self):
     return (self.bssid, self.ssid, tuple(sorted(self.vendor_ies)),
-            tuple(sorted(self.security)))
+            tuple(sorted(self.security)), self.rssi)
 
   def __eq__(self, other):
     # pylint: disable=protected-access
@@ -70,6 +75,10 @@
     if match:
       bss_info.ssid = match.group('SSID')
       continue
+    match = re.match(_RSSI_RE, line)
+    if match:
+      bss_info.rssi = float(match.group('RSSI'))
+      continue
     match = re.match(_VENDOR_IE_RE, line)
     if match:
       bss_info.vendor_ies.append((match.group('OUI'),
@@ -88,29 +97,27 @@
   return result
 
 
-def find_bssids(band, vendor_ie_function, include_secure):
+def find_bssids(band, include_secure):
   """Return information about interesting access points.
 
   Args:
     band:  The band on which to scan.
-    vendor_ie_function:  A function that takes a vendor IE and returns a bool.
     include_secure:  Whether to exclude secure networks.
 
   Returns:
-    Two lists of tuples of the form (SSID, BSSID info dict).  The first list has
-    BSSIDs which have a vendor IE accepted by vendor_ie_function, and the second
-    list has those which don't.
+    A list of (BSSID, priority) tuples, prioritizing BSSIDs with the GFiber
+    provisioning vendor IE > GFiber APs > other APs, and by RSSI within each
+    group.
   """
   parsed = scan_parsed(band)
-  result_with_ie = set()
-  result_without_ie = set()
+  bssids = set()
 
   for bss_info in parsed:
     if bss_info.security and not include_secure:
       continue
 
     for oui, data in bss_info.vendor_ies:
-      if oui == FIBER_OUI:
+      if oui == GFIBER_VENDOR_IE_OUI:
         octets = data.split()
         if octets[0] == '03' and not bss_info.ssid:
           bss_info.ssid = ''.join(octets[1:]).decode('hex')
@@ -121,11 +128,16 @@
     if not bss_info.ssid and not bss_info.vendor_ies:
       bss_info.ssid = DEFAULT_GFIBERSETUP_SSID
 
-    for oui, data in bss_info.vendor_ies:
-      if vendor_ie_function(oui, data):
-        result_with_ie.add(bss_info)
-        break
-    else:
-      result_without_ie.add(bss_info)
+    bssids.add(bss_info)
 
-  return result_with_ie, result_without_ie
+  return [(bss_info, _bssid_priority(bss_info)) for bss_info in bssids]
+
+
+def _bssid_priority(bss_info):
+  result = 4 if bss_info.bssid[:8] in GFIBER_OUIS else 2
+  for oui, data in bss_info.vendor_ies:
+    if (oui == GFIBER_VENDOR_IE_OUI and
+        data.startswith(VENDOR_IE_FEATURE_ID_AUTOPROVISIONING)):
+      result = 5
+
+  return result + (100 + (max(bss_info.rssi, -100))) / 100.0
diff --git a/conman/iw_test.py b/conman/iw_test.py
index 3f80d6c..55b2e7b 100755
--- a/conman/iw_test.py
+++ b/conman/iw_test.py
@@ -551,8 +551,9 @@
      * VI: CW 7-15, AIFSN 2, TXOP 3008 usec
      * VO: CW 3-7, AIFSN 2, TXOP 1504 usec
   Vendor specific: OUI 00:11:22, data: 01 23 45 67
+  Vendor specific: OUI f4:f5:e8, data: 01
   Vendor specific: OUI f4:f5:e8, data: 03 47 46 69 62 65 72 53 65 74 75 70 41 75 74 6f 6d 61 74 69 6f 6e
-BSS f4:f5:e8:f1:36:43(on wcli0)
+BSS 00:1a:11:f1:36:43(on wcli0)
   TSF: 12499150000 usec (0d, 03:28:19)
   freq: 2437
   beacon interval: 100 TUs
@@ -629,49 +630,55 @@
 def find_bssids_test():
   """Test iw.find_bssids."""
   test_ie = ('00:11:22', '01 23 45 67')
+  provisioning_ie = ('f4:f5:e8', '01')
   ssid_ie = (
       'f4:f5:e8',
       '03 47 46 69 62 65 72 53 65 74 75 70 41 75 74 6f 6d 61 74 69 6f 6e',
   )
   short_scan_result = iw.BssInfo(ssid='short scan result',
                                  bssid='00:23:97:57:f4:d8',
+                                 rssi=-60,
                                  security=['WEP'],
                                  vendor_ies=[test_ie])
   provisioning_bss_info = iw.BssInfo(ssid=iw.DEFAULT_GFIBERSETUP_SSID,
                                      bssid='94:b4:0f:f1:36:42',
-                                     vendor_ies=[test_ie, ssid_ie])
+                                     rssi=-66,
+                                     vendor_ies=[test_ie, provisioning_ie,
+                                                 ssid_ie])
   provisioning_bss_info_frenzy = iw.BssInfo(ssid=iw.DEFAULT_GFIBERSETUP_SSID,
-                                            bssid='f4:f5:e8:f1:36:43')
-
-  with_ie, without_ie = iw.find_bssids('wcli0', lambda o, d: o == '00:11:22',
-                                       True)
-
-  wvtest.WVPASSEQ(with_ie, set([short_scan_result, provisioning_bss_info]))
+                                            bssid='00:1a:11:f1:36:43',
+                                            rssi=-66)
 
   wvtest.WVPASSEQ(
-      without_ie,
-      set([provisioning_bss_info_frenzy,
-           iw.BssInfo(ssid='GoogleGuest', bssid='94:b4:0f:f1:36:41'),
-           iw.BssInfo(ssid='GoogleGuest', bssid='94:b4:0f:f1:3a:e1'),
-           iw.BssInfo(ssid='GoogleGuest', bssid='94:b4:0f:f1:35:61'),
-           iw.BssInfo(ssid='Google', bssid='94:b4:0f:f1:36:40',
-                      security=['WPA2']),
-           iw.BssInfo(ssid='Google', bssid='94:b4:0f:f1:3a:e0',
-                      security=['WPA2']),
-           iw.BssInfo(ssid='Google', bssid='94:b4:0f:f1:35:60',
-                      security=['WPA2']),
-           iw.BssInfo(ssid='Google', bssid='94:b4:0f:f1:02:a0',
-                      security=['WPA2'])]))
+      set(iw.find_bssids('wcli0', True)),
+      set([(short_scan_result, 2.4),
+           (provisioning_bss_info, 5.34),
+           (provisioning_bss_info_frenzy, 4.34),
+           (iw.BssInfo(ssid='GoogleGuest', bssid='94:b4:0f:f1:36:41', rssi=-67),
+            2.33),
+           (iw.BssInfo(ssid='GoogleGuest', bssid='94:b4:0f:f1:3a:e1', rssi=-65),
+            2.35),
+           (iw.BssInfo(ssid='GoogleGuest', bssid='94:b4:0f:f1:35:61', rssi=-38),
+            2.62),
+           (iw.BssInfo(ssid='Google', bssid='94:b4:0f:f1:36:40', rssi=-66,
+                       security=['WPA2']), 2.34),
+           (iw.BssInfo(ssid='Google', bssid='94:b4:0f:f1:3a:e0', rssi=-55,
+                       security=['WPA2']), 2.45),
+           (iw.BssInfo(ssid='Google', bssid='94:b4:0f:f1:35:60', rssi=-39,
+                       security=['WPA2']), 2.61),
+           (iw.BssInfo(ssid='Google', bssid='94:b4:0f:f1:02:a0', rssi=-54,
+                       security=['WPA2']), 2.46)]))
 
-  with_ie, without_ie = iw.find_bssids('wcli0', lambda o, d: o == '00:11:22',
-                                       False)
-  wvtest.WVPASSEQ(with_ie, set([provisioning_bss_info]))
   wvtest.WVPASSEQ(
-      without_ie,
-      set([provisioning_bss_info_frenzy,
-           iw.BssInfo(ssid='GoogleGuest', bssid='94:b4:0f:f1:36:41'),
-           iw.BssInfo(ssid='GoogleGuest', bssid='94:b4:0f:f1:3a:e1'),
-           iw.BssInfo(ssid='GoogleGuest', bssid='94:b4:0f:f1:35:61')]))
+      set(iw.find_bssids('wcli0', False)),
+      set([(provisioning_bss_info, 5.34),
+           (provisioning_bss_info_frenzy, 4.34),
+           (iw.BssInfo(ssid='GoogleGuest', bssid='94:b4:0f:f1:36:41', rssi=-67),
+            2.33),
+           (iw.BssInfo(ssid='GoogleGuest', bssid='94:b4:0f:f1:3a:e1', rssi=-65),
+            2.35),
+           (iw.BssInfo(ssid='GoogleGuest', bssid='94:b4:0f:f1:35:61', rssi=-38),
+            2.62)]))
 
 if __name__ == '__main__':
   wvtest.wvtest_main()
diff --git a/craftui/HOW.restart_if_changed b/craftui/HOW.restart_if_changed
index 2a26b18..131bf6d 100644
--- a/craftui/HOW.restart_if_changed
+++ b/craftui/HOW.restart_if_changed
@@ -12,7 +12,6 @@
   [ -n "$pid2" ] && kill $pid2
   echo "######################################################################"
   echo "# starting craftui"
-  gpylint *.py
   make test
   ./craftui &
   pid1=$!
diff --git a/craftui/HOW.updatesim b/craftui/HOW.updatesim
index f01d964..00d1b8e 100644
--- a/craftui/HOW.updatesim
+++ b/craftui/HOW.updatesim
@@ -12,6 +12,9 @@
 		ip -o -d link > /tmp/sim/ip.link.txt;
 		ip -o addr > /tmp/sim/ip.addr.txt;
 		presterastats > /tmp/sim/presterastats.json;
+		for n in adc rxrrc rxslicer dac; do
+			curl -s http://localhost:8080/api/modem/iq/$n > /tmp/glaukus/$n.json
+		done
 		'
 
 	ssh chimera$suffix cd / "&&" tar czf - -C / \
@@ -25,6 +28,8 @@
 		tmp/ssl \
 		tmp/platform \
 		tmp/gpio \
+		tmp/cpss_ready \
+		tmp/peer-"*" \
 		tmp/sim \
 		> sim$suffix.tgz
 
diff --git a/craftui/Makefile b/craftui/Makefile
index dbb2082..f196e8d 100644
--- a/craftui/Makefile
+++ b/craftui/Makefile
@@ -4,12 +4,16 @@
 BINDIR=$(DESTDIR)$(PREFIX)/bin
 WWWDIR=$(DESTDIR)$(PREFIX)/usr/craftui
 PYTHON?=python
+PY=$(wildcard *.py)
+
+SKIPPY=png.py
+LINTPY=$(filter-out $(SKIPPY), $(PY))
 
 all:
 
 install:
 	mkdir -p $(BINDIR) $(WWWDIR)
-	cp craftui craftui.py $(BINDIR)
+	cp craftui craftui.py png.py $(BINDIR)
 	cp -rp www $(WWWDIR)
 
 install-libs:
@@ -34,4 +38,4 @@
 	rm -rf *.pyc
 
 lint:
-	for n in *.py; do gpylint $$n || exit 1; done
+	for n in $(LINTPY); do gpylint $$n || exit 1; done
diff --git a/craftui/craftui b/craftui/craftui
index 9d2a17a..2250595 100755
--- a/craftui/craftui
+++ b/craftui/craftui
@@ -3,6 +3,8 @@
 pycode=/bin/craftui.py
 cw=/usr/catawampus
 devcw=../../../../vendor/google/catawampus
+tornado=
+devtornado=../../../../vendor/opensource/tornado
 localwww=./www
 
 # in developer environment if vendor/google/catawapus is above us
@@ -18,6 +20,7 @@
 # if running from developer desktop, use simulated data
 if [ -n "$sim" ]; then
   cw="$devcw"
+  tornado="$devtornado"
   args="$args --http-port=$((8888+2*($sim-1)))"
   args="$args --https-port=$((8889+2*($sim-1)))"
   args="$args --sim=./sim$sim"
@@ -43,5 +46,5 @@
   exit 1
 done
 
-export PYTHONPATH="$cw/tr/vendor/tornado:$cw/tr/vendor/curtain:$PYTHONPATH"
+export PYTHONPATH="$tornado:$cw/tr/vendor/curtain:$PYTHONPATH"
 exec python -u $debug $pycode $args $httpsmode
diff --git a/craftui/craftui.py b/craftui/craftui.py
index 2971729..f726bf2 100755
--- a/craftui/craftui.py
+++ b/craftui/craftui.py
@@ -22,10 +22,12 @@
 import json
 import os
 import re
+import StringIO
 import subprocess
 import sys
 import urllib2
 import digest
+import png
 import tornado.httpserver
 import tornado.ioloop
 import tornado.web
@@ -429,12 +431,54 @@
 
     return text
 
+  def GetValue(self, data, path):
+    """Walk down the dicts to get a value."""
+    keys = path.split('/')
+    v = data
+    for key in keys:
+      if v:
+        v = v.get(key, None)
+    return v
+
+  def AddLeds(self, data):
+    """Add status leds to data."""
+    red = 'red.gif'
+    green = 'green.gif'
+    leds = {
+        'ACS': red,
+        'Switch': red,
+        'Modem': red,
+        'Radio': red,
+        'RSSI': red,
+        'MSE': red,
+        'Peer': red
+    }
+    if self.GetValue(data, 'platform/ledstate') is 'ACSCONTACT':
+      leds['ACS'] = green
+    if self.GetValue(data, 'platform/cpss_ready'):
+      leds['Switch'] = green
+    if self.GetValue(data, 'modem/status/acquireStatus') == 1:
+      leds['Modem'] = green
+    if self.GetValue(data, 'radio/paLnaPowerEnabled'):
+      leds['Radio'] = green
+    rssi = self.GetValue(data, 'radio/rx/rssi')
+    if rssi >= 1500 and rssi <= 2000:
+      leds['RSSI'] = green
+    mse = self.GetValue(data, 'modem/status/normalizedMse')
+    if mse is not None and mse <= -180:
+      leds['MSE'] = green
+    if self.GetValue(data, 'platform/peer_up'):
+      leds['Peer'] = green
+    data['leds'] = leds
+
   def GetData(self):
     """Get system data, return a json string."""
-    pj = self.GetPlatformData()
-    mj = self.GetModemData()
-    rj = self.GetRadioData()
-    js = '{"platform":' + pj + ',"modem":' + mj + ',"radio":' + rj + '}'
+    data = {}
+    data['platform'] = self.GetPlatformData()
+    data['modem'] = self.GetModemData()
+    data['radio'] = self.GetRadioData()
+    self.AddLeds(data)
+    js = json.dumps(data)
     return js
 
   def AddIpAddr(self, data):
@@ -503,7 +547,7 @@
       data[kdata] = vdata
 
   def GetPlatformData(self):
-    """Get platform data, return a json string."""
+    """Get platform data."""
     data = self.data
     sim = self.sim
 
@@ -517,6 +561,7 @@
     data['ledstate'] = self.ReadFile(sim + '/tmp/gpio/ledstate')
     data['cpu_temperature'] = self.ReadFile(sim + '/tmp/gpio/cpu_temperature')
     data['peer_up'] = os.path.exists(sim + '/tmp/peer-up')
+    data['cpss_ready'] = os.path.exists(sim + '/tmp/cpss_ready')
     cs = '/config/settings/'
     data['craft_ipaddr'] = self.ReadFile(sim + cs + 'craft_ipaddr')
     data['link_ipaddr'] = self.ReadFile(sim + cs + 'local_ipaddr')
@@ -528,10 +573,11 @@
     self.AddInterfaceStats(data)
     self.AddSwitchStats(data)
     self.AddVlans(data)
-    return json.dumps(data)
+    return data
 
   def GetModemData(self):
-    """Get modem data, return a json string."""
+    """Get modem data."""
+    data = {}
     response = '{}'
     if self.sim:
       response = self.ReadFile(self.sim + '/tmp/glaukus/modem.json')
@@ -542,10 +588,15 @@
         response = handle.read()
       except urllib2.URLError as ex:
         print 'Connection to %s failed: %s' % (url, ex.reason)
-    return response
+    try:
+      data = json.loads(response)
+    except ValueError as e:
+      print 'json format error: %s' % e
+    return data
 
   def GetRadioData(self):
     """Get radio data, return a json string."""
+    data = {}
     response = '{}'
     if self.sim:
       response = self.ReadFile(self.sim + '/tmp/glaukus/radio.json')
@@ -556,7 +607,56 @@
         response = handle.read()
       except urllib2.URLError as ex:
         print 'Connection to %s failed: %s' % (url, ex.reason)
-    return response
+    try:
+      data = json.loads(response)
+    except ValueError as e:
+      print 'json format error: %s' % e
+    return data
+
+  def GetIQPNG(self, path):
+    """Get IQ points and render as PNG."""
+    response = '[0,0]'
+    if self.sim:
+      response = self.ReadFile(self.sim + '/tmp/glaukus/' + path + '.json')
+    else:
+      try:
+        url = 'http://localhost:8080/api/modem/iq/' + path
+        handle = urllib2.urlopen(url, timeout=2)
+        response = handle.read()
+      except urllib2.URLError as ex:
+        print 'Connection to %s failed: %s' % (url, ex.reason)
+
+    coords = [0, 0]
+    try:
+      coords = json.loads(response)
+    except ValueError as e:
+      print 'json format error: %s' % e
+
+    # owh is original width/height of data (-1200 to 1200)
+    owh = (2400, 2400)
+    # wh is display size (400x400)
+    wh = (400, 400)
+
+    w = png.Writer(size=wh, greyscale=True, bitdepth=1)
+    scanline = int((wh[0] + 7) / 8)
+    rows = [scanline*[0] for i in xrange(0, wh[1])]
+    for i in xrange(0, len(coords) / 2):
+      # data is a series of x,y,x,y,x,y...
+      xy = (coords[i*2], coords[i*2+1])
+      # transform and scale data to display
+      sxy = (int((xy[0] + owh[0]/2 + .5) * wh[0] / owh[0]),
+             int((xy[1] + owh[1]/2 + .5) * wh[1] / owh[1]))
+      if sxy[0] < 0 or sxy[0] >= wh[0] or sxy[1] < 0 or sxy[1] >= wh[1]:
+        continue
+      # set a pixel in the PNG
+      pos = int(sxy[0] / 8)
+      shift = sxy[0] % 8
+      rows[sxy[1]][pos] |= 1 << (7 - shift)
+    f = StringIO.StringIO()
+    w.write_packed(f, rows)
+    image = f.getvalue()
+    f.close()
+    return image
 
   def GetUserCreds(self, user):
     """Create a dict with the requested password."""
@@ -754,6 +854,29 @@
       self.write(response)
       self.finish()
 
+  class PNGHandler(CraftHandler):
+    """Returns a PNG showing plotted IQ values."""
+    baseurl = 'http://localhost:8080/api/modem/iq/'
+    auth = 'any'
+    page = 'IQ'
+    path = None
+
+    def get(self):
+      if self.TryProxy():
+        return
+      if not self.Authenticated():
+        return
+      ui = self.settings['ui']
+      print '%s %s page (%s)' % (self.request.method, self.page, ui.sim)
+
+      image = ui.GetIQPNG(self.path)
+      self.set_header('Content-Type', 'image/png')
+      self.write(image)
+      self.finish()
+
+  class RXSlicerPNGHandler(PNGHandler):
+    path = 'rxslicer'
+
   def RunUI(self):
     """Create the http redirect and https web server and run forever."""
     sim = self.sim
@@ -763,6 +886,7 @@
         (r'^/status/?$', self.StatusHandler),
         (r'^/config/?$', self.ConfigHandler),
         (r'^/content.json/?$', self.JsonHandler),
+        (r'^/rxslicer.png$', self.RXSlicerPNGHandler),
         (r'^/static/([^/]*)$', tornado.web.StaticFileHandler,
          {'path': self.wwwroot + '/static'}),
     ]
diff --git a/craftui/craftui_test.sh b/craftui/craftui_test.sh
index 677f719..5b71fa9 100755
--- a/craftui/craftui_test.sh
+++ b/craftui/craftui_test.sh
@@ -238,6 +238,10 @@
     $curl $admin_auth -d $d $url/content.json'?peer=1' |& grep '"error": 0}'
     check_success
 
+    testname rxslicer
+    $curl $admin_auth $url/rxslicer.png | file - | grep "PNG image data, 400 x 400"
+    check_success
+
   done
 
   # verify insecure message is hidden on https and not on http
diff --git a/craftui/png.py b/craftui/png.py
new file mode 100755
index 0000000..b55dd3a
--- /dev/null
+++ b/craftui/png.py
@@ -0,0 +1,3857 @@
+#!/usr/bin/env python
+
+# $URL$
+# $Rev$
+
+# png.py - PNG encoder/decoder in pure Python
+#
+# Copyright (C) 2006 Johann C. Rocholl <johann@browsershots.org>
+# Portions Copyright (C) 2009 David Jones <drj@pobox.com>
+# And probably portions Copyright (C) 2006 Nicko van Someren <nicko@nicko.org>
+#
+# Original concept by Johann C. Rocholl.
+#
+# LICENSE (The MIT License)
+#
+# Permission is hereby granted, free of charge, to any person
+# obtaining a copy of this software and associated documentation files
+# (the "Software"), to deal in the Software without restriction,
+# including without limitation the rights to use, copy, modify, merge,
+# publish, distribute, sublicense, and/or sell copies of the Software,
+# and to permit persons to whom the Software is furnished to do so,
+# subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
+# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+#
+# Changelog (recent first):
+# 2009-03-11 David: interlaced bit depth < 8 (writing).
+# 2009-03-10 David: interlaced bit depth < 8 (reading).
+# 2009-03-04 David: Flat and Boxed pixel formats.
+# 2009-02-26 David: Palette support (writing).
+# 2009-02-23 David: Bit-depths < 8; better PNM support.
+# 2006-06-17 Nicko: Reworked into a class, faster interlacing.
+# 2006-06-17 Johann: Very simple prototype PNG decoder.
+# 2006-06-17 Nicko: Test suite with various image generators.
+# 2006-06-17 Nicko: Alpha-channel, grey-scale, 16-bit/plane support.
+# 2006-06-15 Johann: Scanline iterator interface for large input files.
+# 2006-06-09 Johann: Very simple prototype PNG encoder.
+
+# Incorporated into Bangai-O Development Tools by drj on 2009-02-11 from
+# http://trac.browsershots.org/browser/trunk/pypng/lib/png.py?rev=2885
+
+# Incorporated into pypng by drj on 2009-03-12 from
+# //depot/prj/bangaio/master/code/png.py#67
+
+
+"""
+Pure Python PNG Reader/Writer
+
+This Python module implements support for PNG images (see PNG
+specification at http://www.w3.org/TR/2003/REC-PNG-20031110/ ). It reads
+and writes PNG files with all allowable bit depths (1/2/4/8/16/24/32/48/64
+bits per pixel) and colour combinations: greyscale (1/2/4/8/16 bit); RGB,
+RGBA, LA (greyscale with alpha) with 8/16 bits per channel; colour mapped
+images (1/2/4/8 bit).  Adam7 interlacing is supported for reading and
+writing.  A number of optional chunks can be specified (when writing)
+and understood (when reading): ``tRNS``, ``bKGD``, ``gAMA``.
+
+For help, type ``import png; help(png)`` in your python interpreter.
+
+A good place to start is the :class:`Reader` and :class:`Writer` classes.
+
+Requires Python 2.3.  Limited support is available for Python 2.2, but
+not everything works.  Best with Python 2.4 and higher.  Installation is
+trivial, but see the ``README.txt`` file (with the source distribution)
+for details.
+
+This file can also be used as a command-line utility to convert
+`Netpbm <http://netpbm.sourceforge.net/>`_ PNM files to PNG, and the reverse conversion from PNG to
+PNM. The interface is similar to that of the ``pnmtopng`` program from
+Netpbm.  Type ``python png.py --help`` at the shell prompt
+for usage and a list of options.
+
+A note on spelling and terminology
+----------------------------------
+
+Generally British English spelling is used in the documentation.  So
+that's "greyscale" and "colour".  This not only matches the author's
+native language, it's also used by the PNG specification.
+
+The major colour models supported by PNG (and hence by PyPNG) are:
+greyscale, RGB, greyscale--alpha, RGB--alpha.  These are sometimes
+referred to using the abbreviations: L, RGB, LA, RGBA.  In this case
+each letter abbreviates a single channel: *L* is for Luminance or Luma or
+Lightness which is the channel used in greyscale images; *R*, *G*, *B* stand
+for Red, Green, Blue, the components of a colour image; *A* stands for
+Alpha, the opacity channel (used for transparency effects, but higher
+values are more opaque, so it makes sense to call it opacity).
+
+A note on formats
+-----------------
+
+When getting pixel data out of this module (reading) and presenting
+data to this module (writing) there are a number of ways the data could
+be represented as a Python value.  Generally this module uses one of
+three formats called "flat row flat pixel", "boxed row flat pixel", and
+"boxed row boxed pixel".  Basically the concern is whether each pixel
+and each row comes in its own little tuple (box), or not.
+
+Consider an image that is 3 pixels wide by 2 pixels high, and each pixel
+has RGB components:
+
+Boxed row flat pixel::
+
+  list([R,G,B, R,G,B, R,G,B],
+       [R,G,B, R,G,B, R,G,B])
+
+Each row appears as its own list, but the pixels are flattened so that
+three values for one pixel simply follow the three values for the previous
+pixel.  This is the most common format used, because it provides a good
+compromise between space and convenience.  PyPNG regards itself as
+at liberty to replace any sequence type with any sufficiently compatible
+other sequence type; in practice each row is an array (from the array
+module), and the outer list is sometimes an iterator rather than an
+explicit list (so that streaming is possible).
+
+Flat row flat pixel::
+
+  [R,G,B, R,G,B, R,G,B,
+   R,G,B, R,G,B, R,G,B]
+
+The entire image is one single giant sequence of colour values.
+Generally an array will be used (to save space), not a list.
+
+Boxed row boxed pixel::
+
+  list([ (R,G,B), (R,G,B), (R,G,B) ],
+       [ (R,G,B), (R,G,B), (R,G,B) ])
+
+Each row appears in its own list, but each pixel also appears in its own
+tuple.  A serious memory burn in Python.
+
+In all cases the top row comes first, and for each row the pixels are
+ordered from left-to-right.  Within a pixel the values appear in the
+order, R-G-B-A (or L-A for greyscale--alpha).
+
+There is a fourth format, mentioned because it is used internally,
+is close to what lies inside a PNG file itself, and has some support
+from the public API.  This format is called packed.  When packed,
+each row is a sequence of bytes (integers from 0 to 255), just as
+it is before PNG scanline filtering is applied.  When the bit depth
+is 8 this is essentially the same as boxed row flat pixel; when the
+bit depth is less than 8, several pixels are packed into each byte;
+when the bit depth is 16 (the only value more than 8 that is supported
+by the PNG image format) each pixel value is decomposed into 2 bytes
+(and `packed` is a misnomer).  This format is used by the
+:meth:`Writer.write_packed` method.  It isn't usually a convenient
+format, but may be just right if the source data for the PNG image
+comes from something that uses a similar format (for example, 1-bit
+BMPs, or another PNG file).
+
+And now, my famous members
+--------------------------
+"""
+
+# http://www.python.org/doc/2.2.3/whatsnew/node5.html
+from __future__ import generators
+
+__version__ = "$URL$ $Rev$"
+
+from array import array
+try: # See :pyver:old
+    import itertools
+except:
+    pass
+import math
+# http://www.python.org/doc/2.4.4/lib/module-operator.html
+import operator
+import struct
+import sys
+import zlib
+# http://www.python.org/doc/2.4.4/lib/module-warnings.html
+import warnings
+try:
+    import pyximport
+    pyximport.install()
+    import cpngfilters as pngfilters
+except ImportError:
+    pass
+
+
+__all__ = ['Image', 'Reader', 'Writer', 'write_chunks', 'from_array']
+
+
+# The PNG signature.
+# http://www.w3.org/TR/PNG/#5PNG-file-signature
+_signature = struct.pack('8B', 137, 80, 78, 71, 13, 10, 26, 10)
+
+_adam7 = ((0, 0, 8, 8),
+          (4, 0, 8, 8),
+          (0, 4, 4, 8),
+          (2, 0, 4, 4),
+          (0, 2, 2, 4),
+          (1, 0, 2, 2),
+          (0, 1, 1, 2))
+
+def group(s, n):
+    # See
+    # http://www.python.org/doc/2.6/library/functions.html#zip
+    return zip(*[iter(s)]*n)
+
+def isarray(x):
+    """Same as ``isinstance(x, array)`` except on Python 2.2, where it
+    always returns ``False``.  This helps PyPNG work on Python 2.2.
+    """
+
+    try:
+        return isinstance(x, array)
+    except:
+        return False
+
+try:  # see :pyver:old
+    array.tostring
+except:
+    def tostring(row):
+        l = len(row)
+        return struct.pack('%dB' % l, *row)
+else:
+    def tostring(row):
+        """Convert row of bytes to string.  Expects `row` to be an
+        ``array``.
+        """
+        return row.tostring()
+
+# Conditionally convert to bytes.  Works on Python 2 and Python 3.
+try:
+    bytes('', 'ascii')
+    def strtobytes(x): return bytes(x, 'iso8859-1')
+    def bytestostr(x): return str(x, 'iso8859-1')
+except:
+    strtobytes = str
+    bytestostr = str
+
+def interleave_planes(ipixels, apixels, ipsize, apsize):
+    """
+    Interleave (colour) planes, e.g. RGB + A = RGBA.
+
+    Return an array of pixels consisting of the `ipsize` elements of data
+    from each pixel in `ipixels` followed by the `apsize` elements of data
+    from each pixel in `apixels`.  Conventionally `ipixels` and
+    `apixels` are byte arrays so the sizes are bytes, but it actually
+    works with any arrays of the same type.  The returned array is the
+    same type as the input arrays which should be the same type as each other.
+    """
+
+    itotal = len(ipixels)
+    atotal = len(apixels)
+    newtotal = itotal + atotal
+    newpsize = ipsize + apsize
+    # Set up the output buffer
+    # See http://www.python.org/doc/2.4.4/lib/module-array.html#l2h-1356
+    out = array(ipixels.typecode)
+    # It's annoying that there is no cheap way to set the array size :-(
+    out.extend(ipixels)
+    out.extend(apixels)
+    # Interleave in the pixel data
+    for i in range(ipsize):
+        out[i:newtotal:newpsize] = ipixels[i:itotal:ipsize]
+    for i in range(apsize):
+        out[i+ipsize:newtotal:newpsize] = apixels[i:atotal:apsize]
+    return out
+
+def check_palette(palette):
+    """Check a palette argument (to the :class:`Writer` class) for validity.
+    Returns the palette as a list if okay; raises an exception otherwise.
+    """
+
+    # None is the default and is allowed.
+    if palette is None:
+        return None
+
+    p = list(palette)
+    if not (0 < len(p) <= 256):
+        raise ValueError("a palette must have between 1 and 256 entries")
+    seen_triple = False
+    for i,t in enumerate(p):
+        if len(t) not in (3,4):
+            raise ValueError(
+              "palette entry %d: entries must be 3- or 4-tuples." % i)
+        if len(t) == 3:
+            seen_triple = True
+        if seen_triple and len(t) == 4:
+            raise ValueError(
+              "palette entry %d: all 4-tuples must precede all 3-tuples" % i)
+        for x in t:
+            if int(x) != x or not(0 <= x <= 255):
+                raise ValueError(
+                  "palette entry %d: values must be integer: 0 <= x <= 255" % i)
+    return p
+
+class Error(Exception):
+    prefix = 'Error'
+    def __str__(self):
+        return self.prefix + ': ' + ' '.join(self.args)
+
+class FormatError(Error):
+    """Problem with input file format.  In other words, PNG file does
+    not conform to the specification in some way and is invalid.
+    """
+
+    prefix = 'FormatError'
+
+class ChunkError(FormatError):
+    prefix = 'ChunkError'
+
+
+class Writer:
+    """
+    PNG encoder in pure Python.
+    """
+
+    def __init__(self, width=None, height=None,
+                 size=None,
+                 greyscale=False,
+                 alpha=False,
+                 bitdepth=8,
+                 palette=None,
+                 transparent=None,
+                 background=None,
+                 gamma=None,
+                 compression=None,
+                 interlace=False,
+                 bytes_per_sample=None, # deprecated
+                 planes=None,
+                 colormap=None,
+                 maxval=None,
+                 chunk_limit=2**20):
+        """
+        Create a PNG encoder object.
+
+        Arguments:
+
+        width, height
+          Image size in pixels, as two separate arguments.
+        size
+          Image size (w,h) in pixels, as single argument.
+        greyscale
+          Input data is greyscale, not RGB.
+        alpha
+          Input data has alpha channel (RGBA or LA).
+        bitdepth
+          Bit depth: from 1 to 16.
+        palette
+          Create a palette for a colour mapped image (colour type 3).
+        transparent
+          Specify a transparent colour (create a ``tRNS`` chunk).
+        background
+          Specify a default background colour (create a ``bKGD`` chunk).
+        gamma
+          Specify a gamma value (create a ``gAMA`` chunk).
+        compression
+          zlib compression level: 0 (none) to 9 (more compressed); default: -1 or None.
+        interlace
+          Create an interlaced image.
+        chunk_limit
+          Write multiple ``IDAT`` chunks to save memory.
+
+        The image size (in pixels) can be specified either by using the
+        `width` and `height` arguments, or with the single `size`
+        argument.  If `size` is used it should be a pair (*width*,
+        *height*).
+
+        `greyscale` and `alpha` are booleans that specify whether
+        an image is greyscale (or colour), and whether it has an
+        alpha channel (or not).
+
+        `bitdepth` specifies the bit depth of the source pixel values.
+        Each source pixel value must be an integer between 0 and
+        ``2**bitdepth-1``.  For example, 8-bit images have values
+        between 0 and 255.  PNG only stores images with bit depths of
+        1,2,4,8, or 16.  When `bitdepth` is not one of these values,
+        the next highest valid bit depth is selected, and an ``sBIT``
+        (significant bits) chunk is generated that specifies the original
+        precision of the source image.  In this case the supplied pixel
+        values will be rescaled to fit the range of the selected bit depth.
+
+        The details of which bit depth / colour model combinations the
+        PNG file format supports directly, are somewhat arcane
+        (refer to the PNG specification for full details).  Briefly:
+        "small" bit depths (1,2,4) are only allowed with greyscale and
+        colour mapped images; colour mapped images cannot have bit depth
+        16.
+
+        For colour mapped images (in other words, when the `palette`
+        argument is specified) the `bitdepth` argument must match one of
+        the valid PNG bit depths: 1, 2, 4, or 8.  (It is valid to have a
+        PNG image with a palette and an ``sBIT`` chunk, but the meaning
+        is slightly different; it would be awkward to press the
+        `bitdepth` argument into service for this.)
+
+        The `palette` option, when specified, causes a colour mapped image
+        to be created: the PNG colour type is set to 3; greyscale
+        must not be set; alpha must not be set; transparent must
+        not be set; the bit depth must be 1,2,4, or 8.  When a colour
+        mapped image is created, the pixel values are palette indexes
+        and the `bitdepth` argument specifies the size of these indexes
+        (not the size of the colour values in the palette).
+
+        The palette argument value should be a sequence of 3- or
+        4-tuples.  3-tuples specify RGB palette entries; 4-tuples
+        specify RGBA palette entries.  If both 4-tuples and 3-tuples
+        appear in the sequence then all the 4-tuples must come
+        before all the 3-tuples.  A ``PLTE`` chunk is created; if there
+        are 4-tuples then a ``tRNS`` chunk is created as well.  The
+        ``PLTE`` chunk will contain all the RGB triples in the same
+        sequence; the ``tRNS`` chunk will contain the alpha channel for
+        all the 4-tuples, in the same sequence.  Palette entries
+        are always 8-bit.
+
+        If specified, the `transparent` and `background` parameters must
+        be a tuple with three integer values for red, green, blue, or
+        a simple integer (or singleton tuple) for a greyscale image.
+
+        If specified, the `gamma` parameter must be a positive number
+        (generally, a float).  A ``gAMA`` chunk will be created.  Note that
+        this will not change the values of the pixels as they appear in
+        the PNG file, they are assumed to have already been converted
+        appropriately for the gamma specified.
+
+        The `compression` argument specifies the compression level to
+        be used by the ``zlib`` module.  Values from 1 to 9 specify
+        compression, with 9 being "more compressed" (usually smaller
+        and slower, but it doesn't always work out that way).  0 means
+        no compression.  -1 and ``None`` both mean that the default
+        level of compession will be picked by the ``zlib`` module
+        (which is generally acceptable).
+
+        If `interlace` is true then an interlaced image is created
+        (using PNG's so far only interace method, *Adam7*).  This does not
+        affect how the pixels should be presented to the encoder, rather
+        it changes how they are arranged into the PNG file.  On slow
+        connexions interlaced images can be partially decoded by the
+        browser to give a rough view of the image that is successively
+        refined as more image data appears.
+        
+        .. note ::
+        
+          Enabling the `interlace` option requires the entire image
+          to be processed in working memory.
+
+        `chunk_limit` is used to limit the amount of memory used whilst
+        compressing the image.  In order to avoid using large amounts of
+        memory, multiple ``IDAT`` chunks may be created.
+        """
+
+        # At the moment the `planes` argument is ignored;
+        # its purpose is to act as a dummy so that
+        # ``Writer(x, y, **info)`` works, where `info` is a dictionary
+        # returned by Reader.read and friends.
+        # Ditto for `colormap`.
+
+        # A couple of helper functions come first.  Best skipped if you
+        # are reading through.
+
+        def isinteger(x):
+            try:
+                return int(x) == x
+            except:
+                return False
+
+        def check_color(c, which):
+            """Checks that a colour argument for transparent or
+            background options is the right form.  Also "corrects" bare
+            integers to 1-tuples.
+            """
+
+            if c is None:
+                return c
+            if greyscale:
+                try:
+                    l = len(c)
+                except TypeError:
+                    c = (c,)
+                if len(c) != 1:
+                    raise ValueError("%s for greyscale must be 1-tuple" %
+                        which)
+                if not isinteger(c[0]):
+                    raise ValueError(
+                        "%s colour for greyscale must be integer" %
+                        which)
+            else:
+                if not (len(c) == 3 and
+                        isinteger(c[0]) and
+                        isinteger(c[1]) and
+                        isinteger(c[2])):
+                    raise ValueError(
+                        "%s colour must be a triple of integers" %
+                        which)
+            return c
+
+        if size:
+            if len(size) != 2:
+                raise ValueError(
+                  "size argument should be a pair (width, height)")
+            if width is not None and width != size[0]:
+                raise ValueError(
+                  "size[0] (%r) and width (%r) should match when both are used."
+                    % (size[0], width))
+            if height is not None and height != size[1]:
+                raise ValueError(
+                  "size[1] (%r) and height (%r) should match when both are used."
+                    % (size[1], height))
+            width,height = size
+        del size
+
+        if width <= 0 or height <= 0:
+            raise ValueError("width and height must be greater than zero")
+        if not isinteger(width) or not isinteger(height):
+            raise ValueError("width and height must be integers")
+        # http://www.w3.org/TR/PNG/#7Integers-and-byte-order
+        if width > 2**32-1 or height > 2**32-1:
+            raise ValueError("width and height cannot exceed 2**32-1")
+
+        if alpha and transparent is not None:
+            raise ValueError(
+                "transparent colour not allowed with alpha channel")
+
+        if bytes_per_sample is not None:
+            warnings.warn('please use bitdepth instead of bytes_per_sample',
+                          DeprecationWarning)
+            if bytes_per_sample not in (0.125, 0.25, 0.5, 1, 2):
+                raise ValueError(
+                    "bytes per sample must be .125, .25, .5, 1, or 2")
+            bitdepth = int(8*bytes_per_sample)
+        del bytes_per_sample
+        if not isinteger(bitdepth) or bitdepth < 1 or 16 < bitdepth:
+            raise ValueError("bitdepth (%r) must be a postive integer <= 16" %
+              bitdepth)
+
+        self.rescale = None
+        if palette:
+            if bitdepth not in (1,2,4,8):
+                raise ValueError("with palette, bitdepth must be 1, 2, 4, or 8")
+            if transparent is not None:
+                raise ValueError("transparent and palette not compatible")
+            if alpha:
+                raise ValueError("alpha and palette not compatible")
+            if greyscale:
+                raise ValueError("greyscale and palette not compatible")
+        else:
+            # No palette, check for sBIT chunk generation.
+            if alpha or not greyscale:
+                if bitdepth not in (8,16):
+                    targetbitdepth = (8,16)[bitdepth > 8]
+                    self.rescale = (bitdepth, targetbitdepth)
+                    bitdepth = targetbitdepth
+                    del targetbitdepth
+            else:
+                assert greyscale
+                assert not alpha
+                if bitdepth not in (1,2,4,8,16):
+                    if bitdepth > 8:
+                        targetbitdepth = 16
+                    elif bitdepth == 3:
+                        targetbitdepth = 4
+                    else:
+                        assert bitdepth in (5,6,7)
+                        targetbitdepth = 8
+                    self.rescale = (bitdepth, targetbitdepth)
+                    bitdepth = targetbitdepth
+                    del targetbitdepth
+
+        if bitdepth < 8 and (alpha or not greyscale and not palette):
+            raise ValueError(
+              "bitdepth < 8 only permitted with greyscale or palette")
+        if bitdepth > 8 and palette:
+            raise ValueError(
+                "bit depth must be 8 or less for images with palette")
+
+        transparent = check_color(transparent, 'transparent')
+        background = check_color(background, 'background')
+
+        # It's important that the true boolean values (greyscale, alpha,
+        # colormap, interlace) are converted to bool because Iverson's
+        # convention is relied upon later on.
+        self.width = width
+        self.height = height
+        self.transparent = transparent
+        self.background = background
+        self.gamma = gamma
+        self.greyscale = bool(greyscale)
+        self.alpha = bool(alpha)
+        self.colormap = bool(palette)
+        self.bitdepth = int(bitdepth)
+        self.compression = compression
+        self.chunk_limit = chunk_limit
+        self.interlace = bool(interlace)
+        self.palette = check_palette(palette)
+
+        self.color_type = 4*self.alpha + 2*(not greyscale) + 1*self.colormap
+        assert self.color_type in (0,2,3,4,6)
+
+        self.color_planes = (3,1)[self.greyscale or self.colormap]
+        self.planes = self.color_planes + self.alpha
+        # :todo: fix for bitdepth < 8
+        self.psize = (self.bitdepth/8) * self.planes
+
+    def make_palette(self):
+        """Create the byte sequences for a ``PLTE`` and if necessary a
+        ``tRNS`` chunk.  Returned as a pair (*p*, *t*).  *t* will be
+        ``None`` if no ``tRNS`` chunk is necessary.
+        """
+
+        p = array('B')
+        t = array('B')
+
+        for x in self.palette:
+            p.extend(x[0:3])
+            if len(x) > 3:
+                t.append(x[3])
+        p = tostring(p)
+        t = tostring(t)
+        if t:
+            return p,t
+        return p,None
+
+    def write(self, outfile, rows):
+        """Write a PNG image to the output file.  `rows` should be
+        an iterable that yields each row in boxed row flat pixel format.
+        The rows should be the rows of the original image, so there
+        should be ``self.height`` rows of ``self.width * self.planes`` values.
+        If `interlace` is specified (when creating the instance), then
+        an interlaced PNG file will be written.  Supply the rows in the
+        normal image order; the interlacing is carried out internally.
+        
+        .. note ::
+
+          Interlacing will require the entire image to be in working memory.
+        """
+
+        if self.interlace:
+            fmt = 'BH'[self.bitdepth > 8]
+            a = array(fmt, itertools.chain(*rows))
+            return self.write_array(outfile, a)
+        else:
+            nrows = self.write_passes(outfile, rows)
+            if nrows != self.height:
+                raise ValueError(
+                  "rows supplied (%d) does not match height (%d)" %
+                  (nrows, self.height))
+
+    def write_passes(self, outfile, rows, packed=False):
+        """
+        Write a PNG image to the output file.
+
+        Most users are expected to find the :meth:`write` or
+        :meth:`write_array` method more convenient.
+        
+        The rows should be given to this method in the order that
+        they appear in the output file.  For straightlaced images,
+        this is the usual top to bottom ordering, but for interlaced
+        images the rows should have already been interlaced before
+        passing them to this function.
+
+        `rows` should be an iterable that yields each row.  When
+        `packed` is ``False`` the rows should be in boxed row flat pixel
+        format; when `packed` is ``True`` each row should be a packed
+        sequence of bytes.
+
+        """
+
+        # http://www.w3.org/TR/PNG/#5PNG-file-signature
+        outfile.write(_signature)
+
+        # http://www.w3.org/TR/PNG/#11IHDR
+        write_chunk(outfile, 'IHDR',
+                    struct.pack("!2I5B", self.width, self.height,
+                                self.bitdepth, self.color_type,
+                                0, 0, self.interlace))
+
+        # See :chunk:order
+        # http://www.w3.org/TR/PNG/#11gAMA
+        if self.gamma is not None:
+            write_chunk(outfile, 'gAMA',
+                        struct.pack("!L", int(round(self.gamma*1e5))))
+
+        # See :chunk:order
+        # http://www.w3.org/TR/PNG/#11sBIT
+        if self.rescale:
+            write_chunk(outfile, 'sBIT',
+                struct.pack('%dB' % self.planes,
+                            *[self.rescale[0]]*self.planes))
+        
+        # :chunk:order: Without a palette (PLTE chunk), ordering is
+        # relatively relaxed.  With one, gAMA chunk must precede PLTE
+        # chunk which must precede tRNS and bKGD.
+        # See http://www.w3.org/TR/PNG/#5ChunkOrdering
+        if self.palette:
+            p,t = self.make_palette()
+            write_chunk(outfile, 'PLTE', p)
+            if t:
+                # tRNS chunk is optional.  Only needed if palette entries
+                # have alpha.
+                write_chunk(outfile, 'tRNS', t)
+
+        # http://www.w3.org/TR/PNG/#11tRNS
+        if self.transparent is not None:
+            if self.greyscale:
+                write_chunk(outfile, 'tRNS',
+                            struct.pack("!1H", *self.transparent))
+            else:
+                write_chunk(outfile, 'tRNS',
+                            struct.pack("!3H", *self.transparent))
+
+        # http://www.w3.org/TR/PNG/#11bKGD
+        if self.background is not None:
+            if self.greyscale:
+                write_chunk(outfile, 'bKGD',
+                            struct.pack("!1H", *self.background))
+            else:
+                write_chunk(outfile, 'bKGD',
+                            struct.pack("!3H", *self.background))
+
+        # http://www.w3.org/TR/PNG/#11IDAT
+        if self.compression is not None:
+            compressor = zlib.compressobj(self.compression)
+        else:
+            compressor = zlib.compressobj()
+
+        # Choose an extend function based on the bitdepth.  The extend
+        # function packs/decomposes the pixel values into bytes and
+        # stuffs them onto the data array.
+        data = array('B')
+        if self.bitdepth == 8 or packed:
+            extend = data.extend
+        elif self.bitdepth == 16:
+            # Decompose into bytes
+            def extend(sl):
+                fmt = '!%dH' % len(sl)
+                data.extend(array('B', struct.pack(fmt, *sl)))
+        else:
+            # Pack into bytes
+            assert self.bitdepth < 8
+            # samples per byte
+            spb = int(8/self.bitdepth)
+            def extend(sl):
+                a = array('B', sl)
+                # Adding padding bytes so we can group into a whole
+                # number of spb-tuples.
+                l = float(len(a))
+                extra = math.ceil(l / float(spb))*spb - l
+                a.extend([0]*int(extra))
+                # Pack into bytes
+                l = group(a, spb)
+                l = map(lambda e: reduce(lambda x,y:
+                                           (x << self.bitdepth) + y, e), l)
+                data.extend(l)
+        if self.rescale:
+            oldextend = extend
+            factor = \
+              float(2**self.rescale[1]-1) / float(2**self.rescale[0]-1)
+            def extend(sl):
+                oldextend(map(lambda x: int(round(factor*x)), sl))
+
+        # Build the first row, testing mostly to see if we need to
+        # changed the extend function to cope with NumPy integer types
+        # (they cause our ordinary definition of extend to fail, so we
+        # wrap it).  See
+        # http://code.google.com/p/pypng/issues/detail?id=44
+        enumrows = enumerate(rows)
+        del rows
+
+        # First row's filter type.
+        data.append(0)
+        # :todo: Certain exceptions in the call to ``.next()`` or the
+        # following try would indicate no row data supplied.
+        # Should catch.
+        i,row = enumrows.next()
+        try:
+            # If this fails...
+            extend(row)
+        except:
+            # ... try a version that converts the values to int first.
+            # Not only does this work for the (slightly broken) NumPy
+            # types, there are probably lots of other, unknown, "nearly"
+            # int types it works for.
+            def wrapmapint(f):
+                return lambda sl: f(map(int, sl))
+            extend = wrapmapint(extend)
+            del wrapmapint
+            extend(row)
+
+        for i,row in enumrows:
+            # Add "None" filter type.  Currently, it's essential that
+            # this filter type be used for every scanline as we do not
+            # mark the first row of a reduced pass image; that means we
+            # could accidentally compute the wrong filtered scanline if
+            # we used "up", "average", or "paeth" on such a line.
+            data.append(0)
+            extend(row)
+            if len(data) > self.chunk_limit:
+                compressed = compressor.compress(tostring(data))
+                if len(compressed):
+                    # print >> sys.stderr, len(data), len(compressed)
+                    write_chunk(outfile, 'IDAT', compressed)
+                # Because of our very witty definition of ``extend``,
+                # above, we must re-use the same ``data`` object.  Hence
+                # we use ``del`` to empty this one, rather than create a
+                # fresh one (which would be my natural FP instinct).
+                del data[:]
+        if len(data):
+            compressed = compressor.compress(tostring(data))
+        else:
+            compressed = ''
+        flushed = compressor.flush()
+        if len(compressed) or len(flushed):
+            # print >> sys.stderr, len(data), len(compressed), len(flushed)
+            write_chunk(outfile, 'IDAT', compressed + flushed)
+        # http://www.w3.org/TR/PNG/#11IEND
+        write_chunk(outfile, 'IEND')
+        return i+1
+
+    def write_array(self, outfile, pixels):
+        """
+        Write an array in flat row flat pixel format as a PNG file on
+        the output file.  See also :meth:`write` method.
+        """
+
+        if self.interlace:
+            self.write_passes(outfile, self.array_scanlines_interlace(pixels))
+        else:
+            self.write_passes(outfile, self.array_scanlines(pixels))
+
+    def write_packed(self, outfile, rows):
+        """
+        Write PNG file to `outfile`.  The pixel data comes from `rows`
+        which should be in boxed row packed format.  Each row should be
+        a sequence of packed bytes.
+
+        Technically, this method does work for interlaced images but it
+        is best avoided.  For interlaced images, the rows should be
+        presented in the order that they appear in the file.
+
+        This method should not be used when the source image bit depth
+        is not one naturally supported by PNG; the bit depth should be
+        1, 2, 4, 8, or 16.
+        """
+
+        if self.rescale:
+            raise Error("write_packed method not suitable for bit depth %d" %
+              self.rescale[0])
+        return self.write_passes(outfile, rows, packed=True)
+
+    def convert_pnm(self, infile, outfile):
+        """
+        Convert a PNM file containing raw pixel data into a PNG file
+        with the parameters set in the writer object.  Works for
+        (binary) PGM, PPM, and PAM formats.
+        """
+
+        if self.interlace:
+            pixels = array('B')
+            pixels.fromfile(infile,
+                            (self.bitdepth/8) * self.color_planes *
+                            self.width * self.height)
+            self.write_passes(outfile, self.array_scanlines_interlace(pixels))
+        else:
+            self.write_passes(outfile, self.file_scanlines(infile))
+
+    def convert_ppm_and_pgm(self, ppmfile, pgmfile, outfile):
+        """
+        Convert a PPM and PGM file containing raw pixel data into a
+        PNG outfile with the parameters set in the writer object.
+        """
+        pixels = array('B')
+        pixels.fromfile(ppmfile,
+                        (self.bitdepth/8) * self.color_planes *
+                        self.width * self.height)
+        apixels = array('B')
+        apixels.fromfile(pgmfile,
+                         (self.bitdepth/8) *
+                         self.width * self.height)
+        pixels = interleave_planes(pixels, apixels,
+                                   (self.bitdepth/8) * self.color_planes,
+                                   (self.bitdepth/8))
+        if self.interlace:
+            self.write_passes(outfile, self.array_scanlines_interlace(pixels))
+        else:
+            self.write_passes(outfile, self.array_scanlines(pixels))
+
+    def file_scanlines(self, infile):
+        """
+        Generates boxed rows in flat pixel format, from the input file
+        `infile`.  It assumes that the input file is in a "Netpbm-like"
+        binary format, and is positioned at the beginning of the first
+        pixel.  The number of pixels to read is taken from the image
+        dimensions (`width`, `height`, `planes`) and the number of bytes
+        per value is implied by the image `bitdepth`.
+        """
+
+        # Values per row
+        vpr = self.width * self.planes
+        row_bytes = vpr
+        if self.bitdepth > 8:
+            assert self.bitdepth == 16
+            row_bytes *= 2
+            fmt = '>%dH' % vpr
+            def line():
+                return array('H', struct.unpack(fmt, infile.read(row_bytes)))
+        else:
+            def line():
+                scanline = array('B', infile.read(row_bytes))
+                return scanline
+        for y in range(self.height):
+            yield line()
+
+    def array_scanlines(self, pixels):
+        """
+        Generates boxed rows (flat pixels) from flat rows (flat pixels)
+        in an array.
+        """
+
+        # Values per row
+        vpr = self.width * self.planes
+        stop = 0
+        for y in range(self.height):
+            start = stop
+            stop = start + vpr
+            yield pixels[start:stop]
+
+    def array_scanlines_interlace(self, pixels):
+        """
+        Generator for interlaced scanlines from an array.  `pixels` is
+        the full source image in flat row flat pixel format.  The
+        generator yields each scanline of the reduced passes in turn, in
+        boxed row flat pixel format.
+        """
+
+        # http://www.w3.org/TR/PNG/#8InterlaceMethods
+        # Array type.
+        fmt = 'BH'[self.bitdepth > 8]
+        # Value per row
+        vpr = self.width * self.planes
+        for xstart, ystart, xstep, ystep in _adam7:
+            if xstart >= self.width:
+                continue
+            # Pixels per row (of reduced image)
+            ppr = int(math.ceil((self.width-xstart)/float(xstep)))
+            # number of values in reduced image row.
+            row_len = ppr*self.planes
+            for y in range(ystart, self.height, ystep):
+                if xstep == 1:
+                    offset = y * vpr
+                    yield pixels[offset:offset+vpr]
+                else:
+                    row = array(fmt)
+                    # There's no easier way to set the length of an array
+                    row.extend(pixels[0:row_len])
+                    offset = y * vpr + xstart * self.planes
+                    end_offset = (y+1) * vpr
+                    skip = self.planes * xstep
+                    for i in range(self.planes):
+                        row[i::self.planes] = \
+                            pixels[offset+i:end_offset:skip]
+                    yield row
+
+def write_chunk(outfile, tag, data=strtobytes('')):
+    """
+    Write a PNG chunk to the output file, including length and
+    checksum.
+    """
+
+    # http://www.w3.org/TR/PNG/#5Chunk-layout
+    outfile.write(struct.pack("!I", len(data)))
+    tag = strtobytes(tag)
+    outfile.write(tag)
+    outfile.write(data)
+    checksum = zlib.crc32(tag)
+    checksum = zlib.crc32(data, checksum)
+    checksum &= 2**32-1
+    outfile.write(struct.pack("!I", checksum))
+
+def write_chunks(out, chunks):
+    """Create a PNG file by writing out the chunks."""
+
+    out.write(_signature)
+    for chunk in chunks:
+        write_chunk(out, *chunk)
+
+def filter_scanline(type, line, fo, prev=None):
+    """Apply a scanline filter to a scanline.  `type` specifies the
+    filter type (0 to 4); `line` specifies the current (unfiltered)
+    scanline as a sequence of bytes; `prev` specifies the previous
+    (unfiltered) scanline as a sequence of bytes. `fo` specifies the
+    filter offset; normally this is size of a pixel in bytes (the number
+    of bytes per sample times the number of channels), but when this is
+    < 1 (for bit depths < 8) then the filter offset is 1.
+    """
+
+    assert 0 <= type < 5
+
+    # The output array.  Which, pathetically, we extend one-byte at a
+    # time (fortunately this is linear).
+    out = array('B', [type])
+
+    def sub():
+        ai = -fo
+        for x in line:
+            if ai >= 0:
+                x = (x - line[ai]) & 0xff
+            out.append(x)
+            ai += 1
+    def up():
+        for i,x in enumerate(line):
+            x = (x - prev[i]) & 0xff
+            out.append(x)
+    def average():
+        ai = -fo
+        for i,x in enumerate(line):
+            if ai >= 0:
+                x = (x - ((line[ai] + prev[i]) >> 1)) & 0xff
+            else:
+                x = (x - (prev[i] >> 1)) & 0xff
+            out.append(x)
+            ai += 1
+    def paeth():
+        # http://www.w3.org/TR/PNG/#9Filter-type-4-Paeth
+        ai = -fo # also used for ci
+        for i,x in enumerate(line):
+            a = 0
+            b = prev[i]
+            c = 0
+
+            if ai >= 0:
+                a = line[ai]
+                c = prev[ai]
+            p = a + b - c
+            pa = abs(p - a)
+            pb = abs(p - b)
+            pc = abs(p - c)
+            if pa <= pb and pa <= pc: Pr = a
+            elif pb <= pc: Pr = b
+            else: Pr = c
+
+            x = (x - Pr) & 0xff
+            out.append(x)
+            ai += 1
+
+    if not prev:
+        # We're on the first line.  Some of the filters can be reduced
+        # to simpler cases which makes handling the line "off the top"
+        # of the image simpler.  "up" becomes "none"; "paeth" becomes
+        # "left" (non-trivial, but true). "average" needs to be handled
+        # specially.
+        if type == 2: # "up"
+            return line # type = 0
+        elif type == 3:
+            prev = [0]*len(line)
+        elif type == 4: # "paeth"
+            type = 1
+    if type == 0:
+        out.extend(line)
+    elif type == 1:
+        sub()
+    elif type == 2:
+        up()
+    elif type == 3:
+        average()
+    else: # type == 4
+        paeth()
+    return out
+
+
+def from_array(a, mode=None, info={}):
+    """Create a PNG :class:`Image` object from a 2- or 3-dimensional array.
+    One application of this function is easy PIL-style saving:
+    ``png.from_array(pixels, 'L').save('foo.png')``.
+
+    .. note :
+
+      The use of the term *3-dimensional* is for marketing purposes
+      only.  It doesn't actually work.  Please bear with us.  Meanwhile
+      enjoy the complimentary snacks (on request) and please use a
+      2-dimensional array.
+    
+    Unless they are specified using the *info* parameter, the PNG's
+    height and width are taken from the array size.  For a 3 dimensional
+    array the first axis is the height; the second axis is the width;
+    and the third axis is the channel number.  Thus an RGB image that is
+    16 pixels high and 8 wide will use an array that is 16x8x3.  For 2
+    dimensional arrays the first axis is the height, but the second axis
+    is ``width*channels``, so an RGB image that is 16 pixels high and 8
+    wide will use a 2-dimensional array that is 16x24 (each row will be
+    8*3==24 sample values).
+
+    *mode* is a string that specifies the image colour format in a
+    PIL-style mode.  It can be:
+
+    ``'L'``
+      greyscale (1 channel)
+    ``'LA'``
+      greyscale with alpha (2 channel)
+    ``'RGB'``
+      colour image (3 channel)
+    ``'RGBA'``
+      colour image with alpha (4 channel)
+
+    The mode string can also specify the bit depth (overriding how this
+    function normally derives the bit depth, see below).  Appending
+    ``';16'`` to the mode will cause the PNG to be 16 bits per channel;
+    any decimal from 1 to 16 can be used to specify the bit depth.
+
+    When a 2-dimensional array is used *mode* determines how many
+    channels the image has, and so allows the width to be derived from
+    the second array dimension.
+
+    The array is expected to be a ``numpy`` array, but it can be any
+    suitable Python sequence.  For example, a list of lists can be used:
+    ``png.from_array([[0, 255, 0], [255, 0, 255]], 'L')``.  The exact
+    rules are: ``len(a)`` gives the first dimension, height;
+    ``len(a[0])`` gives the second dimension; ``len(a[0][0])`` gives the
+    third dimension, unless an exception is raised in which case a
+    2-dimensional array is assumed.  It's slightly more complicated than
+    that because an iterator of rows can be used, and it all still
+    works.  Using an iterator allows data to be streamed efficiently.
+
+    The bit depth of the PNG is normally taken from the array element's
+    datatype (but if *mode* specifies a bitdepth then that is used
+    instead).  The array element's datatype is determined in a way which
+    is supposed to work both for ``numpy`` arrays and for Python
+    ``array.array`` objects.  A 1 byte datatype will give a bit depth of
+    8, a 2 byte datatype will give a bit depth of 16.  If the datatype
+    does not have an implicit size, for example it is a plain Python
+    list of lists, as above, then a default of 8 is used.
+
+    The *info* parameter is a dictionary that can be used to specify
+    metadata (in the same style as the arguments to the
+    :class:``png.Writer`` class).  For this function the keys that are
+    useful are:
+    
+    height
+      overrides the height derived from the array dimensions and allows
+      *a* to be an iterable.
+    width
+      overrides the width derived from the array dimensions.
+    bitdepth
+      overrides the bit depth derived from the element datatype (but
+      must match *mode* if that also specifies a bit depth).
+
+    Generally anything specified in the
+    *info* dictionary will override any implicit choices that this
+    function would otherwise make, but must match any explicit ones.
+    For example, if the *info* dictionary has a ``greyscale`` key then
+    this must be true when mode is ``'L'`` or ``'LA'`` and false when
+    mode is ``'RGB'`` or ``'RGBA'``.
+    """
+
+    # We abuse the *info* parameter by modifying it.  Take a copy here.
+    # (Also typechecks *info* to some extent).
+    info = dict(info)
+
+    # Syntax check mode string.
+    bitdepth = None
+    try:
+        mode = mode.split(';')
+        if len(mode) not in (1,2):
+            raise Error()
+        if mode[0] not in ('L', 'LA', 'RGB', 'RGBA'):
+            raise Error()
+        if len(mode) == 2:
+            try:
+                bitdepth = int(mode[1])
+            except:
+                raise Error()
+    except Error:
+        raise Error("mode string should be 'RGB' or 'L;16' or similar.")
+    mode = mode[0]
+
+    # Get bitdepth from *mode* if possible.
+    if bitdepth:
+        if info.get('bitdepth') and bitdepth != info['bitdepth']:
+            raise Error("mode bitdepth (%d) should match info bitdepth (%d)." %
+              (bitdepth, info['bitdepth']))
+        info['bitdepth'] = bitdepth
+
+    # Fill in and/or check entries in *info*.
+    # Dimensions.
+    if 'size' in info:
+        # Check width, height, size all match where used.
+        for dimension,axis in [('width', 0), ('height', 1)]:
+            if dimension in info:
+                if info[dimension] != info['size'][axis]:
+                    raise Error(
+                      "info[%r] shhould match info['size'][%r]." %
+                      (dimension, axis))
+        info['width'],info['height'] = info['size']
+    if 'height' not in info:
+        try:
+            l = len(a)
+        except:
+            raise Error(
+              "len(a) does not work, supply info['height'] instead.")
+        info['height'] = l
+    # Colour format.
+    if 'greyscale' in info:
+        if bool(info['greyscale']) != ('L' in mode):
+            raise Error("info['greyscale'] should match mode.")
+    info['greyscale'] = 'L' in mode
+    if 'alpha' in info:
+        if bool(info['alpha']) != ('A' in mode):
+            raise Error("info['alpha'] should match mode.")
+    info['alpha'] = 'A' in mode
+
+    planes = len(mode)
+    if 'planes' in info:
+        if info['planes'] != planes:
+            raise Error("info['planes'] should match mode.")
+
+    # In order to work out whether we the array is 2D or 3D we need its
+    # first row, which requires that we take a copy of its iterator.
+    # We may also need the first row to derive width and bitdepth.
+    a,t = itertools.tee(a)
+    row = t.next()
+    del t
+    try:
+        row[0][0]
+        threed = True
+        testelement = row[0]
+    except:
+        threed = False
+        testelement = row
+    if 'width' not in info:
+        if threed:
+            width = len(row)
+        else:
+            width = len(row) // planes
+        info['width'] = width
+
+    # Not implemented yet
+    assert not threed
+
+    if 'bitdepth' not in info:
+        try:
+            dtype = testelement.dtype
+            # goto the "else:" clause.  Sorry.
+        except:
+            try:
+                # Try a Python array.array.
+                bitdepth = 8 * testelement.itemsize
+            except:
+                # We can't determine it from the array element's
+                # datatype, use a default of 8.
+                bitdepth = 8
+        else:
+            # If we got here without exception, we now assume that
+            # the array is a numpy array.
+            if dtype.kind == 'b':
+                bitdepth = 1
+            else:
+                bitdepth = 8 * dtype.itemsize
+        info['bitdepth'] = bitdepth
+
+    for thing in 'width height bitdepth greyscale alpha'.split():
+        assert thing in info
+    return Image(a, info)
+
+# So that refugee's from PIL feel more at home.  Not documented.
+fromarray = from_array
+
+class Image:
+    """A PNG image.
+    You can create an :class:`Image` object from an array of pixels by calling
+    :meth:`png.from_array`.  It can be saved to disk with the
+    :meth:`save` method."""
+    def __init__(self, rows, info):
+        """
+        .. note ::
+        
+          The constructor is not public.  Please do not call it.
+        """
+        
+        self.rows = rows
+        self.info = info
+
+    def save(self, file):
+        """Save the image to *file*.  If *file* looks like an open file
+        descriptor then it is used, otherwise it is treated as a
+        filename and a fresh file is opened.
+
+        In general, you can only call this method once; after it has
+        been called the first time and the PNG image has been saved, the
+        source data will have been streamed, and cannot be streamed
+        again.
+        """
+
+        w = Writer(**self.info)
+
+        try:
+            file.write
+            def close(): pass
+        except:
+            file = open(file, 'wb')
+            def close(): file.close()
+
+        try:
+            w.write(file, self.rows)
+        finally:
+            close()
+
+class _readable:
+    """
+    A simple file-like interface for strings and arrays.
+    """
+
+    def __init__(self, buf):
+        self.buf = buf
+        self.offset = 0
+
+    def read(self, n):
+        r = self.buf[self.offset:self.offset+n]
+        if isarray(r):
+            r = r.tostring()
+        self.offset += n
+        return r
+
+
+class Reader:
+    """
+    PNG decoder in pure Python.
+    """
+
+    def __init__(self, _guess=None, **kw):
+        """
+        Create a PNG decoder object.
+
+        The constructor expects exactly one keyword argument. If you
+        supply a positional argument instead, it will guess the input
+        type. You can choose among the following keyword arguments:
+
+        filename
+          Name of input file (a PNG file).
+        file
+          A file-like object (object with a read() method).
+        bytes
+          ``array`` or ``string`` with PNG data.
+
+        """
+        if ((_guess is not None and len(kw) != 0) or
+            (_guess is None and len(kw) != 1)):
+            raise TypeError("Reader() takes exactly 1 argument")
+
+        # Will be the first 8 bytes, later on.  See validate_signature.
+        self.signature = None
+        self.transparent = None
+        # A pair of (len,type) if a chunk has been read but its data and
+        # checksum have not (in other words the file position is just
+        # past the 4 bytes that specify the chunk type).  See preamble
+        # method for how this is used.
+        self.atchunk = None
+
+        if _guess is not None:
+            if isarray(_guess):
+                kw["bytes"] = _guess
+            elif isinstance(_guess, str):
+                kw["filename"] = _guess
+            elif isinstance(_guess, file):
+                kw["file"] = _guess
+
+        if "filename" in kw:
+            self.file = open(kw["filename"], "rb")
+        elif "file" in kw:
+            self.file = kw["file"]
+        elif "bytes" in kw:
+            self.file = _readable(kw["bytes"])
+        else:
+            raise TypeError("expecting filename, file or bytes array")
+
+
+    def chunk(self, seek=None, lenient=False):
+        """
+        Read the next PNG chunk from the input file; returns a
+        (*type*,*data*) tuple.  *type* is the chunk's type as a string
+        (all PNG chunk types are 4 characters long).  *data* is the
+        chunk's data content, as a string.
+
+        If the optional `seek` argument is
+        specified then it will keep reading chunks until it either runs
+        out of file or finds the type specified by the argument.  Note
+        that in general the order of chunks in PNGs is unspecified, so
+        using `seek` can cause you to miss chunks.
+
+        If the optional `lenient` argument evaluates to True,
+        checksum failures will raise warnings rather than exceptions.
+        """
+
+        self.validate_signature()
+
+        while True:
+            # http://www.w3.org/TR/PNG/#5Chunk-layout
+            if not self.atchunk:
+                self.atchunk = self.chunklentype()
+            length,type = self.atchunk
+            self.atchunk = None
+            data = self.file.read(length)
+            if len(data) != length:
+                raise ChunkError('Chunk %s too short for required %i octets.'
+                  % (type, length))
+            checksum = self.file.read(4)
+            if len(checksum) != 4:
+                raise ValueError('Chunk %s too short for checksum.', tag)
+            if seek and type != seek:
+                continue
+            verify = zlib.crc32(strtobytes(type))
+            verify = zlib.crc32(data, verify)
+            # Whether the output from zlib.crc32 is signed or not varies
+            # according to hideous implementation details, see
+            # http://bugs.python.org/issue1202 .
+            # We coerce it to be positive here (in a way which works on
+            # Python 2.3 and older).
+            verify &= 2**32 - 1
+            verify = struct.pack('!I', verify)
+            if checksum != verify:
+                # print repr(checksum)
+                (a, ) = struct.unpack('!I', checksum)
+                (b, ) = struct.unpack('!I', verify)
+                message = "Checksum error in %s chunk: 0x%08X != 0x%08X." % (type, a, b)
+                if lenient:
+                    warnings.warn(message, RuntimeWarning)
+                else:
+                    raise ChunkError(message)
+            return type, data
+
+    def chunks(self):
+        """Return an iterator that will yield each chunk as a
+        (*chunktype*, *content*) pair.
+        """
+
+        while True:
+            t,v = self.chunk()
+            yield t,v
+            if t == 'IEND':
+                break
+
+    def undo_filter(self, filter_type, scanline, previous):
+        """Undo the filter for a scanline.  `scanline` is a sequence of
+        bytes that does not include the initial filter type byte.
+        `previous` is decoded previous scanline (for straightlaced
+        images this is the previous pixel row, but for interlaced
+        images, it is the previous scanline in the reduced image, which
+        in general is not the previous pixel row in the final image).
+        When there is no previous scanline (the first row of a
+        straightlaced image, or the first row in one of the passes in an
+        interlaced image), then this argument should be ``None``.
+
+        The scanline will have the effects of filtering removed, and the
+        result will be returned as a fresh sequence of bytes.
+        """
+
+        # :todo: Would it be better to update scanline in place?
+        # Yes, with the Cython extension making the undo_filter fast,
+        # updating scanline inplace makes the code 3 times faster
+        # (reading 50 images of 800x800 went from 40s to 16s)
+        result = scanline
+
+        if filter_type == 0:
+            return result
+
+        if filter_type not in (1,2,3,4):
+            raise FormatError('Invalid PNG Filter Type.'
+              '  See http://www.w3.org/TR/2003/REC-PNG-20031110/#9Filters .')
+
+        # Filter unit.  The stride from one pixel to the corresponding
+        # byte from the previous previous.  Normally this is the pixel
+        # size in bytes, but when this is smaller than 1, the previous
+        # byte is used instead.
+        fu = max(1, self.psize)
+
+        # For the first line of a pass, synthesize a dummy previous
+        # line.  An alternative approach would be to observe that on the
+        # first line 'up' is the same as 'null', 'paeth' is the same
+        # as 'sub', with only 'average' requiring any special case.
+        if not previous:
+            previous = array('B', [0]*len(scanline))
+
+        def sub():
+            """Undo sub filter."""
+
+            ai = 0
+            # Loops starts at index fu.  Observe that the initial part
+            # of the result is already filled in correctly with
+            # scanline.
+            for i in range(fu, len(result)):
+                x = scanline[i]
+                a = result[ai]
+                result[i] = (x + a) & 0xff
+                ai += 1
+
+        def up():
+            """Undo up filter."""
+
+            for i in range(len(result)):
+                x = scanline[i]
+                b = previous[i]
+                result[i] = (x + b) & 0xff
+
+        def average():
+            """Undo average filter."""
+
+            ai = -fu
+            for i in range(len(result)):
+                x = scanline[i]
+                if ai < 0:
+                    a = 0
+                else:
+                    a = result[ai]
+                b = previous[i]
+                result[i] = (x + ((a + b) >> 1)) & 0xff
+                ai += 1
+
+        def paeth():
+            """Undo Paeth filter."""
+
+            # Also used for ci.
+            ai = -fu
+            for i in range(len(result)):
+                x = scanline[i]
+                if ai < 0:
+                    a = c = 0
+                else:
+                    a = result[ai]
+                    c = previous[ai]
+                b = previous[i]
+                p = a + b - c
+                pa = abs(p - a)
+                pb = abs(p - b)
+                pc = abs(p - c)
+                if pa <= pb and pa <= pc:
+                    pr = a
+                elif pb <= pc:
+                    pr = b
+                else:
+                    pr = c
+                result[i] = (x + pr) & 0xff
+                ai += 1
+
+        # Call appropriate filter algorithm.  Note that 0 has already
+        # been dealt with.
+        (None,
+         pngfilters.undo_filter_sub,
+         pngfilters.undo_filter_up,
+         pngfilters.undo_filter_average,
+         pngfilters.undo_filter_paeth)[filter_type](fu, scanline, previous, result)
+        return result
+
+    def deinterlace(self, raw):
+        """
+        Read raw pixel data, undo filters, deinterlace, and flatten.
+        Return in flat row flat pixel format.
+        """
+
+        # print >> sys.stderr, ("Reading interlaced, w=%s, r=%s, planes=%s," +
+        #     " bpp=%s") % (self.width, self.height, self.planes, self.bps)
+        # Values per row (of the target image)
+        vpr = self.width * self.planes
+
+        # Make a result array, and make it big enough.  Interleaving
+        # writes to the output array randomly (well, not quite), so the
+        # entire output array must be in memory.
+        fmt = 'BH'[self.bitdepth > 8]
+        a = array(fmt, [0]*vpr*self.height)
+        source_offset = 0
+
+        for xstart, ystart, xstep, ystep in _adam7:
+            # print >> sys.stderr, "Adam7: start=%s,%s step=%s,%s" % (
+            #     xstart, ystart, xstep, ystep)
+            if xstart >= self.width:
+                continue
+            # The previous (reconstructed) scanline.  None at the
+            # beginning of a pass to indicate that there is no previous
+            # line.
+            recon = None
+            # Pixels per row (reduced pass image)
+            ppr = int(math.ceil((self.width-xstart)/float(xstep)))
+            # Row size in bytes for this pass.
+            row_size = int(math.ceil(self.psize * ppr))
+            for y in range(ystart, self.height, ystep):
+                filter_type = raw[source_offset]
+                source_offset += 1
+                scanline = raw[source_offset:source_offset+row_size]
+                source_offset += row_size
+                recon = self.undo_filter(filter_type, scanline, recon)
+                # Convert so that there is one element per pixel value
+                flat = self.serialtoflat(recon, ppr)
+                if xstep == 1:
+                    assert xstart == 0
+                    offset = y * vpr
+                    a[offset:offset+vpr] = flat
+                else:
+                    offset = y * vpr + xstart * self.planes
+                    end_offset = (y+1) * vpr
+                    skip = self.planes * xstep
+                    for i in range(self.planes):
+                        a[offset+i:end_offset:skip] = \
+                            flat[i::self.planes]
+        return a
+
+    def iterboxed(self, rows):
+        """Iterator that yields each scanline in boxed row flat pixel
+        format.  `rows` should be an iterator that yields the bytes of
+        each row in turn.
+        """
+
+        def asvalues(raw):
+            """Convert a row of raw bytes into a flat row.  Result may
+            or may not share with argument"""
+
+            if self.bitdepth == 8:
+                return raw
+            if self.bitdepth == 16:
+                raw = tostring(raw)
+                return array('H', struct.unpack('!%dH' % (len(raw)//2), raw))
+            assert self.bitdepth < 8
+            width = self.width
+            # Samples per byte
+            spb = 8//self.bitdepth
+            out = array('B')
+            mask = 2**self.bitdepth - 1
+            shifts = map(self.bitdepth.__mul__, reversed(range(spb)))
+            for o in raw:
+                out.extend(map(lambda i: mask&(o>>i), shifts))
+            return out[:width]
+
+        return itertools.imap(asvalues, rows)
+
+    def serialtoflat(self, bytes, width=None):
+        """Convert serial format (byte stream) pixel data to flat row
+        flat pixel.
+        """
+
+        if self.bitdepth == 8:
+            return bytes
+        if self.bitdepth == 16:
+            bytes = tostring(bytes)
+            return array('H',
+              struct.unpack('!%dH' % (len(bytes)//2), bytes))
+        assert self.bitdepth < 8
+        if width is None:
+            width = self.width
+        # Samples per byte
+        spb = 8//self.bitdepth
+        out = array('B')
+        mask = 2**self.bitdepth - 1
+        shifts = map(self.bitdepth.__mul__, reversed(range(spb)))
+        l = width
+        for o in bytes:
+            out.extend([(mask&(o>>s)) for s in shifts][:l])
+            l -= spb
+            if l <= 0:
+                l = width
+        return out
+
+    def iterstraight(self, raw):
+        """Iterator that undoes the effect of filtering, and yields each
+        row in serialised format (as a sequence of bytes).  Assumes input
+        is straightlaced.  `raw` should be an iterable that yields the
+        raw bytes in chunks of arbitrary size."""
+
+        # length of row, in bytes
+        rb = self.row_bytes
+        a = array('B')
+        # The previous (reconstructed) scanline.  None indicates first
+        # line of image.
+        recon = None
+        for some in raw:
+            a.extend(some)
+            while len(a) >= rb + 1:
+                filter_type = a[0]
+                scanline = a[1:rb+1]
+                del a[:rb+1]
+                recon = self.undo_filter(filter_type, scanline, recon)
+                yield recon
+        if len(a) != 0:
+            # :file:format We get here with a file format error: when the
+            # available bytes (after decompressing) do not pack into exact
+            # rows.
+            raise FormatError(
+              'Wrong size for decompressed IDAT chunk.')
+        assert len(a) == 0
+
+    def validate_signature(self):
+        """If signature (header) has not been read then read and
+        validate it; otherwise do nothing.
+        """
+
+        if self.signature:
+            return
+        self.signature = self.file.read(8)
+        if self.signature != _signature:
+            raise FormatError("PNG file has invalid signature.")
+
+    def preamble(self, lenient=False):
+        """
+        Extract the image metadata by reading the initial part of the PNG
+        file up to the start of the ``IDAT`` chunk.  All the chunks that
+        precede the ``IDAT`` chunk are read and either processed for
+        metadata or discarded.
+
+        If the optional `lenient` argument evaluates to True,
+        checksum failures will raise warnings rather than exceptions.
+        """
+
+        self.validate_signature()
+
+        while True:
+            if not self.atchunk:
+                self.atchunk = self.chunklentype()
+                if self.atchunk is None:
+                    raise FormatError(
+                      'This PNG file has no IDAT chunks.')
+            if self.atchunk[1] == 'IDAT':
+                return
+            self.process_chunk(lenient=lenient)
+
+    def chunklentype(self):
+        """Reads just enough of the input to determine the next
+        chunk's length and type, returned as a (*length*, *type*) pair
+        where *type* is a string.  If there are no more chunks, ``None``
+        is returned.
+        """
+
+        x = self.file.read(8)
+        if not x:
+            return None
+        if len(x) != 8:
+            raise FormatError(
+              'End of file whilst reading chunk length and type.')
+        length,type = struct.unpack('!I4s', x)
+        type = bytestostr(type)
+        if length > 2**31-1:
+            raise FormatError('Chunk %s is too large: %d.' % (type,length))
+        return length,type
+
+    def process_chunk(self, lenient=False):
+        """Process the next chunk and its data.  This only processes the
+        following chunk types, all others are ignored: ``IHDR``,
+        ``PLTE``, ``bKGD``, ``tRNS``, ``gAMA``, ``sBIT``.
+
+        If the optional `lenient` argument evaluates to True,
+        checksum failures will raise warnings rather than exceptions.
+        """
+
+        type, data = self.chunk(lenient=lenient)
+        if type == 'IHDR':
+            # http://www.w3.org/TR/PNG/#11IHDR
+            if len(data) != 13:
+                raise FormatError('IHDR chunk has incorrect length.')
+            (self.width, self.height, self.bitdepth, self.color_type,
+             self.compression, self.filter,
+             self.interlace) = struct.unpack("!2I5B", data)
+
+            # Check that the header specifies only valid combinations.
+            if self.bitdepth not in (1,2,4,8,16):
+                raise Error("invalid bit depth %d" % self.bitdepth)
+            if self.color_type not in (0,2,3,4,6):
+                raise Error("invalid colour type %d" % self.color_type)
+            # Check indexed (palettized) images have 8 or fewer bits
+            # per pixel; check only indexed or greyscale images have
+            # fewer than 8 bits per pixel.
+            if ((self.color_type & 1 and self.bitdepth > 8) or
+                (self.bitdepth < 8 and self.color_type not in (0,3))):
+                raise FormatError("Illegal combination of bit depth (%d)"
+                  " and colour type (%d)."
+                  " See http://www.w3.org/TR/2003/REC-PNG-20031110/#table111 ."
+                  % (self.bitdepth, self.color_type))
+            if self.compression != 0:
+                raise Error("unknown compression method %d" % self.compression)
+            if self.filter != 0:
+                raise FormatError("Unknown filter method %d,"
+                  " see http://www.w3.org/TR/2003/REC-PNG-20031110/#9Filters ."
+                  % self.filter)
+            if self.interlace not in (0,1):
+                raise FormatError("Unknown interlace method %d,"
+                  " see http://www.w3.org/TR/2003/REC-PNG-20031110/#8InterlaceMethods ."
+                  % self.interlace)
+
+            # Derived values
+            # http://www.w3.org/TR/PNG/#6Colour-values
+            colormap =  bool(self.color_type & 1)
+            greyscale = not (self.color_type & 2)
+            alpha = bool(self.color_type & 4)
+            color_planes = (3,1)[greyscale or colormap]
+            planes = color_planes + alpha
+
+            self.colormap = colormap
+            self.greyscale = greyscale
+            self.alpha = alpha
+            self.color_planes = color_planes
+            self.planes = planes
+            self.psize = float(self.bitdepth)/float(8) * planes
+            if int(self.psize) == self.psize:
+                self.psize = int(self.psize)
+            self.row_bytes = int(math.ceil(self.width * self.psize))
+            # Stores PLTE chunk if present, and is used to check
+            # chunk ordering constraints.
+            self.plte = None
+            # Stores tRNS chunk if present, and is used to check chunk
+            # ordering constraints.
+            self.trns = None
+            # Stores sbit chunk if present.
+            self.sbit = None
+        elif type == 'PLTE':
+            # http://www.w3.org/TR/PNG/#11PLTE
+            if self.plte:
+                warnings.warn("Multiple PLTE chunks present.")
+            self.plte = data
+            if len(data) % 3 != 0:
+                raise FormatError(
+                  "PLTE chunk's length should be a multiple of 3.")
+            if len(data) > (2**self.bitdepth)*3:
+                raise FormatError("PLTE chunk is too long.")
+            if len(data) == 0:
+                raise FormatError("Empty PLTE is not allowed.")
+        elif type == 'bKGD':
+            try:
+                if self.colormap:
+                    if not self.plte:
+                        warnings.warn(
+                          "PLTE chunk is required before bKGD chunk.")
+                    self.background = struct.unpack('B', data)
+                else:
+                    self.background = struct.unpack("!%dH" % self.color_planes,
+                      data)
+            except struct.error:
+                raise FormatError("bKGD chunk has incorrect length.")
+        elif type == 'tRNS':
+            # http://www.w3.org/TR/PNG/#11tRNS
+            self.trns = data
+            if self.colormap:
+                if not self.plte:
+                    warnings.warn("PLTE chunk is required before tRNS chunk.")
+                else:
+                    if len(data) > len(self.plte)/3:
+                        # Was warning, but promoted to Error as it
+                        # would otherwise cause pain later on.
+                        raise FormatError("tRNS chunk is too long.")
+            else:
+                if self.alpha:
+                    raise FormatError(
+                      "tRNS chunk is not valid with colour type %d." %
+                      self.color_type)
+                try:
+                    self.transparent = \
+                        struct.unpack("!%dH" % self.color_planes, data)
+                except struct.error:
+                    raise FormatError("tRNS chunk has incorrect length.")
+        elif type == 'gAMA':
+            try:
+                self.gamma = struct.unpack("!L", data)[0] / 100000.0
+            except struct.error:
+                raise FormatError("gAMA chunk has incorrect length.")
+        elif type == 'sBIT':
+            self.sbit = data
+            if (self.colormap and len(data) != 3 or
+                not self.colormap and len(data) != self.planes):
+                raise FormatError("sBIT chunk has incorrect length.")
+
+    def read(self, lenient=False):
+        """
+        Read the PNG file and decode it.  Returns (`width`, `height`,
+        `pixels`, `metadata`).
+
+        May use excessive memory.
+
+        `pixels` are returned in boxed row flat pixel format.
+
+        If the optional `lenient` argument evaluates to True,
+        checksum failures will raise warnings rather than exceptions.
+        """
+
+        def iteridat():
+            """Iterator that yields all the ``IDAT`` chunks as strings."""
+            while True:
+                try:
+                    type, data = self.chunk(lenient=lenient)
+                except ValueError, e:
+                    raise ChunkError(e.args[0])
+                if type == 'IEND':
+                    # http://www.w3.org/TR/PNG/#11IEND
+                    break
+                if type != 'IDAT':
+                    continue
+                # type == 'IDAT'
+                # http://www.w3.org/TR/PNG/#11IDAT
+                if self.colormap and not self.plte:
+                    warnings.warn("PLTE chunk is required before IDAT chunk")
+                yield data
+
+        def iterdecomp(idat):
+            """Iterator that yields decompressed strings.  `idat` should
+            be an iterator that yields the ``IDAT`` chunk data.
+            """
+
+            # Currently, with no max_length paramter to decompress, this
+            # routine will do one yield per IDAT chunk.  So not very
+            # incremental.
+            d = zlib.decompressobj()
+            # Each IDAT chunk is passed to the decompressor, then any
+            # remaining state is decompressed out.
+            for data in idat:
+                # :todo: add a max_length argument here to limit output
+                # size.
+                yield array('B', d.decompress(data))
+            yield array('B', d.flush())
+
+        self.preamble(lenient=lenient)
+        raw = iterdecomp(iteridat())
+
+        if self.interlace:
+            raw = array('B', itertools.chain(*raw))
+            arraycode = 'BH'[self.bitdepth>8]
+            # Like :meth:`group` but producing an array.array object for
+            # each row.
+            pixels = itertools.imap(lambda *row: array(arraycode, row),
+                       *[iter(self.deinterlace(raw))]*self.width*self.planes)
+        else:
+            pixels = self.iterboxed(self.iterstraight(raw))
+        meta = dict()
+        for attr in 'greyscale alpha planes bitdepth interlace'.split():
+            meta[attr] = getattr(self, attr)
+        meta['size'] = (self.width, self.height)
+        for attr in 'gamma transparent background'.split():
+            a = getattr(self, attr, None)
+            if a is not None:
+                meta[attr] = a
+        if self.plte:
+            meta['palette'] = self.palette()
+        return self.width, self.height, pixels, meta
+
+
+    def read_flat(self):
+        """
+        Read a PNG file and decode it into flat row flat pixel format.
+        Returns (*width*, *height*, *pixels*, *metadata*).
+
+        May use excessive memory.
+
+        `pixels` are returned in flat row flat pixel format.
+
+        See also the :meth:`read` method which returns pixels in the
+        more stream-friendly boxed row flat pixel format.
+        """
+
+        x, y, pixel, meta = self.read()
+        arraycode = 'BH'[meta['bitdepth']>8]
+        pixel = array(arraycode, itertools.chain(*pixel))
+        return x, y, pixel, meta
+
+    def palette(self, alpha='natural'):
+        """Returns a palette that is a sequence of 3-tuples or 4-tuples,
+        synthesizing it from the ``PLTE`` and ``tRNS`` chunks.  These
+        chunks should have already been processed (for example, by
+        calling the :meth:`preamble` method).  All the tuples are the
+        same size: 3-tuples if there is no ``tRNS`` chunk, 4-tuples when
+        there is a ``tRNS`` chunk.  Assumes that the image is colour type
+        3 and therefore a ``PLTE`` chunk is required.
+
+        If the `alpha` argument is ``'force'`` then an alpha channel is
+        always added, forcing the result to be a sequence of 4-tuples.
+        """
+
+        if not self.plte:
+            raise FormatError(
+                "Required PLTE chunk is missing in colour type 3 image.")
+        plte = group(array('B', self.plte), 3)
+        if self.trns or alpha == 'force':
+            trns = array('B', self.trns or '')
+            trns.extend([255]*(len(plte)-len(trns)))
+            plte = map(operator.add, plte, group(trns, 1))
+        return plte
+
+    def asDirect(self):
+        """Returns the image data as a direct representation of an
+        ``x * y * planes`` array.  This method is intended to remove the
+        need for callers to deal with palettes and transparency
+        themselves.  Images with a palette (colour type 3)
+        are converted to RGB or RGBA; images with transparency (a
+        ``tRNS`` chunk) are converted to LA or RGBA as appropriate.
+        When returned in this format the pixel values represent the
+        colour value directly without needing to refer to palettes or
+        transparency information.
+
+        Like the :meth:`read` method this method returns a 4-tuple:
+
+        (*width*, *height*, *pixels*, *meta*)
+
+        This method normally returns pixel values with the bit depth
+        they have in the source image, but when the source PNG has an
+        ``sBIT`` chunk it is inspected and can reduce the bit depth of
+        the result pixels; pixel values will be reduced according to
+        the bit depth specified in the ``sBIT`` chunk (PNG nerds should
+        note a single result bit depth is used for all channels; the
+        maximum of the ones specified in the ``sBIT`` chunk.  An RGB565
+        image will be rescaled to 6-bit RGB666).
+
+        The *meta* dictionary that is returned reflects the `direct`
+        format and not the original source image.  For example, an RGB
+        source image with a ``tRNS`` chunk to represent a transparent
+        colour, will have ``planes=3`` and ``alpha=False`` for the
+        source image, but the *meta* dictionary returned by this method
+        will have ``planes=4`` and ``alpha=True`` because an alpha
+        channel is synthesized and added.
+
+        *pixels* is the pixel data in boxed row flat pixel format (just
+        like the :meth:`read` method).
+
+        All the other aspects of the image data are not changed.
+        """
+
+        self.preamble()
+
+        # Simple case, no conversion necessary.
+        if not self.colormap and not self.trns and not self.sbit:
+            return self.read()
+
+        x,y,pixels,meta = self.read()
+
+        if self.colormap:
+            meta['colormap'] = False
+            meta['alpha'] = bool(self.trns)
+            meta['bitdepth'] = 8
+            meta['planes'] = 3 + bool(self.trns)
+            plte = self.palette()
+            def iterpal(pixels):
+                for row in pixels:
+                    row = map(plte.__getitem__, row)
+                    yield array('B', itertools.chain(*row))
+            pixels = iterpal(pixels)
+        elif self.trns:
+            # It would be nice if there was some reasonable way of doing
+            # this without generating a whole load of intermediate tuples.
+            # But tuples does seem like the easiest way, with no other way
+            # clearly much simpler or much faster.  (Actually, the L to LA
+            # conversion could perhaps go faster (all those 1-tuples!), but
+            # I still wonder whether the code proliferation is worth it)
+            it = self.transparent
+            maxval = 2**meta['bitdepth']-1
+            planes = meta['planes']
+            meta['alpha'] = True
+            meta['planes'] += 1
+            typecode = 'BH'[meta['bitdepth']>8]
+            def itertrns(pixels):
+                for row in pixels:
+                    # For each row we group it into pixels, then form a
+                    # characterisation vector that says whether each pixel
+                    # is opaque or not.  Then we convert True/False to
+                    # 0/maxval (by multiplication), and add it as the extra
+                    # channel.
+                    row = group(row, planes)
+                    opa = map(it.__ne__, row)
+                    opa = map(maxval.__mul__, opa)
+                    opa = zip(opa) # convert to 1-tuples
+                    yield array(typecode,
+                      itertools.chain(*map(operator.add, row, opa)))
+            pixels = itertrns(pixels)
+        targetbitdepth = None
+        if self.sbit:
+            sbit = struct.unpack('%dB' % len(self.sbit), self.sbit)
+            targetbitdepth = max(sbit)
+            if targetbitdepth > meta['bitdepth']:
+                raise Error('sBIT chunk %r exceeds bitdepth %d' %
+                    (sbit,self.bitdepth))
+            if min(sbit) <= 0:
+                raise Error('sBIT chunk %r has a 0-entry' % sbit)
+            if targetbitdepth == meta['bitdepth']:
+                targetbitdepth = None
+        if targetbitdepth:
+            shift = meta['bitdepth'] - targetbitdepth
+            meta['bitdepth'] = targetbitdepth
+            def itershift(pixels):
+                for row in pixels:
+                    yield map(shift.__rrshift__, row)
+            pixels = itershift(pixels)
+        return x,y,pixels,meta
+
+    def asFloat(self, maxval=1.0):
+        """Return image pixels as per :meth:`asDirect` method, but scale
+        all pixel values to be floating point values between 0.0 and
+        *maxval*.
+        """
+
+        x,y,pixels,info = self.asDirect()
+        sourcemaxval = 2**info['bitdepth']-1
+        del info['bitdepth']
+        info['maxval'] = float(maxval)
+        factor = float(maxval)/float(sourcemaxval)
+        def iterfloat():
+            for row in pixels:
+                yield map(factor.__mul__, row)
+        return x,y,iterfloat(),info
+
+    def _as_rescale(self, get, targetbitdepth):
+        """Helper used by :meth:`asRGB8` and :meth:`asRGBA8`."""
+
+        width,height,pixels,meta = get()
+        maxval = 2**meta['bitdepth'] - 1
+        targetmaxval = 2**targetbitdepth - 1
+        factor = float(targetmaxval) / float(maxval)
+        meta['bitdepth'] = targetbitdepth
+        def iterscale():
+            for row in pixels:
+                yield map(lambda x: int(round(x*factor)), row)
+        if maxval == targetmaxval:
+            return width, height, pixels, meta
+        else:
+            return width, height, iterscale(), meta
+
+    def asRGB8(self):
+        """Return the image data as an RGB pixels with 8-bits per
+        sample.  This is like the :meth:`asRGB` method except that
+        this method additionally rescales the values so that they
+        are all between 0 and 255 (8-bit).  In the case where the
+        source image has a bit depth < 8 the transformation preserves
+        all the information; where the source image has bit depth
+        > 8, then rescaling to 8-bit values loses precision.  No
+        dithering is performed.  Like :meth:`asRGB`, an alpha channel
+        in the source image will raise an exception.
+
+        This function returns a 4-tuple:
+        (*width*, *height*, *pixels*, *metadata*).
+        *width*, *height*, *metadata* are as per the :meth:`read` method.
+        
+        *pixels* is the pixel data in boxed row flat pixel format.
+        """
+
+        return self._as_rescale(self.asRGB, 8)
+
+    def asRGBA8(self):
+        """Return the image data as RGBA pixels with 8-bits per
+        sample.  This method is similar to :meth:`asRGB8` and
+        :meth:`asRGBA`:  The result pixels have an alpha channel, *and*
+        values are rescaled to the range 0 to 255.  The alpha channel is
+        synthesized if necessary (with a small speed penalty).
+        """
+
+        return self._as_rescale(self.asRGBA, 8)
+
+    def asRGB(self):
+        """Return image as RGB pixels.  RGB colour images are passed
+        through unchanged; greyscales are expanded into RGB
+        triplets (there is a small speed overhead for doing this).
+
+        An alpha channel in the source image will raise an
+        exception.
+
+        The return values are as for the :meth:`read` method
+        except that the *metadata* reflect the returned pixels, not the
+        source image.  In particular, for this method
+        ``metadata['greyscale']`` will be ``False``.
+        """
+
+        width,height,pixels,meta = self.asDirect()
+        if meta['alpha']:
+            raise Error("will not convert image with alpha channel to RGB")
+        if not meta['greyscale']:
+            return width,height,pixels,meta
+        meta['greyscale'] = False
+        typecode = 'BH'[meta['bitdepth'] > 8]
+        def iterrgb():
+            for row in pixels:
+                a = array(typecode, [0]) * 3 * width
+                for i in range(3):
+                    a[i::3] = row
+                yield a
+        return width,height,iterrgb(),meta
+
+    def asRGBA(self):
+        """Return image as RGBA pixels.  Greyscales are expanded into
+        RGB triplets; an alpha channel is synthesized if necessary.
+        The return values are as for the :meth:`read` method
+        except that the *metadata* reflect the returned pixels, not the
+        source image.  In particular, for this method
+        ``metadata['greyscale']`` will be ``False``, and
+        ``metadata['alpha']`` will be ``True``.
+        """
+
+        width,height,pixels,meta = self.asDirect()
+        if meta['alpha'] and not meta['greyscale']:
+            return width,height,pixels,meta
+        typecode = 'BH'[meta['bitdepth'] > 8]
+        maxval = 2**meta['bitdepth'] - 1
+        maxbuffer = struct.pack('=' + typecode, maxval) * 4 * width
+        def newarray():
+            return array(typecode, maxbuffer)
+
+        if meta['alpha'] and meta['greyscale']:
+            # LA to RGBA
+            def convert():
+                for row in pixels:
+                    # Create a fresh target row, then copy L channel
+                    # into first three target channels, and A channel
+                    # into fourth channel.
+                    a = newarray()
+                    pngfilters.convert_la_to_rgba(row, a)
+                    yield a
+        elif meta['greyscale']:
+            # L to RGBA
+            def convert():
+                for row in pixels:
+                    a = newarray()
+                    pngfilters.convert_l_to_rgba(row, a)
+                    yield a
+        else:
+            assert not meta['alpha'] and not meta['greyscale']
+            # RGB to RGBA
+            def convert():
+                for row in pixels:
+                    a = newarray()
+                    pngfilters.convert_rgb_to_rgba(row, a)
+                    yield a
+        meta['alpha'] = True
+        meta['greyscale'] = False
+        return width,height,convert(),meta
+
+
+# === Legacy Version Support ===
+
+# :pyver:old:  PyPNG works on Python versions 2.3 and 2.2, but not
+# without some awkward problems.  Really PyPNG works on Python 2.4 (and
+# above); it works on Pythons 2.3 and 2.2 by virtue of fixing up
+# problems here.  It's a bit ugly (which is why it's hidden down here).
+#
+# Generally the strategy is one of pretending that we're running on
+# Python 2.4 (or above), and patching up the library support on earlier
+# versions so that it looks enough like Python 2.4.  When it comes to
+# Python 2.2 there is one thing we cannot patch: extended slices
+# http://www.python.org/doc/2.3/whatsnew/section-slices.html.
+# Instead we simply declare that features that are implemented using
+# extended slices will not work on Python 2.2.
+#
+# In order to work on Python 2.3 we fix up a recurring annoyance involving
+# the array type.  In Python 2.3 an array cannot be initialised with an
+# array, and it cannot be extended with a list (or other sequence).
+# Both of those are repeated issues in the code.  Whilst I would not
+# normally tolerate this sort of behaviour, here we "shim" a replacement
+# for array into place (and hope no-ones notices).  You never read this.
+#
+# In an amusing case of warty hacks on top of warty hacks... the array
+# shimming we try and do only works on Python 2.3 and above (you can't
+# subclass array.array in Python 2.2).  So to get it working on Python
+# 2.2 we go for something much simpler and (probably) way slower.
+try:
+    array('B').extend([])
+    array('B', array('B'))
+except:
+    # Expect to get here on Python 2.3
+    try:
+        class _array_shim(array):
+            true_array = array
+            def __new__(cls, typecode, init=None):
+                super_new = super(_array_shim, cls).__new__
+                it = super_new(cls, typecode)
+                if init is None:
+                    return it
+                it.extend(init)
+                return it
+            def extend(self, extension):
+                super_extend = super(_array_shim, self).extend
+                if isinstance(extension, self.true_array):
+                    return super_extend(extension)
+                if not isinstance(extension, (list, str)):
+                    # Convert to list.  Allows iterators to work.
+                    extension = list(extension)
+                return super_extend(self.true_array(self.typecode, extension))
+        array = _array_shim
+    except:
+        # Expect to get here on Python 2.2
+        def array(typecode, init=()):
+            if type(init) == str:
+                return map(ord, init)
+            return list(init)
+
+# Further hacks to get it limping along on Python 2.2
+try:
+    enumerate
+except:
+    def enumerate(seq):
+        i=0
+        for x in seq:
+            yield i,x
+            i += 1
+
+try:
+    reversed
+except:
+    def reversed(l):
+        l = list(l)
+        l.reverse()
+        for x in l:
+            yield x
+
+try:
+    itertools
+except:
+    class _dummy_itertools:
+        pass
+    itertools = _dummy_itertools()
+    def _itertools_imap(f, seq):
+        for x in seq:
+            yield f(x)
+    itertools.imap = _itertools_imap
+    def _itertools_chain(*iterables):
+        for it in iterables:
+            for element in it:
+                yield element
+    itertools.chain = _itertools_chain
+
+
+# === Support for users without Cython ===
+
+try:
+    pngfilters
+except:
+    class pngfilters(object):
+        def undo_filter_sub(filter_unit, scanline, previous, result):
+            """Undo sub filter."""
+
+            ai = 0
+            # Loops starts at index fu.  Observe that the initial part
+            # of the result is already filled in correctly with
+            # scanline.
+            for i in range(filter_unit, len(result)):
+                x = scanline[i]
+                a = result[ai]
+                result[i] = (x + a) & 0xff
+                ai += 1
+        undo_filter_sub = staticmethod(undo_filter_sub)
+
+        def undo_filter_up(filter_unit, scanline, previous, result):
+            """Undo up filter."""
+
+            for i in range(len(result)):
+                x = scanline[i]
+                b = previous[i]
+                result[i] = (x + b) & 0xff
+        undo_filter_up = staticmethod(undo_filter_up)
+
+        def undo_filter_average(filter_unit, scanline, previous, result):
+            """Undo up filter."""
+
+            ai = -filter_unit
+            for i in range(len(result)):
+                x = scanline[i]
+                if ai < 0:
+                    a = 0
+                else:
+                    a = result[ai]
+                b = previous[i]
+                result[i] = (x + ((a + b) >> 1)) & 0xff
+                ai += 1
+        undo_filter_average = staticmethod(undo_filter_average)
+
+        def undo_filter_paeth(filter_unit, scanline, previous, result):
+            """Undo Paeth filter."""
+
+            # Also used for ci.
+            ai = -filter_unit
+            for i in range(len(result)):
+                x = scanline[i]
+                if ai < 0:
+                    a = c = 0
+                else:
+                    a = result[ai]
+                    c = previous[ai]
+                b = previous[i]
+                p = a + b - c
+                pa = abs(p - a)
+                pb = abs(p - b)
+                pc = abs(p - c)
+                if pa <= pb and pa <= pc:
+                    pr = a
+                elif pb <= pc:
+                    pr = b
+                else:
+                    pr = c
+                result[i] = (x + pr) & 0xff
+                ai += 1
+        undo_filter_paeth = staticmethod(undo_filter_paeth)
+
+        def convert_la_to_rgba(row, result):
+            for i in range(3):
+                result[i::4] = row[0::2]
+            result[3::4] = row[1::2]
+        convert_la_to_rgba = staticmethod(convert_la_to_rgba)
+
+        def convert_l_to_rgba(row, result):
+            """Convert a grayscale image to RGBA. This method assumes the alpha
+            channel in result is already correctly initialized."""
+            for i in range(3):
+                result[i::4] = row
+        convert_l_to_rgba = staticmethod(convert_l_to_rgba)
+
+        def convert_rgb_to_rgba(row, result):
+            """Convert an RGB image to RGBA. This method assumes the alpha
+            channel in result is already correctly initialized."""
+            for i in range(3):
+                result[i::4] = row[i::3]
+        convert_rgb_to_rgba = staticmethod(convert_rgb_to_rgba)
+
+
+# === Internal Test Support ===
+
+# This section comprises the tests that are internally validated (as
+# opposed to tests which produce output files that are externally
+# validated).  Primarily they are unittests.
+
+# Note that it is difficult to internally validate the results of
+# writing a PNG file.  The only thing we can do is read it back in
+# again, which merely checks consistency, not that the PNG file we
+# produce is valid.
+
+# Run the tests from the command line:
+# python -c 'import png;png.test()'
+
+# (For an in-memory binary file IO object) We use BytesIO where
+# available, otherwise we use StringIO, but name it BytesIO.
+try:
+    from io import BytesIO
+except:
+    from StringIO import StringIO as BytesIO
+import tempfile
+# http://www.python.org/doc/2.4.4/lib/module-unittest.html
+import unittest
+
+
+def test():
+    unittest.main(__name__)
+
+def topngbytes(name, rows, x, y, **k):
+    """Convenience function for creating a PNG file "in memory" as a
+    string.  Creates a :class:`Writer` instance using the keyword arguments,
+    then passes `rows` to its :meth:`Writer.write` method.  The resulting
+    PNG file is returned as a string.  `name` is used to identify the file for
+    debugging.
+    """
+
+    import os
+
+    print name
+    f = BytesIO()
+    w = Writer(x, y, **k)
+    w.write(f, rows)
+    if os.environ.get('PYPNG_TEST_TMP'):
+        w = open(name, 'wb')
+        w.write(f.getvalue())
+        w.close()
+    return f.getvalue()
+
+def testWithIO(inp, out, f):
+    """Calls the function `f` with ``sys.stdin`` changed to `inp`
+    and ``sys.stdout`` changed to `out`.  They are restored when `f`
+    returns.  This function returns whatever `f` returns.
+    """
+
+    import os
+
+    try:
+        oldin,sys.stdin = sys.stdin,inp
+        oldout,sys.stdout = sys.stdout,out
+        x = f()
+    finally:
+        sys.stdin = oldin
+        sys.stdout = oldout
+    if os.environ.get('PYPNG_TEST_TMP') and hasattr(out,'getvalue'):
+        name = mycallersname()
+        if name:
+            w = open(name+'.png', 'wb')
+            w.write(out.getvalue())
+            w.close()
+    return x
+
+def mycallersname():
+    """Returns the name of the caller of the caller of this function
+    (hence the name of the caller of the function in which
+    "mycallersname()" textually appears).  Returns None if this cannot
+    be determined."""
+
+    # http://docs.python.org/library/inspect.html#the-interpreter-stack
+    import inspect
+
+    frame = inspect.currentframe()
+    if not frame:
+        return None
+    frame_,filename_,lineno_,funname,linelist_,listi_ = (
+      inspect.getouterframes(frame)[2])
+    return funname
+
+def seqtobytes(s):
+    """Convert a sequence of integers to a *bytes* instance.  Good for
+    plastering over Python 2 / Python 3 cracks.
+    """
+
+    return strtobytes(''.join(chr(x) for x in s))
+
+class Test(unittest.TestCase):
+    # This member is used by the superclass.  If we don't define a new
+    # class here then when we use self.assertRaises() and the PyPNG code
+    # raises an assertion then we get no proper traceback.  I can't work
+    # out why, but defining a new class here means we get a proper
+    # traceback.
+    class failureException(Exception):
+        pass
+
+    def helperLN(self, n):
+        mask = (1 << n) - 1
+        # Use small chunk_limit so that multiple chunk writing is
+        # tested.  Making it a test for Issue 20.
+        w = Writer(15, 17, greyscale=True, bitdepth=n, chunk_limit=99)
+        f = BytesIO()
+        w.write_array(f, array('B', map(mask.__and__, range(1, 256))))
+        r = Reader(bytes=f.getvalue())
+        x,y,pixels,meta = r.read()
+        self.assertEqual(x, 15)
+        self.assertEqual(y, 17)
+        self.assertEqual(list(itertools.chain(*pixels)),
+                         map(mask.__and__, range(1,256)))
+    def testL8(self):
+        return self.helperLN(8)
+    def testL4(self):
+        return self.helperLN(4)
+    def testL2(self):
+        "Also tests asRGB8."
+        w = Writer(1, 4, greyscale=True, bitdepth=2)
+        f = BytesIO()
+        w.write_array(f, array('B', range(4)))
+        r = Reader(bytes=f.getvalue())
+        x,y,pixels,meta = r.asRGB8()
+        self.assertEqual(x, 1)
+        self.assertEqual(y, 4)
+        for i,row in enumerate(pixels):
+            self.assertEqual(len(row), 3)
+            self.assertEqual(list(row), [0x55*i]*3)
+    def testP2(self):
+        "2-bit palette."
+        a = (255,255,255)
+        b = (200,120,120)
+        c = (50,99,50)
+        w = Writer(1, 4, bitdepth=2, palette=[a,b,c])
+        f = BytesIO()
+        w.write_array(f, array('B', (0,1,1,2)))
+        r = Reader(bytes=f.getvalue())
+        x,y,pixels,meta = r.asRGB8()
+        self.assertEqual(x, 1)
+        self.assertEqual(y, 4)
+        self.assertEqual(map(list, pixels), map(list, [a, b, b, c]))
+    def testPtrns(self):
+        "Test colour type 3 and tRNS chunk (and 4-bit palette)."
+        a = (50,99,50,50)
+        b = (200,120,120,80)
+        c = (255,255,255)
+        d = (200,120,120)
+        e = (50,99,50)
+        w = Writer(3, 3, bitdepth=4, palette=[a,b,c,d,e])
+        f = BytesIO()
+        w.write_array(f, array('B', (4, 3, 2, 3, 2, 0, 2, 0, 1)))
+        r = Reader(bytes=f.getvalue())
+        x,y,pixels,meta = r.asRGBA8()
+        self.assertEqual(x, 3)
+        self.assertEqual(y, 3)
+        c = c+(255,)
+        d = d+(255,)
+        e = e+(255,)
+        boxed = [(e,d,c),(d,c,a),(c,a,b)]
+        flat = map(lambda row: itertools.chain(*row), boxed)
+        self.assertEqual(map(list, pixels), map(list, flat))
+    def testRGBtoRGBA(self):
+        "asRGBA8() on colour type 2 source."""
+        # Test for Issue 26
+        r = Reader(bytes=_pngsuite['basn2c08'])
+        x,y,pixels,meta = r.asRGBA8()
+        # Test the pixels at row 9 columns 0 and 1.
+        row9 = list(pixels)[9]
+        self.assertEqual(list(row9[0:8]),
+                         [0xff, 0xdf, 0xff, 0xff, 0xff, 0xde, 0xff, 0xff])
+    def testLtoRGBA(self):
+        "asRGBA() on grey source."""
+        # Test for Issue 60
+        r = Reader(bytes=_pngsuite['basi0g08'])
+        x,y,pixels,meta = r.asRGBA()
+        row9 = list(list(pixels)[9])
+        self.assertEqual(row9[0:8],
+          [222, 222, 222, 255, 221, 221, 221, 255])
+    def testCtrns(self):
+        "Test colour type 2 and tRNS chunk."
+        # Test for Issue 25
+        r = Reader(bytes=_pngsuite['tbrn2c08'])
+        x,y,pixels,meta = r.asRGBA8()
+        # I just happen to know that the first pixel is transparent.
+        # In particular it should be #7f7f7f00
+        row0 = list(pixels)[0]
+        self.assertEqual(tuple(row0[0:4]), (0x7f, 0x7f, 0x7f, 0x00))
+    def testAdam7read(self):
+        """Adam7 interlace reading.
+        Specifically, test that for images in the PngSuite that
+        have both an interlaced and straightlaced pair that both
+        images from the pair produce the same array of pixels."""
+        for candidate in _pngsuite:
+            if not candidate.startswith('basn'):
+                continue
+            candi = candidate.replace('n', 'i')
+            if candi not in _pngsuite:
+                continue
+            print 'adam7 read', candidate
+            straight = Reader(bytes=_pngsuite[candidate])
+            adam7 = Reader(bytes=_pngsuite[candi])
+            # Just compare the pixels.  Ignore x,y (because they're
+            # likely to be correct?); metadata is ignored because the
+            # "interlace" member differs.  Lame.
+            straight = straight.read()[2]
+            adam7 = adam7.read()[2]
+            self.assertEqual(map(list, straight), map(list, adam7))
+    def testAdam7write(self):
+        """Adam7 interlace writing.
+        For each test image in the PngSuite, write an interlaced
+        and a straightlaced version.  Decode both, and compare results.
+        """
+        # Not such a great test, because the only way we can check what
+        # we have written is to read it back again.
+
+        for name,bytes in _pngsuite.items():
+            # Only certain colour types supported for this test.
+            if name[3:5] not in ['n0', 'n2', 'n4', 'n6']:
+                continue
+            it = Reader(bytes=bytes)
+            x,y,pixels,meta = it.read()
+            pngi = topngbytes('adam7wn'+name+'.png', pixels,
+              x=x, y=y, bitdepth=it.bitdepth,
+              greyscale=it.greyscale, alpha=it.alpha,
+              transparent=it.transparent,
+              interlace=False)
+            x,y,ps,meta = Reader(bytes=pngi).read()
+            it = Reader(bytes=bytes)
+            x,y,pixels,meta = it.read()
+            pngs = topngbytes('adam7wi'+name+'.png', pixels,
+              x=x, y=y, bitdepth=it.bitdepth,
+              greyscale=it.greyscale, alpha=it.alpha,
+              transparent=it.transparent,
+              interlace=True)
+            x,y,pi,meta = Reader(bytes=pngs).read()
+            self.assertEqual(map(list, ps), map(list, pi))
+    def testPGMin(self):
+        """Test that the command line tool can read PGM files."""
+        def do():
+            return _main(['testPGMin'])
+        s = BytesIO()
+        s.write(strtobytes('P5 2 2 3\n'))
+        s.write(strtobytes('\x00\x01\x02\x03'))
+        s.flush()
+        s.seek(0)
+        o = BytesIO()
+        testWithIO(s, o, do)
+        r = Reader(bytes=o.getvalue())
+        x,y,pixels,meta = r.read()
+        self.assertTrue(r.greyscale)
+        self.assertEqual(r.bitdepth, 2)
+    def testPAMin(self):
+        """Test that the command line tool can read PAM file."""
+        def do():
+            return _main(['testPAMin'])
+        s = BytesIO()
+        s.write(strtobytes('P7\nWIDTH 3\nHEIGHT 1\nDEPTH 4\nMAXVAL 255\n'
+                'TUPLTYPE RGB_ALPHA\nENDHDR\n'))
+        # The pixels in flat row flat pixel format
+        flat =  [255,0,0,255, 0,255,0,120, 0,0,255,30]
+        asbytes = seqtobytes(flat)
+        s.write(asbytes)
+        s.flush()
+        s.seek(0)
+        o = BytesIO()
+        testWithIO(s, o, do)
+        r = Reader(bytes=o.getvalue())
+        x,y,pixels,meta = r.read()
+        self.assertTrue(r.alpha)
+        self.assertTrue(not r.greyscale)
+        self.assertEqual(list(itertools.chain(*pixels)), flat)
+    def testLA4(self):
+        """Create an LA image with bitdepth 4."""
+        bytes = topngbytes('la4.png', [[5, 12]], 1, 1,
+          greyscale=True, alpha=True, bitdepth=4)
+        sbit = Reader(bytes=bytes).chunk('sBIT')[1]
+        self.assertEqual(sbit, strtobytes('\x04\x04'))
+    def testPal(self):
+        """Test that a palette PNG returns the palette in info."""
+        r = Reader(bytes=_pngsuite['basn3p04'])
+        x,y,pixels,info = r.read()
+        self.assertEqual(x, 32)
+        self.assertEqual(y, 32)
+        self.assertTrue('palette' in info)
+    def testPalWrite(self):
+        """Test metadata for paletted PNG can be passed from one PNG
+        to another."""
+        r = Reader(bytes=_pngsuite['basn3p04'])
+        x,y,pixels,info = r.read()
+        w = Writer(**info)
+        o = BytesIO()
+        w.write(o, pixels)
+        o.flush()
+        o.seek(0)
+        r = Reader(file=o)
+        _,_,_,again_info = r.read()
+        # Same palette
+        self.assertEqual(again_info['palette'], info['palette'])
+    def testPalExpand(self):
+        """Test that bitdepth can be used to fiddle with pallete image."""
+        r = Reader(bytes=_pngsuite['basn3p04'])
+        x,y,pixels,info = r.read()
+        pixels = [list(row) for row in pixels]
+        info['bitdepth'] = 8
+        w = Writer(**info)
+        o = BytesIO()
+        w.write(o, pixels)
+        o.flush()
+        o.seek(0)
+        r = Reader(file=o)
+        _,_,again_pixels,again_info = r.read()
+        # Same pixels
+        again_pixels = [list(row) for row in again_pixels]
+        self.assertEqual(again_pixels, pixels)
+
+    def testPNMsbit(self):
+        """Test that PNM files can generates sBIT chunk."""
+        def do():
+            return _main(['testPNMsbit'])
+        s = BytesIO()
+        s.write(strtobytes('P6 8 1 1\n'))
+        for pixel in range(8):
+            s.write(struct.pack('<I', (0x4081*pixel)&0x10101)[:3])
+        s.flush()
+        s.seek(0)
+        o = BytesIO()
+        testWithIO(s, o, do)
+        r = Reader(bytes=o.getvalue())
+        sbit = r.chunk('sBIT')[1]
+        self.assertEqual(sbit, strtobytes('\x01\x01\x01'))
+    def testLtrns0(self):
+        """Create greyscale image with tRNS chunk."""
+        return self.helperLtrns(0)
+    def testLtrns1(self):
+        """Using 1-tuple for transparent arg."""
+        return self.helperLtrns((0,))
+    def helperLtrns(self, transparent):
+        """Helper used by :meth:`testLtrns*`."""
+        pixels = zip([0x00, 0x38, 0x4c, 0x54, 0x5c, 0x40, 0x38, 0x00])
+        o = BytesIO()
+        w = Writer(8, 8, greyscale=True, bitdepth=1, transparent=transparent)
+        w.write_packed(o, pixels)
+        r = Reader(bytes=o.getvalue())
+        x,y,pixels,meta = r.asDirect()
+        self.assertTrue(meta['alpha'])
+        self.assertTrue(meta['greyscale'])
+        self.assertEqual(meta['bitdepth'], 1)
+    def testWinfo(self):
+        """Test the dictionary returned by a `read` method can be used
+        as args for :meth:`Writer`.
+        """
+        r = Reader(bytes=_pngsuite['basn2c16'])
+        info = r.read()[3]
+        w = Writer(**info)
+    def testPackedIter(self):
+        """Test iterator for row when using write_packed.
+
+        Indicative for Issue 47.
+        """
+        w = Writer(16, 2, greyscale=True, alpha=False, bitdepth=1)
+        o = BytesIO()
+        w.write_packed(o, [itertools.chain([0x0a], [0xaa]),
+                           itertools.chain([0x0f], [0xff])])
+        r = Reader(bytes=o.getvalue())
+        x,y,pixels,info = r.asDirect()
+        pixels = list(pixels)
+        self.assertEqual(len(pixels), 2)
+        self.assertEqual(len(pixels[0]), 16)
+    def testInterlacedArray(self):
+        """Test that reading an interlaced PNG yields each row as an
+        array."""
+        r = Reader(bytes=_pngsuite['basi0g08'])
+        list(r.read()[2])[0].tostring
+    def testTrnsArray(self):
+        """Test that reading a type 2 PNG with tRNS chunk yields each
+        row as an array (using asDirect)."""
+        r = Reader(bytes=_pngsuite['tbrn2c08'])
+        list(r.asDirect()[2])[0].tostring
+
+    # Invalid file format tests.  These construct various badly
+    # formatted PNG files, then feed them into a Reader.  When
+    # everything is working properly, we should get FormatError
+    # exceptions raised.
+    def testEmpty(self):
+        """Test empty file."""
+
+        r = Reader(bytes='')
+        self.assertRaises(FormatError, r.asDirect)
+    def testSigOnly(self):
+        """Test file containing just signature bytes."""
+
+        r = Reader(bytes=_signature)
+        self.assertRaises(FormatError, r.asDirect)
+    def testExtraPixels(self):
+        """Test file that contains too many pixels."""
+
+        def eachchunk(chunk):
+            if chunk[0] != 'IDAT':
+                return chunk
+            data = zlib.decompress(chunk[1])
+            data += strtobytes('\x00garbage')
+            data = zlib.compress(data)
+            chunk = (chunk[0], data)
+            return chunk
+        self.assertRaises(FormatError, self.helperFormat, eachchunk)
+    def testNotEnoughPixels(self):
+        def eachchunk(chunk):
+            if chunk[0] != 'IDAT':
+                return chunk
+            # Remove last byte.
+            data = zlib.decompress(chunk[1])
+            data = data[:-1]
+            data = zlib.compress(data)
+            return (chunk[0], data)
+        self.assertRaises(FormatError, self.helperFormat, eachchunk)
+    def helperFormat(self, f):
+        r = Reader(bytes=_pngsuite['basn0g01'])
+        o = BytesIO()
+        def newchunks():
+            for chunk in r.chunks():
+                yield f(chunk)
+        write_chunks(o, newchunks())
+        r = Reader(bytes=o.getvalue())
+        return list(r.asDirect()[2])
+    def testBadFilter(self):
+        def eachchunk(chunk):
+            if chunk[0] != 'IDAT':
+                return chunk
+            data = zlib.decompress(chunk[1])
+            # Corrupt the first filter byte
+            data = strtobytes('\x99') + data[1:]
+            data = zlib.compress(data)
+            return (chunk[0], data)
+        self.assertRaises(FormatError, self.helperFormat, eachchunk)
+
+    def testFlat(self):
+        """Test read_flat."""
+        import hashlib
+
+        r = Reader(bytes=_pngsuite['basn0g02'])
+        x,y,pixel,meta = r.read_flat()
+        d = hashlib.md5(seqtobytes(pixel)).digest()
+        self.assertEqual(_enhex(d), '255cd971ab8cd9e7275ff906e5041aa0')
+    def testfromarray(self):
+        img = from_array([[0, 0x33, 0x66], [0xff, 0xcc, 0x99]], 'L')
+        img.save('testfromarray.png')
+    def testfromarrayL16(self):
+        img = from_array(group(range(2**16), 256), 'L;16')
+        img.save('testL16.png')
+    def testfromarrayRGB(self):
+        img = from_array([[0,0,0, 0,0,1, 0,1,0, 0,1,1],
+                          [1,0,0, 1,0,1, 1,1,0, 1,1,1]], 'RGB;1')
+        o = BytesIO()
+        img.save(o)
+    def testfromarrayIter(self):
+        import itertools
+
+        i = itertools.islice(itertools.count(10), 20)
+        i = itertools.imap(lambda x: [x, x, x], i)
+        img = from_array(i, 'RGB;5', dict(height=20))
+        f = open('testiter.png', 'wb')
+        img.save(f)
+        f.close()
+
+    # numpy dependent tests.  These are skipped (with a message to
+    # sys.stderr) if numpy cannot be imported.
+    def testNumpyuint16(self):
+        """numpy uint16."""
+
+        try:
+            import numpy
+        except ImportError:
+            print >>sys.stderr, "skipping numpy test"
+            return
+
+        rows = [map(numpy.uint16, range(0,0x10000,0x5555))]
+        b = topngbytes('numpyuint16.png', rows, 4, 1,
+            greyscale=True, alpha=False, bitdepth=16)
+    def testNumpyuint8(self):
+        """numpy uint8."""
+
+        try:
+            import numpy
+        except ImportError:
+            print >>sys.stderr, "skipping numpy test"
+            return
+
+        rows = [map(numpy.uint8, range(0,0x100,0x55))]
+        b = topngbytes('numpyuint8.png', rows, 4, 1,
+            greyscale=True, alpha=False, bitdepth=8)
+    def testNumpybool(self):
+        """numpy bool."""
+
+        try:
+            import numpy
+        except ImportError:
+            print >>sys.stderr, "skipping numpy test"
+            return
+
+        rows = [map(numpy.bool, [0,1])]
+        b = topngbytes('numpybool.png', rows, 2, 1,
+            greyscale=True, alpha=False, bitdepth=1)
+    def testNumpyarray(self):
+        """numpy array."""
+        try:
+            import numpy
+        except ImportError:
+            print >>sys.stderr, "skipping numpy test"
+            return
+
+        pixels = numpy.array([[0,0x5555],[0x5555,0xaaaa]], numpy.uint16)
+        img = from_array(pixels, 'L')
+        img.save('testnumpyL16.png')
+
+    def paeth(self, x, a, b, c):
+        p = a + b - c
+        pa = abs(p - a)
+        pb = abs(p - b)
+        pc = abs(p - c)
+        if pa <= pb and pa <= pc:
+            pr = a
+        elif pb <= pc:
+            pr = b
+        else:
+            pr = c
+        return x - pr
+
+    # test filters and unfilters
+    def testFilterScanlineFirstLine(self):
+        fo = 3  # bytes per pixel
+        line = [30, 31, 32, 230, 231, 232]
+        out = filter_scanline(0, line, fo, None)  # none
+        self.assertEqual(list(out), [0, 30, 31, 32, 230, 231, 232])
+        out = filter_scanline(1, line, fo, None)  # sub
+        self.assertEqual(list(out), [1, 30, 31, 32, 200, 200, 200])
+        out = filter_scanline(2, line, fo, None)  # up
+        # TODO: All filtered scanlines start with a byte indicating the filter
+        # algorithm, except "up". Is this a bug? Should the expected output
+        # start with 2 here?
+        self.assertEqual(list(out), [30, 31, 32, 230, 231, 232])
+        out = filter_scanline(3, line, fo, None)  # average
+        self.assertEqual(list(out), [3, 30, 31, 32, 215, 216, 216])
+        out = filter_scanline(4, line, fo, None)  # paeth
+        self.assertEqual(list(out), [
+            4, self.paeth(30, 0, 0, 0), self.paeth(31, 0, 0, 0),
+            self.paeth(32, 0, 0, 0), self.paeth(230, 30, 0, 0),
+            self.paeth(231, 31, 0, 0), self.paeth(232, 32, 0, 0)
+            ])
+    def testFilterScanline(self):
+        prev = [20, 21, 22, 210, 211, 212]
+        line = [30, 32, 34, 230, 233, 236]
+        fo = 3
+        out = filter_scanline(0, line, fo, prev)  # none
+        self.assertEqual(list(out), [0, 30, 32, 34, 230, 233, 236])
+        out = filter_scanline(1, line, fo, prev)  # sub
+        self.assertEqual(list(out), [1, 30, 32, 34, 200, 201, 202])
+        out = filter_scanline(2, line, fo, prev)  # up
+        self.assertEqual(list(out), [2, 10, 11, 12, 20, 22, 24])
+        out = filter_scanline(3, line, fo, prev)  # average
+        self.assertEqual(list(out), [3, 20, 22, 23, 110, 112, 113])
+        out = filter_scanline(4, line, fo, prev)  # paeth
+        self.assertEqual(list(out), [
+            4, self.paeth(30, 0, 20, 0), self.paeth(32, 0, 21, 0),
+            self.paeth(34, 0, 22, 0), self.paeth(230, 30, 210, 20),
+            self.paeth(233, 32, 211, 21), self.paeth(236, 34, 212, 22)
+            ])
+    def testUnfilterScanline(self):
+        reader = Reader(bytes='')
+        reader.psize = 3
+        scanprev = array('B', [20, 21, 22, 210, 211, 212])
+        scanline = array('B', [30, 32, 34, 230, 233, 236])
+        def cp(a):
+            return array('B', a)
+
+        out = reader.undo_filter(0, cp(scanline), cp(scanprev))
+        self.assertEqual(list(out), list(scanline))  # none
+        out = reader.undo_filter(1, cp(scanline), cp(scanprev))
+        self.assertEqual(list(out), [30, 32, 34, 4, 9, 14])  # sub
+        out = reader.undo_filter(2, cp(scanline), cp(scanprev))
+        self.assertEqual(list(out), [50, 53, 56, 184, 188, 192])  # up
+        out = reader.undo_filter(3, cp(scanline), cp(scanprev))
+        self.assertEqual(list(out), [40, 42, 45, 99, 103, 108])  # average
+        out = reader.undo_filter(4, cp(scanline), cp(scanprev))
+        self.assertEqual(list(out), [50, 53, 56, 184, 188, 192])  # paeth
+    def testUnfilterScanlinePaeth(self):
+        # This tests more edge cases in the paeth unfilter
+        reader = Reader(bytes='')
+        reader.psize = 3
+        scanprev = array('B', [2, 0, 0, 0, 9, 11])
+        scanline = array('B', [6, 10, 9, 100, 101, 102])
+
+        out = reader.undo_filter(4, scanline, scanprev)
+        self.assertEqual(list(out), [8, 10, 9, 108, 111, 113])  # paeth
+    def testIterstraight(self):
+        def arraify(list_of_str):
+            return [array('B', s) for s in list_of_str]
+        reader = Reader(bytes='')
+        reader.row_bytes = 6
+        reader.psize = 3
+        rows = reader.iterstraight(arraify(['\x00abcdef', '\x00ghijkl']))
+        self.assertEqual(list(rows), arraify(['abcdef', 'ghijkl']))
+
+        rows = reader.iterstraight(arraify(['\x00abc', 'def\x00ghijkl']))
+        self.assertEqual(list(rows), arraify(['abcdef', 'ghijkl']))
+
+        rows = reader.iterstraight(arraify(['\x00abcdef\x00ghijkl']))
+        self.assertEqual(list(rows), arraify(['abcdef', 'ghijkl']))
+
+        rows = reader.iterstraight(arraify(['\x00abcdef\x00ghi', 'jkl']))
+        self.assertEqual(list(rows), arraify(['abcdef', 'ghijkl']))
+
+# === Command Line Support ===
+
+def _dehex(s):
+    """Liberally convert from hex string to binary string."""
+    import re
+    import binascii
+
+    # Remove all non-hexadecimal digits
+    s = re.sub(r'[^a-fA-F\d]', '', s)
+    # binscii.unhexlify works in Python 2 and Python 3 (unlike
+    # thing.decode('hex')).
+    return binascii.unhexlify(strtobytes(s))
+def _enhex(s):
+    """Convert from binary string (bytes) to hex string (str)."""
+
+    import binascii
+
+    return bytestostr(binascii.hexlify(s))
+
+# Copies of PngSuite test files taken
+# from http://www.schaik.com/pngsuite/pngsuite_bas_png.html
+# on 2009-02-19 by drj and converted to hex.
+# Some of these are not actually in PngSuite (but maybe they should
+# be?), they use the same naming scheme, but start with a capital
+# letter.
+_pngsuite = {
+  'basi0g01': _dehex("""
+89504e470d0a1a0a0000000d49484452000000200000002001000000012c0677
+cf0000000467414d41000186a031e8965f0000009049444154789c2d8d310ec2
+300c45dfc682c415187a00a42e197ab81e83b127e00c5639001363a580d8582c
+65c910357c4b78b0bfbfdf4f70168c19e7acb970a3f2d1ded9695ce5bf5963df
+d92aaf4c9fd927ea449e6487df5b9c36e799b91bdf082b4d4bd4014fe4014b01
+ab7a17aee694d28d328a2d63837a70451e1648702d9a9ff4a11d2f7a51aa21e5
+a18c7ffd0094e3511d661822f20000000049454e44ae426082
+"""),
+  'basi0g02': _dehex("""
+89504e470d0a1a0a0000000d49484452000000200000002002000000016ba60d
+1f0000000467414d41000186a031e8965f0000005149444154789c635062e860
+00e17286bb609c93c370ec189494960631366e4467b3ae675dcf10f521ea0303
+90c1ca006444e11643482064114a4852c710baea3f18c31918020c30410403a6
+0ac1a09239009c52804d85b6d97d0000000049454e44ae426082
+"""),
+  'basi0g04': _dehex("""
+89504e470d0a1a0a0000000d4948445200000020000000200400000001e4e6f8
+bf0000000467414d41000186a031e8965f000000ae49444154789c658e5111c2
+301044171c141c141c041c843a287510ea20d441c041c141c141c04191102454
+03994998cecd7edcecedbb9bdbc3b2c2b6457545fbc4bac1be437347f7c66a77
+3c23d60db15e88f5c5627338a5416c2e691a9b475a89cd27eda12895ae8dfdab
+43d61e590764f5c83a226b40d669bec307f93247701687723abf31ff83a2284b
+a5b4ae6b63ac6520ad730ca4ed7b06d20e030369bd6720ed383290360406d24e
+13811f2781eba9d34d07160000000049454e44ae426082
+"""),
+  'basi0g08': _dehex("""
+89504e470d0a1a0a0000000d4948445200000020000000200800000001211615
+be0000000467414d41000186a031e8965f000000b549444154789cb5905d0ac2
+3010849dbac81c42c47bf843cf253e8878b0aa17110f214bdca6be240f5d21a5
+94ced3e49bcd322c1624115515154998aa424822a82a5624a1aa8a8b24c58f99
+999908130989a04a00d76c2c09e76cf21adcb209393a6553577da17140a2c59e
+70ecbfa388dff1f03b82fb82bd07f05f7cb13f80bb07ad2fd60c011c3c588eef
+f1f4e03bbec7ce832dca927aea005e431b625796345307b019c845e6bfc3bb98
+769d84f9efb02ea6c00f9bb9ff45e81f9f280000000049454e44ae426082
+"""),
+  'basi0g16': _dehex("""
+89504e470d0a1a0a0000000d49484452000000200000002010000000017186c9
+fd0000000467414d41000186a031e8965f000000e249444154789cb5913b0ec2
+301044c7490aa8f85d81c3e4301c8f53a4ca0da8902c8144b3920b4043111282
+23bc4956681a6bf5fc3c5a3ba0448912d91a4de2c38dd8e380231eede4c4f7a1
+4677700bec7bd9b1d344689315a3418d1a6efbe5b8305ba01f8ff4808c063e26
+c60d5c81edcf6c58c535e252839e93801b15c0a70d810ae0d306b205dc32b187
+272b64057e4720ff0502154034831520154034c3df81400510cdf0015c86e5cc
+5c79c639fddba9dcb5456b51d7980eb52d8e7d7fa620a75120d6064641a05120
+b606771a05626b401a05f1f589827cf0fe44c1f0bae0055698ee8914fffffe00
+00000049454e44ae426082
+"""),
+  'basi2c08': _dehex("""
+89504e470d0a1a0a0000000d49484452000000200000002008020000018b1fdd
+350000000467414d41000186a031e8965f000000f249444154789cd59341aa04
+210c44abc07b78133d59d37333bd89d76868b566d10cf4675af8596431a11662
+7c5688919280e312257dd6a0a4cf1a01008ee312a5f3c69c37e6fcc3f47e6776
+a07f8bdaf5b40feed2d33e025e2ff4fe2d4a63e1a16d91180b736d8bc45854c5
+6d951863f4a7e0b66dcf09a900f3ffa2948d4091e53ca86c048a64390f662b50
+4a999660ced906182b9a01a8be00a56404a6ede182b1223b4025e32c4de34304
+63457680c93aada6c99b73865aab2fc094920d901a203f5ddfe1970d28456783
+26cffbafeffcd30654f46d119be4793f827387fc0d189d5bc4d69a3c23d45a7f
+db803146578337df4d0a3121fc3d330000000049454e44ae426082
+"""),
+  'basi2c16': _dehex("""
+89504e470d0a1a0a0000000d4948445200000020000000201002000001db8f01
+760000000467414d41000186a031e8965f0000020a49444154789cd5962173e3
+3010853fcf1838cc61a1818185a53e56787fa13fa130852e3b5878b4b0b03081
+b97f7030070b53e6b057a0a8912bbb9163b9f109ececbc59bd7dcf2b45492409
+d66f00eb1dd83cb5497d65456aeb8e1040913b3b2c04504c936dd5a9c7e2c6eb
+b1b8f17a58e8d043da56f06f0f9f62e5217b6ba3a1b76f6c9e99e8696a2a72e2
+c4fb1e4d452e92ec9652b807486d12b6669be00db38d9114b0c1961e375461a5
+5f76682a85c367ad6f682ff53a9c2a353191764b78bb07d8ddc3c97c1950f391
+6745c7b9852c73c2f212605a466a502705c8338069c8b9e84efab941eb393a97
+d4c9fd63148314209f1c1d3434e847ead6380de291d6f26a25c1ebb5047f5f24
+d85c49f0f22cc1d34282c72709cab90477bf25b89d49f0f351822297e0ea9704
+f34c82bc94002448ede51866e5656aef5d7c6a385cb4d80e6a538ceba04e6df2
+480e9aa84ddedb413bb5c97b3838456df2d4fec2c7a706983e7474d085fae820
+a841776a83073838973ac0413fea2f1dc4a06e71108fda73109bdae48954ad60
+bf867aac3ce44c7c1589a711cf8a81df9b219679d96d1cec3d8bbbeaa2012626
+df8c7802eda201b2d2e0239b409868171fc104ba8b76f10b4da09f6817ffc609
+c413ede267fd1fbab46880c90f80eccf0013185eb48b47ba03df2bdaadef3181
+cb8976f18e13188768170f98c0f844bb78cb04c62ddac59d09fc3fa25dfc1da4
+14deb3df1344f70000000049454e44ae426082
+"""),
+  'basi3p08': _dehex("""
+89504e470d0a1a0a0000000d494844520000002000000020080300000133a3ba
+500000000467414d41000186a031e8965f00000300504c5445224400f5ffed77
+ff77cbffff110a003a77002222ffff11ff110000222200ffac5566ff66ff6666
+ff01ff221200dcffffccff994444ff005555220000cbcbff44440055ff55cbcb
+00331a00ffecdcedffffe4ffcbffdcdc44ff446666ff330000442200ededff66
+6600ffa444ffffaaeded0000cbcbfefffffdfffeffff0133ff33552a000101ff
+8888ff00aaaa010100440000888800ffe4cbba5b0022ff22663200ffff99aaaa
+ff550000aaaa00cb630011ff11d4ffaa773a00ff4444dc6b0066000001ff0188
+4200ecffdc6bdc00ffdcba00333300ed00ed7300ffff88994a0011ffff770000
+ff8301ffbabafe7b00fffeff00cb00ff999922ffff880000ffff77008888ffdc
+ff1a33000000aa33ffff009900990000000001326600ffbaff44ffffffaaff00
+770000fefeaa00004a9900ffff66ff22220000998bff1155ffffff0101ff88ff
+005500001111fffffefffdfea4ff4466ffffff66ff003300ffff55ff77770000
+88ff44ff00110077ffff006666ffffed000100fff5ed1111ffffff44ff22ffff
+eded11110088ffff00007793ff2200dcdc3333fffe00febabaff99ffff333300
+63cb00baba00acff55ffffdcffff337bfe00ed00ed5555ffaaffffdcdcff5555
+00000066dcdc00dc00dc83ff017777fffefeffffffcbff5555777700fefe00cb
+00cb0000fe010200010000122200ffff220044449bff33ffd4aa0000559999ff
+999900ba00ba2a5500ffcbcbb4ff66ff9b33ffffbaaa00aa42880053aa00ffaa
+aa0000ed00babaffff1100fe00000044009999990099ffcc99ba000088008800
+dc00ff93220000dcfefffeaa5300770077020100cb0000000033ffedff00ba00
+ff3333edffedffc488bcff7700aa00660066002222dc0000ffcbffdcffdcff8b
+110000cb00010155005500880000002201ffffcbffcbed0000ff88884400445b
+ba00ffbc77ff99ff006600baffba00777773ed00fe00003300330000baff77ff
+004400aaffaafffefe000011220022c4ff8800eded99ff99ff55ff002200ffb4
+661100110a1100ff1111dcffbabaffff88ff88010001ff33ffb98ed362000002
+a249444154789c65d0695c0b001806f03711a9904a94d24dac63292949e5a810
+d244588a14ca5161d1a1323973252242d62157d12ae498c8124d25ca3a11398a
+16e55a3cdffab0ffe7f77d7fcff3528645349b584c3187824d9d19d4ec2e3523
+9eb0ae975cf8de02f2486d502191841b42967a1ad49e5ddc4265f69a899e26b5
+e9e468181baae3a71a41b95669da8df2ea3594c1b31046d7b17bfb86592e4cbe
+d89b23e8db0af6304d756e60a8f4ad378bdc2552ae5948df1d35b52143141533
+33bbbbababebeb3b3bc9c9c9c6c6c0c0d7b7b535323225a5aa8a02024a4bedec
+0a0a2a2bcdcd7d7cf2f3a9a9c9cdcdd8b8adcdd5b5ababa828298982824a4ab2
+b21212acadbdbc1414e2e24859b9a72730302f4f49292c4c57373c9c0a0b7372
+8c8c1c1c3a3a92936d6dfdfd293e3e26262a4a4eaea2424b4b5fbfbc9c323278
+3c0b0ba1303abaae8ecdeeed950d6669a9a7a7a141d4de9e9d5d5cdcd2229b94
+c572716132f97cb1d8db9bc3110864a39795d9db6b6a26267a7a9a98d4d6a6a7
+cb76090ef6f030354d4d75766e686030545464cb393a1a1ac6c68686eae8f8f9
+a9aa4644c8b66d6e1689dcdd2512a994cb35330b0991ad9f9b6b659596a6addd
+d8282fafae5e5323fb8f41d01f76c22fd8061be01bfc041a0323e1002c81cd30
+0b9ec027a0c930014ec035580fc3e112bc069a0b53e11c0c8095f00176c163a0
+e5301baec06a580677600ddc05ba0f13e120bc81a770133ec355a017300d4ec2
+0c7800bbe1219c02fa08f3e13c1c85dbb00a2ec05ea0dff00a6ec15a98027360
+070c047a06d7e1085c84f1b014f6c03fa0b33018b6c0211801ebe018fc00da0a
+6f61113c877eb01d4ec317a085700f26c130f80efbe132bc039a0733e106fc81
+f7f017f6c10aa0d1300a0ec374780943e1382c06fa0a9b60238c83473016cec0
+02f80f73fefe1072afc1e50000000049454e44ae426082
+"""),
+  'basi6a08': _dehex("""
+89504e470d0a1a0a0000000d4948445200000020000000200806000001047d4a
+620000000467414d41000186a031e8965f0000012049444154789cc595414ec3
+3010459fa541b8bbb26641b8069b861e8b4d12c1c112c1452a710a2a65d840d5
+949041fc481ec98ae27c7f3f8d27e3e4648047600fec0d1f390fbbe2633a31e2
+9389e4e4ea7bfdbf3d9a6b800ab89f1bd6b553cfcbb0679e960563d72e0a9293
+b7337b9f988cc67f5f0e186d20e808042f1c97054e1309da40d02d7e27f92e03
+6cbfc64df0fc3117a6210a1b6ad1a00df21c1abcf2a01944c7101b0cb568a001
+909c9cf9e399cf3d8d9d4660a875405d9a60d000b05e2de55e25780b7a5268e0
+622118e2399aab063a815808462f1ab86890fc2e03e48bb109ded7d26ce4bf59
+0db91bac0050747fec5015ce80da0e5700281be533f0ce6d5900b59bcb00ea6d
+200314cf801faab200ea752803a8d7a90c503a039f824a53f4694e7342000000
+0049454e44ae426082
+"""),
+  'basn0g01': _dehex("""
+89504e470d0a1a0a0000000d49484452000000200000002001000000005b0147
+590000000467414d41000186a031e8965f0000005b49444154789c2dccb10903
+300c05d1ebd204b24a200b7a346f90153c82c18d0a61450751f1e08a2faaead2
+a4846ccea9255306e753345712e211b221bf4b263d1b427325255e8bdab29e6f
+6aca30692e9d29616ee96f3065f0bf1f1087492fd02f14c90000000049454e44
+ae426082
+"""),
+  'basn0g02': _dehex("""
+89504e470d0a1a0a0000000d49484452000000200000002002000000001ca13d
+890000000467414d41000186a031e8965f0000001f49444154789c6360085df5
+1f8cf1308850c20053868f0133091f6390b90700bd497f818b0989a900000000
+49454e44ae426082
+"""),
+  # A version of basn0g04 dithered down to 3 bits.
+  'Basn0g03': _dehex("""
+89504e470d0a1a0a0000000d494844520000002000000020040000000093e1c8
+2900000001734249540371d88211000000fd49444154789c6d90d18906210c84
+c356f22356b2889588604301b112112b11d94a96bb495cf7fe87f32d996f2689
+44741cc658e39c0b118f883e1f63cc89dafbc04c0f619d7d898396c54b875517
+83f3a2e7ac09a2074430e7f497f00f1138a5444f82839c5206b1f51053cca968
+63258821e7f2b5438aac16fbecc052b646e709de45cf18996b29648508728612
+952ca606a73566d44612b876845e9a347084ea4868d2907ff06be4436c4b41a3
+a3e1774285614c5affb40dbd931a526619d9fa18e4c2be420858de1df0e69893
+a0e3e5523461be448561001042b7d4a15309ce2c57aef2ba89d1c13794a109d7
+b5880aa27744fc5c4aecb5e7bcef5fe528ec6293a930690000000049454e44ae
+426082
+"""),
+  'basn0g04': _dehex("""
+89504e470d0a1a0a0000000d494844520000002000000020040000000093e1c8
+290000000467414d41000186a031e8965f0000004849444154789c6360601014
+545232367671090d4d4b2b2f6720430095dbd1418e002a77e64c720450b9ab56
+912380caddbd9b1c0154ee9933e408a072efde25470095fbee1d1902001f14ee
+01eaff41fa0000000049454e44ae426082
+"""),
+  'basn0g08': _dehex("""
+89504e470d0a1a0a0000000d4948445200000020000000200800000000561125
+280000000467414d41000186a031e8965f0000004149444154789c6364602400
+1408c8b30c05058c0f0829f8f71f3f6079301c1430ca11906764a2795c0c0605
+8c8ff0cafeffcff887e67131181430cae0956564040050e5fe7135e2d8590000
+000049454e44ae426082
+"""),
+  'basn0g16': _dehex("""
+89504e470d0a1a0a0000000d49484452000000200000002010000000000681f9
+6b0000000467414d41000186a031e8965f0000005e49444154789cd5d2310ac0
+300c4351395bef7fc6dca093c0287b32d52a04a3d98f3f3880a7b857131363a0
+3a82601d089900dd82f640ca04e816dc06422640b7a03d903201ba05b7819009
+d02d680fa44c603f6f07ec4ff41938cf7f0016d84bd85fae2b9fd70000000049
+454e44ae426082
+"""),
+  'basn2c08': _dehex("""
+89504e470d0a1a0a0000000d4948445200000020000000200802000000fc18ed
+a30000000467414d41000186a031e8965f0000004849444154789cedd5c10900
+300c024085ec91fdb772133b442bf4a1f8cee12bb40d043b800a14f81ca0ede4
+7d4c784081020f4a871fc284071428f0a0743823a94081bb7077a3c00182b1f9
+5e0f40cf4b0000000049454e44ae426082
+"""),
+  'basn2c16': _dehex("""
+89504e470d0a1a0a0000000d4948445200000020000000201002000000ac8831
+e00000000467414d41000186a031e8965f000000e549444154789cd596c10a83
+301044a7e0417fcb7eb7fdadf6961e06039286266693cc7a188645e43dd6a08f
+1042003e2fe09aef6472737e183d27335fcee2f35a77b702ebce742870a23397
+f3edf2705dd10160f3b2815fe8ecf2027974a6b0c03f74a6e4192843e75c6c03
+35e8ec3202f5e84c0181bbe8cca967a00d9df3491bb040671f2e6087ce1c2860
+8d1e05f8c7ee0f1d00b667e70df44467ef26d01fbd9bc028f42860f71d188bce
+fb8d3630039dbd59601e7ab3c06cf428507f0634d039afdc80123a7bb1801e7a
+b1802a7a14c89f016d74ce331bf080ce9e08f8414f04bca133bfe642fe5e07bb
+c4ec0000000049454e44ae426082
+"""),
+  'basn3p04': _dehex("""
+89504e470d0a1a0a0000000d4948445200000020000000200403000000815467
+c70000000467414d41000186a031e8965f000000037342495404040477f8b5a3
+0000002d504c54452200ff00ffff8800ff22ff000099ffff6600dd00ff77ff00
+ff000000ff99ddff00ff00bbffbb000044ff00ff44d2b049bd00000047494441
+54789c63e8e8080d3d7366d5aaf27263e377ef66ce64204300952b28488e002a
+d7c5851c0154eeddbbe408a07119c81140e52a29912380ca4d4b23470095bb7b
+37190200e0c4ead10f82057d0000000049454e44ae426082
+"""),
+  'basn6a08': _dehex("""
+89504e470d0a1a0a0000000d4948445200000020000000200806000000737a7a
+f40000000467414d41000186a031e8965f0000006f49444154789cedd6310a80
+300c46e12764684fa1f73f55048f21c4ddc545781d52e85028fc1f4d28d98a01
+305e7b7e9cffba33831d75054703ca06a8f90d58a0074e351e227d805c8254e3
+1bb0420f5cdc2e0079208892ffe2a00136a07b4007943c1004d900195036407f
+011bf00052201a9c160fb84c0000000049454e44ae426082
+"""),
+  'cs3n3p08': _dehex("""
+89504e470d0a1a0a0000000d494844520000002000000020080300000044a48a
+c60000000467414d41000186a031e8965f0000000373424954030303a392a042
+00000054504c544592ff0000ff9200ffff00ff0000dbff00ff6dffb600006dff
+b6ff00ff9200dbff000049ffff2400ff000024ff0049ff0000ffdb00ff4900ff
+b6ffff0000ff2400b6ffffdb000092ffff6d000024ffff49006dff00df702b17
+0000004b49444154789c85cac70182000000b1b3625754b0edbfa72324ef7486
+184ed0177a437b680bcdd0031c0ed00ea21f74852ed00a1c9ed0086da0057487
+6ed0121cd6d004bda0013a421ff803224033e177f4ae260000000049454e44ae
+426082
+"""),
+  's09n3p02': _dehex("""
+89504e470d0a1a0a0000000d49484452000000090000000902030000009dffee
+830000000467414d41000186a031e8965f000000037342495404040477f8b5a3
+0000000c504c544500ff000077ffff00ffff7700ff5600640000001f49444154
+789c63600002fbff0c0c56ab19182ca381581a4283f82071200000696505c36a
+437f230000000049454e44ae426082
+"""),
+  'tbgn3p08': _dehex("""
+89504e470d0a1a0a0000000d494844520000002000000020080300000044a48a
+c60000000467414d41000186a031e8965f00000207504c54457f7f7fafafafab
+abab110000222200737300999999510d00444400959500959595e6e600919191
+8d8d8d620d00898989666600b7b700911600000000730d007373736f6f6faaaa
+006b6b6b676767c41a00cccc0000f30000ef00d51e0055555567670000dd0051
+515100d1004d4d4de61e0038380000b700160d0d00ab00560d00090900009500
+009100008d003333332f2f2f2f2b2f2b2b000077007c7c001a05002b27000073
+002b2b2b006f00bb1600272727780d002323230055004d4d00cc1e00004d00cc
+1a000d00003c09006f6f00002f003811271111110d0d0d55554d090909001100
+4d0900050505000d00e2e200000900000500626200a6a6a6a2a2a29e9e9e8484
+00fb00fbd5d500801100800d00ea00ea555500a6a600e600e6f7f700e200e233
+0500888888d900d9848484c01a007777003c3c05c8c8008080804409007c7c7c
+bb00bbaa00aaa600a61e09056262629e009e9a009af322005e5e5e05050000ee
+005a5a5adddd00a616008d008d00e20016050027270088110078780000c40078
+00787300736f006f44444400aa00c81e004040406600663c3c3c090000550055
+1a1a00343434d91e000084004d004d007c004500453c3c00ea1e00222222113c
+113300331e1e1efb22001a1a1a004400afaf00270027003c001616161e001e0d
+160d2f2f00808000001e00d1d1001100110d000db7b7b7090009050005b3b3b3
+6d34c4230000000174524e530040e6d86600000001624b474402660b7c640000
+01f249444154789c6360c0048c8c58049100575f215ee92e6161ef109cd2a15e
+4b9645ce5d2c8f433aa4c24f3cbd4c98833b2314ab74a186f094b9c2c27571d2
+6a2a58e4253c5cda8559057a392363854db4d9d0641973660b0b0bb76bb16656
+06970997256877a07a95c75a1804b2fbcd128c80b482a0b0300f8a824276a9a8
+ec6e61612b3e57ee06fbf0009619d5fac846ac5c60ed20e754921625a2daadc6
+1967e29e97d2239c8aec7e61fdeca9cecebef54eb36c848517164514af16169e
+866444b2b0b7b55534c815cc2ec22d89cd1353800a8473100a4485852d924a6a
+412adc74e7ad1016ceed043267238c901716f633a812022998a4072267c4af02
+92127005c0f811b62830054935ce017b38bf0948cc5c09955f030a24617d9d46
+63371fd940b0827931cbfdf4956076ac018b592f72d45594a9b1f307f3261b1a
+084bc2ad50018b1900719ba6ba4ca325d0427d3f6161449486f981144cf3100e
+2a5f2a1ce8683e4ddf1b64275240c8438d98af0c729bbe07982b8a1c94201dc2
+b3174c9820bcc06201585ad81b25b64a2146384e3798290c05ad280a18c0a62e
+e898260c07fca80a24c076cc864b777131a00190cdfa3069035eccbc038c30e1
+3e88b46d16b6acc5380d6ac202511c392f4b789aa7b0b08718765990111606c2
+9e854c38e5191878fbe471e749b0112bb18902008dc473b2b2e8e72700000000
+49454e44ae426082
+"""),
+  'Tp2n3p08': _dehex("""
+89504e470d0a1a0a0000000d494844520000002000000020080300000044a48a
+c60000000467414d41000186a031e8965f00000300504c544502ffff80ff05ff
+7f0703ff7f0180ff04ff00ffff06ff000880ff05ff7f07ffff06ff000804ff00
+0180ff02ffff03ff7f02ffff80ff0503ff7f0180ffff0008ff7f0704ff00ffff
+06ff000802ffffff7f0704ff0003ff7fffff0680ff050180ff04ff000180ffff
+0008ffff0603ff7f80ff05ff7f0702ffffff000880ff05ffff0603ff7f02ffff
+ff7f070180ff04ff00ffff06ff000880ff050180ffff7f0702ffff04ff0003ff
+7fff7f0704ff0003ff7f0180ffffff06ff000880ff0502ffffffff0603ff7fff
+7f0702ffff04ff000180ff80ff05ff0008ff7f07ffff0680ff0504ff00ff0008
+0180ff03ff7f02ffff02ffffffff0604ff0003ff7f0180ffff000880ff05ff7f
+0780ff05ff00080180ff02ffffff7f0703ff7fffff0604ff00ff7f07ff0008ff
+ff0680ff0504ff0002ffff0180ff03ff7fff0008ffff0680ff0504ff000180ff
+02ffff03ff7fff7f070180ff02ffff04ff00ffff06ff0008ff7f0780ff0503ff
+7fffff06ff0008ff7f0780ff0502ffff03ff7f0180ff04ff0002ffffff7f07ff
+ff0604ff0003ff7fff00080180ff80ff05ffff0603ff7f0180ffff000804ff00
+80ff0502ffffff7f0780ff05ffff0604ff000180ffff000802ffffff7f0703ff
+7fff0008ff7f070180ff03ff7f02ffff80ff05ffff0604ff00ff0008ffff0602
+ffff0180ff04ff0003ff7f80ff05ff7f070180ff04ff00ff7f0780ff0502ffff
+ff000803ff7fffff0602ffffff7f07ffff0680ff05ff000804ff0003ff7f0180
+ff02ffff0180ffff7f0703ff7fff000804ff0080ff05ffff0602ffff04ff00ff
+ff0603ff7fff7f070180ff80ff05ff000803ff7f0180ffff7f0702ffffff0008
+04ff00ffff0680ff0503ff7f0180ff04ff0080ff05ffff06ff000802ffffff7f
+0780ff05ff0008ff7f070180ff03ff7f04ff0002ffffffff0604ff00ff7f07ff
+000880ff05ffff060180ff02ffff03ff7f80ff05ffff0602ffff0180ff03ff7f
+04ff00ff7f07ff00080180ffff000880ff0502ffff04ff00ff7f0703ff7fffff
+06ff0008ffff0604ff00ff7f0780ff0502ffff03ff7f0180ffdeb83387000000
+f874524e53000000000000000008080808080808081010101010101010181818
+1818181818202020202020202029292929292929293131313131313131393939
+393939393941414141414141414a4a4a4a4a4a4a4a52525252525252525a5a5a
+5a5a5a5a5a62626262626262626a6a6a6a6a6a6a6a73737373737373737b7b7b
+7b7b7b7b7b83838383838383838b8b8b8b8b8b8b8b94949494949494949c9c9c
+9c9c9c9c9ca4a4a4a4a4a4a4a4acacacacacacacacb4b4b4b4b4b4b4b4bdbdbd
+bdbdbdbdbdc5c5c5c5c5c5c5c5cdcdcdcdcdcdcdcdd5d5d5d5d5d5d5d5dedede
+dededededee6e6e6e6e6e6e6e6eeeeeeeeeeeeeeeef6f6f6f6f6f6f6f6b98ac5
+ca0000012c49444154789c6360e7169150d230b475f7098d4ccc28a96ced9e32
+63c1da2d7b8e9fb97af3d1fb8f3f18e8a0808953544a4dd7c4c2c9233c2621bf
+b4aab17fdacce5ab36ee3a72eafaad87efbefea68702362e7159652d031b07cf
+c0b8a4cce28aa68e89f316aedfb4ffd0b92bf79fbcfcfe931e0a183904e55435
+8decdcbcc22292b3caaadb7b27cc5db67af3be63e72fdf78fce2d31f7a2860e5
+119356d037b374f10e8a4fc92eaa6fee99347fc9caad7b0f9ebd74f7c1db2fbf
+e8a180995f484645dbdccad12f38363dafbcb6a573faeca5ebb6ed3e7ce2c29d
+e76fbefda38702063e0149751d537b67ff80e8d4dcc29a86bea97316add9b0e3
+c0e96bf79ebdfafc971e0a587885e515f58cad5d7d43a2d2720aeadaba26cf5a
+bc62fbcea3272fde7efafac37f3a28000087c0fe101bc2f85f0000000049454e
+44ae426082
+"""),
+  'tbbn1g04': _dehex("""
+89504e470d0a1a0a0000000d494844520000002000000020040000000093e1c8
+290000000467414d41000186a031e8965f0000000274524e530007e8f7589b00
+000002624b47440000aa8d23320000013e49444154789c55d1cd4b024118c7f1
+efbe6419045b6a48a72d352808b435284f9187ae9b098627a1573a19945beba5
+e8129e8222af11d81e3a4545742de8ef6af6d5762e0fbf0fc33c33f36085cb76
+bc4204778771b867260683ee57e13f0c922df5c719c2b3b6c6c25b2382cea4b9
+9f7d4f244370746ac71f4ca88e0f173a6496749af47de8e44ba8f3bf9bdfa98a
+0faf857a7dd95c7dc8d7c67c782c99727997f41eb2e3c1e554152465bb00fe8e
+b692d190b718d159f4c0a45c4435915a243c58a7a4312a7a57913f05747594c6
+46169866c57101e4d4ce4d511423119c419183a3530cc63db88559ae28e7342a
+1e9c8122b71139b8872d6e913153224bc1f35b60e4445bd4004e20ed6682c759
+1d9873b3da0fbf50137dc5c9bde84fdb2ec8bde1189e0448b63584735993c209
+7a601bd2710caceba6158797285b7f2084a2f82c57c01a0000000049454e44ae
+426082
+"""),
+  'tbrn2c08': _dehex("""
+89504e470d0a1a0a0000000d4948445200000020000000200802000000fc18ed
+a30000000467414d41000186a031e8965f0000000674524e53007f007f007f8a
+33334f00000006624b474400ff0000000033277cf3000004d649444154789cad
+965f68537714c73fd912d640235e692f34d0406fa0c1663481045ab060065514
+56660a295831607df0a1488715167060840a1614e6431e9cb34fd2c00a762c85
+f6a10f816650c13b0cf40612e1822ddc4863bd628a8924d23d6464f9d3665dd9
+f7e977ce3dbff3cd3939bfdfef6bb87dfb364782dbed065ebe7cd93acc78b4ec
+a228debd7bb7bfbfbfbbbbfb7f261045311a8d261209405194274f9ea4d3e916
+f15f1c3eb5dd6e4fa5fecce526239184a2b0b8486f6f617171b1f5ae4311381c
+8e57af5e5dbd7a351088150a78bd389d44222c2f93cdfe66b7db8f4ee07038b6
+b6b6bebf766d7e7e7e60a06432313b4ba984c3c1c4049a46b95c5a58583822c1
+dbb76f27272733d1b9df853c3030c0f232562b9108cf9eb1b888d7cbf030abab
+31abd5fa1f08dc6ef7e7cf9f1f3f7e1c8944745d4f1400c62c001313acad21cb
+b8dd2c2c603271eb1640341aad4c6d331aa7e8c48913a150a861307ecc11e964
+74899919bc5e14e56fffc404f1388502f178dceff7ef4bf0a5cfe7abb533998c
+e5f9ea2f1dd88c180d64cb94412df3dd57e83a6b3b3c7a84c98420100c72fd3a
+636348bae726379fe69e8e8d8dbd79f3a6558b0607079796965256479b918085
+7b02db12712b6181950233023f3f647494ee6e2e5ea45864cce5b8a7fe3acffc
+3aebb22c2bd5d20e22d0757d7b7bbbbdbd3d94a313bed1b0aa3cd069838b163a
+8d4c59585f677292d0b84d9a995bd337def3fe6bbe5e6001989b9b6bfe27ea08
+36373781542ab56573248b4c5bc843ac4048c7ab21aa24ca00534c25482828a3
+8c9ee67475bbaaaab22cb722c8e57240a150301a8d219de94e44534d7d90e885
+87acb0e2c4f9800731629b6c5ee14a35a6b9887d2a0032994cb9cf15dbe59650
+ff7b46a04c9a749e7cc5112214266cc65c31354d5b5d5d3d90209bcd5616a552
+a95c2e87f2a659bd9ee01c2cd73964e438f129a6aa9e582c363838b80f81d7eb
+5555b56a2a8ad2d9d7affd0409f8015c208013fea00177b873831b0282c964f2
+783c1e8fa7582cee5f81a669b5e6eeeeaee58e8559b0c233d8843c7c0b963a82
+34e94b5cb2396d7d7d7db22c8ba258fb0afd43f0e2c58b919191ba9de9b4d425
+118329b0c3323c8709d02041b52b4ea7f39de75d2a934a2693c0a953a76a93d4
+5d157ebf7f6565a5542a553df97c5e10045dd731c130b86113cc300cbd489224
+08422a952a140a95788fc763b1d41558d7a2d7af5f5fb870a1d6a3aaaacd6603
+18802da84c59015bd2e6897b745d9765b99a1df0f97c0daf74e36deaf7fbcd66
+73ad2797cb89a2c839880188a2e8743a8bc5a22ccbba5e376466b3b9bdbdbd21
+6123413a9d0e0402b51e4dd3bababa788eb022b85caeb6b6364551b6b7b76942
+43f7f727007a7a7a04a1ee8065b3595fde2768423299ac1ec6669c3973e65004
+c0f8f878ad69341a33994ced2969c0d0d0502412f9f8f163f3a7fd654b474787
+288ad53e74757535df6215b85cae60302849d2410aecc037f9f2e5cbd5b5c160
+680eb0dbede170381c0e7ff8f0a185be3b906068684892a4ca7a6f6faff69328
+8ad3d3d3f7efdfdfdbdbfb57e96868a14d0d0643381c96242997cbe5f3794010
+84603078fcf8f1d6496bd14a3aba5c2ea7d369341a5555b5582c8140e0fcf9f3
+1b1b1b87cf4eeb0a8063c78e45a3d19e9e1ebfdfdf5a831e844655d18093274f
+9e3d7bf6d3a74f3b3b3b47c80efc05ff7af28fefb70d9b0000000049454e44ae
+426082
+"""),
+  'basn6a16': _dehex("""
+89504e470d0a1a0a0000000d494844520000002000000020100600000023eaa6
+b70000000467414d41000186a031e8965f00000d2249444154789cdd995f6c1c
+d775c67ff38fb34b724d2ee55a8e4b04a0ac87049100cab4dbd8c6528902cb4d
+10881620592e52d4325ac0905bc98a94025e71fd622cb5065ac98a0c283050c0
+728a00b6e542a1d126885cd3298928891d9a0444037e904434951d4b90b84b2f
+c9dde1fcebc33977a95555348f411e16dfce9d3b77ee77eebde77ce78c95a669
+0ad07c17009a13edd898b87dfb1fcb7d2b4d1bff217f33df80deb1e6267df0ff
+c1e6e6dfafdf1f5a7fd30f9aef66b6d546dd355bf02c40662e3307f9725a96c6
+744c3031f83782f171c148dbc3bf1774f5dad1e79d6f095a3f54d4fbec5234ef
+d9a2f8d73afe4f14f57ef4f42def7b44f19060f06b45bddf1c5534d77fd922be
+2973a15a82e648661c6e3240aa3612ead952b604bde57458894f29deaf133bac
+13d2766f5227a4a3b8cf08da7adfd6fbd6bd8a4fe9dbb43d35e3dfa3f844fbf8
+9119bf4f7144094fb56333abf8a86063ca106f94b3a3b512343765e60082097f
+1bb86ba72439a653519b09f5cee1ce61c897d37eedf5553580ae60f4af8af33a
+b14fd400b6a0f34535c0434afc0b3a9f07147527a5fa7ca218ff56c74d74dc3f
+155cfd3325fc278acf2ae1cb4a539f5f9937c457263b0bd51234c732a300cdd1
+cc1840f0aaff54db0e4874ed5a9b5d6d27d4bb36746d80de72baa877ff4b275a
+d7895ed1897ea4139b5143fcbb1a62560da1ed9662aaed895ec78a91c18795b8
+5e07ab4af8ba128e95e682e0728bf8f2e5ae815a091a53d902ac1920d8e05f06
+589de8d8d66680789f4e454fb9d9ec66cd857af796ee2d902fa73fd5bba775a2
+153580ae44705ed0d37647d15697cb8f14bfa3e3e8fdf8031d47af571503357c
+f30d25acedcbbf135c9a35c49766ba07ab255859e8ec03684e66860182dff8f7
+0304bff6ff1c20fc81b7afdd00a71475539a536e36bb5973a19e3b923b02bde5
+e4efd4003ac170eb2d13fe274157afedbd82d6fb3a9a1e85e4551d47cf7078f8
+9671fe4289ebf5f2bf08d63f37c4eb4773c55a0996efeefa0ca011671d8060ca
+2f0004c7fcc300e166ef0240f825efe3361f106d57d423d0723f7acacd66376b
+2ed47b7a7a7a205f4ef4ac4691e0aad9aa0d41cf13741c3580a506487574ddca
+61a8c403c1863ebfbcac3475168b2de28b8b3d77544bb05ce92a02aceced3c0d
+d0cc65ea371b201cf1c601c24dde1c4078cedbdeb60322f50126a019bf6edc9b
+39e566b39b3517eaf97c3e0fbde5e4491d45bd74537145d155b476aa0176e868
+c6abebf30dbd5e525c54ac8e18e2d56abeb756827a3d970358a97416019a6f64
+f60004fdfe1580d5c98e618070cc1b05887eee7e0d209a70db7d8063029889b4
+c620ead78d7b33a7dc6c76b3e6427ddddbebde867c393aa7845e5403e8ca794a
+d0d6fb897af5f03525fe5782f5e7046bdaef468bf88d1debc6ab25583cd17310
+6079b9ab0ba059c914018245bf076075b5a303200c3c1f209a733701444fbbaf
+00c4134ebb016c5d0b23614c243701cdf875e3decce9349bddacb9505fbf7dfd
+76e82d87736a00f5d2b5ffd4b7dce2719a4d25ae717ee153c1abef18e257cfad
+7fa45682da48ef38c052b53b0fd06864b300c151ff08c0ea431de701a287dd5f
+004497dc7b01a253ee3e80b8c7f91c20f967fb6fdb7c80ada7d8683723614c24
+3701cdf875e3decc29379bddacb950ef3fd47f08f2e5a61ea4aa2a3eb757cd55
+13345efcfa59c12b2f19e2578ef77fb75a82854ffbee01a83f977b11a031931d
+040802df07082b5e11207cc17b1e209a770700e2df0a83e409fb7580f827c230
+99b06fd901fb058d6835dacd481813c94d40337eddb83773cacd66376b2ed437
+bebcf165e82d2f4e4beb7f3fa6e652c2d7ee10bc78c010bfb87fe3c95a09ae9f
+bd732740bd2fb700d0f865f64180e059ff044018ca0ca28a5b04883f701e0088
+bfec7c0c909cb71f0448c6ec518074b375012079d9dedf66004bcfbc51eb2dd1
+aadacd481813c94d40337eddb83773cacd66376b2ed487868686205fbe7c49ef
+5605a73f34c4a7a787eeab96e0da81bb4e022c15ba27019a5b339300e16bf286
+a8eae601e25866907cdf3e0890acb36f00245fb57f05904e59c300e92561946e
+b2e600d209ab7d07f04d458dfb46ad1bd16ab49b913026929b8066fcba716fe6
+949bcd6ed65ca8ef7e7cf7e3d05b7e7c8f217ee6cdddbb6a25a856f37980e0c7
+fe4e80a82623c48193014846ec7180f4acf518409aca0cd28a5504e03b32c374
+de1a00608a0240faaa327a4b19fe946fb6f90054dbb5f2333d022db56eb4966a
+3723614c243701cdf8f556bea8a7dc6c76b3e66bd46584ddbbcebc0990cf4b0f
+ff4070520c282338a7e26700ec725202b01e4bcf0258963c6f1d4d8f0030cb20
+805549c520930c03584fa522b676f11600ffc03fde3e1b3489a9c9054c9aa23b
+c08856a3dd8c843191dc0434e3d78d7b33a75c36fb993761f7ae5a69f72ef97f
+e6ad336fed7e1c60e8bee96980bbdebbb60da07b7069062033d9dc0ae03d296f
+70ab511ec071640676252902d833c916007b3e1900b0a6d2028035968e025861
+ea01581369fb11488c34d18cbc95989afccca42baad65ba2d5683723614c24d7
+8066fcbab8b7e96918baaf5aaa56219f975fb50a43f7c9bde90fa73f1c1a02d8
+78f2e27e803b77ca08b90519315b6fe400fc1392097a9eccc0ad444500e70199
+a1331f0f00d8934901c07e5d526ceb87c2d07e2579badd005a2b31a5089391b7
+1253358049535a6add8856dd0146c298482e01ede27ed878b256ba7600ee3a09
+c18fc1df09fe01084ec25defc1b56db0f1a4f4bd78e0e2818d2f0334e7330300
+7df7c888b917e50dd9c1c60c80efcb0cbc63e1f700bce7c31700dccbd1060027
+8add9b0de06c8e2f00d84962b7d7030e2a61538331b98051f92631bd253f336a
+dd8856a3dd44c25c390efddfad96ae9f853b77c25201ba27c533b8bdf28b6ad0
+3d084b33d2e7fa59099e9901b8f2d29597fa0f01848f78e70082117f1ca07b76
+6910209b9519f895a008d031bbba05c09d8f06005c5b18b8fba25300cea6780e
+c03e911c6ccf06d507b48a4fa606634a114609de929f9934c5a87511ad57cfc1
+fa476aa5854fa1ef1e3910b905686e85cc24c40138198915f133d2d6dc2a7dea
+7df2ccc2a752faf2cec1d577aebeb37e3b4034eeee0008dff3be0e6b923773b4
+7904c0ef9119767cb4fa1500ef1361e08e452500f71561e84cc4ed3e20fab6a2
+c905f40cb76a3026bf3319b91ac2e46792a6dcd801ebc6aba5da08f48ecb81c8
+bd088d5f42f6417191de93908c803d0e76199292b485af41b60e8d9c3c537f0e
+8211f0c7211a077707dc18b931b2ee6d80a4d7ae024491ebc24d4a708ff70680
+7f25e807e8785f1878e322d6ddaf453f0770ff2dfa769b01423dbbad72a391b6
+5a7c3235985629423372494cab55c8f7d64a8b27a0e7202c55a13b0f8d19c80e
+4ae9ca3f015115dc3ca467c17a4c7ee95970ab10e5a54ff0ac3cd39881ee5958
+1a84f03df0be0e492fd855a8d6aa35d10b4962dbb0a604a3d3ee5e80a8eee600
+a24977f8660378bf0bbf00e01d0a8fb7f980f04b8aa6ce6aca8d5a7533c52753
+839152c4e222f4dc512dd5eb90cbc981e8ea12cf90cd8a8bf47d89159e2741d3
+7124f65b96fcd254dae258fa84a13c13043246a32129574787e49eae2b49b86d
+c3e2e78b9ff7f4002415bb08907c66df0d103b4e0c104db90500ff70700c203a
+ee1e82dba4c3e16e256c0acca6ceaae9afd1f612d7eb472157ac95962bd05594
+7dd1598466053245088e827f44628657942a825b84e4fb601f84b4025611aca3
+901e01bb024911dc0a4445f08e41f83df02b10142173149ab71baf027611ea95
+7a257704201d14cd9af4d90b00f194530088cb4e09c0df1c5c0088f7393f6833
+c0aa3ac156655de3bca9b34ab9716906ba07aba5e5bba1eb3358d90b9da7c533
+64f6888bf47b60f521e8380fe10be03d2feac17900927560df40f4e48f805960
+50328d648bf4893f9067c217a0631656b7c898c122847bc07b03a2d3e0ee85e4
+33b0ef867450c4fad2ecd26cf7168074c0ba0c904cdac300c9cfec4701924df6
+1cdca61e10685c6f7d52d0caba1498972f43d740adb4b2009d7d7220b20e3473
+90a943d00ffe959bb6eac3e0fe42ea49ee00c45f06e76329b1dabf127d690d80
+5581b408f63c2403e0cc433c00ee658836803b0fd100747c04ab5f917704fd10
+d5c1cd41ec801343d207f602a403605d86e5f9e5f9ae0d00e994556833806685
+c931fb709b0f08b4e869bea5c827859549e82c544b8d29c816a0390999613920
+7e610d5727a16318c2003c1fa24be0de2b32caf92224e7c17e5004b6350c4c01
+05601218066b0ad28224e149019c086257ca315102de2712903bde97b8144d82
+3b2c6ac52d403c054e019249b087f53d0558995a99ea946c70cc927458b3c1ff
+550f30050df988d4284376b4566a8e416654cc921985e037e0df0fc131f00f4b
+acf0c6211c036f14a239703741740adc7da227edd7e56b833d0ae92549b4d357
+25dfb49ed2ff63908e6adf27d6d0dda7638d4154d2778daca17f58e61297c129
+41f233b01f5dc3740cac51688c35c6b22580f48224fee9b83502569a66b629f1
+09f3713473413e2666e7fe6f6c6efefdfafda1f56f6e06f93496d9d67cb7366a
+9964b6f92e64b689196ec6c604646fd3fe4771ff1bf03f65d8ecc3addbb5f300
+00000049454e44ae426082
+"""),
+}
+
+def read_pam_header(infile):
+    """
+    Read (the rest of a) PAM header.  `infile` should be positioned
+    immediately after the initial 'P7' line (at the beginning of the
+    second line).  Returns are as for `read_pnm_header`.
+    """
+    
+    # Unlike PBM, PGM, and PPM, we can read the header a line at a time.
+    header = dict()
+    while True:
+        l = infile.readline().strip()
+        if l == strtobytes('ENDHDR'):
+            break
+        if not l:
+            raise EOFError('PAM ended prematurely')
+        if l[0] == strtobytes('#'):
+            continue
+        l = l.split(None, 1)
+        if l[0] not in header:
+            header[l[0]] = l[1]
+        else:
+            header[l[0]] += strtobytes(' ') + l[1]
+
+    required = ['WIDTH', 'HEIGHT', 'DEPTH', 'MAXVAL']
+    required = [strtobytes(x) for x in required]
+    WIDTH,HEIGHT,DEPTH,MAXVAL = required
+    present = [x for x in required if x in header]
+    if len(present) != len(required):
+        raise Error('PAM file must specify WIDTH, HEIGHT, DEPTH, and MAXVAL')
+    width = int(header[WIDTH])
+    height = int(header[HEIGHT])
+    depth = int(header[DEPTH])
+    maxval = int(header[MAXVAL])
+    if (width <= 0 or
+        height <= 0 or
+        depth <= 0 or
+        maxval <= 0):
+        raise Error(
+          'WIDTH, HEIGHT, DEPTH, MAXVAL must all be positive integers')
+    return 'P7', width, height, depth, maxval
+
+def read_pnm_header(infile, supported=('P5','P6')):
+    """
+    Read a PNM header, returning (format,width,height,depth,maxval).
+    `width` and `height` are in pixels.  `depth` is the number of
+    channels in the image; for PBM and PGM it is synthesized as 1, for
+    PPM as 3; for PAM images it is read from the header.  `maxval` is
+    synthesized (as 1) for PBM images.
+    """
+
+    # Generally, see http://netpbm.sourceforge.net/doc/ppm.html
+    # and http://netpbm.sourceforge.net/doc/pam.html
+
+    supported = [strtobytes(x) for x in supported]
+
+    # Technically 'P7' must be followed by a newline, so by using
+    # rstrip() we are being liberal in what we accept.  I think this
+    # is acceptable.
+    type = infile.read(3).rstrip()
+    if type not in supported:
+        raise NotImplementedError('file format %s not supported' % type)
+    if type == strtobytes('P7'):
+        # PAM header parsing is completely different.
+        return read_pam_header(infile)
+    # Expected number of tokens in header (3 for P4, 4 for P6)
+    expected = 4
+    pbm = ('P1', 'P4')
+    if type in pbm:
+        expected = 3
+    header = [type]
+
+    # We have to read the rest of the header byte by byte because the
+    # final whitespace character (immediately following the MAXVAL in
+    # the case of P6) may not be a newline.  Of course all PNM files in
+    # the wild use a newline at this point, so it's tempting to use
+    # readline; but it would be wrong.
+    def getc():
+        c = infile.read(1)
+        if not c:
+            raise Error('premature EOF reading PNM header')
+        return c
+
+    c = getc()
+    while True:
+        # Skip whitespace that precedes a token.
+        while c.isspace():
+            c = getc()
+        # Skip comments.
+        while c == '#':
+            while c not in '\n\r':
+                c = getc()
+        if not c.isdigit():
+            raise Error('unexpected character %s found in header' % c)
+        # According to the specification it is legal to have comments
+        # that appear in the middle of a token.
+        # This is bonkers; I've never seen it; and it's a bit awkward to
+        # code good lexers in Python (no goto).  So we break on such
+        # cases.
+        token = strtobytes('')
+        while c.isdigit():
+            token += c
+            c = getc()
+        # Slight hack.  All "tokens" are decimal integers, so convert
+        # them here.
+        header.append(int(token))
+        if len(header) == expected:
+            break
+    # Skip comments (again)
+    while c == '#':
+        while c not in '\n\r':
+            c = getc()
+    if not c.isspace():
+        raise Error('expected header to end with whitespace, not %s' % c)
+
+    if type in pbm:
+        # synthesize a MAXVAL
+        header.append(1)
+    depth = (1,3)[type == strtobytes('P6')]
+    return header[0], header[1], header[2], depth, header[3]
+
+def write_pnm(file, width, height, pixels, meta):
+    """Write a Netpbm PNM/PAM file."""
+
+    bitdepth = meta['bitdepth']
+    maxval = 2**bitdepth - 1
+    # Rudely, the number of image planes can be used to determine
+    # whether we are L (PGM), LA (PAM), RGB (PPM), or RGBA (PAM).
+    planes = meta['planes']
+    # Can be an assert as long as we assume that pixels and meta came
+    # from a PNG file.
+    assert planes in (1,2,3,4)
+    if planes in (1,3):
+        if 1 == planes:
+            # PGM
+            # Could generate PBM if maxval is 1, but we don't (for one
+            # thing, we'd have to convert the data, not just blat it
+            # out).
+            fmt = 'P5'
+        else:
+            # PPM
+            fmt = 'P6'
+        file.write('%s %d %d %d\n' % (fmt, width, height, maxval))
+    if planes in (2,4):
+        # PAM
+        # See http://netpbm.sourceforge.net/doc/pam.html
+        if 2 == planes:
+            tupltype = 'GRAYSCALE_ALPHA'
+        else:
+            tupltype = 'RGB_ALPHA'
+        file.write('P7\nWIDTH %d\nHEIGHT %d\nDEPTH %d\nMAXVAL %d\n'
+                   'TUPLTYPE %s\nENDHDR\n' %
+                   (width, height, planes, maxval, tupltype))
+    # Values per row
+    vpr = planes * width
+    # struct format
+    fmt = '>%d' % vpr
+    if maxval > 0xff:
+        fmt = fmt + 'H'
+    else:
+        fmt = fmt + 'B'
+    for row in pixels:
+        file.write(struct.pack(fmt, *row))
+    file.flush()
+
+def color_triple(color):
+    """
+    Convert a command line colour value to a RGB triple of integers.
+    FIXME: Somewhere we need support for greyscale backgrounds etc.
+    """
+    if color.startswith('#') and len(color) == 4:
+        return (int(color[1], 16),
+                int(color[2], 16),
+                int(color[3], 16))
+    if color.startswith('#') and len(color) == 7:
+        return (int(color[1:3], 16),
+                int(color[3:5], 16),
+                int(color[5:7], 16))
+    elif color.startswith('#') and len(color) == 13:
+        return (int(color[1:5], 16),
+                int(color[5:9], 16),
+                int(color[9:13], 16))
+
+def _add_common_options(parser):
+    """Call *parser.add_option* for each of the options that are
+    common between this PNG--PNM conversion tool and the gen
+    tool.
+    """
+    parser.add_option("-i", "--interlace",
+                      default=False, action="store_true",
+                      help="create an interlaced PNG file (Adam7)")
+    parser.add_option("-t", "--transparent",
+                      action="store", type="string", metavar="#RRGGBB",
+                      help="mark the specified colour as transparent")
+    parser.add_option("-b", "--background",
+                      action="store", type="string", metavar="#RRGGBB",
+                      help="save the specified background colour")
+    parser.add_option("-g", "--gamma",
+                      action="store", type="float", metavar="value",
+                      help="save the specified gamma value")
+    parser.add_option("-c", "--compression",
+                      action="store", type="int", metavar="level",
+                      help="zlib compression level (0-9)")
+    return parser
+
+def _main(argv):
+    """
+    Run the PNG encoder with options from the command line.
+    """
+
+    # Parse command line arguments
+    from optparse import OptionParser
+    import re
+    version = '%prog ' + re.sub(r'( ?\$|URL: |Rev:)', '', __version__)
+    parser = OptionParser(version=version)
+    parser.set_usage("%prog [options] [imagefile]")
+    parser.add_option('-r', '--read-png', default=False,
+                      action='store_true',
+                      help='Read PNG, write PNM')
+    parser.add_option("-a", "--alpha",
+                      action="store", type="string", metavar="pgmfile",
+                      help="alpha channel transparency (RGBA)")
+    _add_common_options(parser)
+
+    (options, args) = parser.parse_args(args=argv[1:])
+
+    # Convert options
+    if options.transparent is not None:
+        options.transparent = color_triple(options.transparent)
+    if options.background is not None:
+        options.background = color_triple(options.background)
+
+    # Prepare input and output files
+    if len(args) == 0:
+        infilename = '-'
+        infile = sys.stdin
+    elif len(args) == 1:
+        infilename = args[0]
+        infile = open(infilename, 'rb')
+    else:
+        parser.error("more than one input file")
+    outfile = sys.stdout
+    if sys.platform == "win32":
+        import msvcrt, os
+        msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
+
+    if options.read_png:
+        # Encode PNG to PPM
+        png = Reader(file=infile)
+        width,height,pixels,meta = png.asDirect()
+        write_pnm(outfile, width, height, pixels, meta) 
+    else:
+        # Encode PNM to PNG
+        format, width, height, depth, maxval = \
+          read_pnm_header(infile, ('P5','P6','P7'))
+        # When it comes to the variety of input formats, we do something
+        # rather rude.  Observe that L, LA, RGB, RGBA are the 4 colour
+        # types supported by PNG and that they correspond to 1, 2, 3, 4
+        # channels respectively.  So we use the number of channels in
+        # the source image to determine which one we have.  We do not
+        # care about TUPLTYPE.
+        greyscale = depth <= 2
+        pamalpha = depth in (2,4)
+        supported = map(lambda x: 2**x-1, range(1,17))
+        try:
+            mi = supported.index(maxval)
+        except ValueError:
+            raise NotImplementedError(
+              'your maxval (%s) not in supported list %s' %
+              (maxval, str(supported)))
+        bitdepth = mi+1
+        writer = Writer(width, height,
+                        greyscale=greyscale,
+                        bitdepth=bitdepth,
+                        interlace=options.interlace,
+                        transparent=options.transparent,
+                        background=options.background,
+                        alpha=bool(pamalpha or options.alpha),
+                        gamma=options.gamma,
+                        compression=options.compression)
+        if options.alpha:
+            pgmfile = open(options.alpha, 'rb')
+            format, awidth, aheight, adepth, amaxval = \
+              read_pnm_header(pgmfile, 'P5')
+            if amaxval != '255':
+                raise NotImplementedError(
+                  'maxval %s not supported for alpha channel' % amaxval)
+            if (awidth, aheight) != (width, height):
+                raise ValueError("alpha channel image size mismatch"
+                                 " (%s has %sx%s but %s has %sx%s)"
+                                 % (infilename, width, height,
+                                    options.alpha, awidth, aheight))
+            writer.convert_ppm_and_pgm(infile, pgmfile, outfile)
+        else:
+            writer.convert_pnm(infile, outfile)
+
+
+if __name__ == '__main__':
+    try:
+        _main(sys.argv)
+    except Error, e:
+        print >>sys.stderr, e
diff --git a/craftui/sim1.tgz b/craftui/sim1.tgz
index e1d1a97..b414105 100644
--- a/craftui/sim1.tgz
+++ b/craftui/sim1.tgz
Binary files differ
diff --git a/craftui/sim2.tgz b/craftui/sim2.tgz
index a69b101..fff9aae 100644
--- a/craftui/sim2.tgz
+++ b/craftui/sim2.tgz
Binary files differ
diff --git a/craftui/www/config.thtml b/craftui/www/config.thtml
index 88ed88f..3d2a3c9 100644
--- a/craftui/www/config.thtml
+++ b/craftui/www/config.thtml
@@ -3,6 +3,7 @@
   <meta content="text/html;charset=utf-8" http-equiv="Content-Type">
   <meta content="utf-8" http-equiv="encoding">
   <script src="static/jquery-2.1.4.min.js"></script>
+  <script src="static/dygraph-combined.js"></script>
   <link rel="stylesheet" type="text/css" href="static/craft.css">
   <link rel=icon href=static/favicon.ico>
   <link rel=stylesheet href="https://fonts.googleapis.com/css?family=Open+Sans:300,400,600,700&amp;lang=en">
@@ -22,7 +23,6 @@
       </nav>
     </section>
   </header>
-  <br>
   <div hidden>
     <input id=hidden_on_https value="{{hidden_on_https}}">
     <input id=hidden_on_peer value="{{hidden_on_peer}}">
@@ -31,6 +31,27 @@
     <input id=peer_arg_on_peer value="{{peer_arg_on_peer}}">
   </div>
   <div {{shown_on_peer}}><font color="red"><b>This is the Peer</b></font></div>
+  <div>
+    <table class="leds">
+      <tr>
+        <td width=50 align=center>Craft<br>
+          <img id="leds/Craft" width=20 height=20 src=/static/grey.gif></td>
+        <td width=50 align=center>ACS<br>
+          <img id="leds/ACS" width=20 height=20 src=/static/grey.gif></td>
+        <td width=50 align=center>Switch<br>
+          <img id="leds/Switch" width=20 height=20 src=/static/grey.gif></td>
+        <td width=50 align=center>Modem<br>
+          <img id="leds/Modem" width=20 height=20 src=/static/grey.gif></td>
+        <td width=50 align=center>Radio<br>
+          <img id="leds/Radio" width=20 height=20 src=/static/grey.gif></td>
+        <td width=50 align=center>RSSI<br>
+          <img id="leds/RSSI" width=20 height=20 src=/static/grey.gif></td>
+        <td width=50 align=center>MSE<br>
+          <img id="leds/MSE" width=20 height=20 src=/static/grey.gif></td>
+        <td width=50 align=center>Peer<br>
+          <img id="leds/Peer" width=20 height=20 src=/static/grey.gif></td>
+    </table>
+  </div>
   <div class="tabs">
     <div class="tab">
       <input type="radio" id="tab-1" name="tab-group-1" checked>
@@ -38,7 +59,7 @@
       <div class="content">
         <b>Running Configuration:</b><br>
         <textarea id=configuration cols=60 rows=30>...</textarea><br>
-        <input type=submit value=Apply onclick="CraftUI.config('password_admin', 0, 1)">
+        <input type=submit value=Apply onclick="craftUI.config('password_admin', 0, 1)">
       </div>
     </div>
 
@@ -60,7 +81,7 @@
               Current <b>Admin</b> Password: <input id=password_admin_admin type=password value=""><br>
               New Admin Password: <input id=password_admin_new type=password value=""><br>
               Confirm: <input id=password_admin_confirm type=password value=""><br>
-              <input type=submit value="Apply Now" onclick="CraftUI.config('password_admin', 0, 1)">
+              <input type=submit value="Apply Now" onclick="craftUI.config('password_admin', 0, 1)">
             <td>
               <span id=password_admin_result>...</span>
 
@@ -71,7 +92,7 @@
               Current <b>Admin</b> Password: <input id=password_guest_admin type=password value=""><br>
               New Guest Password: <input id=password_guest_new type=password value=""><br>
               Confirm: <input id=password_guest_confirm type=password value=""><br>
-              <input type=submit value="Apply Now" onclick="CraftUI.config('password_guest', 0, 1)">
+              <input type=submit value="Apply Now" onclick="craftUI.config('password_guest', 0, 1)">
             <td>
               <span id=password_guest_result>...</span>
 
@@ -97,10 +118,10 @@
             <td align=right><span id="platform/active_craft_inet">...</span>
             <td align=right>
               <span id="platform/craft_ipaddr">...</span>
-              <input type=submit value="Apply Now" onclick="CraftUI.config('craft_ipaddr', 1)">
+              <input type=submit value="Apply Now" onclick="craftUI.config('craft_ipaddr', 1)">
             <td>
               <input id=craft_ipaddr type=text value="">
-              <input type=submit value=Configure onclick="CraftUI.config('craft_ipaddr')">
+              <input type=submit value=Configure onclick="craftUI.config('craft_ipaddr')">
             <td>
               <span id=craft_ipaddr_result>...</span>
 
@@ -109,10 +130,10 @@
             <td align=right><span id="platform/active_link_inet">...</span>
             <td align=right>
               <span id="platform/link_ipaddr">...</span>
-              <input type=submit value="Apply Now" onclick="CraftUI.config('link_ipaddr', 1)">
+              <input type=submit value="Apply Now" onclick="craftUI.config('link_ipaddr', 1)">
             <td>
               <input id=link_ipaddr type=text value="">
-              <input type=submit value=Configure onclick="CraftUI.config('link_ipaddr')">
+              <input type=submit value=Configure onclick="craftUI.config('link_ipaddr')">
             <td>
               <span id=link_ipaddr_result>...</span>
 
@@ -121,10 +142,10 @@
             <td align=right>See Peer
             <td align=right>
               <span id="platform/peer_ipaddr">...</span>
-              <input type=submit value="Apply Now" onclick="CraftUI.config('peer_ipaddr', 1)">
+              <input type=submit value="Apply Now" onclick="craftUI.config('peer_ipaddr', 1)">
             <td>
               <input id=peer_ipaddr type=text value="">
-              <input type=submit value=Configure onclick="CraftUI.config('peer_ipaddr')">
+              <input type=submit value=Configure onclick="craftUI.config('peer_ipaddr')">
             <td>
               <span id=peer_ipaddr_result>...</span>
 
@@ -133,10 +154,10 @@
             <td align=right><span id="platform/active_inband_vlan">...</span>
             <td align=right>
               <span id="platform/vlan_inband">...</span>
-              <input type=submit value="Apply Now" onclick="CraftUI.config('vlan_inband', 1)">
+              <input type=submit value="Apply Now" onclick="craftUI.config('vlan_inband', 1)">
             <td>
               <input id=vlan_inband type=text value="">
-              <input type=submit value=Configure onclick="CraftUI.config('vlan_inband')">
+              <input type=submit value=Configure onclick="craftUI.config('vlan_inband')">
             <td>
               <span id=vlan_inband_result>...</span>
 
@@ -145,10 +166,10 @@
             <td align=right><span id="platform/active_ooband_vlan">...</span>
             <td align=right>
               <span id="platform/vlan_ooband">...</span>
-              <input type=submit value="Apply Now" onclick="CraftUI.config('vlan_ooband', 1)">
+              <input type=submit value="Apply Now" onclick="craftUI.config('vlan_ooband', 1)">
             <td>
               <input id=vlan_ooband type=text value="">
-              <input type=submit value=Configure onclick="CraftUI.config('vlan_ooband')">
+              <input type=submit value=Configure onclick="craftUI.config('vlan_ooband')">
             <td>
               <span id=vlan_ooband_result>...</span>
 
@@ -157,10 +178,10 @@
             <td align=right><span id="platform/active_link_vlan">...</span>
             <td align=right>
               <span id="platform/vlan_link">...</span>
-              <input type=submit value="Apply Now" onclick="CraftUI.config('vlan_peer', 1)">
+              <input type=submit value="Apply Now" onclick="craftUI.config('vlan_peer', 1)">
             <td>
               <input id=vlan_peer type=text value="">
-              <input type=submit value=Configure onclick="CraftUI.config('vlan_peer')">
+              <input type=submit value=Configure onclick="craftUI.config('vlan_peer')">
             <td>
               <span id=vlan_peer_result>...</span>
 
@@ -185,7 +206,7 @@
             <td align=right><span id="radio/hiTransceiver/pll/frequency">...</span>
             <td>
               <input id=freq_hi type=text value="">
-              <input type=submit value=Apply onclick="CraftUI.config('freq_hi')">
+              <input type=submit value=Apply onclick="craftUI.config('freq_hi')">
             <td>
               <span id=freq_hi_result>...</span>
 
@@ -194,7 +215,7 @@
             <td align=right><span id="radio/loTransceiver/pll/frequency">...</span>
             <td>
               <input id=freq_lo type=text value="">
-              <input type=submit value=Apply onclick="CraftUI.config('freq_lo')">
+              <input type=submit value=Apply onclick="craftUI.config('freq_lo')">
             <td>
               <span id=freq_lo_result>...</span>
 
@@ -203,7 +224,7 @@
             <td align=right><span id="radio/hiTransceiver/mode">...</span>
             <td>
               <input id=mode_hi type=text value="">
-              <input type=submit value=Apply onclick="CraftUI.config('mode_hi')">
+              <input type=submit value=Apply onclick="craftUI.config('mode_hi')">
             <td>
               <span id=mode_hi_result>...</span>
 
@@ -212,7 +233,7 @@
             <td align=right><span id="modem/status/acmEngineRxSensorsEnabled">...</span>
             <td>
               <input id=acm_on type=text value="">
-              <input type=submit value=Apply onclick="CraftUI.config('acm_on')">
+              <input type=submit value=Apply onclick="craftUI.config('acm_on')">
             <td>
               <span id=acm_on_result>...</span>
 
@@ -221,7 +242,7 @@
             <td align=right><span id="radio/tx/paPowerSet">...</span>
             <td>
               <input id=tx_powerlevel type=text value="">
-              <input type=submit value=Apply onclick="CraftUI.config('tx_powerlevel')">
+              <input type=submit value=Apply onclick="craftUI.config('tx_powerlevel')">
             <td>
               <span id=tx_powerlevel_result>...</span>
 
@@ -230,7 +251,7 @@
             <td align=right><span id="radio/tx/vgaGain">...</span>
             <td>
               <input id=tx_gain type=text value="">
-              <input type=submit value=Apply onclick="CraftUI.config('tx_gain')">
+              <input type=submit value=Apply onclick="craftUI.config('tx_gain')">
             <td>
               <span id=tx_gain_result>...</span>
 
@@ -239,7 +260,7 @@
             <td align=right><span id="radio/rx/agcDigitalGainIndex">...</span>
             <td>
               <input id=rx_gainindex type=text value="">
-              <input type=submit value=Apply onclick="CraftUI.config('rx_gainindex')">
+              <input type=submit value=Apply onclick="craftUI.config('rx_gainindex')">
             <td>
               <span id=rx_gainindex_result>...</span>
 
@@ -248,11 +269,12 @@
             <td align=right><span id="radio/paLnaPowerEnabled">...</span>
             <td>
               <input id=palna_on type=text value="">
-              <input type=submit value=Apply onclick="CraftUI.config('palna_on')">
+              <input type=submit value=Apply onclick="craftUI.config('palna_on')">
             <td>
               <span id=palna_on_result>...</span>
 
         </table>
+        <div id="rsl-graph">...</div>
       </div>
     </div>
 
@@ -261,17 +283,21 @@
       <label for="tab-5">Reboot</label>
       <div class="content">
         <input hidden id=reboot type=text value="true">
-        <input type=submit value=Reboot onclick="CraftUI.config('reboot')">
+        <input type=submit value=Reboot onclick="craftUI.config('reboot')">
         <span class="values">
           <span id=reboot_result>...</span>
         </span>
         <br>
+        Note: Allow 60 seconds for UI to return after reboot, and 200 seconds for link to come up.
+        <br>
         <br>
         <input hidden id=factory_reset type=text value="true">
-        <input type=submit value="Factory Reset" onclick="CraftUI.config('factory_reset')">
+        <input type=submit value="Factory Reset" onclick="craftUI.config('factory_reset')">
         <span class="values">
           <span id=factory_reset_result>...</span>
         </span>
+        <br>
+        Warning: This will restore all defaults, including the craft port address to <a href=http://192.168.1.1/>192.168.1.1</a>.
       </div>
     </div>
 
@@ -279,6 +305,7 @@
       <input type="radio" id="tab-6" name="tab-group-1">
       <label for="tab-6">Debug</label>
       <div class="content">
+        <b>Constellation:</b><span class="values"><a href="/rxslicer.png" target=_blank>rxslicer.png</a></li></span><br>
         <b>refreshCount:</b><span class="values" id="platform/refreshCount">...</span><br>
         <b>unhandled xml:</b><span class="values">
           <textarea id=unhandled cols=60 rows=30>...</textarea></span><br>
diff --git a/craftui/www/static/blue.gif b/craftui/www/static/blue.gif
new file mode 100644
index 0000000..3caecb9
--- /dev/null
+++ b/craftui/www/static/blue.gif
Binary files differ
diff --git a/craftui/www/static/craft.css b/craftui/www/static/craft.css
index 5c383ab..6887b60 100644
--- a/craftui/www/static/craft.css
+++ b/craftui/www/static/craft.css
@@ -1,6 +1,6 @@
 table, th, td {
   margin: 1px;
-  margin-bottom: 20px;
+  margin-bottom: 10px;
   border: 1px solid #ccc;
   font-size:12px;
 }
@@ -73,3 +73,8 @@
   z-index: 1;
   opacity: 1;
 }
+
+.leds {
+  background: #fff;
+  margin-bottom: 40px;
+}
diff --git a/craftui/www/static/craft.js b/craftui/www/static/craft.js
index 7b539ba..90a9bf9 100644
--- a/craftui/www/static/craft.js
+++ b/craftui/www/static/craft.js
@@ -7,25 +7,67 @@
     document.documentElement.classList.add('unsupported');
     return;
   }
-
-  // Initialize the info.
-  CraftUI.getInfo();
-
-  // Refresh data periodically.
-  window.setInterval(CraftUI.getInfo, 5000);
 };
 
-CraftUI.info = {checksum: 0};
-CraftUI.am_sending = false
+CraftUI.prototype.init = function() {
+  this.info = {checksum: 0};
+  this.am_sending = false
 
-CraftUI.updateField = function(key, val) {
+  // store history on some values
+  this.history = {
+    'radio/rx/rsl': { name: 'rsl-graph', count: 24, values: [] }
+  };
+
+  // Initialize the info.
+  this.getInfo();
+
+  // Refresh data periodically.
+  var f = function() {
+    this.ui.getInfo();
+  };
+  window.ui = this;
+  window.setInterval(f, 5000);
+};
+
+CraftUI.prototype.updateGraphs = function() {
+  for (var name in this.history) {
+    var h = this.history[name];
+    if (h.values.length == 0) {
+      continue;
+    }
+    if (!h.graph) {
+      h.graph = new Dygraph(document.getElementById(h.name), h.values, {
+        xlabel: 'Time',
+        ylabel: 'RSL',
+        labels: [ 'Date', 'RSL' ],
+      });
+    }
+    h.graph.updateOptions({ 'file': h.values });
+  }
+};
+
+CraftUI.prototype.updateField = function(key, val) {
+  // store history if requested
+  var h = this.history[key];
+  if (h) {
+    h.values.push([ui.date, val]);
+    if (h.values.length > h.count) {
+      h.values = h.values.slice(-h.count);
+    }
+  }
+  // find element, show on debug page if not used
   var el = document.getElementById(key);
   if (el == null) {
-    self.unhandled += key + '=' + val + '; ';
+    this.unhandled += key + '=' + val + '; ';
     return;
   }
-  el.innerHTML = ''; // Clear the field.
+  // For IMG objects, set image
+  if (el.src) {
+    el.src = '/static/' + val;
+    return;
+  }
   // For objects, create an unordered list and append the values as list items.
+  el.innerHTML = ''; // Clear the field.
   if (val && typeof val === 'object') {
     var ul = document.createElement('ul');
     for (key in val) {
@@ -48,44 +90,61 @@
   el.appendChild(document.createTextNode(val));
 };
 
-CraftUI.flattenAndUpdateFields = function(jsonmap, prefix) {
+CraftUI.prototype.flattenAndUpdateFields = function(jsonmap, prefix) {
   for (var key in jsonmap) {
     var val = jsonmap[key];
     if (typeof val !== 'object') {
-      CraftUI.updateField(prefix + key, jsonmap[key]);
+      this.updateField(prefix + key, jsonmap[key]);
     } else {
-      CraftUI.flattenAndUpdateFields(val, prefix + key + '/')
+      this.flattenAndUpdateFields(val, prefix + key + '/')
     }
   }
 };
 
-CraftUI.getInfo = function() {
+CraftUI.prototype.getInfo = function() {
   // Request info, set the connected status, and update the fields.
-  if (CraftUI.am_sending) {
+  if (this.am_sending) {
     return;
   }
   var peer_arg_on_peer = document.getElementById("peer_arg_on_peer").value;
   var xhr = new XMLHttpRequest();
+  xhr.timeout = 2000;
+  xhr.ui = this;
   xhr.onreadystatechange = function() {
-    self.unhandled = '';
-    if (xhr.readyState == 4 && xhr.status == 200) {
-      var list = JSON.parse(xhr.responseText);
-      CraftUI.flattenAndUpdateFields(list, '');
+    var ui = this.ui;
+    if (xhr.readyState != 4) {
+      return;
     }
-    CraftUI.updateField('unhandled', self.unhandled);
-    CraftUI.am_sending = false
+    ui.unhandled = '';
+    var led = 'red.gif';
+    if (xhr.status == 200) {
+      ui.date = new Date();
+      var list = JSON.parse(xhr.responseText);
+      ui.flattenAndUpdateFields(list, '');
+      led = 'green.gif';
+    } else {
+      var leds = ['ACS', 'Switch', 'Modem', 'Radio', 'RSSI', 'MSE', 'Peer'];
+      for (var i in leds) {
+        ui.updateField('leds/' + leds[i], 'grey.gif')
+      }
+    }
+    ui.updateField('unhandled', ui.unhandled);
+    ui.updateField('leds/Craft', led)
+    ui.updateGraphs();
+    ui.am_sending = false
   };
   xhr.open('get', '/content.json' + peer_arg_on_peer, true);
-  CraftUI.am_sending = true
+  this.am_sending = true
   xhr.send();
 };
 
-CraftUI.config = function(key, activate, is_password) {
+CraftUI.prototype.config = function(key, activate, is_password) {
   // POST as json
   var peer_arg_on_peer = document.getElementById("peer_arg_on_peer").value;
   var el = document.getElementById(key);
-  var xhr = new XMLHttpRequest();
   var action = "Configured";
+  var xhr = new XMLHttpRequest();
+  xhr.ui = this;
   xhr.open('post', '/content.json' + peer_arg_on_peer);
   xhr.setRequestHeader('Content-Type', 'application/json; charset=UTF-8');
   var data;
@@ -108,13 +167,14 @@
   var resultid = key + "_result"
   var el = document.getElementById(resultid);
   xhr.onload = function(e) {
+    var ui = this.ui;
     var json = JSON.parse(xhr.responseText);
     if (json.error == 0) {
       el.innerHTML = action + " successfully.";
     } else {
       el.innerHTML = "Error: " + json.errorstring;
     }
-    CraftUI.getInfo();
+    ui.getInfo();
   }
   xhr.onerror = function(e) {
     el.innerHTML = xhr.statusText + xhr.responseText;
@@ -123,4 +183,5 @@
   xhr.send(txt);
 };
 
-new CraftUI();
+var craftUI = new CraftUI();
+craftUI.init();
diff --git a/craftui/www/static/green.gif b/craftui/www/static/green.gif
new file mode 100644
index 0000000..2281692
--- /dev/null
+++ b/craftui/www/static/green.gif
Binary files differ
diff --git a/craftui/www/static/grey.gif b/craftui/www/static/grey.gif
new file mode 100644
index 0000000..34fe83e
--- /dev/null
+++ b/craftui/www/static/grey.gif
Binary files differ
diff --git a/craftui/www/static/red.gif b/craftui/www/static/red.gif
new file mode 100644
index 0000000..539c5cd
--- /dev/null
+++ b/craftui/www/static/red.gif
Binary files differ
diff --git a/craftui/www/static/yellow.gif b/craftui/www/static/yellow.gif
new file mode 100644
index 0000000..9b20598
--- /dev/null
+++ b/craftui/www/static/yellow.gif
Binary files differ
diff --git a/craftui/www/status.thtml b/craftui/www/status.thtml
index f2a4ee9..a6e37be 100644
--- a/craftui/www/status.thtml
+++ b/craftui/www/status.thtml
@@ -3,6 +3,7 @@
   <meta content="text/html;charset=utf-8" http-equiv="Content-Type">
   <meta content="utf-8" http-equiv="encoding">
   <script src="static/jquery-2.1.4.min.js"></script>
+  <script src="static/dygraph-combined.js"></script>
   <link rel="stylesheet" type="text/css" href="static/craft.css">
   <link rel=icon href=static/favicon.ico>
   <link rel=stylesheet href="https://fonts.googleapis.com/css?family=Open+Sans:300,400,600,700&amp;lang=en">
@@ -22,7 +23,6 @@
       </nav>
     </section>
   </header>
-  <br>
   <div hidden>
     <input id=hidden_on_https value="{{hidden_on_https}}">
     <input id=hidden_on_peer value="{{hidden_on_peer}}">
@@ -31,6 +31,27 @@
     <input id=peer_arg_on_peer value="{{peer_arg_on_peer}}">
   </div>
   <div {{shown_on_peer}}><font color="red"><b>This is the Peer</b></font></div>
+  <div>
+    <table class="leds">
+      <tr>
+        <td width=50 align=center>Craft<br>
+          <img id="leds/Craft" width=20 height=20 src=/static/grey.gif></td>
+        <td width=50 align=center>ACS<br>
+          <img id="leds/ACS" width=20 height=20 src=/static/grey.gif></td>
+        <td width=50 align=center>Switch<br>
+          <img id="leds/Switch" width=20 height=20 src=/static/grey.gif></td>
+        <td width=50 align=center>Modem<br>
+          <img id="leds/Modem" width=20 height=20 src=/static/grey.gif></td>
+        <td width=50 align=center>Radio<br>
+          <img id="leds/Radio" width=20 height=20 src=/static/grey.gif></td>
+        <td width=50 align=center>RSSI<br>
+          <img id="leds/RSSI" width=20 height=20 src=/static/grey.gif></td>
+        <td width=50 align=center>MSE<br>
+          <img id="leds/MSE" width=20 height=20 src=/static/grey.gif></td>
+        <td width=50 align=center>Peer<br>
+          <img id="leds/Peer" width=20 height=20 src=/static/grey.gif></td>
+    </table>
+  </div>
   <div class="tabs">
     <div class="tab">
       <input type="radio" id="tab-1" name="tab-group-1" checked>
@@ -487,6 +508,7 @@
       <input type="radio" id="tab-6" name="tab-group-1">
       <label for="tab-6">Debug</label>
       <div class="content">
+        <b>Constellation:</b><span class="values"><a href="/rxslicer.png" target=_blank>rxslicer.png</a></li></span><br>
         <b>refreshCount:</b><span class="values" id="platform/refreshCount">...</span><br>
         <b>unhandled xml:</b><span class="values">
           <textarea id=unhandled cols=60 rows=30>...</textarea></span><br>
diff --git a/gpio-mailbox/TEST.gpio-mailbox b/gpio-mailbox/TEST.gpio-mailbox
index 3cd5dee..edc8f60 100644
--- a/gpio-mailbox/TEST.gpio-mailbox
+++ b/gpio-mailbox/TEST.gpio-mailbox
@@ -1,4 +1,4 @@
-rm -rf /tmp/gpio /tmp/led
+rm -rf /tmp/gpio /tmp/leds
 
 mkdir -p /tmp/gpio
 echo x5 0 1 0 2 0 0x0f > /tmp/gpio/leds
diff --git a/gpio-mailbox/broadcom.c b/gpio-mailbox/broadcom.c
index 575f197..a53e51e 100644
--- a/gpio-mailbox/broadcom.c
+++ b/gpio-mailbox/broadcom.c
@@ -614,6 +614,15 @@
   }
 }
 
+static void *mmap_(void* addr, size_t size, int prot, int flags, int fd,
+                   off_t offset) {
+#ifdef __ANDROID__
+  return mmap64(addr, size, prot, flags, fd, (off64_t)(uint64_t)(uint32_t)offset);
+#else
+  return mmap(addr, size, prot, flags, fd, offset);
+#endif
+}
+
 static int platform_init(struct platform_info* p) {
   platform_cleanup();
 
@@ -623,8 +632,8 @@
     return -1;
   }
   mmap_size = p->mmap_size;
-  mmap_addr = mmap(NULL, mmap_size, PROT_READ | PROT_WRITE, MAP_SHARED,
-                   mmap_fd, p->mmap_base);
+  mmap_addr = mmap_(NULL, mmap_size, PROT_READ | PROT_WRITE, MAP_SHARED,
+                    mmap_fd, p->mmap_base);
   if (mmap_addr == MAP_FAILED) {
     perror("mmap");
     platform_cleanup();
diff --git a/gpio-mailbox/gfch100.c b/gpio-mailbox/gfch100.c
index 73d97bd..2cf1f23 100644
--- a/gpio-mailbox/gfch100.c
+++ b/gpio-mailbox/gfch100.c
@@ -18,15 +18,13 @@
 #define GPIO_OUT                "out"
 
 /* GPIO_ACTIVITY LED is blue on Chimera. */
-#define GPIO_ACTIVITY           "30"
-#define GPIO_RED                "31"
+#define GPIO_ACTIVITY		"/led_activity"
+#define GPIO_RED		"/led_red"
 
-#define GPIO_BASE_DIR           "/sys/class/gpio"
-#define GPIO_EXPORT             GPIO_BASE_DIR "/export"
+#define GPIO_BASE_DIR		"/dev/gpio"
 
-#define GPIO_DIR(n)             GPIO_BASE_DIR "/gpio" n
+#define GPIO_DIR(n)             GPIO_BASE_DIR n
 
-#define GPIO_DIRECTION(dir)     dir "/direction"
 #define GPIO_VALUE(dir)         dir "/value"
 
 struct PinHandle_s {
@@ -38,9 +36,7 @@
 };
 
 struct sysgpio {
-  const char* export_value;
   const char* value_path;
-  const char* direction_path;
 };
 
 struct platform_info {
@@ -57,13 +53,9 @@
       .value_path = "/sys/class/hwmon/hwmon0/temp1_input",
     },
     .led_red = {
-      .export_value = GPIO_RED,
-      .direction_path = GPIO_DIRECTION(GPIO_DIR(GPIO_RED)),
       .value_path = GPIO_VALUE(GPIO_DIR(GPIO_RED)),
     },
     .led_activity = {
-      .export_value = GPIO_ACTIVITY,
-      .direction_path = GPIO_DIRECTION(GPIO_DIR(GPIO_ACTIVITY)),
       .value_path = GPIO_VALUE(GPIO_DIR(GPIO_ACTIVITY)),
     },
   }
@@ -89,16 +81,6 @@
     perror("calloc(PinHandle)");
     return NULL;
   }
-
-  // initialize leds to match boot values
-  write_file_string(GPIO_EXPORT, GPIO_RED);
-  write_file_string(platform->led_red.direction_path, GPIO_OUT);
-  write_file_string(platform->led_red.value_path, GPIO_OFF);
-
-  write_file_string(GPIO_EXPORT, GPIO_ACTIVITY);
-  write_file_string(platform->led_activity.direction_path, GPIO_OUT);
-  write_file_string(platform->led_activity.value_path, GPIO_ON);
-
   return handle;
 }
 
diff --git a/hnvram/hnvram_main.c b/hnvram/hnvram_main.c
index b850048..323d62e 100644
--- a/hnvram/hnvram_main.c
+++ b/hnvram/hnvram_main.c
@@ -83,6 +83,7 @@
   {"LASER_CHANNEL",        NVRAM_FIELD_LASER_CHANNEL,     HNVRAM_STRING},
   {"MAC_ADDR_PON",         NVRAM_FIELD_MAC_ADDR_PON,      HNVRAM_MAC},
   {"PRODUCTION_UNIT",      NVRAM_FIELD_PRODUCTION_UNIT,   HNVRAM_STRING},
+  {"BOOT_TARGET",          NVRAM_FIELD_BOOT_TARGET,       HNVRAM_STRING},
 };
 
 const hnvram_field_t* get_nvram_field(const char* name) {
diff --git a/jsonpoll/jsonpoll.py b/jsonpoll/jsonpoll.py
index 11f9c64..cc38875 100755
--- a/jsonpoll/jsonpoll.py
+++ b/jsonpoll/jsonpoll.py
@@ -87,9 +87,8 @@
   def WriteToStderr(self, msg, is_json=False):
     """Write a message to stderr."""
     if is_json:
-      json_data = json.loads(msg)
       flat_data = []
-      self._FlatObject('', json_data, flat_data)
+      self._FlatObject('', msg, flat_data)
       # Make the json easier to parse from the logs.
       for s in flat_data:
         sys.stderr.write('%s\n' % s)
@@ -118,7 +117,7 @@
                                os.path.dirname(output_file))
             continue
           tmpfile = fd.name
-          fd.write(response)
+          fd.write(json.dumps(response))
           fd.flush()
           os.fsync(fd.fileno())
           try:
@@ -131,11 +130,22 @@
         if os.path.exists(tmpfile):
           os.unlink(tmpfile)
 
+  def ParseJSONFromResponse(self, response):
+    try:
+      json_resp = json.loads(response)
+    except UnicodeDecodeError as ex:
+      self.WriteToStderr('Non-UTF8 character in HTTP response: %s', ex)
+      return None
+    except ValueError as ex:
+      self.WriteToStderr('Failed to parse JSON from HTTP response: %s', ex)
+      return None
+    return json_resp
+
   def GetHttpResponse(self, url):
     """Creates a request and retrieves the response from a web server."""
     try:
       handle = urllib2.urlopen(url, timeout=self._SOCKET_TIMEOUT_SECS)
-      response = handle.read()
+      response = self.ParseJSONFromResponse(handle.read())
     except socket.timeout as ex:
       self.WriteToStderr('Connection to %s timed out after %d seconds: %s\n'
                          % (url, self._SOCKET_TIMEOUT_SECS, ex))
@@ -143,9 +153,11 @@
     except urllib2.URLError as ex:
       self.WriteToStderr('Connection to %s failed: %s\n' % (url, ex.reason))
       return None
+
     # Write the response to stderr so it will be uploaded with the other system
     # log files. This will allow turbogrinder to alert on the radio subsystem.
-    self.WriteToStderr(response, is_json=True)
+    if response is not None:
+      self.WriteToStderr(response, is_json=True)
     return response
 
   def CreateDirs(self, dir_to_create):
diff --git a/jsonpoll/jsonpoll_test.py b/jsonpoll/jsonpoll_test.py
index f4f0240..1ccd764 100644
--- a/jsonpoll/jsonpoll_test.py
+++ b/jsonpoll/jsonpoll_test.py
@@ -56,7 +56,7 @@
     self.get_response_called = True
     if self.generate_empty_response:
       return None
-    return json.dumps(JSON_RESPONSE)
+    return self.ParseJSONFromResponse(self.json_response)
 
 
 class JsonPollTest(unittest.TestCase):
@@ -73,6 +73,7 @@
   def setUp(self):
     self.CreateTempFile()
     self.poller = FakeJsonPoll('fakehost.blah', 31337, 1)
+    self.poller.json_response = json.dumps(JSON_RESPONSE)
     self.poller.error_count = 0
     self.poller.generate_empty_response = False
 
@@ -90,7 +91,7 @@
     # equivalent JSON representation we wrote out from the mock.
     with open(self.output_file, 'r') as f:
       output = ''.join(line.rstrip() for line in f)
-      self.assertEqual(json.dumps(JSON_RESPONSE), output)
+      self.assertEqual(JSON_RESPONSE, json.loads(output))
 
   def testRequestStatsFailureToCreateDirOutput(self):
     self.poller.paths_to_statfiles = {'fake/url': '/root/cannotwrite'}
@@ -107,7 +108,7 @@
   def testCachedRequestStats(self):
     # Set the "last_response" as our mock output. This should mean we do not
     # write anything to the output file.
-    self.poller.last_response = json.dumps(JSON_RESPONSE)
+    self.poller.last_response = JSON_RESPONSE
 
     # Create a fake entry in the paths_to_stats map.
     self.poller.paths_to_statfiles = {'fake/url': self.output_file}
@@ -127,5 +128,39 @@
     want = ['base/key1=1', 'base/key2/key3=3', 'base/key2/key4=4']
     self.assertEqual(got.sort(), want.sort())
 
+  def testJSONParsing(self):
+    # { "key": "value" }
+    start_json = ' { "key" : "'
+    euro = u'\u20AC'
+    end_json = '" }'
+
+    # Test for empty JSON
+    self.poller.json_response = ''
+    self.assertEquals(self.poller.GetHttpResponse('fake/url'), None)
+
+    # Test for broken JSON
+    self.poller.json_response = start_json
+    self.assertEquals(self.poller.GetHttpResponse('fake/url'), None)
+    self.poller.json_response = end_json
+    self.assertEquals(self.poller.GetHttpResponse('fake/url'), None)
+    self.poller.json_response = start_json + end_json + end_json
+    self.assertEquals(self.poller.GetHttpResponse('fake/url'), None)
+
+    # The json library (dumps/loads) assumes strings as UTF-8
+    # Need to fail gracefully when wrong encoding is given
+
+    # Normal ascii
+    incoming_json = start_json + 'ascii-value' + end_json
+    self.poller.json_response = incoming_json
+    self.assertNotEquals(self.poller.GetHttpResponse('fake/url'), None)
+
+    # Unicode utf-8: '\xE2 \x82 \xAC' == euro_sign
+    self.poller.json_response = start_json + euro.encode('utf-8') + end_json
+    self.assertNotEquals(self.poller.GetHttpResponse('fake/url'), None)
+
+    # Unicode utf-16: '\x20\xAC' == euro_sign, should fail
+    self.poller.json_response = start_json + euro.encode('utf-16') + end_json
+    self.assertEquals(self.poller.GetHttpResponse('fake/url'), None)
+
 if __name__ == '__main__':
   unittest.main()
diff --git a/ledpattern/Makefile b/ledpattern/Makefile
index 705a398..ad3456d 100644
--- a/ledpattern/Makefile
+++ b/ledpattern/Makefile
@@ -1,7 +1,7 @@
 default:
 
-PREFIX=/
-BINDIR=$(DESTDIR)$(PREFIX)/bin
+ETCDIR=$(DESTDIR)/etc
+BINDIR=$(DESTDIR)/bin
 PYTHON?=python
 
 all:
@@ -9,6 +9,9 @@
 install:
 	mkdir -p $(BINDIR)
 	cp ledpattern.py $(BINDIR)/ledpattern
+	cp ledtapcode.sh $(BINDIR)/ledtapcode
+	cp ledpatterns $(ETCDIR)/ledpatterns
+	chmod +x $(BINDIR)/ledtapcode
 
 install-libs:
 	@echo "No libs to install."
diff --git a/ledpattern/ledpatterns b/ledpattern/ledpatterns
new file mode 100644
index 0000000..2e0ab63
--- /dev/null
+++ b/ledpattern/ledpatterns
@@ -0,0 +1,12 @@
+HALTED,P,R
+NO_LASER_CHANNEL,P,P
+SET_LASER_FAILED,P,R,R
+LOSLOF_ALARM,P,R,B
+OTHER_ALARM,P,R,P
+GPON_INITIAL,P,B,R
+GPON_STANDBY,P,B,P
+GPON_SERIAL,P,P,R
+GPON_RANGING,P,P,B
+WAIT_ACS,P,B,B
+ALL_OK,P,B,B,B
+UNKNOWN_ERROR,P,R,R,R
diff --git a/ledpattern/ledtapcode.sh b/ledpattern/ledtapcode.sh
new file mode 100755
index 0000000..6841f2f
--- /dev/null
+++ b/ledpattern/ledtapcode.sh
@@ -0,0 +1,107 @@
+#!/bin/sh
+
+. /etc/utils.sh
+
+LEDPATTERN="ledpattern /etc/ledpatterns"
+SYSFS_GPON_PATH="/sys/devices/platform/gpon"
+MONITOR_PATH="/tmp/gpio/ledcontrol"
+LASER_STATUS_FILE="/tmp/laser_i2c_status"
+ALARM_GPON_FILE="$SYSFS_GPON_PATH/info/alarmGpon"
+GPON_INFO_FILE="$SYSFS_GPON_PATH/info/infoGpon"
+HALTED_FILE="$MONITOR_PATH/halted"
+HW_FAILURE="$MONITOR_PATH/hardware_failure"
+LASER_CHANNEL_FILE="$SYSFS_GPON_PATH/misc/laserChannel"
+ACS_FILE="$MONITOR_PATH/acsconnected"
+
+PlayPatternAndExit()
+{
+  state="$1"
+  # ledpattern takes care of all the LED management and state selection.
+  result="$($LEDPATTERN $state)"
+  if [ "$?" -ne 0 ]; then
+    echo "Failed to display pattern $state: $result"
+    exit 1
+  fi
+  exit 0
+}
+
+if [ ! -f "$ALARM_GPON_FILE" ]; then
+  echo "$ALARM_GPON_FILE does not exist"
+  PlayPatternAndExit UNKNOWN_ERROR
+fi
+
+if [ ! -f "$GPON_INFO_FILE" ]; then
+  echo "$GPON_INFO_FILE does not exist"
+  PlayPatternAndExit UNKNOWN_ERROR
+fi
+
+if [ ! -f "$LASER_CHANNEL_FILE" ]; then
+  echo "$LASER_CHANNEL_FILE does not exist"
+  PlayPatternAndExit UNKNOWN_ERROR
+fi
+
+# It is a valid state that there may not be a LASER_STATUS_FILE yet.
+if [ -f "$LASER_STATUS_FILE" ]; then
+  laser_status=$(cat "$LASER_STATUS_FILE")
+  if [ "$laser_status" -ne 0 ]; then
+    echo "Playing SET_LASER_FAILED pattern"
+    PlayPatternAndExit SET_LASER_FAILED
+  fi
+fi
+
+if [ -f "$HW_FAILURE" ]; then
+  echo "Playing HALTED pattern on HW_FAILURE"
+  PlayPatternAndExit HALTED
+fi
+
+if [ -f "$HALTED_FILE" ]; then
+  echo "Playing HALTED pattern on HALTED_FILE"
+  PlayPatternAndExit HALTED
+fi
+
+# Chop the table headers off the output using tail, otherwise grep gets
+# confused later.
+alarm_info=$(cat "$ALARM_GPON_FILE" | tail -n+7)
+los_output=$(echo "$alarm_info" | grep "LOS" | grep "ON")
+lof_output=$(echo "$alarm_info" | grep "LOF" | grep "ON")
+if [ -n "$los_output" ] || [ -n "$lof_output" ]; then
+  echo "Playing LOSLOF_ALARM pattern"
+  PlayPatternAndExit LOSLOF_ALARM
+fi
+other_alarm=$(echo "$alarm_info" | grep "ON")
+if [ -n "$other_alarm" ]; then
+  echo "Playing OTHER_ALARM pattern"
+  PlayPatternAndExit OTHER_ALARM
+fi
+
+gpon_info=$(cat "$GPON_INFO_FILE" | grep "ONU STATE")
+if contains "$gpon_info" "INITIAL"; then
+  echo "Playing GPON_INITIAL pattern"
+  PlayPatternAndExit GPON_INITIAL
+elif contains "$gpon_info" "STANDBY"; then
+  echo "Playing GPON_STANDBY pattern"
+  PlayPatternAndExit GPON_STANDBY
+elif contains "$gpon_info" "SERIAL"; then
+  echo "Playing GPON_SERIAL pattern"
+  PlayPatternAndExit GPON_SERIAL
+elif contains "$gpon_info" "RANGING"; then
+  echo "Playing GPON_RANGING pattern"
+  PlayPatternAndExit GPON_RANGING
+fi
+
+laser_channel=$(cat "$LASER_CHANNEL_FILE")
+if [ ! -f "$ACS_FILE" ] && [ "$laser_channel" -eq "-1" ]; then
+  echo "Playing NO_LASER_CHANNEL pattern"
+  PlayPatternAndExit NO_LASER_CHANNEL
+elif [ ! -f "$ACS_FILE" ] && [ $laser_channel -ne "-1" ]; then
+  echo "Playing WAIT_ACS pattern"
+  PlayPatternAndExit WAIT_ACS
+elif [ -f "$ACS_FILE" ] && [ $laser_channel -ne "-1" ]; then
+  echo "Playing ALL_OK pattern"
+  PlayPatternAndExit ALL_OK
+else
+  # If we get all the way here and nothing triggered on the way then this really
+  # is an unknown error...
+  echo "Nothing triggered? Playing UNKNOWN_ERROR pattern..."
+  PlayPatternAndExit UNKNOWN_ERROR
+fi
diff --git a/taxonomy/dhcp.py b/taxonomy/dhcp.py
index 0354ebd..26ecb82 100644
--- a/taxonomy/dhcp.py
+++ b/taxonomy/dhcp.py
@@ -59,6 +59,7 @@
     '1,3,6,15,119,95,252,44,46,47': ['ipodtouch1'],
 
     '252,3,42,15,6,1,12': ['lgtv', 'tizen'],
+    '252,3,42,6,1,12': ['tizen'],
 
     '1,3,6,15,119,95,252,44,46,101': ['macos'],
     '1,3,6,15,119,95,252,44,46': ['macos'],
@@ -74,8 +75,9 @@
 
     '1,28,2,3,15,6,12': ['tivo'],
 
-    '1,3,6,12,15,28,42': ['viziotv', 'wemo', 'directv'],
+    '1,3,6,12,15,28,42': ['viziotv', 'wemo', 'directv', 'samsungtv'],
     '1,3,6,12,15,28,40,41,42': ['viziotv', 'kindle'],
+    '1,3,6,12,15,17,23,28,29,31,33,40,41,42': ['viziotv'],
 
     '1,3,6,15,28,33': ['wii'],
     '1,3,6,15': ['wii', 'xbox'],
diff --git a/taxonomy/ethernet.py b/taxonomy/ethernet.py
index 00be466..85a4522 100644
--- a/taxonomy/ethernet.py
+++ b/taxonomy/ethernet.py
@@ -51,6 +51,8 @@
     'd8:50:e6': ['asus'],
     'f8:32:e4': ['asus'],
 
+    '58:67:1a': ['barnes&noble'],
+
     '30:8c:fb': ['dropcam'],
 
     '00:1a:11': ['google'],
@@ -245,6 +247,7 @@
     'b8:57:d8': ['samsung'],
     'b8:5a:73': ['samsung'],
     'b8:5e:7b': ['samsung'],
+    'bc:14:85': ['samsung'],
     'bc:20:a4': ['samsung'],
     'bc:72:b1': ['samsung'],
     'bc:8c:cd': ['samsung'],
@@ -279,8 +282,12 @@
     '58:48:22': ['sony'],
     'b4:52:7e': ['sony'],
 
+    '10:08:c1': ['toshiba'],
+
     'a4:8d:3b': ['vizio'],
 
+    'b4:79:a7': ['wink'],
+
     '00:24:e4': ['withings'],
 
     '64:cc:2e': ['xiaomi'],
diff --git a/taxonomy/pcaptest.py b/taxonomy/pcaptest.py
index 2b864e1..2b5d90f 100644
--- a/taxonomy/pcaptest.py
+++ b/taxonomy/pcaptest.py
@@ -45,13 +45,22 @@
   ('', './testdata/pcaps/Samsung Exhibit 2.4GHz.pcap'),
   ('', './testdata/pcaps/Samsung Fascinate 2.4GHz.pcap'),
   ('', './testdata/pcaps/Samsung Galaxy Tab 2 2.4GHz.pcap'),
+  ('', './testdata/pcaps/Samsung Galaxy 4G 2.4GHz SGH-T959V.pcap'),
   ('', './testdata/pcaps/Samsung Infuse 5GHz.pcap'),
   ('', './testdata/pcaps/Samsung Vibrant 2.4GHz.pcap'),
+  ('', './testdata/pcaps/Sony Ericsson Xperia X10 2.4GHz.pcap'),
 
   # Names where the identified species doesn't exactly match the filename,
   # usually because multiple devices are too similar to distinguish. We name
   # the file for the specific device which was captured, and add an entry
   # here for the best identification which we can manage.
+  ('Amazon Kindle', './testdata/pcaps/Amazon Kindle 4th gen 2.4GHz 9023.pcap'),
+  ('Amazon Kindle', './testdata/pcaps/Amazon Kindle 4th gen 2.4GHz B00E.pcap'),
+  ('Amazon Kindle', './testdata/pcaps/Amazon Kindle Paperwhite 2012 2.4GHz B024.pcap'),
+  ('Amazon Kindle', './testdata/pcaps/Amazon Kindle Touch 2.4GHz Broadcast Probe B011.pcap'),
+  ('Amazon Kindle', './testdata/pcaps/Amazon Kindle Touch 2.4GHz Specific Probe B011.pcap'),
+  ('Amazon Kindle', './testdata/pcaps/Amazon Kindle Voyage 2.4GHz B013.pcap'),
+  ('Amazon Kindle', './testdata/pcaps/Amazon Kindle Voyage 2.4GHz B054.pcap'),
   ('iPad 1st or 2nd gen', './testdata/pcaps/iPad 1st gen 5GHz.pcap'),
   ('iPad 1st or 2nd gen', './testdata/pcaps/iPad 2nd gen 5GHz.pcap'),
   ('iPad 4th gen or Air 1st gen', './testdata/pcaps/iPad (4th gen) 5GHz.pcap'),
diff --git a/taxonomy/testdata/dhcp.leases b/taxonomy/testdata/dhcp.leases
index 9bdc396..ab83a43 100644
--- a/taxonomy/testdata/dhcp.leases
+++ b/taxonomy/testdata/dhcp.leases
@@ -56,3 +56,16 @@
 1432237016 a4:8d:3b:00:00:00 192.168.42.45 VizioSmartTV
 1432237016 00:11:d9:00:00:00 192.168.42.46 TiVoBOLT
 1432237016 ac:3a:7a:00:00:00 192.168.42.47 Roku3-4230
+1432237016 d4:63:fe:00:00:00 192.168.42.48 LGSmartTV
+1432237016 bc:14:85:00:00:00 192.168.42.49 SamsungTizenTV
+1432237016 78:bd:bc:00:00:00 192.168.42.50 SamsungTizenTV
+1432237016 54:88:0e:00:00:00 192.168.42.51 SamsungLED75TV
+1432237016 bc:30:7d:00:00:00 192.168.42.52 PanasonicTV
+1432237016 60:12:8b:00:00:00 192.168.42.53 CanonPixma
+1432237016 88:87:17:00:00:00 192.168.42.54 CanonPixma
+1432237016 cc:95:d7:00:00:00 192.168.42.55 VizioTV
+1432237016 c0:f2:fb:00:00:00 192.168.42.56 iPaadMini3
+1432237016 04:52:f3:00:00:00 192.168.42.57 iPaadMini4
+1432237016 a4:d1:d2:00:00:00 192.168.42.58 iPaadOldiOS
+1432237016 70:48:0f:00:00:00 192.168.42.59 iPadPro12_9
+1432237016 6c:c2:17:00:00:00 192.168.42.60 HPPrinter
diff --git a/taxonomy/testdata/dhcp.signatures b/taxonomy/testdata/dhcp.signatures
index e9ef842..1fcfa61 100644
--- a/taxonomy/testdata/dhcp.signatures
+++ b/taxonomy/testdata/dhcp.signatures
@@ -48,3 +48,16 @@
 a4:8d:3b:00:00:00 1,3,6,12,15,28,42
 00:11:d9:00:00:00 1,28,2,3,15,6,12
 ac:3a:7a:00:00:00 1,3,6,15,12
+d4:63:fe:00:00:00 252,3,42,15,6,1,12
+bc:14:85:00:00:00 252,3,42,6,1,12
+78:bd:bc:00:00:00 252,3,42,6,1,12
+54:88:0e:00:00:00 1,3,6,12,15,28,42
+bc:30:7d:00:00:00 58,59,6,15,51,54,1,3
+60:12:8b:00:00:00 1,3,6,15,44,47
+88:87:17:00:00:00 1,3,6,15,44,47
+cc:95:d7:00:00:00 1,3,6,12,15,17,23,28,29,31,33,40,41,42
+c0:f2:fb:00:00:00 1,3,6,15,119,252
+04:52:f3:00:00:00 1,3,6,15,119,252
+a4:d1:d2:00:00:00 1,3,6,15,119,252
+70:48:0f:00:00:00 1,3,6,15,119,252
+6c:c2:17:00:00:00 6,3,1,15,66,67,13,44,12,81,252
diff --git a/taxonomy/testdata/pcaps/Amazon Kindle 4th gen 2.4GHz 9023.pcap b/taxonomy/testdata/pcaps/Amazon Kindle 4th gen 2.4GHz 9023.pcap
new file mode 100644
index 0000000..7e26437
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Amazon Kindle 4th gen 2.4GHz 9023.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Amazon Kindle Voyage, Paperwhite, or 4th gen 2.4GHz 4th gen B00E.pcap b/taxonomy/testdata/pcaps/Amazon Kindle 4th gen 2.4GHz B00E.pcap
similarity index 100%
rename from taxonomy/testdata/pcaps/Amazon Kindle Voyage, Paperwhite, or 4th gen 2.4GHz 4th gen B00E.pcap
rename to taxonomy/testdata/pcaps/Amazon Kindle 4th gen 2.4GHz B00E.pcap
Binary files differ
diff --git "a/taxonomy/testdata/pcaps/Amazon Kindle Fire 7\" \0502015 edition\051 2.4GHz 5V98LN GFRG2x0.pcap" "b/taxonomy/testdata/pcaps/Amazon Kindle Fire 7\" \0502015 edition\051 2.4GHz 5V98LN GFRG2x0.pcap"
new file mode 100644
index 0000000..d768c82
--- /dev/null
+++ "b/taxonomy/testdata/pcaps/Amazon Kindle Fire 7\" \0502015 edition\051 2.4GHz 5V98LN GFRG2x0.pcap"
Binary files differ
diff --git "a/taxonomy/testdata/pcaps/Amazon Kindle Fire 7\" \0502015 edition\051 2.4GHz Broadcast Probe 5V98LN.pcap" "b/taxonomy/testdata/pcaps/Amazon Kindle Fire 7\" \0502015 edition\051 2.4GHz Broadcast Probe 5V98LN.pcap"
new file mode 100644
index 0000000..c595fbb
--- /dev/null
+++ "b/taxonomy/testdata/pcaps/Amazon Kindle Fire 7\" \0502015 edition\051 2.4GHz Broadcast Probe 5V98LN.pcap"
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Amazon Kindle Voyage, Paperwhite, or 4th gen 2.4GHz Paperwhite B024.pcap b/taxonomy/testdata/pcaps/Amazon Kindle Paperwhite 2012 2.4GHz B024.pcap
similarity index 100%
rename from taxonomy/testdata/pcaps/Amazon Kindle Voyage, Paperwhite, or 4th gen 2.4GHz Paperwhite B024.pcap
rename to taxonomy/testdata/pcaps/Amazon Kindle Paperwhite 2012 2.4GHz B024.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Amazon Kindle Touch 2.4GHz Broadcast Probe B011.pcap b/taxonomy/testdata/pcaps/Amazon Kindle Touch 2.4GHz Broadcast Probe B011.pcap
new file mode 100644
index 0000000..677d9e8
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Amazon Kindle Touch 2.4GHz Broadcast Probe B011.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Amazon Kindle Touch 2.4GHz Specific Probe B011.pcap b/taxonomy/testdata/pcaps/Amazon Kindle Touch 2.4GHz Specific Probe B011.pcap
new file mode 100644
index 0000000..75cb5d0
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Amazon Kindle Touch 2.4GHz Specific Probe B011.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Amazon Kindle Voyage, Paperwhite, or 4th gen 2.4GHz Voyage B013.pcap b/taxonomy/testdata/pcaps/Amazon Kindle Voyage 2.4GHz B013.pcap
similarity index 100%
rename from taxonomy/testdata/pcaps/Amazon Kindle Voyage, Paperwhite, or 4th gen 2.4GHz Voyage B013.pcap
rename to taxonomy/testdata/pcaps/Amazon Kindle Voyage 2.4GHz B013.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Amazon Kindle Voyage, Paperwhite, or 4th gen 2.4GHz Voyage B054.pcap b/taxonomy/testdata/pcaps/Amazon Kindle Voyage 2.4GHz B054.pcap
similarity index 100%
rename from taxonomy/testdata/pcaps/Amazon Kindle Voyage, Paperwhite, or 4th gen 2.4GHz Voyage B054.pcap
rename to taxonomy/testdata/pcaps/Amazon Kindle Voyage 2.4GHz B054.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Barnes & Noble Nook Color 2.4GHz BNRV200.pcap b/taxonomy/testdata/pcaps/Barnes & Noble Nook Color 2.4GHz BNRV200.pcap
new file mode 100644
index 0000000..eee871c
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Barnes & Noble Nook Color 2.4GHz BNRV200.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Canon Printer 2.4GHz Large Broadcast Probe MX492.pcap b/taxonomy/testdata/pcaps/Canon Printer 2.4GHz Large Broadcast Probe MX492.pcap
new file mode 100644
index 0000000..b9fcd4b
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Canon Printer 2.4GHz Large Broadcast Probe MX492.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Canon Printer 2.4GHz MX410.pcap b/taxonomy/testdata/pcaps/Canon Printer 2.4GHz MX410.pcap
new file mode 100644
index 0000000..514eac4
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Canon Printer 2.4GHz MX410.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Canon Printer 2.4GHz Specific Probe MX492.pcap b/taxonomy/testdata/pcaps/Canon Printer 2.4GHz Specific Probe MX492.pcap
new file mode 100644
index 0000000..a361567
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Canon Printer 2.4GHz Specific Probe MX492.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Canon Printer 2.4GHz Very Large Broadcast Probe MX492.pcap b/taxonomy/testdata/pcaps/Canon Printer 2.4GHz Very Large Broadcast Probe MX492.pcap
new file mode 100644
index 0000000..12b33a1
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Canon Printer 2.4GHz Very Large Broadcast Probe MX492.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/HP Printer 2.4GHz OfficeJet Pro 8610.pcap b/taxonomy/testdata/pcaps/HP Printer 2.4GHz OfficeJet Pro 8610.pcap
new file mode 100644
index 0000000..c62092f
--- /dev/null
+++ b/taxonomy/testdata/pcaps/HP Printer 2.4GHz OfficeJet Pro 8610.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/LG Smart TV 2.4GHz Broadcast Probe 55UH7700-UB.pcap b/taxonomy/testdata/pcaps/LG Smart TV 2.4GHz Broadcast Probe 55UH7700-UB.pcap
new file mode 100644
index 0000000..e25b5cb
--- /dev/null
+++ b/taxonomy/testdata/pcaps/LG Smart TV 2.4GHz Broadcast Probe 55UH7700-UB.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/LG Smart TV 2.4GHz Specific Probe 55UH7700-UB.pcap b/taxonomy/testdata/pcaps/LG Smart TV 2.4GHz Specific Probe 55UH7700-UB.pcap
new file mode 100644
index 0000000..e6cf5bd
--- /dev/null
+++ b/taxonomy/testdata/pcaps/LG Smart TV 2.4GHz Specific Probe 55UH7700-UB.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/LG Smart TV 5GHz Broadcast Probe 55UH7700-UB.pcap b/taxonomy/testdata/pcaps/LG Smart TV 5GHz Broadcast Probe 55UH7700-UB.pcap
new file mode 100644
index 0000000..a0eb75d
--- /dev/null
+++ b/taxonomy/testdata/pcaps/LG Smart TV 5GHz Broadcast Probe 55UH7700-UB.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/LG Smart TV 5GHz Specific Probe 55UH7700-UB.pcap b/taxonomy/testdata/pcaps/LG Smart TV 5GHz Specific Probe 55UH7700-UB.pcap
new file mode 100644
index 0000000..f576a6c
--- /dev/null
+++ b/taxonomy/testdata/pcaps/LG Smart TV 5GHz Specific Probe 55UH7700-UB.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Nexus 5X 2.4GHz Broadcast Probe htcap 01ad.pcap b/taxonomy/testdata/pcaps/Nexus 5X 2.4GHz Broadcast Probe htcap 01ad.pcap
new file mode 100644
index 0000000..e871e93
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Nexus 5X 2.4GHz Broadcast Probe htcap 01ad.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Nexus 5X 2.4GHz Specific Probe htcap 01ad.pcap b/taxonomy/testdata/pcaps/Nexus 5X 2.4GHz Specific Probe htcap 01ad.pcap
new file mode 100644
index 0000000..85c56e0
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Nexus 5X 2.4GHz Specific Probe htcap 01ad.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Nexus 5X 5GHz Broadcast Probe htcap 01ad.pcap b/taxonomy/testdata/pcaps/Nexus 5X 5GHz Broadcast Probe htcap 01ad.pcap
new file mode 100644
index 0000000..9a44d6b
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Nexus 5X 5GHz Broadcast Probe htcap 01ad.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Nexus 5X 5GHz Small Specific Probe.pcap b/taxonomy/testdata/pcaps/Nexus 5X 5GHz Small Specific Probe.pcap
new file mode 100644
index 0000000..f9cfce7
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Nexus 5X 5GHz Small Specific Probe.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Nexus 5X 5GHz Specific Probe htcap 01ad.pcap b/taxonomy/testdata/pcaps/Nexus 5X 5GHz Specific Probe htcap 01ad.pcap
new file mode 100644
index 0000000..aef7521
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Nexus 5X 5GHz Specific Probe htcap 01ad.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Panasonic TV 2.4GHz TC-58AX800U Broadcast Probe.pcap b/taxonomy/testdata/pcaps/Panasonic TV 2.4GHz TC-58AX800U Broadcast Probe.pcap
new file mode 100644
index 0000000..25d831a
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Panasonic TV 2.4GHz TC-58AX800U Broadcast Probe.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Panasonic TV 2.4GHz TC-58AX800U Specific Probe.pcap b/taxonomy/testdata/pcaps/Panasonic TV 2.4GHz TC-58AX800U Specific Probe.pcap
new file mode 100644
index 0000000..6a64e36
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Panasonic TV 2.4GHz TC-58AX800U Specific Probe.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Panasonic TV 5GHz TC-58AX800U Broadcast Probe.pcap b/taxonomy/testdata/pcaps/Panasonic TV 5GHz TC-58AX800U Broadcast Probe.pcap
new file mode 100644
index 0000000..023077a
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Panasonic TV 5GHz TC-58AX800U Broadcast Probe.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Panasonic TV 5GHz TC-58AX800U Specific Probe.pcap b/taxonomy/testdata/pcaps/Panasonic TV 5GHz TC-58AX800U Specific Probe.pcap
new file mode 100644
index 0000000..562e8a2
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Panasonic TV 5GHz TC-58AX800U Specific Probe.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Samsung Galaxy 4G 2.4GHz SGH-T959V.pcap b/taxonomy/testdata/pcaps/Samsung Galaxy 4G 2.4GHz SGH-T959V.pcap
new file mode 100644
index 0000000..9a28122
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Samsung Galaxy 4G 2.4GHz SGH-T959V.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Samsung Smart TV 2.4GHz LED75 Broadcast Probe.pcap b/taxonomy/testdata/pcaps/Samsung Smart TV 2.4GHz LED75 Broadcast Probe.pcap
new file mode 100644
index 0000000..4a958c2
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Samsung Smart TV 2.4GHz LED75 Broadcast Probe.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Samsung Smart TV 2.4GHz LED75 Specific Probe.pcap b/taxonomy/testdata/pcaps/Samsung Smart TV 2.4GHz LED75 Specific Probe.pcap
new file mode 100644
index 0000000..9c0ea3d
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Samsung Smart TV 2.4GHz LED75 Specific Probe.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Samsung Smart TV 2.4GHz UN40JU6500 Broadcast Probe.pcap b/taxonomy/testdata/pcaps/Samsung Smart TV 2.4GHz UN40JU6500 Broadcast Probe.pcap
new file mode 100644
index 0000000..c84bb4e
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Samsung Smart TV 2.4GHz UN40JU6500 Broadcast Probe.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Samsung Smart TV 2.4GHz UN40JU6500 Specific Probe.pcap b/taxonomy/testdata/pcaps/Samsung Smart TV 2.4GHz UN40JU6500 Specific Probe.pcap
new file mode 100644
index 0000000..17b1931
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Samsung Smart TV 2.4GHz UN40JU6500 Specific Probe.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Samsung Smart TV 2.4GHz UN55JS9000 Broadcast Probe.pcap b/taxonomy/testdata/pcaps/Samsung Smart TV 2.4GHz UN55JS9000 Broadcast Probe.pcap
new file mode 100644
index 0000000..bae308d
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Samsung Smart TV 2.4GHz UN55JS9000 Broadcast Probe.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Samsung Smart TV 2.4GHz UN55JS9000 Specific Probe.pcap b/taxonomy/testdata/pcaps/Samsung Smart TV 2.4GHz UN55JS9000 Specific Probe.pcap
new file mode 100644
index 0000000..eaaae3e
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Samsung Smart TV 2.4GHz UN55JS9000 Specific Probe.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Samsung Smart TV 2.4GHz.pcap b/taxonomy/testdata/pcaps/Samsung Smart TV 2.4GHz UN60F6300AF.pcap
similarity index 100%
rename from taxonomy/testdata/pcaps/Samsung Smart TV 2.4GHz.pcap
rename to taxonomy/testdata/pcaps/Samsung Smart TV 2.4GHz UN60F6300AF.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Samsung Smart TV 5GHz LED75 Broadcast Probe.pcap b/taxonomy/testdata/pcaps/Samsung Smart TV 5GHz LED75 Broadcast Probe.pcap
new file mode 100644
index 0000000..d3556c2
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Samsung Smart TV 5GHz LED75 Broadcast Probe.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Samsung Smart TV 5GHz LED75 Specific Probe.pcap b/taxonomy/testdata/pcaps/Samsung Smart TV 5GHz LED75 Specific Probe.pcap
new file mode 100644
index 0000000..19675e7
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Samsung Smart TV 5GHz LED75 Specific Probe.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Samsung Smart TV 5GHz UN40JU6500 Broadcast Probe.pcap b/taxonomy/testdata/pcaps/Samsung Smart TV 5GHz UN40JU6500 Broadcast Probe.pcap
new file mode 100644
index 0000000..6ea8b14
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Samsung Smart TV 5GHz UN40JU6500 Broadcast Probe.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Samsung Smart TV 5GHz UN40JU6500 Specific Probe.pcap b/taxonomy/testdata/pcaps/Samsung Smart TV 5GHz UN40JU6500 Specific Probe.pcap
new file mode 100644
index 0000000..1f5a8e5
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Samsung Smart TV 5GHz UN40JU6500 Specific Probe.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Samsung Smart TV 5GHz UN46ES7100F Broadcast Probe.pcap b/taxonomy/testdata/pcaps/Samsung Smart TV 5GHz UN46ES7100F Broadcast Probe.pcap
new file mode 100644
index 0000000..f2a4ab1
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Samsung Smart TV 5GHz UN46ES7100F Broadcast Probe.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Samsung Smart TV 5GHz UN46ES7100F Specific Probe.pcap b/taxonomy/testdata/pcaps/Samsung Smart TV 5GHz UN46ES7100F Specific Probe.pcap
new file mode 100644
index 0000000..3cbf5eb
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Samsung Smart TV 5GHz UN46ES7100F Specific Probe.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Samsung Smart TV 5GHz UN55JS9000 Broadcast Probe.pcap b/taxonomy/testdata/pcaps/Samsung Smart TV 5GHz UN55JS9000 Broadcast Probe.pcap
new file mode 100644
index 0000000..8d7cd18
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Samsung Smart TV 5GHz UN55JS9000 Broadcast Probe.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Samsung Smart TV 5GHz UN55JS9000 Specific Probe.pcap b/taxonomy/testdata/pcaps/Samsung Smart TV 5GHz UN55JS9000 Specific Probe.pcap
new file mode 100644
index 0000000..2f941b5
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Samsung Smart TV 5GHz UN55JS9000 Specific Probe.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Samsung Smart TV 5GHz.pcap b/taxonomy/testdata/pcaps/Samsung Smart TV 5GHz UN60F6300AF.pcap
similarity index 100%
rename from taxonomy/testdata/pcaps/Samsung Smart TV 5GHz.pcap
rename to taxonomy/testdata/pcaps/Samsung Smart TV 5GHz UN60F6300AF.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Sony Bravia TV 2.4GHz Broadcast Probe XBR-49X850B.pcap b/taxonomy/testdata/pcaps/Sony Bravia TV 2.4GHz Broadcast Probe XBR-49X850B.pcap
new file mode 100644
index 0000000..0284c95
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Sony Bravia TV 2.4GHz Broadcast Probe XBR-49X850B.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Sony Bravia TV 2.4GHz Broadcast Probe XBR-55X850C.pcap b/taxonomy/testdata/pcaps/Sony Bravia TV 2.4GHz Broadcast Probe XBR-55X850C.pcap
new file mode 100644
index 0000000..a3fcae1
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Sony Bravia TV 2.4GHz Broadcast Probe XBR-55X850C.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Sony Bravia TV 2.4GHz Specific Probe XBR-49X850B.pcap b/taxonomy/testdata/pcaps/Sony Bravia TV 2.4GHz Specific Probe XBR-49X850B.pcap
new file mode 100644
index 0000000..976e3bb
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Sony Bravia TV 2.4GHz Specific Probe XBR-49X850B.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Sony Bravia TV 2.4GHz Specific Probe XBR-55X850C.pcap b/taxonomy/testdata/pcaps/Sony Bravia TV 2.4GHz Specific Probe XBR-55X850C.pcap
new file mode 100644
index 0000000..7a91a9b
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Sony Bravia TV 2.4GHz Specific Probe XBR-55X850C.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Sony Bravia TV 2015 model 2.4GHz Broadcast Probe XBR-55X900C.pcap b/taxonomy/testdata/pcaps/Sony Bravia TV 2015 model 2.4GHz Broadcast Probe XBR-55X900C.pcap
new file mode 100644
index 0000000..017f6e1
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Sony Bravia TV 2015 model 2.4GHz Broadcast Probe XBR-55X900C.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Sony Bravia TV 2015 model 2.4GHz Specific Probe XBR-55X900C.pcap b/taxonomy/testdata/pcaps/Sony Bravia TV 2015 model 2.4GHz Specific Probe XBR-55X900C.pcap
new file mode 100644
index 0000000..423fb96
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Sony Bravia TV 2015 model 2.4GHz Specific Probe XBR-55X900C.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Sony Bravia TV 2015 model 5GHz Broadcast Probe XBR-55X900C.pcap b/taxonomy/testdata/pcaps/Sony Bravia TV 2015 model 5GHz Broadcast Probe XBR-55X900C.pcap
new file mode 100644
index 0000000..45fc9d9
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Sony Bravia TV 2015 model 5GHz Broadcast Probe XBR-55X900C.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Sony Bravia TV 2015 model 5GHz Specific Probe XBR-55X900C.pcap b/taxonomy/testdata/pcaps/Sony Bravia TV 2015 model 5GHz Specific Probe XBR-55X900C.pcap
new file mode 100644
index 0000000..925cda6
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Sony Bravia TV 2015 model 5GHz Specific Probe XBR-55X900C.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Sony Bravia TV 5GHz Broadcast Probe XBR-49X850B.pcap b/taxonomy/testdata/pcaps/Sony Bravia TV 5GHz Broadcast Probe XBR-49X850B.pcap
new file mode 100644
index 0000000..2dfaf35
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Sony Bravia TV 5GHz Broadcast Probe XBR-49X850B.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Sony Bravia TV 5GHz Broadcast Probe XBR-55X850B.pcap b/taxonomy/testdata/pcaps/Sony Bravia TV 5GHz Broadcast Probe XBR-55X850B.pcap
new file mode 100644
index 0000000..7c961dd
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Sony Bravia TV 5GHz Broadcast Probe XBR-55X850B.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Sony Bravia TV 5GHz Specific Probe XBR-49X850B.pcap b/taxonomy/testdata/pcaps/Sony Bravia TV 5GHz Specific Probe XBR-49X850B.pcap
new file mode 100644
index 0000000..aa4ec4d
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Sony Bravia TV 5GHz Specific Probe XBR-49X850B.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Sony Bravia TV 5GHz Specific Probe XBR-55X850B.pcap b/taxonomy/testdata/pcaps/Sony Bravia TV 5GHz Specific Probe XBR-55X850B.pcap
new file mode 100644
index 0000000..3068a62
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Sony Bravia TV 5GHz Specific Probe XBR-55X850B.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Sony Ericsson Xperia X10 2.4GHz.pcap b/taxonomy/testdata/pcaps/Sony Ericsson Xperia X10 2.4GHz.pcap
new file mode 100644
index 0000000..9dee0c4
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Sony Ericsson Xperia X10 2.4GHz.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Toshiba Smart TV 2.4GHz Broadcast Probe 40L3400U.pcap b/taxonomy/testdata/pcaps/Toshiba Smart TV 2.4GHz Broadcast Probe 40L3400U.pcap
new file mode 100644
index 0000000..a5fc0e7
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Toshiba Smart TV 2.4GHz Broadcast Probe 40L3400U.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Toshiba Smart TV 2.4GHz Specific Probe 40L3400U.pcap b/taxonomy/testdata/pcaps/Toshiba Smart TV 2.4GHz Specific Probe 40L3400U.pcap
new file mode 100644
index 0000000..926c99e
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Toshiba Smart TV 2.4GHz Specific Probe 40L3400U.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Toshiba Smart TV 5GHz Broadcast Probe 40L3400U.pcap b/taxonomy/testdata/pcaps/Toshiba Smart TV 5GHz Broadcast Probe 40L3400U.pcap
new file mode 100644
index 0000000..625f35a
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Toshiba Smart TV 5GHz Broadcast Probe 40L3400U.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Toshiba Smart TV 5GHz Specific Probe 40L3400U.pcap b/taxonomy/testdata/pcaps/Toshiba Smart TV 5GHz Specific Probe 40L3400U.pcap
new file mode 100644
index 0000000..7da522f
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Toshiba Smart TV 5GHz Specific Probe 40L3400U.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Vizio Smart TV 2.4GHz Large Broadcast Probe P602ui-B3.pcap b/taxonomy/testdata/pcaps/Vizio Smart TV 2.4GHz Large Broadcast Probe P602ui-B3.pcap
new file mode 100644
index 0000000..62f1748
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Vizio Smart TV 2.4GHz Large Broadcast Probe P602ui-B3.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Vizio Smart TV 2.4GHz Small Broadcast Probe P602ui-B3.pcap b/taxonomy/testdata/pcaps/Vizio Smart TV 2.4GHz Small Broadcast Probe P602ui-B3.pcap
new file mode 100644
index 0000000..513b9b6
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Vizio Smart TV 2.4GHz Small Broadcast Probe P602ui-B3.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Vizio Smart TV 2.4GHz Specific Probe P602ui-B3.pcap b/taxonomy/testdata/pcaps/Vizio Smart TV 2.4GHz Specific Probe P602ui-B3.pcap
new file mode 100644
index 0000000..e662f82
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Vizio Smart TV 2.4GHz Specific Probe P602ui-B3.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Vizio Smart TV 5GHz P602ui-B3 Broadcast Probe.pcap b/taxonomy/testdata/pcaps/Vizio Smart TV 5GHz P602ui-B3 Broadcast Probe.pcap
new file mode 100644
index 0000000..d278248
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Vizio Smart TV 5GHz P602ui-B3 Broadcast Probe.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Vizio Smart TV 5GHz P602ui-B3 Specific Probe.pcap b/taxonomy/testdata/pcaps/Vizio Smart TV 5GHz P602ui-B3 Specific Probe.pcap
new file mode 100644
index 0000000..b1784dc
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Vizio Smart TV 5GHz P602ui-B3 Specific Probe.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Wink Hub 2.4GHz Broadcast Probe.pcap b/taxonomy/testdata/pcaps/Wink Hub 2.4GHz Broadcast Probe.pcap
new file mode 100644
index 0000000..bc78522
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Wink Hub 2.4GHz Broadcast Probe.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Wink Hub 2.4GHz Specific Probe.pcap b/taxonomy/testdata/pcaps/Wink Hub 2.4GHz Specific Probe.pcap
new file mode 100644
index 0000000..1d98058
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Wink Hub 2.4GHz Specific Probe.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Xiaomi Mi 5 2.4GHz Broadcast Probe.pcap b/taxonomy/testdata/pcaps/Xiaomi Mi 5 2.4GHz Broadcast Probe.pcap
new file mode 100644
index 0000000..1bf9860
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Xiaomi Mi 5 2.4GHz Broadcast Probe.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Xiaomi Mi 5 2.4GHz Specific Probe.pcap b/taxonomy/testdata/pcaps/Xiaomi Mi 5 2.4GHz Specific Probe.pcap
new file mode 100644
index 0000000..247e5da
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Xiaomi Mi 5 2.4GHz Specific Probe.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Xiaomi Mi 5 5GHz Broadcast Probe.pcap b/taxonomy/testdata/pcaps/Xiaomi Mi 5 5GHz Broadcast Probe.pcap
new file mode 100644
index 0000000..afd7217
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Xiaomi Mi 5 5GHz Broadcast Probe.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/Xiaomi Mi 5 5GHz Specific Probe.pcap b/taxonomy/testdata/pcaps/Xiaomi Mi 5 5GHz Specific Probe.pcap
new file mode 100644
index 0000000..20c9e07
--- /dev/null
+++ b/taxonomy/testdata/pcaps/Xiaomi Mi 5 5GHz Specific Probe.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/iPad 1st or 2nd gen 5GHz older iOS Broadcast Probe.pcap b/taxonomy/testdata/pcaps/iPad 1st or 2nd gen 5GHz older iOS Broadcast Probe.pcap
new file mode 100644
index 0000000..03be132
--- /dev/null
+++ b/taxonomy/testdata/pcaps/iPad 1st or 2nd gen 5GHz older iOS Broadcast Probe.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/iPad 1st or 2nd gen 5GHz older iOS Specific Probe.pcap b/taxonomy/testdata/pcaps/iPad 1st or 2nd gen 5GHz older iOS Specific Probe.pcap
new file mode 100644
index 0000000..0398616
--- /dev/null
+++ b/taxonomy/testdata/pcaps/iPad 1st or 2nd gen 5GHz older iOS Specific Probe.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/iPad Mini 3rd gen 2.4GHz MH392LL.pcap b/taxonomy/testdata/pcaps/iPad Mini 3rd gen 2.4GHz MH392LL.pcap
new file mode 100644
index 0000000..5c2b5b7
--- /dev/null
+++ b/taxonomy/testdata/pcaps/iPad Mini 3rd gen 2.4GHz MH392LL.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/iPad Mini 3rd gen 5GHz MH392LL.pcap b/taxonomy/testdata/pcaps/iPad Mini 3rd gen 5GHz MH392LL.pcap
new file mode 100644
index 0000000..7ad8491
--- /dev/null
+++ b/taxonomy/testdata/pcaps/iPad Mini 3rd gen 5GHz MH392LL.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/iPad Mini 4th gen 5GHz MK6L2LL Broadcast Probe.pcap b/taxonomy/testdata/pcaps/iPad Mini 4th gen 5GHz MK6L2LL Broadcast Probe.pcap
new file mode 100644
index 0000000..29b07ca
--- /dev/null
+++ b/taxonomy/testdata/pcaps/iPad Mini 4th gen 5GHz MK6L2LL Broadcast Probe.pcap
Binary files differ
diff --git a/taxonomy/testdata/pcaps/iPad Mini 4th gen 5GHz MK6L2LL Specific Probe.pcap b/taxonomy/testdata/pcaps/iPad Mini 4th gen 5GHz MK6L2LL Specific Probe.pcap
new file mode 100644
index 0000000..9801c46
--- /dev/null
+++ b/taxonomy/testdata/pcaps/iPad Mini 4th gen 5GHz MK6L2LL Specific Probe.pcap
Binary files differ
diff --git a/taxonomy/wifi.py b/taxonomy/wifi.py
index aa9a0cb..6b48061 100644
--- a/taxonomy/wifi.py
+++ b/taxonomy/wifi.py
@@ -74,7 +74,7 @@
     'wifi4|probe:0,1,50|assoc:0,1,50,221(0050f2,2)|oui:amazon':
         ('Amazon Kindle', 'Keyboard 3', '2.4GHz'),
     'wifi4|probe:0,1,50,45,htcap:002c,htagg:01,htmcs:000000ff|assoc:0,1,50,45,48,221(0050f2,2),htcap:002c,htagg:01,htmcs:000000ff|oui:amazon':
-        ('Amazon Kindle', 'Voyage, Paperwhite, or 4th gen', '2.4GHz'),
+        ('Amazon Kindle', '', '2.4GHz'),
 
     'wifi4|probe:0,1,50,3,45,221(0050f2,8),htcap:1130,htagg:18,htmcs:000000ff|assoc:0,1,50,48,45,221(0050f2,2),htcap:1130,htagg:18,htmcs:000000ff|oui:amazon':
         ('Amazon Kindle', 'Fire 7" (2011 edition)', '2.4GHz'),
@@ -121,6 +121,9 @@
     'wifi4|probe:0,1,50,3,45,127,107,221(001018,2),221(00904c,51),221(0050f2,8),htcap:0020,htagg:1a,htmcs:000000ff,extcap:00000804|assoc:0,1,48,50,45,70,221(001018,2),221(00904c,51),221(0050f2,2),htcap:0020,htagg:1a,htmcs:000000ff|os:ios':
         ('Apple Watch', '', '2.4GHz'),
 
+    'wifi4|probe:0,1,50,45,htcap:1030,htagg:18,htmcs:000000ff|assoc:0,1,50,46,48,45,221(0050f2,2),htcap:1030,htagg:18,htmcs:000000ff|oui:barnes&noble':
+        ('Barnes & Noble Nook', 'Color', '2.4GHz'),
+
     'wifi4|probe:0,1,50,221(0050f2,4)|assoc:0,1,50,45,221(0050f2,2),48,htcap:000c,htagg:1b,htmcs:000000ff|os:wemo':
         ('Belkin WeMo', 'Switch', '2.4GHz'),
 
@@ -136,10 +139,17 @@
     'wifi4|probe:0,1,50,45,3,221(001018,2),221(00904c,51),htcap:112c,htagg:19,htmcs:000000ff|assoc:0,1,48,50,45,221(001018,2),221(00904c,51),221(0050f2,2),htcap:112c,htagg:19,htmcs:000000ff|os:brotherprinter':
         ('Brother Printer', '', '2.4GHz'),
 
+    # MX410, probably others
     'wifi4|probe:0,1,3,45,50,htcap:007e,htagg:00,htmcs:000000ff|assoc:0,1,45,48,50,221(0050f2,2),htcap:000c,htagg:1b,htmcs:000000ff|os:canonprinter':
         ('Canon Printer', '', '2.4GHz'),
+    # MX492, probably others
     'wifi4|probe:0,1,3,45,50,htcap:007e,htagg:00,htmcs:000000ff|assoc:0,1,48,50,221(0050f2,2),45,htcap:000c,htagg:1b,htmcs:000000ff|os:canonprinter':
         ('Canon Printer', '', '2.4GHz'),
+    # MX492 has been seen to send these massive Probe packets. Likely other models, too.
+    'wifi4|probe:64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10,127,11,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,extcap:|assoc:0,1,48,50,221(0050f2,2),45,htcap:000c,htagg:1b,htmcs:000000ff|os:canonprinter':
+        ('Canon Printer', '', '2.4GHz'),
+    'wifi4|probe:0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0|assoc:0,1,48,50,221(0050f2,2),45,htcap:000c,htagg:1b,htmcs:000000ff|os:canonprinter':
+        ('Canon Printer', '', '2.4GHz'),
 
     'wifi4|probe:0,1,45,191,htcap:11e2,htagg:17,htmcs:0000ffff,vhtcap:038071a0,vhtrxmcs:0000fffa,vhttxmcs:0000fffa|assoc:0,1,48,45,127,191,221(0050f2,2),htcap:11e6,htagg:17,htmcs:0000ffff,vhtcap:038001a0,vhtrxmcs:0000fffa,vhttxmcs:0000fffa,extcap:0000000000000040|os:chromeos':
         ('Chromebook', 'Pixel 2', '5GHz'),
@@ -268,6 +278,8 @@
 
     'wifi4|probe:0,1,45,221(001018,2),221(00904c,51),htcap:080c,htagg:1b,htmcs:000000ff|assoc:0,1,33,36,48,45,221(001018,2),221(00904c,51),221(0050f2,2),htcap:080c,htagg:1b,htmcs:000000ff,txpow:1008|os:ios':
         ('iPad', '1st or 2nd gen', '5GHz'),
+    'wifi4|probe:0,1,45,221(001018,2),htcap:080c,htagg:1b,htmcs:000000ff|assoc:0,1,33,36,48,45,221(001018,2),221(00904c,51),htcap:080c,htagg:1b,htmcs:000000ff,txpow:1008|os:ios':
+        ('iPad', '1st or 2nd gen', '5GHz'),
     'wifi4|probe:0,1,45,221(001018,2),221(00904c,51),htcap:0800,htagg:1b,htmcs:000000ff|assoc:0,1,33,36,48,45,221(001018,2),221(00904c,51),221(0050f2,2),htcap:0800,htagg:1b,htmcs:000000ff,txpow:1008|os:ios':
         ('iPad', '1st or 2nd gen', '5GHz'),
     'wifi4|probe:0,1,50,45,221(001018,2),221(00904c,51),htcap:180c,htagg:1b,htmcs:000000ff|assoc:0,1,33,36,48,50,45,221(001018,2),221(00904c,51),221(0050f2,2),htcap:180c,htagg:1b,htmcs:000000ff,txpow:1008|os:ios':
@@ -334,6 +346,20 @@
     'wifi4|probe:0,1,50,3,45,127,107,221(001018,2),221(00904c,51),221(0050f2,8),htcap:01bc,htagg:1b,htmcs:0000ffff,extcap:00000804|assoc:0,1,33,36,48,50,45,70,221(001018,2),221(00904c,51),221(0050f2,2),htcap:01bc,htagg:1b,htmcs:0000ffff,txpow:1603|os:ios':
         ('iPad Mini', '2nd gen', '2.4GHz'),
 
+    'wifi4|probe:0,1,45,127,107,221(001018,2),221(00904c,51),221(0050f2,8),htcap:01fe,htagg:1b,htmcs:0000ffff,extcap:00000804|assoc:0,1,33,36,48,45,221(001018,2),221(00904c,51),221(0050f2,2),htcap:01fe,htagg:1b,htmcs:0000ffff,txpow:e001|os:ios':
+        ('iPad Mini', '3rd gen', '5GHz'),
+    'wifi4|probe:0,1,45,127,107,221(001018,2),221(00904c,51),221(0050f2,8),htcap:01fe,htagg:1b,htmcs:0000ffff,extcap:00000804|assoc:0,1,33,36,48,45,70,221(001018,2),221(00904c,51),221(0050f2,2),htcap:01fe,htagg:1b,htmcs:0000ffff,txpow:e001|os:ios':
+        ('iPad Mini', '3rd gen', '5GHz'),
+    'wifi4|probe:0,1,50,3,45,127,107,221(001018,2),221(00904c,51),221(0050f2,8),htcap:01bc,htagg:1b,htmcs:0000ffff,extcap:00000804|assoc:0,1,33,36,48,50,45,221(001018,2),221(00904c,51),221(0050f2,2),htcap:01bc,htagg:1b,htmcs:0000ffff,txpow:1201|os:ios':
+        ('iPad Mini', '3rd gen', '2.4GHz'),
+    'wifi4|probe:0,1,50,3,45,127,107,221(001018,2),221(00904c,51),221(0050f2,8),htcap:01bc,htagg:1b,htmcs:0000ffff,extcap:00000804|assoc:0,1,33,36,48,50,45,70,221(001018,2),221(00904c,51),221(0050f2,2),htcap:01bc,htagg:1b,htmcs:0000ffff,txpow:1201|os:ios':
+        ('iPad Mini', '3rd gen', '2.4GHz'),
+
+    'wifi4|probe:0,1,45,127,107,191,221(0050f2,8),221(001018,2),htcap:006f,htagg:17,htmcs:0000ffff,vhtcap:0f815832,vhtrxmcs:0000fffa,vhttxmcs:0000fffa,extcap:0400088400000040|assoc:0,1,33,36,45,127,191,221(001018,2),221(0050f2,2),htcap:006f,htagg:17,htmcs:0000ffff,vhtcap:0f815832,vhtrxmcs:0000fffa,vhttxmcs:0000fffa,txpow:e002,extcap:0400000000000040|os:ios':
+        ('iPad Mini', '4th gen', '5GHz'),
+    'wifi4|probe:0,1,45,127,107,191,221(0050f2,8),221(001018,2),htcap:006f,htagg:17,htmcs:0000ffff,vhtcap:0f815832,vhtrxmcs:0000fffa,vhttxmcs:0000fffa,extcap:0400088400000040|assoc:0,1,33,36,48,70,45,127,191,221(001018,2),221(0050f2,2),htcap:006f,htagg:17,htmcs:0000ffff,vhtcap:0f815832,vhtrxmcs:0000fffa,vhttxmcs:0000fffa,txpow:e002,extcap:0400000000000040|os:ios':
+        ('iPad Mini', '4th gen', '5GHz'),
+
     'wifi4|probe:0,1,3,50|assoc:0,1,48,50|os:ios':
         ('iPhone 2', '', '2.4GHz'),
 
@@ -501,8 +527,16 @@
     'wifi4|probe:0,1,3,45,221(0050f2,8),221(0050f2,4),221(506f9a,9),htcap:016e,htagg:03,htmcs:000000ff,wps:LG_V400|assoc:0,1,33,36,48,70,45,221(0050f2,2),127,htcap:016e,htagg:03,htmcs:000000ff,txpow:170d,extcap:00000a0200000000':
         ('LG Pad', 'v400', '5GHz'),
 
+    'wifi4|probe:0,1,45,191,221(001018,2),htcap:006f,htagg:17,htmcs:0000ffff,vhtcap:0f815832,vhtrxmcs:0000fffa,vhttxmcs:0000fffa|assoc:0,1,33,36,48,45,191,221(001018,2),221(0050f2,2),htcap:006f,htagg:17,htmcs:0000ffff,vhtcap:0f815832,vhtrxmcs:0000fffa,vhttxmcs:0000fffa,txpow:e009|os:lgtv':
+        ('LG Smart TV', '', '5GHz'),
+    'wifi4|probe:0,1,45,191,221(0050f2,4),221(506f9a,9),221(001018,2),htcap:006f,htagg:17,htmcs:0000ffff,vhtcap:0f815832,vhtrxmcs:0000fffa,vhttxmcs:0000fffa,wps:_|assoc:0,1,33,36,48,45,191,221(001018,2),221(0050f2,2),htcap:006f,htagg:17,htmcs:0000ffff,vhtcap:0f815832,vhtrxmcs:0000fffa,vhttxmcs:0000fffa,txpow:e009|os:lgtv':
+        ('LG Smart TV', '', '5GHz'),
+    'wifi4|probe:0,1,50,3,45,127,221(0050f2,4),221(506f9a,9),221(001018,2),htcap:002d,htagg:17,htmcs:0000ffff,extcap:0000000000000040,wps:_|assoc:0,1,50,33,36,48,45,221(001018,2),221(0050f2,2),htcap:002d,htagg:17,htmcs:0000ffff,txpow:1209|os:lgtv':
+        ('LG Smart TV', '', '2.4GHz'),
     'wifi4|probe:0,1,50,3,45,127,221(001018,2),221(00904c,51),htcap:11ac,htagg:16,htmcs:0000ffff,extcap:0000000000000040|assoc:0,1,33,36,48,50,45,127,221(001018,2),221(0050f2,2),htcap:11ac,htagg:16,htmcs:0000ffff,txpow:140a,extcap:0000000000000040|os:lgtv':
         ('LG Smart TV', '', '2.4GHz'),
+    'wifi4|probe:0,1,50,3,45,221(0050f2,4),221(506f9a,9),221(001018,2),htcap:002d,htagg:17,htmcs:0000ffff,wps:_|assoc:0,1,50,33,36,48,45,221(001018,2),221(0050f2,2),htcap:002d,htagg:17,htmcs:0000ffff,txpow:1209|os:lgtv':
+        ('LG Smart TV', '', '2.4GHz'),
 
     'wifi4|probe:0,1,50,3,45,221(0050f2,8),221(0050f2,4),221(506f9a,9),htcap:012c,htagg:03,htmcs:000000ff,wps:LGLS660|assoc:0,1,50,48,45,221(0050f2,2),htcap:012c,htagg:03,htmcs:000000ff':
         ('LG Tribute', '', '2.4GHz'),
@@ -607,6 +641,8 @@
         ('Nexus 5X', '', '5GHz'),
     'wifi4|probe:0,1,127,45,191,htcap:01ef,htagg:03,htmcs:0000ffff,vhtcap:338061b2,vhtrxmcs:030cfffa,vhttxmcs:030cfffa,extcap:00000a020100004080|assoc:0,1,33,36,48,70,45,221(0050f2,2),191,127,htcap:01ef,htagg:03,htmcs:0000ffff,vhtcap:339071b2,vhtrxmcs:030cfffa,vhttxmcs:030cfffa,txpow:1e08,extcap:000000000000004080|oui:lg':
         ('Nexus 5X', '', '5GHz'),
+    'wifi4|probe:0,1,127,45,191,htcap:01ad,htagg:03,htmcs:0000ffff,vhtcap:338061b2,vhtrxmcs:030cfffa,vhttxmcs:030cfffa,extcap:00000a020100004080|assoc:0,1,33,36,48,70,45,221(0050f2,2),191,127,htcap:01ef,htagg:03,htmcs:0000ffff,vhtcap:339071b2,vhtrxmcs:030cfffa,vhttxmcs:030cfffa,txpow:1e08,extcap:000000000000004080|oui:lg':
+        ('Nexus 5X', '', '5GHz'),
     'wifi4|probe:0,1,127,extcap:00000a020100004080|assoc:0,1,33,36,48,70,45,221(0050f2,2),191,127,htcap:01ef,htagg:03,htmcs:0000ffff,vhtcap:339071b2,vhtrxmcs:030cfffa,vhttxmcs:030cfffa,txpow:1e08,extcap:000000000000004080|oui:lg':
         ('Nexus 5X', '', '5GHz'),
     'wifi4|probe:0,1,45,221(0050f2,8),191,127,htcap:01ef,htagg:03,htmcs:0000ffff,vhtcap:339071b2,vhtrxmcs:030cfffa,vhttxmcs:030cfffa,extcap:000000000000004080|assoc:0,1,33,36,48,70,45,221(0050f2,2),191,127,htcap:01ef,htagg:03,htmcs:0000ffff,vhtcap:339071b2,vhtrxmcs:030cfffa,vhttxmcs:030cfffa,txpow:1e08,extcap:000000000000004080|oui:lg':
@@ -619,6 +655,8 @@
         ('Nexus 5X', '', '2.4GHz'),
     'wifi4|probe:0,1,50,127,45,191,htcap:01ef,htagg:03,htmcs:0000ffff,vhtcap:338061b2,vhtrxmcs:030cfffa,vhttxmcs:030cfffa,extcap:00000a020100004080|assoc:0,1,50,33,48,70,45,221(0050f2,2),127,htcap:01ad,htagg:03,htmcs:0000ffff,txpow:1e08,extcap:000000000000000080|oui:lg':
         ('Nexus 5X', '', '2.4GHz'),
+    'wifi4|probe:0,1,50,127,45,191,htcap:01ad,htagg:03,htmcs:0000ffff,vhtcap:338061b2,vhtrxmcs:030cfffa,vhttxmcs:030cfffa,extcap:00000a020100004080|assoc:0,1,50,33,48,70,45,221(0050f2,2),127,htcap:01ad,htagg:03,htmcs:0000ffff,txpow:1e08,extcap:000000000000000080|oui:lg':
+        ('Nexus 5X', '', '2.4GHz'),
     'wifi4|probe:0,1,50,127,extcap:00000a020100004080|assoc:0,1,50,33,48,70,45,221(0050f2,2),127,htcap:01ad,htagg:03,htmcs:0000ffff,txpow:1e08,extcap:000000000000000080|oui:lg':
         ('Nexus 5X', '', '2.4GHz'),
     'wifi4|probe:0,1,50,3,45,221(0050f2,8),127,htcap:01ad,htagg:03,htmcs:0000ffff,extcap:000000000000000080|assoc:0,1,50,33,48,70,45,221(0050f2,2),127,htcap:01ad,htagg:03,htmcs:0000ffff,txpow:1e08,extcap:000000000000000080|oui:lg':
@@ -741,6 +779,10 @@
     'wifi4|probe:0,1,50,3,45,221(0050f2,8),htcap:012c,htagg:03,htmcs:000000ff|assoc:0,1,50,33,48,70,45,221(0050f2,2),127,htcap:012c,htagg:03,htmcs:000000ff,txpow:170d,extcap:00000a0200000000|oui:oneplus':
         ('Oneplus', 'X', '2.4GHz'),
 
+    'wifi4|probe:0,1,45,221(0050f2,4),htcap:11ee,htagg:02,htmcs:0000ffff,wps:WPS_STA|assoc:0,1,33,36,48,221(0050f2,2),45,127,htcap:11ee,htagg:02,htmcs:0000ffff,txpow:0b00,extcap:01|os:panasonictv':
+        ('Panasonic TV', '', '5GHz'),
+    'wifi4|probe:0,1,50,45,221(0050f2,4),htcap:01ac,htagg:02,htmcs:0000ffff,wps:WPS_STA|assoc:0,1,50,221(0050f2,2),45,127,htcap:01ac,htagg:02,htmcs:0000ffff,extcap:01|os:panasonictv':
+        ('Panasonic TV', '', '2.4GHz'),
     'wifi4|probe:0,1,50,48|assoc:0,1,33,36,50,221(0050f2,2),45,221(00037f,1),221(00037f,4),48,htcap:1004,htagg:1b,htmcs:0000ffff,txpow:0f0f|os:panasonictv':
         ('Panasonic TV', '', '2.4GHz'),
     'wifi4|probe:0,1,50,45,221(0050f2,4),htcap:01ad,htagg:02,htmcs:0000ffff,wps:WPS_SUPPLICANT_STATION|assoc:0,1,50,45,48,221(0050f2,2),htcap:01ad,htagg:02,htmcs:0000ffff|os:panasonictv':
@@ -1005,12 +1047,30 @@
 
     'wifi4|probe:0,1,45,htcap:11ee,htagg:02,htmcs:0000ffff|assoc:0,1,45,127,33,36,48,221(0050f2,2),htcap:11ee,htagg:02,htmcs:0000ffff,txpow:1100,extcap:01|os:samsungtv':
         ('Samsung Smart TV', '', '5GHz'),
+    # UN55JS9000, probably more
+    'wifi4|probe:0,1,45,127,191,221(001018,2),htcap:006f,htagg:17,htmcs:0000ffff,vhtcap:0f815832,vhtrxmcs:0000fffa,vhttxmcs:0000fffa,extcap:0000000000000040|assoc:0,1,33,36,48,45,127,191,221(001018,2),221(0050f2,2),htcap:006f,htagg:17,htmcs:0000ffff,vhtcap:0f815832,vhtrxmcs:0000fffa,vhttxmcs:0000fffa,txpow:e009,extcap:0000000000000040|os:tizen':
+        ('Samsung Smart TV', '', '5GHz'),
+    # LED75
+    'wifi4|probe:0,1,45,221(002d25,32),htcap:11ee,htagg:02,htmcs:0000ffff|assoc:0,1,33,36,221(0050f2,2),45,127,htcap:11ee,htagg:02,htmcs:0000ffff,txpow:0e00,extcap:01|os:samsungtv':
+        ('Samsung Smart TV', '', '5GHz'),
+    # UN46ES7100F
+    'wifi4|probe:0,1,45,221(002d25,32),htcap:11ee,htagg:02,htmcs:0000ffff|assoc:0,1,33,36,48,221(0050f2,2),45,127,htcap:11ee,htagg:02,htmcs:0000ffff,txpow:0e00,extcap:01|os:samsungtv':
+        ('Samsung Smart TV', '', '5GHz'),
+    # UN40JU6500
+    'wifi4|probe:0,1,45,191,221(001018,2),htcap:006f,htagg:17,htmcs:0000ffff,vhtcap:0f815832,vhtrxmcs:0000fffa,vhttxmcs:0000fffa|assoc:0,1,33,36,48,45,191,221(001018,2),221(0050f2,2),htcap:006f,htagg:17,htmcs:0000ffff,vhtcap:0f815832,vhtrxmcs:0000fffa,vhttxmcs:0000fffa,txpow:e009|os:tizen':
+        ('Samsung Smart TV', '', '5GHz'),
     'wifi4|probe:0,1,50,45,htcap:01ac,htagg:02,htmcs:0000ffff|assoc:0,1,50,45,127,48,221(0050f2,2),htcap:01ac,htagg:02,htmcs:0000ffff,extcap:01|os:samsungtv':
         ('Samsung Smart TV', '', '2.4GHz'),
     'wifi4|probe:0,1,50,45,221(002d25,32),htcap:01ac,htagg:02,htmcs:0000ffff|assoc:0,1,50,48,221(0050f2,2),45,127,htcap:01ac,htagg:02,htmcs:0000ffff,extcap:01|os:samsungtv':
         ('Samsung Smart TV', '', '2.4GHz'),
     'wifi4|probe:0,1,50,45,htcap:0120,htagg:02,htmcs:000000ff|assoc:0,1,50,48,221(0050f2,2),45,127,htcap:0120,htagg:02,htmcs:000000ff,extcap:01|os:samsungtv':
         ('Samsung Smart TV', '', '2.4GHz'),
+    # UN40JU6500, probably more
+    'wifi4|probe:0,1,50,3,45,127,221(001018,2),htcap:002d,htagg:17,htmcs:0000ffff,extcap:0000000000000040|assoc:0,1,50,33,36,48,45,221(001018,2),221(0050f2,2),htcap:002d,htagg:17,htmcs:0000ffff,txpow:1209|os:tizen':
+        ('Samsung Smart TV', '', '2.4GHz'),
+    # UN40JU6500, probably more
+    'wifi4|probe:0,1,50,3,45,221(001018,2),htcap:002d,htagg:17,htmcs:0000ffff|assoc:0,1,50,33,36,48,45,221(001018,2),221(0050f2,2),htcap:002d,htagg:17,htmcs:0000ffff,txpow:1209|os:tizen':
+        ('Samsung Smart TV', '', '2.4GHz'),
 
     'wifi4|probe:0,1,50,3,45,htcap:11ef,htagg:1b,htmcs:0000ffff|assoc:0,1,50,48,45,221(0050f2,2),htcap:11ef,htagg:1b,htmcs:0000ffff|oui:sling':
         ('Slingbox', '500', '2.4GHz'),
@@ -1021,6 +1081,12 @@
         ('Sony Bravia TV', '2015 model', '5GHz'),
     'wifi4|probe:0,1,221(0050f2,4),221(506f9a,10),221(506f9a,9),wps:BRAVIA_2015|assoc:0,1,45,127,221(000c43,6),221(0050f2,2),48,127,htcap:01ef,htagg:13,htmcs:0000ffff,extcap:00000a02':
         ('Sony Bravia TV', '2015 model', '5GHz'),
+    'wifi4|probe:0,1,45,191,221(0050f2,4),221(506f9a,10),221(506f9a,9),htcap:11ef,htagg:13,htmcs:0000ffff,vhtcap:31c139b0,vhtrxmcs:030cfffa,vhttxmcs:030cfffa,wps:BRAVIA_4K_2015|assoc:0,1,45,191,127,221(000c43,6),221(0050f2,2),48,127,htcap:006f,htagg:13,htmcs:0000ffff,vhtcap:31c001b0,vhtrxmcs:030cfffa,vhttxmcs:030cfffa,extcap:00000a02':
+        ('Sony Bravia TV', '2015 model', '5GHz'),
+    'wifi4|probe:0,1,45,221(0050f2,4),htcap:11ee,htagg:02,htmcs:0000ffff,wps:Sony_BRAVIA|assoc:0,1,33,36,48,221(0050f2,2),45,127,htcap:11ee,htagg:02,htmcs:0000ffff,txpow:0c00,extcap:01':
+        ('Sony Bravia TV', '', '5GHz'),
+    'wifi4|probe:0,1,221(0050f2,4),221(506f9a,10),221(506f9a,9),wps:BRAVIA_4K_2015|assoc:0,1,45,191,127,221(000c43,6),221(0050f2,2),48,127,htcap:006f,htagg:13,htmcs:0000ffff,vhtcap:31c001b0,vhtrxmcs:030cfffa,vhttxmcs:030cfffa,extcap:00000a02':
+        ('Sony Bravia TV', '2015 model', '5GHz'),
     'wifi4|probe:0,1,50,45,221(0050f2,4),221(506f9a,10),221(506f9a,9),htcap:01ad,htagg:02,htmcs:0000ffff,wps:Sony_BRAVIA|assoc:0,1,50,45,127,48,221(0050f2,2),htcap:01ad,htagg:02,htmcs:0000ffff,extcap:01':
         ('Sony Bravia TV', '', '2.4GHz'),
     'wifi4|probe:0,1,50,45,221(0050f2,4),htcap:01ac,htagg:02,htmcs:0000ffff,wps:Sony_BRAVIA|assoc:0,1,50,48,221(0050f2,2),45,127,htcap:01ac,htagg:02,htmcs:0000ffff,extcap:01':
@@ -1033,6 +1099,8 @@
         ('Sony Bravia TV', '2015 model', '2.4GHz'),
     'wifi4|probe:0,1,50,221(0050f2,4),221(506f9a,10),221(506f9a,9),wps:BRAVIA_2015|assoc:0,1,50,45,127,221(000c43,6),221(0050f2,2),48,127,htcap:01ad,htagg:13,htmcs:0000ffff,extcap:00000a02':
         ('Sony Bravia TV', '2015 model', '2.4GHz'),
+    'wifi4|probe:0,1,50,45,127,221(0050f2,4),221(506f9a,10),221(506f9a,9),htcap:11ef,htagg:13,htmcs:0000ffff,extcap:00,wps:BRAVIA_4K_2015|assoc:0,1,50,45,127,221(000c43,6),221(0050f2,2),48,127,htcap:008c,htagg:13,htmcs:0000ffff,extcap:00000a02':
+        ('Sony Bravia TV', '2015 model', '2.4GHz'),
 
     'wifi4|probe:0,1,3,45,221(0050f2,8),191,htcap:016e,htagg:03,htmcs:000000ff,vhtcap:31800120,vhtrxmcs:0000fffc,vhttxmcs:0000fffc|assoc:0,1,33,36,48,70,45,221(0050f2,2),127,htcap:012c,htagg:03,htmcs:000000ff|oui:sony':
         ('Sony Xperia', 'Z Ultra', '5GHz'),
@@ -1077,6 +1145,21 @@
     'wifi4|probe:0,1,50,221(001018,2)|assoc:0,1,48,50,221(001018,2)|os:tivo':
         ('TiVo', 'Series3 or Series4', '2.4GHz'),
 
+    'wifi4|probe:0,1,50,45,221(0050f2,4),htcap:106e,htagg:13,htmcs:0000ffff,wps:Ralink_Wireless_Linux_Client|assoc:0,1,45,127,221(000c43,6),221(0050f2,2),48,htcap:000e,htagg:13,htmcs:0000ffff,extcap:00|oui:toshiba':
+        ('Toshiba Smart TV', '', '5GHz'),
+    'wifi4|probe:0,1,221(0050f2,4),wps:Ralink_Wireless_Linux_Client|assoc:0,1,45,127,221(000c43,6),221(0050f2,2),48,htcap:000e,htagg:13,htmcs:0000ffff,extcap:00|oui:toshiba':
+        ('Toshiba Smart TV', '', '5GHz'),
+    'wifi4|probe:0,1,50,221(0050f2,4),wps:Ralink_Wireless_Linux_Client|assoc:0,1,50,45,127,221(000c43,6),221(0050f2,2),48,htcap:000c,htagg:13,htmcs:0000ffff,extcap:01|oui:toshiba':
+        ('Toshiba Smart TV', '', '2.4GHz'),
+    'wifi4|probe:0,1,50,45,127,221(0050f2,4),htcap:106e,htagg:13,htmcs:0000ffff,extcap:00,wps:Ralink_Wireless_Linux_Client|assoc:0,1,50,45,127,221(000c43,6),221(0050f2,2),48,htcap:000c,htagg:13,htmcs:0000ffff,extcap:01|oui:toshiba':
+        ('Toshiba Smart TV', '', '2.4GHz'),
+
+    # P602ui-B3
+    'wifi4|probe:0,1,50,221(0050f2,4),wps:_|assoc:0,33,36,1,48,221(0050f2,2),45,127,htcap:106e,htagg:1f,htmcs:0000ffff,txpow:150d,extcap:0000000000000000|os:viziotv':
+        ('Vizio Smart TV', '', '5GHz'),
+    # P602ui-B3
+    'wifi4|probe:0,1,50|assoc:0,33,36,1,48,221(0050f2,2),45,127,htcap:106e,htagg:1f,htmcs:0000ffff,txpow:150d,extcap:0000000000000000|os:viziotv':
+        ('Vizio Smart TV', '', '5GHz'),
     'wifi4|probe:0,1,45,191,221(0050f2,4),221(506f9a,9),221(001018,2),htcap:01ef,htagg:17,htmcs:0000ffff,vhtcap:0f8159b2,vhtrxmcs:0000fffa,vhttxmcs:0000fffa,wps:_|assoc:0,1,33,36,48,45,191,221(001018,2),221(0050f2,2),htcap:01ef,htagg:17,htmcs:0000ffff,vhtcap:0f8159b2,vhtrxmcs:0000fffa,vhttxmcs:0000fffa,txpow:e002|oui:vizio':
         ('Vizio SmartCast TV', '', '2.4GHz'),
     'wifi4|probe:0,1,50,221(0050f2,4),wps:Ralink_Wireless_Linux_Client|assoc:0,1,50,45,127,221(000c43,6),221(0050f2,2),48,htcap:000c,htagg:12,htmcs:000000ff,extcap:01000000|os:viziotv':
@@ -1091,6 +1174,12 @@
         ('Vizio Smart TV', '', '2.4GHz'),
     'wifi4|probe:0,1,50,48|assoc:0,1,50,221(0050f2,2),45,51,127,48,htcap:012c,htagg:1b,htmcs:000000ff,extcap:01|os:viziotv':
         ('Vizio Smart TV', '', '2.4GHz'),
+    # P602ui-B3
+    'wifi4|probe:0,1,50,221(0050f2,4),wps:_|assoc:0,1,50,48,221(0050f2,2),45,127,htcap:122c,htagg:1f,htmcs:0000ffff,extcap:0000000000000000|os:viziotv':
+        ('Vizio Smart TV', '', '2.4GHz'),
+    # P602ui-B3
+    'wifi4|probe:0,1,50|assoc:0,1,50,48,221(0050f2,2),45,127,htcap:122c,htagg:1f,htmcs:0000ffff,extcap:0000000000000000|os:viziotv':
+        ('Vizio Smart TV', '', '2.4GHz'),
 
     'wifi4|probe:0,1,3,45,221(0050f2,8),htcap:016e,htagg:03,htmcs:000000ff|assoc:0,1,33,36,45,221(0050f2,2),htcap:016e,htagg:03,htmcs:000000ff,txpow:110d|oui:vizio':
         ('Vizio Tablet', 'XR6P', '5GHz'),
@@ -1103,6 +1192,9 @@
     'wifi4|probe:0,1,50,45,3,221(00904c,51),htcap:100c,htagg:19,htmcs:000000ff|assoc:0,1,48,50,45,221(00904c,51),221(0050f2,2),htcap:100c,htagg:19,htmcs:000000ff|os:wii':
         ('Wii-U', '', '2.4GHz'),
 
+    'wifi4|probe:0,1,50,45,3,221(001018,2),221(00904c,51),htcap:1020,htagg:1a,htmcs:000000ff|assoc:0,1,33,36,48,50,45,221(001018,2),221(00904c,51),221(0050f2,2),htcap:1020,htagg:1a,htmcs:000000ff,txpow:1009|oui:wink':
+        ('Wink Hub', '', '2.4GHz'),
+
     'wifi4|probe:0,1,50,45,3,221(001018,2),221(00904c,51),htcap:110c,htagg:19,htmcs:000000ff|assoc:0,1,48,50,45,221(001018,2),221(00904c,51),221(0050f2,2),htcap:110c,htagg:19,htmcs:000000ff|oui:withings':
         ('Withings Scale', '', '2.4GHz'),
 
@@ -1135,6 +1227,15 @@
     'wifi4|probe:0,1,50,3,45,221(0050f2,8),htcap:012c,htagg:03,htmcs:000000ff|assoc:0,1,50,33,48,70,45,221(0050f2,2),htcap:012c,htagg:03,htmcs:000000ff,txpow:170d|oui:xiaomi':
         ('Xiaomi Redmi', '3', '2.4GHz'),
 
+    'wifi4|probe:0,1,45,221(0050f2,8),191,127,htcap:016f,htagg:1f,htmcs:000000ff,vhtcap:33907132,vhtrxmcs:0186fffe,vhttxmcs:0186fffe,extcap:040000000000004080|assoc:0,1,33,36,48,70,45,221(0050f2,2),191,127,htcap:016f,htagg:1f,htmcs:000000ff,vhtcap:33907132,vhtrxmcs:0186fffe,vhttxmcs:0186fffe,txpow:1408,extcap:040000000000004080|oui:xiaomi':
+        ('Xiaomi Mi', '5', '5GHz'),
+    'wifi4|probe:0,1,45,191,3,127,htcap:016f,htagg:df,htmcs:000000ff,vhtcap:33800132,vhtrxmcs:0186fffe,vhttxmcs:0186fffe,extcap:04000a020100004080|assoc:0,1,33,36,48,70,45,221(0050f2,2),191,127,htcap:016f,htagg:1f,htmcs:000000ff,vhtcap:33907132,vhtrxmcs:0186fffe,vhttxmcs:0186fffe,txpow:1408,extcap:040000000000004080|oui:xiaomi':
+        ('Xiaomi Mi', '5', '5GHz'),
+    'wifi4|probe:0,1,50,45,191,3,127,htcap:016f,htagg:df,htmcs:000000ff,vhtcap:33800132,vhtrxmcs:0186fffe,vhttxmcs:0186fffe,extcap:04000a020100004080|assoc:0,1,50,33,48,70,45,221(0050f2,2),127,htcap:012d,htagg:1f,htmcs:000000ff,txpow:1408,extcap:040000000000000080|oui:xiaomi':
+        ('Xiaomi Mi', '5', '2.4GHz'),
+    'wifi4|probe:0,1,50,3,45,221(0050f2,8),127,htcap:012d,htagg:1f,htmcs:000000ff,extcap:040000000000000080|assoc:0,1,50,33,48,70,45,221(0050f2,2),127,htcap:012d,htagg:1f,htmcs:000000ff,txpow:1408,extcap:040000000000000080|oui:xiaomi':
+        ('Xiaomi Mi', '5', '2.4GHz'),
+
     'wifi4|probe:0,1,50,221(0050f2,4),221(506f9a,9),wps:Z820|assoc:0,1,50,45,48,127,221(0050f2,2),htcap:1172,htagg:03,htmcs:000000ff,extcap:01':
         ('ZTE Obsidian', '', '2.4GHz'),
 }
diff --git a/waveguide/clientinfo.py b/waveguide/clientinfo.py
index 28cd643..8173084 100644
--- a/waveguide/clientinfo.py
+++ b/waveguide/clientinfo.py
@@ -30,6 +30,36 @@
   try:
     with open(os.path.join(FINGERPRINTS_DIR, mac)) as f:
       signature = f.read()
-      return ';'.join(taxonomy.identify_wifi_device(signature, mac))
+      (genus, species, perf) = taxonomy.identify_wifi_device(signature, mac)
+
+      # Preserve older output format of chipset;model;performance. We no
+      # longer track chipsets, but we output the leading ';' separator to
+      # maintain compatibility with the format.
+      #
+      # For example, in the old code:
+      # unknown: SHA:c1...7b;Unknown;802.11n n:2,w:40
+      # known:   BCM4329;iPad (1st/2nd gen);802.11n n:1,w:20
+      #
+      # In the current code, in the unknown case:
+      # genus = 'SHA:c1...7b', species = 'Unknown', perf = '802.11n n:2,w:40'
+      # SHA:c1...7b;Unknown;802.11n n:2,w:40
+      #
+      # In the current code, known, with species information:
+      # genus = 'iPad', species = '(1st/2nd gen)', perf = '802.11n n:1,w:20'
+      # ;iPad (1st/2nd gen);802.11n n:1,w:20
+      #
+      # In the current code, known, no specific species:
+      # genus = 'Samsung Galaxy S6', species = '', perf = '802.11ac n:2,w:80'
+      # ;Samsung Galaxy S6;802.11ac n:2,w:80
+      # We don't want an extra space at the end of the model, so we need to be
+      # careful about a join of the empty species.
+      # ;Samsung Galaxy S6 ;802.11ac n:2,w:80
+
+      if genus.startswith('SHA:'):
+        return genus + ';' + species + ';' + perf
+      elif species:
+        return ';' + genus + ' ' + species + ';' + perf
+      else:
+        return ';' + genus + ';' + perf
   except IOError:
     return None
diff --git a/waveguide/log.py b/waveguide/log.py
index af05667..cf9ccb6 100644
--- a/waveguide/log.py
+++ b/waveguide/log.py
@@ -17,15 +17,11 @@
 """Helper functions for logging."""
 
 import errno
-import hmac
 import os
-import struct
 import sys
-import helpers
 
 
 LOGLEVEL = 0
-ANONYMIZE = True
 STATUS_DIR = None
 
 
@@ -47,62 +43,6 @@
     Log(s, *args)
 
 
-SOFT = 'AEIOUY' 'V'
-HARD = 'BCDFGHJKLMNPQRSTVWXYZ' 'AEIOU'
-
-
-def Trigraph(num):
-  """Given a value from 0..4095, encode it as a cons+vowel+cons sequence."""
-  ns = len(SOFT)
-  nh = len(HARD)
-  assert nh * ns * nh >= 4096
-  c3 = num % nh
-  c2 = (num / nh) % ns
-  c1 = num / nh / ns
-  return HARD[c1] + SOFT[c2] + HARD[c3]
-
-
-def WordFromBinary(s):
-  """Encode a binary blob into a string of pronounceable syllables."""
-  out = []
-  while s:
-    part = s[:3]
-    s = s[3:]
-    while len(part) < 4:
-      part = '\0' + part
-    bits = struct.unpack('!I', part)[0]
-    out += [(bits >> 12) & 0xfff,
-            (bits >> 0)  & 0xfff]
-  return ''.join(Trigraph(i) for i in out)
-
-
-# Note(apenwarr): There are a few ways to do this.  I elected to go with
-# short human-usable strings (allowing for the small possibility of
-# collisions) since the log messages will probably be "mostly" used by
-# humans.
-#
-# An alternative would be to use "format preserving encryption" (basically
-# a secure 1:1 mapping of unencrypted to anonymized, in the same number of
-# bits) and then produce longer "words" with no possibility of collision.
-# But with our current WordFromBinary() implementation, that would be
-# 12 characters long, which is kind of inconvenient and we probably don't
-# need that level of care.  Inside waveguide we use the real MAC addresses
-# so collisions won't cause a real problem.
-#
-# TODO(apenwarr): consider not anonymizing the OUI.
-#   That way we could see any behaviour differences between vendors.
-#   Sadly, that might make it too easy to brute force a MAC address back out;
-#   the remaining 3 bytes have too little entropy.
-#
-def AnonymizeMAC(consensus_key, macbin):
-  """Anonymize a binary MAC address using the given key."""
-  assert len(macbin) == 6
-  if consensus_key and ANONYMIZE:
-    return WordFromBinary(hmac.new(consensus_key, macbin).digest())[:6]
-  else:
-    return helpers.DecodeMAC(macbin)
-
-
 def WriteEventFile(name):
   """Create a file in STATUS_DIR if it does not already exist.
 
diff --git a/waveguide/log_test.py b/waveguide/log_test.py
deleted file mode 100755
index fabc09f..0000000
--- a/waveguide/log_test.py
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/python
-import log
-from wvtest import wvtest
-
-
-@wvtest.wvtest
-def AnonTest():
-  m1 = '\x01\x02\x03\x04\x05\x06'
-  m2 = '\x31\x32\x33\x34\x35\x36'
-
-  s1 = log.AnonymizeMAC(None, m1)
-  s2 = log.AnonymizeMAC(None, m2)
-  a1a = log.AnonymizeMAC('key', m1)
-  a2a = log.AnonymizeMAC('key', m2)
-  a1b = log.AnonymizeMAC('key2', m1)
-  a2b = log.AnonymizeMAC('key2', m2)
-
-  # make sure they're printable strings
-  wvtest.WVPASSEQ(s1, str(s1))
-  wvtest.WVPASSEQ(a1a, str(a1a))
-  wvtest.WVPASSEQ(a1b, str(a1b))
-
-  # and reasonably sized
-  wvtest.WVPASSLE(len(a1a), 8)
-
-  # and change when the key or MAC changes
-  wvtest.WVPASSNE(s1, s2)
-  wvtest.WVPASSNE(a1a, a1b)
-  wvtest.WVPASSNE(a2a, a2b)
-  wvtest.WVPASSNE(a1a, a2a)
-  wvtest.WVPASSNE(a1b, a2b)
-
-
-if __name__ == '__main__':
-  wvtest.wvtest_main()
diff --git a/waveguide/waveguide.py b/waveguide/waveguide.py
index 87324c3..594f83f 100755
--- a/waveguide/waveguide.py
+++ b/waveguide/waveguide.py
@@ -58,8 +58,7 @@
 tx-interval=      Seconds between state transmits (0 to disable) [15]
 autochan-interval= Seconds between autochannel decisions (0 to disable) [300]
 print-interval=   Seconds between state printouts to log (0 to disable) [16]
-D,debug           Increase (non-anonymized!) debug output level
-no-anonymize      Don't anonymize MAC addresses in logs
+D,debug           Increase debug output level
 status-dir=       Directory to store status information [/tmp/waveguide]
 watch-pid=        Shut down if the given process pid disappears
 auto-disable-threshold=  Shut down if >= RSSI received from other AP [-30]
@@ -240,11 +239,8 @@
   def Filename(self, suffix):
     return os.path.join(opt.status_dir, '%s.%s' % (self.vdevname, suffix))
 
-  def AnonymizeMAC(self, mac):
-    return log.AnonymizeMAC(consensus_key, mac)
-
   def _LogPrefix(self):
-    return '%s(%s): ' % (self.vdevname, self.AnonymizeMAC(self.mac))
+    return '%s(%s): ' % (self.vdevname, helpers.DecodeMAC(self.mac))
 
   def Log(self, s, *args):
     log.Log(self._LogPrefix() + s, *args)
@@ -291,7 +287,7 @@
       self.Debug('ignoring peer due to key mismatch')
       return 0
     if p.me.mac not in self.peer_list:
-      self.Log('added a peer: %s', self.AnonymizeMAC(p.me.mac))
+      self.Log('added a peer: %s', helpers.DecodeMAC(p.me.mac))
     self.peer_list[p.me.mac] = p
     self.MaybeAutoDisable()
     return 1
@@ -445,7 +441,7 @@
       return None
     for peer in sorted(self.peer_list.values(), key=lambda p: p.me.mac):
       self.Debug('considering auto disable: peer=%s',
-                 self.AnonymizeMAC(peer.me.mac))
+                 helpers.DecodeMAC(peer.me.mac))
       if peer.me.mac not in self.bss_list:
         self.Debug('--> peer no match')
       else:
@@ -478,11 +474,11 @@
     """Writes/removes the auto-disable file based on ShouldAutoDisable()."""
     ad = self.ShouldAutoDisable()
     if ad and self.auto_disabled != ad:
-      self.Log('auto-disabling because of %s', self.AnonymizeMAC(ad))
+      self.Log('auto-disabling because of %s', helpers.DecodeMAC(ad))
       helpers.WriteFileAtomic(self.Filename('disabled'), helpers.DecodeMAC(ad))
     elif self.auto_disabled and not ad:
       self.Log('auto-enabling because %s disappeared',
-               self.AnonymizeMAC(self.auto_disabled))
+               helpers.DecodeMAC(self.auto_disabled))
       helpers.Unlink(self.Filename('disabled'))
     self.auto_disabled = ad
 
@@ -960,17 +956,12 @@
       helpers.WriteFileAtomic(os.path.join(WIFIBLASTER_DIR, g.group()),
                               '%d %s' % (time.time(), line))
 
-  def _AnonymizeResult(self, line):
-    def Repl(match):
-      return log.AnonymizeMAC(consensus_key, helpers.EncodeMAC(match.group()))
-    return re.sub(MACADDR_REGEX, Repl, line)
-
   def _HandleResults(self, errcode, stdout, stderr):
     """Callback for 'wifiblaster' results."""
     log.Debug('wifiblaster err:%r stdout:%r stderr:%r', errcode, stdout[:70],
               stderr)
     for line in stdout.splitlines():
-      log.Log('wifiblaster: %s' % self._AnonymizeResult(line))
+      log.Log('wifiblaster: %s' % line)
       self._SaveResult(line)
 
   def _StrToBool(self, s):
@@ -1090,7 +1081,6 @@
   if opt.watch_pid and opt.watch_pid <= 1:
     o.fatal('--watch-pid must be empty or > 1')
   log.LOGLEVEL = opt.debug
-  log.ANONYMIZE = opt.anonymize
   log.STATUS_DIR = opt.status_dir
 
   try:
@@ -1232,11 +1222,11 @@
         self_signals[m.mac] = bss_signal
         peer_data[m.mac] = seen_peers
         log.Log('%s: APs=%-4d peer-APs=%s stations=%s',
-                m.AnonymizeMAC(p.me.mac), len(p.seen_bss),
-                ','.join('%s(%d)' % (m.AnonymizeMAC(i.mac), i.rssi)
+                helpers.DecodeMAC(p.me.mac), len(p.seen_bss),
+                ','.join('%s(%d)' % (helpers.DecodeMAC(i.mac), i.rssi)
                          for i in sorted(seen_bss_peers,
                                          key=lambda i: -i.rssi)),
-                ','.join('%s(%d)' % (m.AnonymizeMAC(i.mac), i.rssi)
+                ','.join('%s(%d)' % (helpers.DecodeMAC(i.mac), i.rssi)
                          for i in sorted(p.assoc,
                                          key=lambda i: -i.rssi)))
 
@@ -1251,7 +1241,7 @@
       can2G_count = can5G_count = 0
       for m in managers:
         for assoc in m.assoc_list.itervalues():
-          anon = m.AnonymizeMAC(assoc.mac)
+          station = helpers.DecodeMAC(assoc.mac)
           if log_sta_band_capabilities:
             if assoc.can5G:
               can5G_count += 1
@@ -1259,11 +1249,10 @@
             else:
               can2G_count += 1
               capability = '2.4'
-            log.Log('Connected station %s supports %s GHz', anon, capability)
-          station = helpers.DecodeMAC(assoc.mac)
+            log.Log('Connected station %s supports %s GHz', station, capability)
           species = clientinfo.taxonomize(station)
           if species:
-            log.Log('Connected station %s taxonomy: %s' % (anon, species))
+            log.Log('Connected station %s taxonomy: %s', station, species)
       if log_sta_band_capabilities:
         log.Log('Connected stations: total %d, 5 GHz %d, 2.4 GHz %d',
                 can5G_count + can2G_count, can5G_count, can2G_count)
diff --git a/waveguide/wifiblaster_controller_test.py b/waveguide/wifiblaster_controller_test.py
index 9e300f2..12ff480 100755
--- a/waveguide/wifiblaster_controller_test.py
+++ b/waveguide/wifiblaster_controller_test.py
@@ -71,13 +71,6 @@
     stdout = ('version=1 mac=11:11:11:11:11:11 throughput=10000000 '
               'samples=5000000,15000000\n'
               'malformed 11:11:11:11:11:11 but has macs 11:11:11:11:11:11\n')
-
-    result = wc._AnonymizeResult(stdout)
-    expected = ('version=1 mac=CYAFVU throughput=10000000 '
-                'samples=5000000,15000000\n'
-                'malformed CYAFVU but has macs CYAFVU\n')
-    wvtest.WVPASSEQ(result, expected)
-
     expected = [('version=1 mac=11:11:11:11:11:11 throughput=10000000 '
                  'samples=5000000,15000000'),
                 'malformed 11:11:11:11:11:11 but has macs 11:11:11:11:11:11']
diff --git a/wifi/autochannel.py b/wifi/autochannel.py
index 51b4d00..c669c9a 100644
--- a/wifi/autochannel.py
+++ b/wifi/autochannel.py
@@ -65,6 +65,12 @@
                      % (band, autotype, width))
 
 
+def get_all_frequencies(band):
+  """Get all 802.11 frequencies for the given band."""
+  return get_permitted_frequencies(band, 'OVERLAP' if band == '2.4' else 'ANY',
+                                   '20').split()
+
+
 def scan(interface, band, autotype, width):
   """Do an autochannel scan and return the recommended channel.
 
diff --git a/wifi/autochannel_test.py b/wifi/autochannel_test.py
index a53725f..7198cb5 100755
--- a/wifi/autochannel_test.py
+++ b/wifi/autochannel_test.py
@@ -22,5 +22,17 @@
     wvtest.WVEXCEPT(ValueError, autochannel.get_permitted_frequencies, *case)
 
 
+@wvtest.wvtest
+def get_all_frequencies_test():
+  wvtest.WVPASSEQ(['2412', '2417', '2422', '2427', '2432', '2437', '2442',
+                   '2447', '2452', '2457', '2462'],
+                  autochannel.get_all_frequencies('2.4'))
+
+  wvtest.WVPASSEQ(['5180', '5200', '5220', '5240', '5745', '5765', '5785',
+                   '5805', '5825', '5260', '5280', '5300', '5320', '5500',
+                   '5520', '5540', '5560', '5580', '5660', '5680', '5700'],
+                  autochannel.get_all_frequencies('5'))
+
+
 if __name__ == '__main__':
   wvtest.wvtest_main()
diff --git a/wifi/configs.py b/wifi/configs.py
index 3377a08..743fe10 100644
--- a/wifi/configs.py
+++ b/wifi/configs.py
@@ -6,6 +6,8 @@
 
 import Crypto.Protocol.KDF
 
+# pylint: disable=g-bad-import-order
+import autochannel
 import experiment
 import utils
 
@@ -373,10 +375,13 @@
                                utils.validate_and_sanitize_bssid(opt.bssid))
   network_block = make_network_block(network_block_lines)
 
+  freq_list = ','.join(autochannel.get_all_frequencies(opt.band))
+
   lines = [
       'ctrl_interface=/var/run/wpa_supplicant',
       'ap_scan=1',
       'autoscan=exponential:1:30',
+      'freq_list=' + freq_list,
       network_block
   ]
   return '\n'.join(lines)
diff --git a/wifi/configs_test.py b/wifi/configs_test.py
index ab5d6c7..64e05c6 100755
--- a/wifi/configs_test.py
+++ b/wifi/configs_test.py
@@ -10,27 +10,36 @@
 from wvtest import wvtest
 
 
+_FREQ_LIST = {
+    '2.4': '2412,2417,2422,2427,2432,2437,2442,2447,2452,2457,2462',
+    '5': ('5180,5200,5220,5240,5745,5765,5785,5805,5825,5260,5280,5300,5320,'
+          '5500,5520,5540,5560,5580,5660,5680,5700'),
+}
+
+
 _WPA_SUPPLICANT_CONFIG = """ctrl_interface=/var/run/wpa_supplicant
 ap_scan=1
 autoscan=exponential:1:30
-network={
+freq_list={freq_list}
+network={{
 \tssid="some ssid"
 \t#psk="some passphrase"
 \tpsk=41821f7ca3ea5d85beea7644ed7e0fefebd654177fa06c26fbdfdc3c599a317f
 \tscan_ssid=1
-}
+}}
 """
 
 _WPA_SUPPLICANT_CONFIG_BSSID = """ctrl_interface=/var/run/wpa_supplicant
 ap_scan=1
 autoscan=exponential:1:30
-network={
+freq_list={freq_list}
+network={{
 \tssid="some ssid"
 \t#psk="some passphrase"
 \tpsk=41821f7ca3ea5d85beea7644ed7e0fefebd654177fa06c26fbdfdc3c599a317f
 \tscan_ssid=1
 \tbssid=12:34:56:78:90:ab
-}
+}}
 """
 
 # pylint: disable=g-backslash-continuation
@@ -38,12 +47,13 @@
 """ctrl_interface=/var/run/wpa_supplicant
 ap_scan=1
 autoscan=exponential:1:30
-network={
+freq_list={freq_list}
+network={{
 \tssid="some ssid"
 \tkey_mgmt=NONE
 \tscan_ssid=1
 \tbssid=12:34:56:78:90:ab
-}
+}}
 """
 
 
@@ -54,24 +64,30 @@
         "Can't test generate_wpa_supplicant_config without wpa_passphrase.")
     return
 
-  opt = FakeOptDict()
-  config = configs.generate_wpa_supplicant_config(
-      'some ssid', 'some passphrase', opt)
-  wvtest.WVPASSEQ(_WPA_SUPPLICANT_CONFIG, config)
+  for band in ('2.4', '5'):
+    opt = FakeOptDict()
+    opt.band = band
+    got = configs.generate_wpa_supplicant_config(
+        'some ssid', 'some passphrase', opt)
+    want = _WPA_SUPPLICANT_CONFIG.format(freq_list=_FREQ_LIST[band])
+    wvtest.WVPASSEQ(want, got)
 
-  opt.bssid = 'TotallyNotValid'
-  wvtest.WVEXCEPT(utils.BinWifiException,
-                  configs.generate_wpa_supplicant_config,
-                  'some ssid', 'some passphrase', opt)
+    opt.bssid = 'TotallyNotValid'
+    wvtest.WVEXCEPT(utils.BinWifiException,
+                    configs.generate_wpa_supplicant_config,
+                    'some ssid', 'some passphrase', opt)
 
-  opt.bssid = '12:34:56:78:90:Ab'
-  config = configs.generate_wpa_supplicant_config(
-      'some ssid', 'some passphrase', opt)
-  wvtest.WVPASSEQ(_WPA_SUPPLICANT_CONFIG_BSSID, config)
+    opt.bssid = '12:34:56:78:90:Ab'
+    got = configs.generate_wpa_supplicant_config(
+        'some ssid', 'some passphrase', opt)
+    want = _WPA_SUPPLICANT_CONFIG_BSSID.format(freq_list=_FREQ_LIST[band])
+    wvtest.WVPASSEQ(want, got)
 
-  config = configs.generate_wpa_supplicant_config(
-      'some ssid', None, opt)
-  wvtest.WVPASSEQ(_WPA_SUPPLICANT_CONFIG_BSSID_UNSECURED, config)
+    got = configs.generate_wpa_supplicant_config(
+        'some ssid', None, opt)
+    want = _WPA_SUPPLICANT_CONFIG_BSSID_UNSECURED.format(
+        freq_list=_FREQ_LIST[band])
+    wvtest.WVPASSEQ(want, got)
 
 
 _PHY_INFO = """Wiphy phy0
diff --git a/wifi/quantenna.py b/wifi/quantenna.py
index 39dfabf..1408574 100755
--- a/wifi/quantenna.py
+++ b/wifi/quantenna.py
@@ -50,8 +50,8 @@
   return None, None, None, None
 
 
-def _set_link_state(hif, state):
-  subprocess.check_output(['ip', 'link', 'set', 'dev', hif, state])
+def _ifplugd_action(hif, state):
+  subprocess.check_output(['/etc/ifplugd/ifplugd.action', hif, state])
 
 
 def _parse_scan_result(line):
@@ -145,7 +145,7 @@
     _qcsapi('vlan_config', 'pcie0', 'trunk', vlan)
 
     _qcsapi('block_bss', lif, 0)
-    _set_link_state(hif, 'up')
+    _ifplugd_action(hif, 'up')
   except:
     stop_ap_wifi(opt)
     raise
@@ -188,7 +188,7 @@
     _qcsapi('vlan_config', 'pcie0', 'enable')
     _qcsapi('vlan_config', 'pcie0', 'trunk', vlan)
 
-    _set_link_state(hif, 'up')
+    _ifplugd_action(hif, 'up')
   except:
     stop_client_wifi(opt)
     raise
@@ -207,7 +207,7 @@
   except subprocess.CalledProcessError:
     pass
 
-  _set_link_state(hif, 'down')
+  _ifplugd_action(hif, 'down')
 
   return True
 
@@ -223,7 +223,7 @@
   except subprocess.CalledProcessError:
     pass
 
-  _set_link_state(hif, 'down')
+  _ifplugd_action(hif, 'down')
 
   return True
 
diff --git a/wifi/wifi.py b/wifi/wifi.py
index b0ef7f9..8797633 100755
--- a/wifi/wifi.py
+++ b/wifi/wifi.py
@@ -542,9 +542,20 @@
       ('hostapd_cli', '-i', interface, 'status'), no_stdout=True) == 0
 
 
-def _is_wpa_supplicant_running(interface):
+def _wpa_cli(program, interface, command):
   return utils.subprocess_quiet(
-      ('wpa_cli', '-i', interface, 'status'), no_stdout=True) == 0
+      (program, '-i', interface, command), no_stdout=True) == 0
+
+
+def _is_wpa_supplicant_running(interface):
+  return _wpa_cli('wpa_cli', interface, 'status')
+
+
+def _reconfigure_wpa_supplicant(interface):
+  if not _wpa_cli('wpa_cli', interface, 'reconfigure'):
+    return False
+
+  return _wait_for_wpa_supplicant_to_associate(interface)
 
 
 def _hostapd_debug_options():
@@ -653,6 +664,38 @@
         return None
 
 
+def _wait_for_wpa_supplicant_to_associate(interface):
+  """Wait for wpa_supplicant to associate.
+
+  If it does not associate within a certain period of time, terminate it.
+
+  Args:
+    interface: The interface on which wpa_supplicant is running.
+
+  Raises:
+    BinWifiException: if wpa_supplicant fails to associate and
+    also cannot be stopped to cleanup after the failure.
+
+  Returns:
+    Whether wpa_supplicant associated within the timeout.
+  """
+  utils.log('Waiting for wpa_supplicant to connect')
+  for _ in xrange(100):
+    if _get_wpa_state(interface) == 'COMPLETED':
+      utils.log('ok')
+      return True
+    sys.stderr.write('.')
+    time.sleep(0.1)
+
+  utils.log('wpa_supplicant did not connect.')
+  if not _stop_wpa_supplicant(interface):
+    raise utils.BinWifiException(
+        "Couldn't stop wpa_supplicant after it failed to connect.  "
+        "Consider killing it manually.")
+
+  return False
+
+
 def _start_wpa_supplicant(interface, config_filename):
   """Starts a babysat wpa_supplicant.
 
@@ -704,21 +747,7 @@
   else:
     return False
 
-  utils.log('Waiting for wpa_supplicant to connect')
-  for _ in xrange(100):
-    if _get_wpa_state(interface) == 'COMPLETED':
-      utils.log('ok')
-      return True
-    sys.stderr.write('.')
-    time.sleep(0.1)
-
-  utils.log('wpa_supplicant did not connect.')
-  if not _stop_wpa_supplicant(interface):
-    raise utils.BinWifiException(
-        "Couldn't stop wpa_supplicant after it failed to connect.  "
-        "Consider killing it manually.")
-
-  return False
+  return _wait_for_wpa_supplicant_to_associate(interface)
 
 
 def _maybe_restart_hostapd(interface, config, opt):
@@ -777,8 +806,7 @@
 def _restart_hostapd(band):
   """Restart hostapd from previous options.
 
-  Only used by _maybe_restart_wpa_supplicant, to restart hostapd after stopping
-  it.
+  Only used by _set_wpa_supplicant_config, to restart hostapd after stopping it.
 
   Args:
     band: The band on which to restart hostapd.
@@ -797,7 +825,7 @@
   _run(argv)
 
 
-def _maybe_restart_wpa_supplicant(interface, config, opt):
+def _set_wpa_supplicant_config(interface, config, opt):
   """Starts or restarts wpa_supplicant unless doing so would be a no-op.
 
   The no-op case (i.e. wpa_supplicant is already running with an equivalent
@@ -826,11 +854,12 @@
   except IOError:
     pass
 
-  if not _is_wpa_supplicant_running(interface):
+  already_running = _is_wpa_supplicant_running(interface)
+  if not already_running:
     utils.log('wpa_supplicant not running yet, starting.')
   elif current_config != config:
     # TODO(rofrankel): Consider using wpa_cli reconfigure here.
-    utils.log('wpa_supplicant config changed, restarting.')
+    utils.log('wpa_supplicant config changed, reconfiguring.')
   elif opt.force_restart:
     utils.log('Forced restart requested.')
     forced = True
@@ -838,12 +867,12 @@
     utils.log('wpa_supplicant-%s already configured and running', interface)
     return True
 
-  if not _stop_wpa_supplicant(interface):
-    raise utils.BinWifiException("Couldn't stop wpa_supplicant")
-
   if not forced:
     utils.atomic_write(tmp_config_filename, config)
 
+  # TODO(rofrankel): Consider removing all the restart hostapd stuff when
+  # b/30140131 is resolved.  hostapd seems to keep working without being
+  # restarted, at least on Camaro.
   restart_hostapd = False
   ap_interface = iw.find_interface_from_band(band, iw.INTERFACE_TYPE.ap,
                                              opt.interface_suffix)
@@ -852,13 +881,15 @@
     opt_without_persist = options.OptDict({})
     opt_without_persist.persist = False
     opt_without_persist.band = opt.band
-    # Code review: Will AP and client always have the same suffix?
     opt_without_persist.interface_suffix = opt.interface_suffix
     if not stop_ap_wifi(opt_without_persist):
       raise utils.BinWifiException(
           "Couldn't stop hostapd to start wpa_supplicant.")
 
-  if not _start_wpa_supplicant(interface, tmp_config_filename):
+  if already_running:
+    if not _reconfigure_wpa_supplicant(interface):
+      raise utils.BinWifiException('Failed to reconfigure wpa_supplicant.')
+  elif not _start_wpa_supplicant(interface, tmp_config_filename):
     raise utils.BinWifiException(
         'wpa_supplicant failed to start.  Look at wpa_supplicant logs for '
         'details.')
@@ -934,7 +965,7 @@
           ('ip', 'link', 'set', interface, 'address', mac_address))
 
   wpa_config = configs.generate_wpa_supplicant_config(opt.ssid, psk, opt)
-  if not _maybe_restart_wpa_supplicant(interface, wpa_config, opt):
+  if not _set_wpa_supplicant_config(interface, wpa_config, opt):
     return False
 
   return True