Code drop of WebRTC r1538

- The corresponding libjingle version is r115 per
http://code.google.com/p/webrtc/source/browse/trunk/peerconnection/DEPS?r=1739

Change-Id: I8f25660c25dce038880afaa68357161a61d51da4
diff --git a/AUTHORS b/AUTHORS
new file mode 100644
index 0000000..c3d75d6
--- /dev/null
+++ b/AUTHORS
@@ -0,0 +1,6 @@
+# Names should be added to this file like so:
+# Name or Organization <email address>
+
+Google Inc.
+Mozilla Foundation
+Ben Strong <bstrong@gmail.com>
\ No newline at end of file
diff --git a/All.target.mk b/All.target.mk
new file mode 100644
index 0000000..dfd3e13
--- /dev/null
+++ b/All.target.mk
@@ -0,0 +1,18 @@
+# This file is generated by gyp; do not edit.
+
+TOOLSET := target
+TARGET := All
+### Rules for final target.
+$(obj).target/All.stamp: TOOLSET := $(TOOLSET)
+$(obj).target/All.stamp: $(builddir)/peerconnection_server $(builddir)/peerconnection_client $(obj).target/src/common_audio/libsignal_processing.a $(obj).target/src/common_audio/libresampler.a $(obj).target/src/common_audio/libvad.a $(builddir)/signal_processing_unittests $(builddir)/resampler_unittests $(builddir)/vad_unittests $(obj).target/src/common_video/libwebrtc_libyuv.a $(obj).target/src/common_video/libwebrtc_jpeg.a $(builddir)/libyuv_unittests $(builddir)/jpeg_test $(obj).target/src/modules/libCNG.a $(obj).target/src/modules/libG711.a $(obj).target/src/modules/libG722.a $(obj).target/src/modules/libiLBC.a $(obj).target/src/modules/libiSAC.a $(obj).target/src/modules/libiSACFix.a $(obj).target/src/modules/libPCM16B.a $(obj).target/src/modules/libaudio_coding_module.a $(obj).target/src/modules/libNetEq.a $(obj).target/src/modules/libaudio_conference_mixer.a $(obj).target/src/modules/libaudio_device.a $(obj).target/src/modules/libaudio_processing.a $(obj).target/src/modules/libaec.a $(obj).target/src/modules/libaec_sse2.a $(obj).target/src/modules/libaecm.a $(obj).target/src/modules/libagc.a $(obj).target/src/modules/libns.a $(obj).target/src/modules/libns_fix.a $(obj).target/src/modules/libapm_util.a $(obj).target/src/modules/libmedia_file.a $(obj).target/src/modules/libudp_transport.a $(obj).target/src/modules/libwebrtc_utility.a $(obj).target/src/modules/libwebrtc_i420.a $(obj).target/src/modules/libwebrtc_vp8.a $(obj).target/src/modules/libwebrtc_video_coding.a $(obj).target/src/modules/libvideo_capture_module.a $(obj).target/src/modules/libvideo_processing.a $(obj).target/src/modules/libvideo_processing_sse2.a $(obj).target/src/modules/libvideo_render_module.a $(obj).target/src/modules/librtp_rtcp.a $(builddir)/iSACtest $(builddir)/iSACAPITest $(builddir)/iSACSwitchSampRateTest $(builddir)/iSACFixtest $(builddir)/audioproc_unittest $(obj).target/src/modules/libaudioproc_unittest_proto.a $(builddir)/rtp_rtcp_unittests $(builddir)/test_bwe $(builddir)/test_fec $(builddir)/test_rtp_rtcp_api $(builddir)/video_coding_test $(builddir)/video_coding_unittests $(builddir)/video_processing_unittests $(builddir)/audioproc $(builddir)/unpack_aecdump $(obj).target/src/modules/libvideo_codecs_test_framework.a $(builddir)/video_codecs_test_framework_unittests $(builddir)/video_codecs_test_framework_integrationtests $(builddir)/video_quality_measurement $(builddir)/cng_unittests $(builddir)/g711_unittests $(builddir)/g711_test $(builddir)/g722_unittests $(builddir)/G722Test $(builddir)/iLBCtest $(builddir)/pcm16b_unittests $(builddir)/audio_coding_module_test $(builddir)/audio_coding_unittests $(builddir)/neteq_unittests $(builddir)/NetEqRTPplay $(builddir)/RTPencode $(builddir)/RTPjitter $(builddir)/RTPanalyze $(builddir)/RTPchange $(builddir)/RTPtimeshift $(builddir)/RTPcat $(obj).target/src/modules/libNetEqTestTools.a $(builddir)/audio_conference_mixer_unittests $(builddir)/audio_device_test_api $(builddir)/audio_device_test_func $(obj).target/src/modules/libaudioproc_debug_proto.a $(builddir)/media_file_unittests $(builddir)/udp_transport_unittests $(builddir)/webrtc_utility_unittests $(obj).target/src/modules/libtest_framework.a $(builddir)/vp8_test $(builddir)/vp8_unittests $(builddir)/video_capture_module_test $(builddir)/video_render_module_test $(obj).target/src/system_wrappers/source/libsystem_wrappers.a $(builddir)/system_wrappers_unittests $(obj).target/src/video_engine/libvideo_engine_core.a $(builddir)/vie_auto_test $(builddir)/video_engine_core_unittests $(obj).target/src/voice_engine/libvoice_engine_core.a $(builddir)/voe_auto_test $(builddir)/voe_cmd_test $(builddir)/voice_engine_unittests $(obj).target/test/libmetrics.a $(builddir)/metrics_unittests $(obj).target/test/libtest_support.a $(obj).target/test/libtest_support_main.a $(builddir)/test_support_unittests FORCE_DO_CMD
+	$(call do_cmd,touch)
+
+all_deps += $(obj).target/All.stamp
+# Add target alias
+.PHONY: All
+All: $(obj).target/All.stamp
+
+# Add target alias to "all" target.
+.PHONY: all
+all: All
+
diff --git a/Android.mk b/Android.mk
new file mode 100644
index 0000000..b626394
--- /dev/null
+++ b/Android.mk
@@ -0,0 +1,167 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+MY_WEBRTC_ROOT_PATH := $(call my-dir)
+
+# voice
+include $(MY_WEBRTC_ROOT_PATH)/src/common_audio/resampler/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/common_audio/signal_processing/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/common_audio/vad/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_coding/neteq/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_coding/codecs/cng/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_coding/codecs/g711/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_coding/codecs/g722/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_coding/codecs/pcm16b/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_coding/codecs/ilbc/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_coding/codecs/iSAC/fix/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_coding/codecs/iSAC/main/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_coding/main/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_conference_mixer/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_device/main/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_processing/aec/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_processing/aecm/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_processing/agc/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_processing/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_processing/ns/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/audio_processing/utility/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/media_file/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/rtp_rtcp/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/udp_transport/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/utility/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/system_wrappers/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/voice_engine/main/source/Android.mk
+
+# video
+include $(MY_WEBRTC_ROOT_PATH)/src/common_video/jpeg/main/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/common_video/libyuv/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/video_capture/main/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/video_coding/codecs/i420/main/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/video_coding/codecs/vp8/main/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/video_coding/main/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/video_processing/main/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/modules/video_render/main/source/Android.mk
+include $(MY_WEBRTC_ROOT_PATH)/src/video_engine/Android.mk
+
+# third party
+include $(MY_WEBRTC_ROOT_PATH)/libvpx.mk
+
+# build .so
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+include $(LOCAL_PATH)/../../external/webrtc/android-webrtc.mk
+
+LOCAL_ARM_MODE := arm
+LOCAL_MODULE := libwebrtc_audio_preprocessing
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_WHOLE_STATIC_LIBRARIES := \
+    libwebrtc_spl \
+    libwebrtc_resampler \
+    libwebrtc_apm \
+    libwebrtc_apm_utility \
+    libwebrtc_vad \
+    libwebrtc_ns \
+    libwebrtc_agc \
+    libwebrtc_aec \
+    libwebrtc_aecm \
+    libwebrtc_system_wrappers
+
+# Add Neon libraries.
+ifeq ($(WEBRTC_BUILD_NEON_LIBS),true)
+LOCAL_WHOLE_STATIC_LIBRARIES += \
+    libwebrtc_aecm_neon \
+    libwebrtc_ns_neon
+endif
+
+LOCAL_STATIC_LIBRARIES := \
+    libprotobuf-cpp-2.3.0-lite
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport
+
+LOCAL_PRELINK_MODULE := false
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_SHARED_LIBRARY)
+
+###
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_ARM_MODE := arm
+LOCAL_MODULE := libwebrtc
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_WHOLE_STATIC_LIBRARIES := \
+    libwebrtc_system_wrappers \
+    libwebrtc_audio_device \
+    libwebrtc_pcm16b \
+    libwebrtc_cng \
+    libwebrtc_audio_coding \
+    libwebrtc_rtp_rtcp \
+    libwebrtc_media_file \
+    libwebrtc_udp_transport \
+    libwebrtc_utility \
+    libwebrtc_neteq \
+    libwebrtc_audio_conference_mixer \
+    libwebrtc_isac \
+    libwebrtc_ilbc \
+    libwebrtc_isacfix \
+    libwebrtc_g722 \
+    libwebrtc_g711 \
+    libwebrtc_voe_core \
+    libwebrtc_video_render \
+    libwebrtc_video_capture \
+    libwebrtc_i420 \
+    libwebrtc_video_coding \
+    libwebrtc_video_processing \
+    libwebrtc_vp8 \
+    libwebrtc_vie_core \
+    libwebrtc_yuv \
+    libwebrtc_jpeg \
+    libwebrtc_vpx
+
+# Add Neon libraries.
+ifeq ($(WEBRTC_BUILD_NEON_LIBS),true)
+LOCAL_WHOLE_STATIC_LIBRARIES += \
+    libwebrtc_isacfix_neon
+endif
+
+LOCAL_STATIC_LIBRARIES := \
+    libyuv_static
+
+LOCAL_SHARED_LIBRARIES := \
+    libcutils \
+    libdl \
+    libstlport \
+    libjpeg \
+    libGLESv2 \
+    libOpenSLES \
+    libwebrtc_audio_preprocessing
+
+LOCAL_PRELINK_MODULE := false
+
+ifndef NDK_ROOT
+include external/stlport/libstlport.mk
+endif
+include $(BUILD_SHARED_LIBRARY)
+
+# test apps, they're for test only; all these test apps have LOCAL_MODULE_TAGS:=tests
+# voice engine test apps
+include $(MY_WEBRTC_ROOT_PATH)/src/voice_engine/main/test/cmd_test/Android.mk
+#include $(MY_WEBRTC_ROOT_PATH)/src/voice_engine/main/test/auto_test/Android.mk
+# video engien test apps
+include $(MY_WEBRTC_ROOT_PATH)/src/video_engine/main/test/android_test/Android.mk
+#include $(MY_WEBRTC_ROOT_PATH)/src/video_engine/test/auto_test/android/Android.mk
diff --git a/DEPS b/DEPS
new file mode 100644
index 0000000..31fdfd0
--- /dev/null
+++ b/DEPS
@@ -0,0 +1,131 @@
+vars = {
+  # Use this googlecode_url variable only if there is an internal mirror for it.
+  # If you do not know, use the full path while defining your new deps entry.
+  "googlecode_url": "http://%s.googlecode.com/svn",
+  "chromium_trunk" : "http://src.chromium.org/svn/trunk",
+  "chromium_revision": "114939",
+  "libjingle_revision": "101",
+
+  # External resources like video and audio files used for testing purposes.
+  # Downloaded on demand when needed.
+  "webrtc_resources_revision": "6",
+}
+
+# NOTE: Prefer revision numbers to tags for svn deps.
+deps = {
+  "trunk/build":
+    Var("chromium_trunk") + "/src/build@" + Var("chromium_revision"),
+
+  "trunk/testing":
+    Var("chromium_trunk") + "/src/testing@" + Var("chromium_revision"),
+
+  "trunk/testing/gtest":
+    (Var("googlecode_url") % "googletest") + "/trunk@573",
+
+  "trunk/testing/gmock":
+    (Var("googlecode_url") % "googlemock") + "/trunk@386",
+
+  "trunk/tools/gyp":
+    (Var("googlecode_url") % "gyp") + "/trunk@1107",
+
+  # Needed by build/common.gypi.
+  "trunk/tools/win/supalink":
+    Var("chromium_trunk") + "/src/tools/win/supalink@" + Var("chromium_revision"),
+
+  "trunk/tools/clang/scripts":
+    Var("chromium_trunk") + "/src/tools/clang/scripts@" + Var("chromium_revision"),
+
+  "trunk/tools/python":
+    Var("chromium_trunk") + "/src/tools/python@" + Var("chromium_revision"),
+
+  "trunk/tools/valgrind":
+    Var("chromium_trunk") + "/src/tools/valgrind@" + Var("chromium_revision"),
+
+  "trunk/third_party/protobuf/":
+    Var("chromium_trunk") + "/src/third_party/protobuf@" + Var("chromium_revision"),
+
+  "trunk/third_party/libvpx/source/libvpx":
+    "http://git.chromium.org/webm/libvpx.git@e479379a",
+
+  "trunk/third_party/libjpeg_turbo/":
+    Var("chromium_trunk") + "/deps/third_party/libjpeg_turbo@95800",
+
+  "trunk/third_party/libjpeg/":
+    Var("chromium_trunk") + "/src/third_party/libjpeg@" + Var("chromium_revision"),
+
+  "trunk/third_party/libsrtp/":
+    Var("chromium_trunk") + "/deps/third_party/libsrtp@115467",
+
+  "trunk/third_party/yasm/":
+    Var("chromium_trunk") + "/src/third_party/yasm@" + Var("chromium_revision"),
+
+  "trunk/third_party/expat/":
+    Var("chromium_trunk") + "/src/third_party/expat@" + Var("chromium_revision"),
+
+  "trunk/third_party/libjingle/":
+    Var("chromium_trunk") + "/src/third_party/libjingle@" + Var("chromium_revision"),
+
+  "trunk/third_party/google-gflags/src":
+    (Var("googlecode_url") % "google-gflags") + "/trunk/src@45",
+
+  "trunk/third_party/libjingle/source":
+    (Var("googlecode_url") % "libjingle") + "/trunk@" + Var("libjingle_revision"),
+
+  "trunk/third_party/yasm/source/patched-yasm":
+    Var("chromium_trunk") + "/deps/third_party/yasm/patched-yasm@73761",
+    
+  # Used by libjpeg-turbo
+  "trunk/third_party/yasm/binaries":
+    Var("chromium_trunk") + "/deps/third_party/yasm/binaries@74228",
+
+  "trunk/third_party/jsoncpp/":
+    "http://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/trunk/jsoncpp@246",
+
+  "trunk/third_party/libyuv":
+    (Var("googlecode_url") % "libyuv") + "/trunk@121",
+
+  # Used by tools/coverage/dashboard and tools/python_charts
+  "trunk/third_party/google-visualization-python":
+    (Var("googlecode_url") % "google-visualization-python") + "/trunk@15",
+
+  # Used by tools/coverage
+  "trunk/third_party/oauth2":
+    "https://github.com/simplegeo/python-oauth2.git@a83f4a297336b631e75cba102910c19231518159"
+}
+
+deps_os = {
+  "win": {
+    "trunk/third_party/cygwin/":
+      Var("chromium_trunk") + "/deps/third_party/cygwin@66844",
+  }
+}
+
+hooks = [
+  {
+    # Create a supplement.gypi file under trunk/.  This file will be picked up
+    # by gyp and we use it to set Chromium related variables (inside_chromium_build)
+    # to 0 and enable the standalone build.
+    "pattern": ".",
+    "action": ["python", "trunk/tools/create_supplement_gypi.py", "trunk/src/supplement.gypi"],
+  },
+  {
+    # Pull clang on mac. If nothing changed, or on non-mac platforms, this takes
+    # zero seconds to run. If something changed, it downloads a prebuilt clang.
+    "pattern": ".",
+    "action": ["python", "trunk/tools/clang/scripts/update.py", "--mac-only"],
+  },
+  {
+    # Download test resources, i.e. video and audio files. If the latest
+    # version is already downloaded, this takes zero seconds to run.
+    # If a newer version or no current download exists, it will download
+    # the resources and extract them.
+    "pattern": ".",
+    "action": ["python", "trunk/tools/resources/update.py"],
+  },
+  {
+    # A change to a .gyp, .gypi, or to GYP itself should run the generator.
+    "pattern": ".",
+    "action": ["python", "trunk/build/gyp_chromium", "--depth=trunk", "trunk/webrtc.gyp"],
+  },
+]
+
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..dd4a345
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1 @@
+Refer to src/LICENSE.
diff --git a/LICENSE_THIRD_PARTY b/LICENSE_THIRD_PARTY
new file mode 100644
index 0000000..d47c055
--- /dev/null
+++ b/LICENSE_THIRD_PARTY
@@ -0,0 +1 @@
+Refer to src/LICENSE_THIRD_PARTY.
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..da80d9e
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,1073 @@
+# We borrow heavily from the kernel build setup, though we are simpler since
+# we don't have Kconfig tweaking settings on us.
+
+# The implicit make rules have it looking for RCS files, among other things.
+# We instead explicitly write all the rules we care about.
+# It's even quicker (saves ~200ms) to pass -r on the command line.
+MAKEFLAGS=-r
+
+# The source directory tree.
+srcdir := .
+
+# The name of the builddir.
+builddir_name ?= out
+
+# The V=1 flag on command line makes us verbosely print command lines.
+ifdef V
+  quiet=
+else
+  quiet=quiet_
+endif
+
+# Specify BUILDTYPE=Release on the command line for a release build.
+BUILDTYPE ?= Debug
+
+# Directory all our build output goes into.
+# Note that this must be two directories beneath src/ for unit tests to pass,
+# as they reach into the src/ directory for data with relative paths.
+builddir ?= $(builddir_name)/$(BUILDTYPE)
+abs_builddir := $(abspath $(builddir))
+depsdir := $(builddir)/.deps
+
+# Object output directory.
+obj := $(builddir)/obj
+abs_obj := $(abspath $(obj))
+
+# We build up a list of every single one of the targets so we can slurp in the
+# generated dependency rule Makefiles in one pass.
+all_deps :=
+
+
+
+# C++ apps need to be linked with g++.
+#
+# Note: flock is used to seralize linking. Linking is a memory-intensive
+# process so running parallel links can often lead to thrashing.  To disable
+# the serialization, override LINK via an envrionment variable as follows:
+#
+#   export LINK=g++
+#
+# This will allow make to invoke N linker processes as specified in -jN.
+LINK ?= flock $(builddir)/linker.lock $(CXX)
+
+CC.target ?= $(CC)
+CFLAGS.target ?= $(CFLAGS)
+CXX.target ?= $(CXX)
+CXXFLAGS.target ?= $(CXXFLAGS)
+LINK.target ?= $(LINK)
+LDFLAGS.target ?= $(LDFLAGS) 
+AR.target ?= $(AR)
+ARFLAGS.target ?= crsT
+
+# N.B.: the logic of which commands to run should match the computation done
+# in gyp's make.py where ARFLAGS.host etc. is computed.
+# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
+# to replicate this environment fallback in make as well.
+CC.host ?= gcc
+CFLAGS.host ?=
+CXX.host ?= g++
+CXXFLAGS.host ?=
+LINK.host ?= g++
+LDFLAGS.host ?=
+AR.host ?= ar
+ARFLAGS.host := crsT
+
+# Define a dir function that can handle spaces.
+# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
+# "leading spaces cannot appear in the text of the first argument as written.
+# These characters can be put into the argument value by variable substitution."
+empty :=
+space := $(empty) $(empty)
+
+# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
+replace_spaces = $(subst $(space),?,$1)
+unreplace_spaces = $(subst ?,$(space),$1)
+dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
+
+# Flags to make gcc output dependency info.  Note that you need to be
+# careful here to use the flags that ccache and distcc can understand.
+# We write to a dep file on the side first and then rename at the end
+# so we can't end up with a broken dep file.
+depfile = $(depsdir)/$(call replace_spaces,$@).d
+DEPFLAGS = -MMD -MF $(depfile).raw
+
+# We have to fixup the deps output in a few ways.
+# (1) the file output should mention the proper .o file.
+# ccache or distcc lose the path to the target, so we convert a rule of
+# the form:
+#   foobar.o: DEP1 DEP2
+# into
+#   path/to/foobar.o: DEP1 DEP2
+# (2) we want missing files not to cause us to fail to build.
+# We want to rewrite
+#   foobar.o: DEP1 DEP2 \
+#               DEP3
+# to
+#   DEP1:
+#   DEP2:
+#   DEP3:
+# so if the files are missing, they're just considered phony rules.
+# We have to do some pretty insane escaping to get those backslashes
+# and dollar signs past make, the shell, and sed at the same time.
+# Doesn't work with spaces, but that's fine: .d files have spaces in
+# their names replaced with other characters.
+define fixup_dep
+# The depfile may not exist if the input file didn't have any #includes.
+touch $(depfile).raw
+# Fixup path as in (1).
+sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
+# Add extra rules as in (2).
+# We remove slashes and replace spaces with new lines;
+# remove blank lines;
+# delete the first line and append a colon to the remaining lines.
+sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
+  grep -v '^$$'                             |\
+  sed -e 1d -e 's|$$|:|'                     \
+    >> $(depfile)
+rm $(depfile).raw
+endef
+
+# Command definitions:
+# - cmd_foo is the actual command to run;
+# - quiet_cmd_foo is the brief-output summary of the command.
+
+quiet_cmd_cc = CC($(TOOLSET)) $@
+cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
+
+quiet_cmd_cxx = CXX($(TOOLSET)) $@
+cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
+
+quiet_cmd_touch = TOUCH $@
+cmd_touch = touch $@
+
+quiet_cmd_copy = COPY $@
+# send stderr to /dev/null to ignore messages when linking directories.
+cmd_copy = ln -f "$<" "$@" 2>/dev/null || (rm -rf "$@" && cp -af "$<" "$@")
+
+quiet_cmd_alink = AR($(TOOLSET)) $@
+cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) $(ARFLAGS.$(TOOLSET)) $@ $(filter %.o,$^)
+
+# Due to circular dependencies between libraries :(, we wrap the
+# special "figure out circular dependencies" flags around the entire
+# input list during linking.
+quiet_cmd_link = LINK($(TOOLSET)) $@
+cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS)
+
+# We support two kinds of shared objects (.so):
+# 1) shared_library, which is just bundling together many dependent libraries
+# into a link line.
+# 2) loadable_module, which is generating a module intended for dlopen().
+#
+# They differ only slightly:
+# In the former case, we want to package all dependent code into the .so.
+# In the latter case, we want to package just the API exposed by the
+# outermost module.
+# This means shared_library uses --whole-archive, while loadable_module doesn't.
+# (Note that --whole-archive is incompatible with the --start-group used in
+# normal linking.)
+
+# Other shared-object link notes:
+# - Set SONAME to the library filename so our binaries don't reference
+# the local, absolute paths used on the link command-line.
+quiet_cmd_solink = SOLINK($(TOOLSET)) $@
+cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS)
+
+quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
+cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS)
+
+
+# Define an escape_quotes function to escape single quotes.
+# This allows us to handle quotes properly as long as we always use
+# use single quotes and escape_quotes.
+escape_quotes = $(subst ','\'',$(1))
+# This comment is here just to include a ' to unconfuse syntax highlighting.
+# Define an escape_vars function to escape '$' variable syntax.
+# This allows us to read/write command lines with shell variables (e.g.
+# $LD_LIBRARY_PATH), without triggering make substitution.
+escape_vars = $(subst $$,$$$$,$(1))
+# Helper that expands to a shell command to echo a string exactly as it is in
+# make. This uses printf instead of echo because printf's behaviour with respect
+# to escape sequences is more portable than echo's across different shells
+# (e.g., dash, bash).
+exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
+
+# Helper to compare the command we're about to run against the command
+# we logged the last time we ran the command.  Produces an empty
+# string (false) when the commands match.
+# Tricky point: Make has no string-equality test function.
+# The kernel uses the following, but it seems like it would have false
+# positives, where one string reordered its arguments.
+#   arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
+#                       $(filter-out $(cmd_$@), $(cmd_$(1))))
+# We instead substitute each for the empty string into the other, and
+# say they're equal if both substitutions produce the empty string.
+# .d files contain ? instead of spaces, take that into account.
+command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
+                       $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
+
+# Helper that is non-empty when a prerequisite changes.
+# Normally make does this implicitly, but we force rules to always run
+# so we can check their command lines.
+#   $? -- new prerequisites
+#   $| -- order-only dependencies
+prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
+
+# Helper that executes all postbuilds, and deletes the output file when done
+# if any of the postbuilds failed.
+define do_postbuilds
+  @E=0;\
+  for p in $(POSTBUILDS); do\
+    eval $$p;\
+    F=$$?;\
+    if [ $$F -ne 0 ]; then\
+      E=$$F;\
+    fi;\
+  done;\
+  if [ $$E -ne 0 ]; then\
+    rm -rf "$@";\
+    exit $$E;\
+  fi
+endef
+
+# do_cmd: run a command via the above cmd_foo names, if necessary.
+# Should always run for a given target to handle command-line changes.
+# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
+# Third argument, if non-zero, makes it do POSTBUILDS processing.
+# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
+# spaces already and dirx strips the ? characters.
+define do_cmd
+$(if $(or $(command_changed),$(prereq_changed)),
+  @$(call exact_echo,  $($(quiet)cmd_$(1)))
+  @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
+  $(if $(findstring flock,$(word 1,$(cmd_$1))),
+    @$(cmd_$(1))
+    @echo "  $(quiet_cmd_$(1)): Finished",
+    @$(cmd_$(1))
+  )
+  @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
+  @$(if $(2),$(fixup_dep))
+  $(if $(and $(3), $(POSTBUILDS)),
+    $(call do_postbuilds)
+  )
+)
+endef
+
+# Declare the "all" target first so it is the default,
+# even though we don't have the deps yet.
+.PHONY: all
+all:
+
+# Use FORCE_DO_CMD to force a target to run.  Should be coupled with
+# do_cmd.
+.PHONY: FORCE_DO_CMD
+FORCE_DO_CMD:
+
+TOOLSET := host
+# Suffix rules, putting all outputs into $(obj).
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
+	@$(call do_cmd,cc,1)
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
+	@$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
+	@$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
+	@$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
+	@$(call do_cmd,cc,1)
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
+	@$(call do_cmd,cc,1)
+
+# Try building from generated source, too.
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
+	@$(call do_cmd,cc,1)
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
+	@$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
+	@$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
+	@$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
+	@$(call do_cmd,cc,1)
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
+	@$(call do_cmd,cc,1)
+
+$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
+	@$(call do_cmd,cc,1)
+$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
+	@$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
+	@$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
+	@$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
+	@$(call do_cmd,cc,1)
+$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
+	@$(call do_cmd,cc,1)
+
+TOOLSET := target
+# Suffix rules, putting all outputs into $(obj).
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
+	@$(call do_cmd,cc,1)
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
+	@$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
+	@$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
+	@$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
+	@$(call do_cmd,cc,1)
+$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
+	@$(call do_cmd,cc,1)
+
+# Try building from generated source, too.
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
+	@$(call do_cmd,cc,1)
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
+	@$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
+	@$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
+	@$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
+	@$(call do_cmd,cc,1)
+$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
+	@$(call do_cmd,cc,1)
+
+$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
+	@$(call do_cmd,cc,1)
+$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
+	@$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
+	@$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
+	@$(call do_cmd,cxx,1)
+$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
+	@$(call do_cmd,cc,1)
+$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
+	@$(call do_cmd,cc,1)
+
+
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,All.target.mk)))),)
+  include All.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,peerconnection/peerconnection_client.target.mk)))),)
+  include peerconnection/peerconnection_client.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,peerconnection/peerconnection_server.target.mk)))),)
+  include peerconnection/peerconnection_server.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/common_audio/resampler.target.mk)))),)
+  include src/common_audio/resampler.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/common_audio/resampler_unittests.target.mk)))),)
+  include src/common_audio/resampler_unittests.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/common_audio/signal_processing.target.mk)))),)
+  include src/common_audio/signal_processing.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/common_audio/signal_processing_unittests.target.mk)))),)
+  include src/common_audio/signal_processing_unittests.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/common_audio/vad.target.mk)))),)
+  include src/common_audio/vad.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/common_audio/vad_unittests.target.mk)))),)
+  include src/common_audio/vad_unittests.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/common_video/jpeg_test.target.mk)))),)
+  include src/common_video/jpeg_test.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/common_video/libyuv_unittests.target.mk)))),)
+  include src/common_video/libyuv_unittests.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/common_video/webrtc_jpeg.target.mk)))),)
+  include src/common_video/webrtc_jpeg.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/common_video/webrtc_libyuv.target.mk)))),)
+  include src/common_video/webrtc_libyuv.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/CNG.target.mk)))),)
+  include src/modules/CNG.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/G711.target.mk)))),)
+  include src/modules/G711.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/G722.target.mk)))),)
+  include src/modules/G722.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/G722Test.target.mk)))),)
+  include src/modules/G722Test.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/NetEq.target.mk)))),)
+  include src/modules/NetEq.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/NetEqRTPplay.target.mk)))),)
+  include src/modules/NetEqRTPplay.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/NetEqTestTools.target.mk)))),)
+  include src/modules/NetEqTestTools.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/PCM16B.target.mk)))),)
+  include src/modules/PCM16B.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/RTPanalyze.target.mk)))),)
+  include src/modules/RTPanalyze.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/RTPcat.target.mk)))),)
+  include src/modules/RTPcat.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/RTPchange.target.mk)))),)
+  include src/modules/RTPchange.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/RTPencode.target.mk)))),)
+  include src/modules/RTPencode.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/RTPjitter.target.mk)))),)
+  include src/modules/RTPjitter.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/RTPtimeshift.target.mk)))),)
+  include src/modules/RTPtimeshift.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/aec.target.mk)))),)
+  include src/modules/aec.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/aec_sse2.target.mk)))),)
+  include src/modules/aec_sse2.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/aecm.target.mk)))),)
+  include src/modules/aecm.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/agc.target.mk)))),)
+  include src/modules/agc.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/apm_util.target.mk)))),)
+  include src/modules/apm_util.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/audio_coding_module.target.mk)))),)
+  include src/modules/audio_coding_module.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/audio_coding_module_test.target.mk)))),)
+  include src/modules/audio_coding_module_test.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/audio_coding_unittests.target.mk)))),)
+  include src/modules/audio_coding_unittests.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/audio_conference_mixer.target.mk)))),)
+  include src/modules/audio_conference_mixer.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/audio_conference_mixer_unittests.target.mk)))),)
+  include src/modules/audio_conference_mixer_unittests.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/audio_device.target.mk)))),)
+  include src/modules/audio_device.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/audio_device_test_api.target.mk)))),)
+  include src/modules/audio_device_test_api.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/audio_device_test_func.target.mk)))),)
+  include src/modules/audio_device_test_func.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/audio_processing.target.mk)))),)
+  include src/modules/audio_processing.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/audioproc.target.mk)))),)
+  include src/modules/audioproc.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/audioproc_debug_proto.target.mk)))),)
+  include src/modules/audioproc_debug_proto.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/audioproc_unittest.target.mk)))),)
+  include src/modules/audioproc_unittest.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/audioproc_unittest_proto.target.mk)))),)
+  include src/modules/audioproc_unittest_proto.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/cng_unittests.target.mk)))),)
+  include src/modules/cng_unittests.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/g711_test.target.mk)))),)
+  include src/modules/g711_test.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/g711_unittests.target.mk)))),)
+  include src/modules/g711_unittests.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/g722_unittests.target.mk)))),)
+  include src/modules/g722_unittests.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/iLBC.target.mk)))),)
+  include src/modules/iLBC.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/iLBCtest.target.mk)))),)
+  include src/modules/iLBCtest.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/iSAC.target.mk)))),)
+  include src/modules/iSAC.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/iSACAPITest.target.mk)))),)
+  include src/modules/iSACAPITest.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/iSACFix.target.mk)))),)
+  include src/modules/iSACFix.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/iSACFixtest.target.mk)))),)
+  include src/modules/iSACFixtest.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/iSACSwitchSampRateTest.target.mk)))),)
+  include src/modules/iSACSwitchSampRateTest.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/iSACtest.target.mk)))),)
+  include src/modules/iSACtest.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/media_file.target.mk)))),)
+  include src/modules/media_file.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/media_file_unittests.target.mk)))),)
+  include src/modules/media_file_unittests.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/neteq_unittests.target.mk)))),)
+  include src/modules/neteq_unittests.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/ns.target.mk)))),)
+  include src/modules/ns.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/ns_fix.target.mk)))),)
+  include src/modules/ns_fix.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/pcm16b_unittests.target.mk)))),)
+  include src/modules/pcm16b_unittests.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/rtp_rtcp.target.mk)))),)
+  include src/modules/rtp_rtcp.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/rtp_rtcp_unittests.target.mk)))),)
+  include src/modules/rtp_rtcp_unittests.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/test_bwe.target.mk)))),)
+  include src/modules/test_bwe.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/test_fec.target.mk)))),)
+  include src/modules/test_fec.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/test_framework.target.mk)))),)
+  include src/modules/test_framework.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/test_rtp_rtcp_api.target.mk)))),)
+  include src/modules/test_rtp_rtcp_api.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/udp_transport.target.mk)))),)
+  include src/modules/udp_transport.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/udp_transport_unittests.target.mk)))),)
+  include src/modules/udp_transport_unittests.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/unpack_aecdump.target.mk)))),)
+  include src/modules/unpack_aecdump.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/video_capture_module.target.mk)))),)
+  include src/modules/video_capture_module.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/video_capture_module_test.target.mk)))),)
+  include src/modules/video_capture_module_test.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/video_codecs_test_framework.target.mk)))),)
+  include src/modules/video_codecs_test_framework.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/video_codecs_test_framework_integrationtests.target.mk)))),)
+  include src/modules/video_codecs_test_framework_integrationtests.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/video_codecs_test_framework_unittests.target.mk)))),)
+  include src/modules/video_codecs_test_framework_unittests.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/video_coding_test.target.mk)))),)
+  include src/modules/video_coding_test.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/video_coding_unittests.target.mk)))),)
+  include src/modules/video_coding_unittests.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/video_processing.target.mk)))),)
+  include src/modules/video_processing.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/video_processing_sse2.target.mk)))),)
+  include src/modules/video_processing_sse2.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/video_processing_unittests.target.mk)))),)
+  include src/modules/video_processing_unittests.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/video_quality_measurement.target.mk)))),)
+  include src/modules/video_quality_measurement.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/video_render_module.target.mk)))),)
+  include src/modules/video_render_module.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/video_render_module_test.target.mk)))),)
+  include src/modules/video_render_module_test.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/vp8_test.target.mk)))),)
+  include src/modules/vp8_test.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/vp8_unittests.target.mk)))),)
+  include src/modules/vp8_unittests.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/webrtc_i420.target.mk)))),)
+  include src/modules/webrtc_i420.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/webrtc_utility.target.mk)))),)
+  include src/modules/webrtc_utility.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/webrtc_utility_unittests.target.mk)))),)
+  include src/modules/webrtc_utility_unittests.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/webrtc_video_coding.target.mk)))),)
+  include src/modules/webrtc_video_coding.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/modules/webrtc_vp8.target.mk)))),)
+  include src/modules/webrtc_vp8.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/system_wrappers/source/system_wrappers.target.mk)))),)
+  include src/system_wrappers/source/system_wrappers.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/system_wrappers/source/system_wrappers_unittests.target.mk)))),)
+  include src/system_wrappers/source/system_wrappers_unittests.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/video_engine/video_engine_core.target.mk)))),)
+  include src/video_engine/video_engine_core.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/video_engine/video_engine_core_unittests.target.mk)))),)
+  include src/video_engine/video_engine_core_unittests.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/video_engine/vie_auto_test.target.mk)))),)
+  include src/video_engine/vie_auto_test.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/voice_engine/voe_auto_test.target.mk)))),)
+  include src/voice_engine/voe_auto_test.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/voice_engine/voe_cmd_test.target.mk)))),)
+  include src/voice_engine/voe_cmd_test.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/voice_engine/voice_engine_core.target.mk)))),)
+  include src/voice_engine/voice_engine_core.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,src/voice_engine/voice_engine_unittests.target.mk)))),)
+  include src/voice_engine/voice_engine_unittests.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,test/metrics.target.mk)))),)
+  include test/metrics.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,test/metrics_unittests.target.mk)))),)
+  include test/metrics_unittests.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,test/test_support.target.mk)))),)
+  include test/test_support.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,test/test_support_main.target.mk)))),)
+  include test/test_support_main.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,test/test_support_unittests.target.mk)))),)
+  include test/test_support_unittests.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,testing/gmock.target.mk)))),)
+  include testing/gmock.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,testing/gmock_main.target.mk)))),)
+  include testing/gmock_main.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,testing/gtest.target.mk)))),)
+  include testing/gtest.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,testing/gtest_main.target.mk)))),)
+  include testing/gtest_main.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,testing/gtest_prod.host.mk)))),)
+  include testing/gtest_prod.host.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,testing/gtest_prod.target.mk)))),)
+  include testing/gtest_prod.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,third_party/expat/expat.target.mk)))),)
+  include third_party/expat/expat.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,third_party/google-gflags/google-gflags.target.mk)))),)
+  include third_party/google-gflags/google-gflags.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,third_party/libjpeg_turbo/libjpeg.target.mk)))),)
+  include third_party/libjpeg_turbo/libjpeg.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,third_party/libsrtp/libsrtp.target.mk)))),)
+  include third_party/libsrtp/libsrtp.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,third_party/libvpx/libvpx.target.mk)))),)
+  include third_party/libvpx/libvpx.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,third_party/libyuv/libyuv.target.mk)))),)
+  include third_party/libyuv/libyuv.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,third_party/protobuf/protobuf_full_do_not_use.host.mk)))),)
+  include third_party/protobuf/protobuf_full_do_not_use.host.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,third_party/protobuf/protobuf_full_do_not_use.target.mk)))),)
+  include third_party/protobuf/protobuf_full_do_not_use.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,third_party/protobuf/protobuf_lite.host.mk)))),)
+  include third_party/protobuf/protobuf_lite.host.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,third_party/protobuf/protobuf_lite.target.mk)))),)
+  include third_party/protobuf/protobuf_lite.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,third_party/protobuf/protoc.host.mk)))),)
+  include third_party/protobuf/protoc.host.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,third_party/protobuf/py_proto.target.mk)))),)
+  include third_party/protobuf/py_proto.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,third_party/yasm/config_sources.host.mk)))),)
+  include third_party/yasm/config_sources.host.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,third_party/yasm/generate_files.host.mk)))),)
+  include third_party/yasm/generate_files.host.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,third_party/yasm/genmacro.host.mk)))),)
+  include third_party/yasm/genmacro.host.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,third_party/yasm/genmodule.host.mk)))),)
+  include third_party/yasm/genmodule.host.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,third_party/yasm/genperf.host.mk)))),)
+  include third_party/yasm/genperf.host.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,third_party/yasm/genperf_libs.host.mk)))),)
+  include third_party/yasm/genperf_libs.host.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,third_party/yasm/genstring.host.mk)))),)
+  include third_party/yasm/genstring.host.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,third_party/yasm/genversion.host.mk)))),)
+  include third_party/yasm/genversion.host.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,third_party/yasm/re2c.host.mk)))),)
+  include third_party/yasm/re2c.host.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,third_party/yasm/yasm.host.mk)))),)
+  include third_party/yasm/yasm.host.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,third_party_mods/libjingle/jsoncpp.target.mk)))),)
+  include third_party_mods/libjingle/jsoncpp.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,third_party_mods/libjingle/libjingle.target.mk)))),)
+  include third_party_mods/libjingle/libjingle.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,third_party_mods/libjingle/libjingle_app.target.mk)))),)
+  include third_party_mods/libjingle/libjingle_app.target.mk
+endif
+ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
+    $(findstring $(join ^,$(prefix)),\
+                 $(join ^,third_party_mods/libjingle/libjingle_p2p.target.mk)))),)
+  include third_party_mods/libjingle/libjingle_p2p.target.mk
+endif
+
+quiet_cmd_regen_makefile = ACTION Regenerating $@
+cmd_regen_makefile = ./build/gyp_chromium -fmake --ignore-environment "--toplevel-dir=." -Ibuild/common.gypi -I/usr/local/google/clients/webrtc/trunk/src/supplement.gypi "--depth=." webrtc.gyp
+Makefile: src/modules/udp_transport/source/udp_transport.gypi src/modules/video_processing/main/test/vpm_tests.gypi src/modules/video_coding/codecs/test/video_codecs_test_framework.gypi third_party/google-gflags/google-gflags.gyp test/test.gyp src/common_audio/signal_processing/signal_processing.gypi src/common_audio/resampler/resampler.gypi src/modules/video_capture/main/source/video_capture.gypi src/build/common.gypi src/modules/audio_processing/apm_tests.gypi third_party_mods/libjingle/libjingle.gyp third_party/yasm/yasm.gyp src/modules/audio_conference_mixer/source/audio_conference_mixer.gypi src/modules/video_coding/codecs/i420/main/source/i420.gypi src/modules/audio_coding/codecs/g711/g711.gypi src/modules/rtp_rtcp/test/testFec/test_fec.gypi build/internal/release_impl.gypi src/supplement.gypi src/voice_engine/voice_engine.gyp build/use_skia_on_mac.gypi third_party/yasm/yasm_compile.gypi src/common_video/jpeg/main/source/jpeg.gypi src/common_settings.gypi src/modules/video_coding/main/source/video_coding.gypi src/modules/video_processing/main/source/video_processing.gypi src/modules/audio_processing/utility/util.gypi src/modules/audio_coding/codecs/pcm16b/pcm16b.gypi src/modules/media_file/source/media_file.gypi src/modules/modules.gyp src/modules/video_coding/codecs/test_framework/test_framework.gypi build/internal/release_impl_official.gypi build/internal/release_defaults.gypi src/modules/audio_coding/codecs/iSAC/main/source/isac.gypi src/common_audio/vad/vad.gypi src/common_video/common_video.gyp src/video_engine/video_engine.gyp src/video_engine/test/auto_test/vie_auto_test.gypi third_party/libvpx/libvpx_srcs_arm_neon.gypi third_party/libvpx/libvpx_srcs_arm.gypi src/build/protoc.gypi src/modules/rtp_rtcp/source/rtp_rtcp.gypi third_party/libvpx/libvpx.gyp testing/gmock.gyp third_party/expat/expat.gyp src/voice_engine/main/test/voice_engine_tests.gypi src/modules/audio_processing/aecm/aecm.gypi src/system_wrappers/source/system_wrappers.gyp src/modules/audio_coding/codecs/iSAC/isacfix_test.gypi src/modules/video_coding/main/source/video_coding_test.gypi src/voice_engine/main/source/voice_engine_core.gypi src/modules/audio_coding/codecs/ilbc/ilbc.gypi src/modules/video_render/main/source/video_render.gypi src/modules/video_coding/codecs/tools/video_codecs_tools.gypi build/release.gypi test/metrics.gyp testing/gtest.gyp build/common.gypi src/modules/audio_processing/aec/aec.gypi src/modules/audio_coding/codecs/iSAC/isac_test.gypi third_party/libvpx/libvpx_srcs_x86.gypi src/modules/video_coding/codecs/vp8/main/source/vp8.gypi third_party/libyuv/libyuv.gyp third_party/libvpx/libvpx_srcs_x86_64.gypi src/modules/audio_coding/codecs/g722/g722.gypi third_party/libsrtp/libsrtp.gyp src/modules/rtp_rtcp/test/test_bwe/test_bwe.gypi src/common_audio/common_audio.gyp src/modules/audio_processing/agc/agc.gypi webrtc.gyp src/modules/audio_coding/neteq/neteq.gypi third_party/libjpeg_turbo/libjpeg.gyp peerconnection/peerconnection.gyp src/modules/audio_coding/codecs/iSAC/fix/source/isacfix.gypi src/video_engine/video_engine_core.gypi src/video_engine/main/test/WindowsTest/windowstest.gypi src/modules/audio_device/main/source/audio_device.gypi src/modules/audio_coding/main/source/audio_coding_module.gypi third_party/protobuf/protobuf.gyp src/common_video/libyuv/libyuv.gypi src/modules/audio_processing/audio_processing.gypi src/modules/rtp_rtcp/source/rtp_rtcp_tests.gypi src/modules/audio_coding/codecs/cng/cng.gypi src/modules/rtp_rtcp/test/testAPI/test_api.gypi src/modules/utility/source/utility.gypi src/modules/audio_processing/ns/ns.gypi
+	$(call do_cmd,regen_makefile)
+
+# "all" is a concatenation of the "all" targets from all the included
+# sub-makefiles. This is just here to clarify.
+all:
+
+# Add in dependency-tracking rules.  $(all_deps) is the list of every single
+# target in our tree. Only consider the ones with .d (dependency) info:
+d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
+ifneq ($(d_files),)
+  # Rather than include each individual .d file, concatenate them into a
+  # single file which make is able to load faster.  We split this into
+  # commands that take 1000 files at a time to avoid overflowing the
+  # command line.
+  $(shell cat $(wordlist 1,1000,$(d_files)) > $(depsdir)/all.deps)
+
+  ifneq ($(word 1001,$(d_files)),)
+    $(shell cat $(wordlist 1001,2000,$(d_files)) >> $(depsdir)/all.deps)
+  endif
+  ifneq ($(word 2001,$(d_files)),)
+    $(error Found unprocessed dependency files (gyp didn't generate enough rules!))
+  endif
+
+  # make looks for ways to re-generate included makefiles, but in our case, we
+  # don't have a direct way. Explicitly telling make that it has nothing to do
+  # for them makes it go faster.
+  $(depsdir)/all.deps: ;
+
+  include $(depsdir)/all.deps
+endif
diff --git a/OWNERS b/OWNERS
new file mode 100644
index 0000000..b110a52
--- /dev/null
+++ b/OWNERS
@@ -0,0 +1,5 @@
+henrika@webrtc.org

+niklas.enbom@webrtc.org

+andrew@webrtc.org

+tina.legrand@webrtc.org

+tommi@webrtc.org
\ No newline at end of file
diff --git a/PATENTS b/PATENTS
new file mode 100644
index 0000000..5cb83ec
--- /dev/null
+++ b/PATENTS
@@ -0,0 +1 @@
+Refer to src/PATENTS.
diff --git a/PRESUBMIT.py b/PRESUBMIT.py
new file mode 100644
index 0000000..d290309
--- /dev/null
+++ b/PRESUBMIT.py
@@ -0,0 +1,38 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+def CheckChangeOnUpload(input_api, output_api):
+  webrtc_license_header = (
+      r'.*? Copyright \(c\) %(year)s The WebRTC project authors\. '
+        r'All Rights Reserved\.\n'
+      r'.*?\n'
+      r'.*? Use of this source code is governed by a BSD-style license\n'
+      r'.*? that can be found in the LICENSE file in the root of the source\n'
+      r'.*? tree\. An additional intellectual property rights grant can be '
+        r'found\n'
+      r'.*? in the file PATENTS\.  All contributing project authors may\n'
+      r'.*? be found in the AUTHORS file in the root of the source tree\.\n'
+  ) % {
+      'year': input_api.time.strftime('%Y'),
+  }
+
+  results = []
+  # Ideally, maxlen would be 80.
+  results.extend(input_api.canned_checks.CheckLongLines(
+      input_api, output_api, maxlen=95))
+  results.extend(input_api.canned_checks.CheckChangeHasNoTabs(
+      input_api, output_api))
+  results.extend(input_api.canned_checks.CheckLicense(
+      input_api, output_api, webrtc_license_header))
+
+  return results
+
+def CheckChangeOnCommit(input_api, output_api):
+  results = []
+  results.extend(input_api.canned_checks.CheckOwners(input_api, output_api))
+  return results
diff --git a/WATCHLISTS b/WATCHLISTS
new file mode 100644
index 0000000..d277712
--- /dev/null
+++ b/WATCHLISTS
@@ -0,0 +1,111 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# Inspired by chromium.org:
+# http://dev.chromium.org/developers/contributing-code/watchlists
+
+{
+  'WATCHLIST_DEFINITIONS': {
+    'this_file': {
+      'filepath': '^WATCHLISTS$',
+    },
+    'all_src': {
+      'filepath': 'src/.*',
+    },
+    'root_files': {
+      # src/build/ and non-recursive contents of ./ and src/
+      'filepath': '^[^/]*$|src/[^/]*$|src/build/.*',
+    },
+    'documented_interfaces': {
+      'filepath': 'src/[^/]*\.h$|'\
+                  'src/video_engine/main/interface/.*|'\
+                  'src/voice_engine/main/interface/.*',
+    },
+    'build_files': {
+      'filepath': '\.gyp$|\.gypi$|Android\.mk$',
+    },
+    'java_files': {
+      'filepath': '\.java$|\.xml$',
+    },
+    'video_engine': {
+      'filepath': 'src/video_engine/.*',
+    },
+    'voice_engine': {
+      'filepath': 'src/voice_engine/.*',
+    },
+    'common_audio': {
+      'filepath': 'src/common_audio/.*',
+    },
+    'video_capture': {
+      'filepath': 'src/modules/video_capture/.*',
+    },
+    'video_render': {
+      'filepath': 'src/modules/video_render/.*',
+    },
+    'audio_device': {
+      'filepath': 'src/modules/audio_device/.*',
+    },
+    'audio_coding': {
+      'filepath': 'src/modules/audio_coding/.*',
+    },
+    'NetEQ': {
+      'filepath': 'src/modules/audio_coding/NetEQ/.*',
+    },
+    'audio_processing': {
+      'filepath': 'src/modules/audio_processing/.*',
+    },
+    'video_codecs': {
+      'filepath': 'src/modules/video_coding/codecs/.*',
+    },
+    'video_coding': {
+      'filepath': 'src/modules/video_coding/.*',
+    },
+    'rtp_rtcp': {
+      'filepath': 'src/modules/rtp_rtcp/.*'
+    },
+    'system_wrappers': {
+      'filepath': 'src/system_wrappers/.*',
+    },
+  },
+
+  'WATCHLISTS': {
+    'this_file': [''],
+    'all_src': ['tterriberry@mozilla.com',
+                'giles@mozilla.com'],
+    'root_files': ['andrew@webrtc.org',
+                   'niklas.enbom@webrtc.org'],
+    'documented_interfaces': ['interface-changes@webrtc.org',
+                              'rwolff@gocast.it'],
+    'build_files': ['leozwang@webrtc.org'],
+    'java_files': ['leozwang@webrtc.org'],
+    'common_audio': ['bjornv@webrtc.org',
+                     'andrew@webrtc.org'],
+    'video_engine': ['mflodman@webrtc.org',
+                     'perkj@webrtc.org'],
+    'voice_engine': ['henrika@webrtc.org'],
+    'video_capture': ['mflodman@webrtc.org',
+                      'perkj@webrtc.org',
+                      'leozwang@webrtc.org'],
+    'video_render': ['mflodman@webrtc.org',
+                     'perkj@webrtc.org',
+                     'leozwang@webrtc.org'],
+    'audio_device': ['henrika@webrtc.org',
+                     'leozwang@webrtc.org'],
+    'audio_coding': ['tina.legrand@webrtc.org'],
+    'NetEQ': ['henrik.lundin@webrtc.org'],
+    'audio_processing': ['andrew@webrtc.org',
+                         'bjornv@webrtc.org'],
+    'video_codecs': ['henrik.lundin@webrtc.org'],
+    'video_coding': ['stefan@webrtc.org'],
+    'rtp_rtcp': ['mflodman@webrtc.org',
+                 'pwestin@webrtc.org'],
+    'system_wrappers': ['mflodman@webrtc.org',
+                        'henrika@webrtc.org',
+                        'andrew@webrtc.org'],
+  },
+}
diff --git a/android-webrtc.mk b/android-webrtc.mk
new file mode 100644
index 0000000..cd495b0
--- /dev/null
+++ b/android-webrtc.mk
@@ -0,0 +1,46 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# These defines will apply to all source files
+# Think again before changing it
+MY_WEBRTC_COMMON_DEFS := \
+    '-DWEBRTC_TARGET_PC' \
+    '-DWEBRTC_LINUX' \
+    '-DWEBRTC_THREAD_RR' \
+    '-DWEBRTC_CLOCK_TYPE_REALTIME' \
+    '-DWEBRTC_ANDROID'
+#    The following macros are used by modules,
+#    we might need to re-organize them
+#    '-DWEBRTC_ANDROID_OPENSLES' [module audio_device]
+#    '-DNETEQ_VOICEENGINE_CODECS' [module audio_coding neteq]
+#    '-DWEBRTC_MODULE_UTILITY_VIDEO' [module media_file] [module utility]
+ifeq ($(TARGET_ARCH),arm)
+MY_WEBRTC_COMMON_DEFS += \
+    '-DWEBRTC_ARCH_ARM'
+#    '-DWEBRTC_DETECT_ARM_NEON' # only used in a build configuration without Neon
+# TODO(kma): figure out if the above define could be moved to NDK build only.
+
+# TODO(kma): test if the code under next two macros works with generic GCC compilers
+ifeq ($(ARCH_ARM_HAVE_NEON),true)
+MY_WEBRTC_COMMON_DEFS += \
+    '-DWEBRTC_ARCH_ARM_NEON'
+MY_ARM_CFLAGS_NEON := \
+    -flax-vector-conversions
+endif
+
+ifneq (,$(filter '-DWEBRTC_DETECT_ARM_NEON' '-DWEBRTC_ARCH_ARM_NEON', \
+    $(MY_WEBRTC_COMMON_DEFS)))
+WEBRTC_BUILD_NEON_LIBS := true
+endif
+
+ifeq ($(ARCH_ARM_HAVE_ARMV7A),true)
+MY_WEBRTC_COMMON_DEFS += \
+    '-DWEBRTC_ARCH_ARM_V7A'
+endif
+
+endif # ifeq ($(TARGET_ARCH),arm)
\ No newline at end of file
diff --git a/build/README.chromium b/build/README.chromium
new file mode 100644
index 0000000..012df35
--- /dev/null
+++ b/build/README.chromium
@@ -0,0 +1,15 @@
+List of property sheets to be included by projects:
+  common.vsprops
+    Not used anymore. No-op. Kept for compatibility with current projects.
+
+  debug.vsprops
+    Enables debug settings. Must be included directly in Debug configuration. Includes internal\essential.vsprops.
+
+  external_code.vsprops
+    Contains settings made to simplify usage of external (non-Google) code. It relaxes the warning levels. Should be included after debug.vsprops or release.vsprops to override their settings.
+
+  output_dll_copy.rules
+    Run to enable automatic copy of DLL when they are as an input file in a vcproj project.
+
+  release.vsprops
+    Enables release settings. Must be included directly in Release configuration. Includes internal\essential.vsprops. Also includes "internal\release_impl$(CHROME_BUILD_TYPE).vsprops". So the behavior is dependant on the CHROME_BUILD_TYPE environment variable.
diff --git a/build/all.gyp b/build/all.gyp
new file mode 100644
index 0000000..3934ad2
--- /dev/null
+++ b/build/all.gyp
@@ -0,0 +1,607 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'All',
+      'type': 'none',
+      'xcode_create_dependents_test_runner': 1,
+      'dependencies': [
+        'some.gyp:*',
+        '../base/base.gyp:*',
+        '../chrome/browser/sync/tools/sync_tools.gyp:*',
+        '../chrome/chrome.gyp:*',
+        '../content/content.gyp:*',
+        '../crypto/crypto.gyp:*',
+        '../ui/ui.gyp:*',
+        '../gpu/gpu.gyp:*',
+        '../gpu/demos/demos.gyp:*',
+        '../gpu/tools/tools.gyp:*',
+        '../ipc/ipc.gyp:*',
+        '../jingle/jingle.gyp:*',
+        '../media/media.gyp:*',
+        '../net/net.gyp:*',
+        '../ppapi/ppapi.gyp:*',
+        '../ppapi/ppapi_internal.gyp:*',
+        '../printing/printing.gyp:*',
+        '../sdch/sdch.gyp:*',
+        '../skia/skia.gyp:*',
+        '../sql/sql.gyp:*',
+        '../testing/gmock.gyp:*',
+        '../testing/gtest.gyp:*',
+        '../third_party/bzip2/bzip2.gyp:*',
+        '../third_party/cacheinvalidation/cacheinvalidation.gyp:*',
+        '../third_party/cld/cld.gyp:*',
+        '../third_party/codesighs/codesighs.gyp:*',
+        '../third_party/ffmpeg/ffmpeg.gyp:*',
+        '../third_party/iccjpeg/iccjpeg.gyp:*',
+        '../third_party/icu/icu.gyp:*',
+        '../third_party/libpng/libpng.gyp:*',
+        '../third_party/libwebp/libwebp.gyp:*',
+        '../third_party/libxml/libxml.gyp:*',
+        '../third_party/libxslt/libxslt.gyp:*',
+        '../third_party/lzma_sdk/lzma_sdk.gyp:*',
+        '../third_party/mesa/mesa.gyp:*',
+        '../third_party/modp_b64/modp_b64.gyp:*',
+        '../third_party/npapi/npapi.gyp:*',
+        '../third_party/ots/ots.gyp:*',
+        '../third_party/sqlite/sqlite.gyp:*',
+        '../third_party/WebKit/Source/WebKit/chromium/WebKit.gyp:*',
+        '../third_party/zlib/zlib.gyp:*',
+        '../v8/tools/gyp/v8.gyp:*',
+        '../webkit/support/webkit_support.gyp:*',
+        '../webkit/webkit.gyp:*',
+        'util/build_util.gyp:*',
+        'temp_gyp/googleurl.gyp:*',
+        '<(libjpeg_gyp_path):*',
+      ],
+      'conditions': [
+        ['os_posix==1 and OS!="android"', {
+          'dependencies': [
+            '../third_party/yasm/yasm.gyp:*#host',
+            '../cloud_print/virtual_driver/virtual_driver_posix.gyp:*',
+           ],
+        }],
+        ['OS=="mac" or OS=="win"', {
+          'dependencies': [
+            '../third_party/nss/nss.gyp:*',
+           ],
+        }],
+        ['OS=="mac"', {
+          'dependencies': [
+            '../third_party/ocmock/ocmock.gyp:*',
+          ],
+        }],
+        ['OS=="linux"', {
+          'dependencies': [
+            '../breakpad/breakpad.gyp:*',
+            '../courgette/courgette.gyp:*',
+            '../dbus/dbus.gyp:*',
+            '../sandbox/sandbox.gyp:*',
+          ],
+          'conditions': [
+            ['branding=="Chrome"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:linux_packages_<(channel)',
+              ],
+            }],
+          ],
+        }],
+        ['use_wayland==1', {
+          'dependencies': [
+            '../ui/wayland/wayland.gyp:*',
+          ],
+        }],
+        ['toolkit_uses_gtk==1', {
+          'dependencies': [
+            '../tools/gtk_clipboard_dump/gtk_clipboard_dump.gyp:*',
+            '../tools/xdisplaycheck/xdisplaycheck.gyp:*',
+          ],
+        }],
+        ['OS=="win"', {
+          'conditions': [
+            ['win_use_allocator_shim==1', {
+              'dependencies': [
+                '../base/allocator/allocator.gyp:*',
+              ],
+            }],
+          ],
+          'dependencies': [
+            '../breakpad/breakpad.gyp:*',
+            '../chrome_frame/chrome_frame.gyp:*',
+            '../cloud_print/virtual_driver/virtual_driver.gyp:*',
+            '../courgette/courgette.gyp:*',
+            '../rlz/rlz.gyp:*',
+            '../sandbox/sandbox.gyp:*',
+            '../third_party/angle/src/build_angle.gyp:*',
+            '../third_party/bsdiff/bsdiff.gyp:*',
+            '../third_party/bspatch/bspatch.gyp:*',
+            '../third_party/gles2_book/gles2_book.gyp:*',
+            '../tools/memory_watcher/memory_watcher.gyp:*',
+          ],
+        }, {
+          'dependencies': [
+            '../third_party/libevent/libevent.gyp:*',
+          ],
+        }],
+        ['toolkit_views==1', {
+          'dependencies': [
+            '../ui/views/views.gyp:*',
+          ],
+        }],
+        ['use_aura==1', {
+          'dependencies': [
+            '../ui/aura/aura.gyp:*',
+            '../ui/aura_shell/aura_shell.gyp:*',
+          ],
+        }],
+        ['remoting==1', {
+          'dependencies': [
+            '../remoting/remoting.gyp:*',
+          ],
+        }],
+        ['use_openssl==0', {
+          'dependencies': [
+            '../net/third_party/nss/ssl.gyp:*',
+          ],
+        }],
+      ],
+    }, # target_name: All
+    {
+      'target_name': 'All_syzygy',
+      'type': 'none',
+      'conditions': [
+        ['OS=="win" and fastbuild==0', {
+            'dependencies': [
+              '../chrome/installer/mini_installer_syzygy.gyp:*',
+            ],
+          },
+        ],
+      ],
+    }, # target_name: All_syzygy
+    {
+      'target_name': 'chromium_builder_tests',
+      'type': 'none',
+      'dependencies': [
+        '../base/base.gyp:base_unittests',
+        '../chrome/chrome.gyp:browser_tests',
+        '../chrome/chrome.gyp:interactive_ui_tests',
+        '../chrome/chrome.gyp:safe_browsing_tests',
+        '../chrome/chrome.gyp:sync_integration_tests',
+        '../chrome/chrome.gyp:sync_unit_tests',
+        '../chrome/chrome.gyp:ui_tests',
+        '../chrome/chrome.gyp:unit_tests',
+        '../content/content.gyp:content_browsertests',
+        '../content/content.gyp:content_unittests',
+        '../crypto/crypto.gyp:crypto_unittests',
+        '../ui/ui.gyp:gfx_unittests',
+        '../gpu/gpu.gyp:gpu_unittests',
+        '../gpu/gles2_conform_support/gles2_conform_support.gyp:gles2_conform_support',
+        '../ipc/ipc.gyp:ipc_tests',
+        '../jingle/jingle.gyp:jingle_unittests',
+        '../media/media.gyp:media_unittests',
+        '../net/net.gyp:net_unittests',
+        '../printing/printing.gyp:printing_unittests',
+        '../remoting/remoting.gyp:remoting_unittests',
+        '../sql/sql.gyp:sql_unittests',
+        '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+        '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+        'temp_gyp/googleurl.gyp:googleurl_unittests',
+      ],
+      'conditions': [
+        ['OS=="win"', {
+          'dependencies': [
+            '../chrome/chrome.gyp:installer_util_unittests',
+            '../chrome/chrome.gyp:mini_installer_test',
+            # mini_installer_tests depends on mini_installer. This should be
+            # defined in installer.gyp.
+            '../chrome/installer/mini_installer.gyp:mini_installer',
+            '../chrome_frame/chrome_frame.gyp:chrome_frame_net_tests',
+            '../chrome_frame/chrome_frame.gyp:chrome_frame_perftests',
+            '../chrome_frame/chrome_frame.gyp:chrome_frame_reliability_tests',
+            '../chrome_frame/chrome_frame.gyp:chrome_frame_tests',
+            '../chrome_frame/chrome_frame.gyp:chrome_frame_unittests',
+            '../chrome_frame/chrome_frame.gyp:npchrome_frame',
+            '../courgette/courgette.gyp:courgette_unittests',
+            '../sandbox/sandbox.gyp:sbox_integration_tests',
+            '../sandbox/sandbox.gyp:sbox_unittests',
+            '../sandbox/sandbox.gyp:sbox_validation_tests',
+            '../third_party/WebKit/Source/WebKit/chromium/WebKit.gyp:copy_TestNetscapePlugIn',
+            '../ui/views/views.gyp:views_unittests',
+            # TODO(nsylvain) ui_tests.exe depends on test_shell_common.
+            # This should:
+            # 1) not be the case. OR.
+            # 2) be expressed in the ui tests dependencies.
+            '../webkit/webkit.gyp:test_shell_common',
+           ],
+        }],
+      ],
+    }, # target_name: chromium_builder_tests
+    {
+      'target_name': 'chromium_2010_builder_tests',
+      'type': 'none',
+      'dependencies': [
+        'chromium_builder_tests',
+      ],
+    }, # target_name: chromium_2010_builder_tests
+    {
+      'target_name': 'chromium_builder_nacl_win_integration',
+      'type': 'none',
+      'dependencies': [
+        'chromium_builder_qa', # needed for pyauto
+        'chromium_builder_tests',
+      ],
+    }, # target_name: chromium_builder_nacl_win_integration
+    {
+      'target_name': 'chromium_builder_perf',
+      'type': 'none',
+      'dependencies': [
+        'chromium_builder_qa', # needed for pyauto
+        '../chrome/chrome.gyp:performance_ui_tests',
+        '../chrome/chrome.gyp:plugin_tests',
+        '../chrome/chrome.gyp:sync_performance_tests',
+        '../chrome/chrome.gyp:ui_tests',
+      ],
+    }, # target_name: chromium_builder_perf
+    {
+      'target_name': 'chromium_gpu_builder',
+      'type': 'none',
+      'dependencies': [
+        '../chrome/chrome.gyp:gpu_tests',
+        '../chrome/chrome.gyp:performance_ui_tests',
+        '../third_party/WebKit/Source/WebKit/chromium/WebKit.gyp:DumpRenderTree',
+      ],
+    }, # target_name: chromium_gpu_builder
+    {
+      'target_name': 'chromium_builder_qa',
+      'type': 'none',
+      'dependencies': [
+        '../chrome/chrome.gyp:chromedriver',
+      ],
+      'conditions': [
+        ['OS=="mac" or OS=="win" or (os_posix==1 and target_arch==python_arch)', {
+          'dependencies': [
+            '../chrome/chrome.gyp:pyautolib',
+          ],
+        }],
+      ],
+    }, # target_name: chromium_builder_qa
+  ],
+  'conditions': [
+    ['OS=="mac"', {
+      'targets': [
+        {
+          # Target to build everything plus the dmg.  We don't put the dmg
+          # in the All target because developers really don't need it.
+          'target_name': 'all_and_dmg',
+          'type': 'none',
+          'dependencies': [
+            'All',
+            '../chrome/chrome.gyp:build_app_dmg',
+          ],
+        },
+        # These targets are here so the build bots can use them to build
+        # subsets of a full tree for faster cycle times.
+        {
+          'target_name': 'chromium_builder_dbg',
+          'type': 'none',
+          'dependencies': [
+            '../chrome/chrome.gyp:browser_tests',
+            '../chrome/chrome.gyp:interactive_ui_tests',
+            '../chrome/chrome.gyp:safe_browsing_tests',
+            '../chrome/chrome.gyp:sync_integration_tests',
+            '../chrome/chrome.gyp:sync_unit_tests',
+            '../chrome/chrome.gyp:ui_tests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../content/content.gyp:content_browsertests',
+            '../content/content.gyp:content_unittests',
+            '../ui/ui.gyp:gfx_unittests',
+            '../gpu/gpu.gyp:gpu_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/media.gyp:media_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../sql/sql.gyp:sql_unittests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            'temp_gyp/googleurl.gyp:googleurl_unittests',
+          ],
+        },
+        {
+          'target_name': 'chromium_builder_rel',
+          'type': 'none',
+          'dependencies': [
+            '../chrome/chrome.gyp:browser_tests',
+            '../chrome/chrome.gyp:performance_ui_tests',
+            '../chrome/chrome.gyp:plugin_tests',
+            '../chrome/chrome.gyp:safe_browsing_tests',
+            '../chrome/chrome.gyp:sync_integration_tests',
+            '../chrome/chrome.gyp:sync_unit_tests',
+            '../chrome/chrome.gyp:ui_tests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../content/content.gyp:content_browsertests',
+            '../content/content.gyp:content_unittests',
+            '../ui/ui.gyp:gfx_unittests',
+            '../gpu/gpu.gyp:gpu_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/media.gyp:media_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../sql/sql.gyp:sql_unittests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            'temp_gyp/googleurl.gyp:googleurl_unittests',
+          ],
+        },
+        {
+          'target_name': 'chromium_builder_dbg_tsan_mac',
+          'type': 'none',
+          'dependencies': [
+            '../base/base.gyp:base_unittests',
+            '../crypto/crypto.gyp:crypto_unittests',
+            'temp_gyp/googleurl.gyp:googleurl_unittests',
+            '../net/net.gyp:net_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/media.gyp:media_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+          ],
+        },
+        {
+          'target_name': 'chromium_builder_dbg_valgrind_mac',
+          'type': 'none',
+          'dependencies': [
+            '../base/base.gyp:base_unittests',
+            '../crypto/crypto.gyp:crypto_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../media/media.gyp:media_unittests',
+            '../net/net.gyp:net_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../chrome/chrome.gyp:safe_browsing_tests',
+            '../chrome/chrome.gyp:sync_unit_tests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../chrome/chrome.gyp:ui_tests',
+            '../content/content.gyp:content_unittests',
+            '../ui/ui.gyp:gfx_unittests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../sql/sql.gyp:sql_unittests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            'temp_gyp/googleurl.gyp:googleurl_unittests',
+          ],
+        },
+      ],  # targets
+    }], # OS="mac"
+    ['OS=="win"', {
+      'targets': [
+        # These targets are here so the build bots can use them to build
+        # subsets of a full tree for faster cycle times.
+        {
+          'target_name': 'chromium_builder',
+          'type': 'none',
+          'dependencies': [
+            '../chrome/chrome.gyp:browser_tests',
+            '../chrome/chrome.gyp:installer_util_unittests',
+            '../chrome/chrome.gyp:interactive_ui_tests',
+            '../chrome/chrome.gyp:mini_installer_test',
+            '../chrome/chrome.gyp:performance_ui_tests',
+            '../chrome/chrome.gyp:plugin_tests',
+            '../chrome/chrome.gyp:safe_browsing_tests',
+            '../chrome/chrome.gyp:sync_integration_tests',
+            '../chrome/chrome.gyp:sync_unit_tests',
+            '../chrome/chrome.gyp:ui_tests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../content/content.gyp:content_browsertests',
+            '../content/content.gyp:content_unittests',
+            # mini_installer_tests depends on mini_installer. This should be
+            # defined in installer.gyp.
+            '../chrome/installer/mini_installer.gyp:mini_installer',
+            '../chrome_frame/chrome_frame.gyp:chrome_frame_net_tests',
+            '../chrome_frame/chrome_frame.gyp:chrome_frame_perftests',
+            '../chrome_frame/chrome_frame.gyp:chrome_frame_reliability_tests',
+            '../chrome_frame/chrome_frame.gyp:chrome_frame_tests',
+            '../chrome_frame/chrome_frame.gyp:chrome_frame_unittests',
+            '../chrome_frame/chrome_frame.gyp:npchrome_frame',
+            '../courgette/courgette.gyp:courgette_unittests',
+            '../ui/ui.gyp:gfx_unittests',
+            '../gpu/gpu.gyp:gpu_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/media.gyp:media_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../sql/sql.gyp:sql_unittests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            '../third_party/WebKit/Source/WebKit/chromium/WebKit.gyp:copy_TestNetscapePlugIn',
+            '../ui/views/views.gyp:views_unittests',
+            # TODO(nsylvain) ui_tests.exe depends on test_shell_common.
+            # This should:
+            # 1) not be the case. OR.
+            # 2) be expressed in the ui tests dependencies.
+            '../webkit/webkit.gyp:test_shell_common',
+            'temp_gyp/googleurl.gyp:googleurl_unittests',
+          ],
+        },
+        {
+          'target_name': 'chromium_builder_dbg_tsan_win',
+          'type': 'none',
+          'dependencies': [
+            '../base/base.gyp:base_unittests',
+            '../content/content.gyp:content_unittests',
+            '../crypto/crypto.gyp:crypto_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/media.gyp:media_unittests',
+            '../net/net.gyp:net_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../sql/sql.gyp:sql_unittests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            'temp_gyp/googleurl.gyp:googleurl_unittests',
+          ],
+        },
+        {
+          'target_name': 'chromium_builder_dbg_drmemory_win',
+          'type': 'none',
+          'dependencies': [
+            '../base/base.gyp:base_unittests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../content/content.gyp:content_unittests',
+            '../crypto/crypto.gyp:crypto_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/media.gyp:media_unittests',
+            '../net/net.gyp:net_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../sql/sql.gyp:sql_unittests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            'temp_gyp/googleurl.gyp:googleurl_unittests',
+          ],
+        },
+        {
+          'target_name': 'webkit_builder_win',
+          'type': 'none',
+          'dependencies': [
+            '../webkit/webkit.gyp:test_shell',
+            '../webkit/webkit.gyp:test_shell_tests',
+            '../webkit/webkit.gyp:pull_in_webkit_unit_tests',
+            '../webkit/webkit.gyp:pull_in_DumpRenderTree',
+          ],
+        },
+      ],  # targets
+      'conditions': [
+        ['branding=="Chrome"', {
+          'targets': [
+            {
+              'target_name': 'chrome_official_builder',
+              'type': 'none',
+              'dependencies': [
+                '../chrome/chrome.gyp:chromedriver',
+                '../chrome/chrome.gyp:crash_service',
+                '../chrome/chrome.gyp:crash_service_win64',
+                '../chrome/chrome.gyp:policy_templates',
+                '../chrome/chrome.gyp:pyautolib',
+                '../chrome/chrome.gyp:reliability_tests',
+                '../chrome/chrome.gyp:automated_ui_tests',
+                '../chrome/installer/mini_installer.gyp:mini_installer',
+                '../chrome_frame/chrome_frame.gyp:npchrome_frame',
+                '../courgette/courgette.gyp:courgette',
+                '../courgette/courgette.gyp:courgette64',
+                '../cloud_print/virtual_driver/virtual_driver.gyp:virtual_driver',
+                '../remoting/remoting.gyp:webapp_it2me',
+                '../third_party/adobe/flash/flash_player.gyp:flash_player',
+              ],
+              'conditions': [
+                ['internal_pdf', {
+                  'dependencies': [
+                    '../pdf/pdf.gyp:pdf',
+                  ],
+                }], # internal_pdf
+              ]
+            },
+          ], # targets
+        }], # branding=="Chrome"
+       ], # conditions
+    }], # OS="win"
+    ['chromeos==1', {
+      'targets': [
+        {
+          'target_name': 'chromeos_builder',
+          'type': 'none',
+          'sources': [
+            # TODO(bradnelson): This is here to work around gyp issue 137.
+            #     Remove this sources list when that issue has been fixed.
+            'all.gyp',
+          ],
+          'dependencies': [
+            '../base/base.gyp:base_unittests',
+            '../chrome/chrome.gyp:browser_tests',
+            '../chrome/chrome.gyp:chrome',
+            '../chrome/chrome.gyp:interactive_ui_tests',
+            '../chrome/chrome.gyp:performance_ui_tests',
+            '../chrome/chrome.gyp:safe_browsing_tests',
+            '../chrome/chrome.gyp:sync_unit_tests',
+            '../chrome/chrome.gyp:sync_integration_tests',
+            '../chrome/chrome.gyp:ui_tests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../content/content.gyp:content_browsertests',
+            '../content/content.gyp:content_unittests',
+            '../crypto/crypto.gyp:crypto_unittests',
+            '../dbus/dbus.gyp:dbus_unittests',
+            '../ui/ui.gyp:gfx_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/media.gyp:ffmpeg_tests',
+            '../media/media.gyp:media_unittests',
+            '../net/net.gyp:net_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../sql/sql.gyp:sql_unittests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            '../ui/views/views.gyp:views_unittests',
+            'temp_gyp/googleurl.gyp:googleurl_unittests',
+          ],
+        },
+      ],  # targets
+    }], # "chromeos==1"
+    ['use_aura==1', {
+      'targets': [
+        {
+          'target_name': 'aura_builder',
+          'type': 'none',
+          'dependencies': [
+            '../chrome/chrome.gyp:chrome',
+            '../chrome/chrome.gyp:unit_tests',
+            '../chrome/chrome.gyp:ui_tests',
+            '../ui/aura_shell/aura_shell.gyp:aura_shell_exe',
+            '../ui/aura_shell/aura_shell.gyp:aura_shell_unittests',
+            '../ui/aura/aura.gyp:*',
+            '../ui/gfx/compositor/compositor.gyp:*',
+            '../ui/views/views.gyp:views',
+            '../ui/views/views.gyp:views_unittests',
+            '../webkit/webkit.gyp:pull_in_webkit_unit_tests',
+          ],
+          'conditions': [
+            ['OS=="win"', {
+              # Remove this when we have the real compositor.
+              'copies': [
+                {
+                  'destination': '<(PRODUCT_DIR)',
+                  'files': ['../third_party/directxsdk/files/dlls/D3DX10d_43.dll']
+                },
+              ],
+              'dependencies': [
+                '../chrome/chrome.gyp:crash_service',
+                '../chrome/chrome.gyp:crash_service_win64',
+              ],
+            }],
+            ['OS=="linux"', {
+              # Tests that currently only work on Linux.
+              'dependencies': [
+                '../base/base.gyp:base_unittests',
+                '../chrome/chrome.gyp:sync_unit_tests',
+                '../content/content.gyp:content_unittests',
+                '../ipc/ipc.gyp:ipc_tests',
+                '../sql/sql.gyp:sql_unittests',
+                '../ui/ui.gyp:gfx_unittests',
+              ],
+            }],
+          ],
+        },
+      ],  # targets
+    }], # "use_aura==1"
+  ], # conditions
+}
diff --git a/build/all_android.gyp b/build/all_android.gyp
new file mode 100644
index 0000000..7e0d0f4
--- /dev/null
+++ b/build/all_android.gyp
@@ -0,0 +1,42 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is all.gyp file for Android to prevent breakage in Android and other
+# platform; It will be churning a lot in the short term and eventually be merged
+# into all.gyp.
+
+{
+  'targets': [
+    {
+      'target_name': 'All',
+      'type': 'none',
+      'dependencies': [
+        'util/build_util.gyp:*',
+        'android_builder_tests',
+      ],
+    }, # target_name: All
+    {
+      # The current list of tests for android.  This is temporary
+      # until the full set supported.  If adding a new test here,
+      # please also add it to build/android/run_tests.py
+      'target_name': 'android_builder_tests',
+      'type': 'none',
+      'dependencies': [
+        '../base/base.gyp:base_unittests',
+        '../sql/sql.gyp:sql_unittests',
+        '../ipc/ipc.gyp:ipc_tests',
+        '../net/net.gyp:net_unittests',
+      ],
+    },
+    { 
+      # Experimental / in-progress targets that are expected to fail.
+      'target_name': 'android_experimental',
+      'type': 'none',
+      'dependencies': [
+        '../webkit/webkit.gyp:pull_in_webkit_unit_tests',
+        '../webkit/webkit.gyp:pull_in_DumpRenderTree',
+      ],
+    },
+  ],  # targets
+}
diff --git a/build/android/android_commands.py b/build/android/android_commands.py
new file mode 100755
index 0000000..007d040
--- /dev/null
+++ b/build/android/android_commands.py
@@ -0,0 +1,780 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provides an interface to communicate with the device via the adb command.
+
+Assumes adb binary is currently on system path.
+
+Usage:
+  python android_commands.py wait-for-pm
+"""
+
+import collections
+import datetime
+import logging
+import optparse
+import os
+import pexpect
+import re
+import subprocess
+import sys
+import tempfile
+import time
+
+# adb_interface.py is under ../../third_party/android/testrunner/
+sys.path.append(os.path.join(os.path.abspath(os.path.dirname(__file__)), '..',
+   '..', 'third_party', 'android', 'testrunner'))
+import adb_interface
+import cmd_helper
+import errors  #  is under ../../third_party/android/testrunner/errors.py
+from run_tests_helper import IsRunningAsBuildbot
+
+
+# Pattern to search for the next whole line of pexpect output and capture it
+# into a match group. We can't use ^ and $ for line start end with pexpect,
+# see http://www.noah.org/python/pexpect/#doc for explanation why.
+PEXPECT_LINE_RE = re.compile('\n([^\r]*)\r')
+
+# Set the adb shell prompt to be a unique marker that will [hopefully] not
+# appear at the start of any line of a command's output.
+SHELL_PROMPT = '~+~PQ\x17RS~+~'
+
+# This only works for single core devices.
+SCALING_GOVERNOR = '/sys/devices/system/cpu/cpu0/cpufreq/scaling_governor'
+DROP_CACHES = '/proc/sys/vm/drop_caches'
+
+# Java properties file
+LOCAL_PROPERTIES_PATH = '/data/local.prop'
+
+# Property in /data/local.prop that controls Java assertions.
+JAVA_ASSERT_PROPERTY = 'dalvik.vm.enableassertions'
+
+BOOT_COMPLETE_RE = re.compile(
+    re.escape('android.intent.action.MEDIA_MOUNTED path: /mnt/sdcard')
+    + '|' + re.escape('PowerManagerService: bootCompleted'))
+
+# Keycode "enum" suitable for passing to AndroidCommands.SendKey().
+KEYCODE_DPAD_RIGHT = 22
+KEYCODE_ENTER = 66
+KEYCODE_MENU = 82
+KEYCODE_BACK = 4
+
+
+def GetEmulators():
+  """Returns a list of emulators.  Does not filter by status (e.g. offline).
+
+  Both devices starting with 'emulator' will be returned in below output:
+
+    * daemon not running. starting it now on port 5037 *
+    * daemon started successfully *
+    List of devices attached
+    027c10494100b4d7        device
+    emulator-5554   offline
+    emulator-5558   device
+  """
+  re_device = re.compile('^emulator-[0-9]+', re.MULTILINE)
+  devices = re_device.findall(cmd_helper.GetCmdOutput(['adb', 'devices']))
+  return devices
+
+
+def GetAttachedDevices():
+  """Returns a list of attached, online android devices.
+
+  If a preferred device has been set with ANDROID_SERIAL, it will be first in
+  the returned list.
+
+  Example output:
+
+    * daemon not running. starting it now on port 5037 *
+    * daemon started successfully *
+    List of devices attached
+    027c10494100b4d7        device
+    emulator-5554   offline
+  """
+  re_device = re.compile('^([a-zA-Z0-9_:.-]+)\tdevice$', re.MULTILINE)
+  devices = re_device.findall(cmd_helper.GetCmdOutput(['adb', 'devices']))
+  preferred_device = os.environ.get("ANDROID_SERIAL")
+  if preferred_device in devices:
+    devices.remove(preferred_device)
+    devices.insert(0, preferred_device)
+  return devices
+
+
+def _GetHostFileInfo(file_name):
+  """Returns a tuple containing size and modified UTC time for file_name."""
+  # The time accuracy on device is only to minute level, remove the second and
+  # microsecond from host results.
+  utc_time = datetime.datetime.utcfromtimestamp(os.path.getmtime(file_name))
+  time_delta = datetime.timedelta(seconds=utc_time.second,
+                                  microseconds=utc_time.microsecond)
+  return os.path.getsize(file_name), utc_time - time_delta
+
+
+def ListHostPathContents(path):
+  """Lists files in all subdirectories of |path|.
+
+  Args:
+    path: The path to list.
+
+  Returns:
+    A dict of {"name": (size, lastmod), ...}.
+  """
+  if os.path.isfile(path):
+    return {os.path.basename(path): _GetHostFileInfo(path)}
+  ret = {}
+  for root, dirs, files in os.walk(path):
+    for d in dirs:
+      if d.startswith('.'):
+        dirs.remove(d)  # Prune the dir for subsequent iterations.
+    for f in files:
+      if f.startswith('.'):
+        continue
+      full_file_name = os.path.join(root, f)
+      file_name = os.path.relpath(full_file_name, path)
+      ret[file_name] = _GetHostFileInfo(full_file_name)
+  return ret
+
+
+def _GetFilesFromRecursiveLsOutput(path, ls_output, re_file, utc_offset=None):
+  """Gets a list of files from `ls` command output.
+
+  Python's os.walk isn't used because it doesn't work over adb shell.
+
+  Args:
+    path: The path to list.
+    ls_output: A list of lines returned by an `ls -lR` command.
+    re_file: A compiled regular expression which parses a line into named groups
+        consisting of at minimum "filename", "date", "time", "size" and
+        optionally "timezone".
+    utc_offset: A 5-character string of the form +HHMM or -HHMM, where HH is a
+        2-digit string giving the number of UTC offset hours, and MM is a
+        2-digit string giving the number of UTC offset minutes. If the input
+        utc_offset is None, will try to look for the value of "timezone" if it
+        is specified in re_file.
+
+  Returns:
+    A dict of {"name": (size, lastmod), ...} where:
+      name: The file name relative to |path|'s directory.
+      size: The file size in bytes (0 for directories).
+      lastmod: The file last modification date in UTC.
+  """
+  re_directory = re.compile('^%s/(?P<dir>[^:]+):$' % re.escape(path))
+  path_dir = os.path.dirname(path)
+
+  current_dir = ''
+  files = {}
+  for line in ls_output:
+    directory_match = re_directory.match(line)
+    if directory_match:
+      current_dir = directory_match.group('dir')
+      continue
+    file_match = re_file.match(line)
+    if file_match:
+      filename = os.path.join(current_dir, file_match.group('filename'))
+      if filename.startswith(path_dir):
+        filename = filename[len(path_dir)+1:]
+      lastmod = datetime.datetime.strptime(
+          file_match.group('date') + ' ' + file_match.group('time')[:5],
+          '%Y-%m-%d %H:%M')
+      if not utc_offset and 'timezone' in re_file.groupindex:
+        utc_offset = file_match.group('timezone')
+      if isinstance(utc_offset, str) and len(utc_offset) == 5:
+        utc_delta = datetime.timedelta(hours=int(utc_offset[1:3]),
+                                       minutes=int(utc_offset[3:5]))
+        if utc_offset[0:1] == '-':
+          utc_delta = -utc_delta;
+        lastmod -= utc_delta
+      files[filename] = (int(file_match.group('size')), lastmod)
+  return files
+
+
+def GetLogTimestamp(log_line):
+  """Returns the timestamp of the given |log_line|."""
+  try:
+    return datetime.datetime.strptime(log_line[:18], '%m-%d %H:%M:%S.%f')
+  except (ValueError, IndexError):
+    logging.critical('Error reading timestamp from ' + log_line)
+    return None
+
+
+class AndroidCommands(object):
+  """Helper class for communicating with Android device via adb.
+
+  Args:
+    device: If given, adb commands are only send to the device of this ID.
+        Otherwise commands are sent to all attached devices.
+    wait_for_pm: If true, issues an adb wait-for-device command.
+  """
+
+  def __init__(self, device=None, wait_for_pm=False):
+    self._adb = adb_interface.AdbInterface()
+    if device:
+      self._adb.SetTargetSerial(device)
+    if wait_for_pm:
+      self.WaitForDevicePm()
+    self._logcat = None
+    self._original_governor = None
+    self._pushed_files = []
+
+  def Adb(self):
+    """Returns our AdbInterface to avoid us wrapping all its methods."""
+    return self._adb
+
+  def WaitForDevicePm(self):
+    """Blocks until the device's package manager is available.
+
+    To workaround http://b/5201039, we restart the shell and retry if the
+    package manager isn't back after 120 seconds.
+
+    Raises:
+      errors.WaitForResponseTimedOutError after max retries reached.
+    """
+    last_err = None
+    retries = 3
+    while retries:
+      try:
+        self._adb.WaitForDevicePm()
+        return  # Success
+      except errors.WaitForResponseTimedOutError as e:
+        last_err = e
+        logging.warning('Restarting and retrying after timeout: %s' % str(e))
+        retries -= 1
+        self.RestartShell()
+    raise last_err  # Only reached after max retries, re-raise the last error.
+
+  def SynchronizeDateTime(self):
+    """Synchronize date/time between host and device."""
+    self._adb.SendShellCommand('date -u %f' % time.time())
+
+  def RestartShell(self):
+    """Restarts the shell on the device. Does not block for it to return."""
+    self.RunShellCommand('stop')
+    self.RunShellCommand('start')
+
+  def Reboot(self, full_reboot=True):
+    """Reboots the device and waits for the package manager to return.
+
+    Args:
+      full_reboot: Whether to fully reboot the device or just restart the shell.
+    """
+    # TODO(torne): hive can't reboot the device either way without breaking the
+    # connection; work out if we can handle this better
+    if os.environ.get('USING_HIVE'):
+      logging.warning('Ignoring reboot request as we are on hive')
+      return
+    if full_reboot:
+      self._adb.SendCommand('reboot')
+    else:
+      self.RestartShell()
+    self.WaitForDevicePm()
+    self.StartMonitoringLogcat(timeout=120)
+    self.WaitForLogMatch(BOOT_COMPLETE_RE)
+    self.UnlockDevice()
+
+  def Uninstall(self, package):
+    """Uninstalls the specified package from the device.
+
+    Args:
+      package: Name of the package to remove.
+    """
+    uninstall_command = 'uninstall %s' % package
+
+    logging.info('>>> $' + uninstall_command)
+    self._adb.SendCommand(uninstall_command, timeout_time=60)
+
+  def Install(self, package_file_path):
+    """Installs the specified package to the device.
+
+    Args:
+      package_file_path: Path to .apk file to install.
+    """
+
+    assert os.path.isfile(package_file_path)
+
+    install_command = 'install %s' % package_file_path
+
+    logging.info('>>> $' + install_command)
+    self._adb.SendCommand(install_command, timeout_time=2*60)
+
+  # It is tempting to turn this function into a generator, however this is not
+  # possible without using a private (local) adb_shell instance (to ensure no
+  # other command interleaves usage of it), which would defeat the main aim of
+  # being able to reuse the adb shell instance across commands.
+  def RunShellCommand(self, command, timeout_time=20, log_result=True):
+    """Send a command to the adb shell and return the result.
+
+    Args:
+      command: String containing the shell command to send. Must not include
+               the single quotes as we use them to escape the whole command.
+      timeout_time: Number of seconds to wait for command to respond before
+        retrying, used by AdbInterface.SendShellCommand.
+      log_result: Boolean to indicate whether we should log the result of the
+                  shell command.
+
+    Returns:
+      list containing the lines of output received from running the command
+    """
+    logging.info('>>> $' + command)
+    if "'" in command: logging.warning(command + " contains ' quotes")
+    result = self._adb.SendShellCommand("'%s'" % command,
+                                        timeout_time).splitlines()
+    if log_result:
+      logging.info('\n>>> '.join(result))
+    return result
+
+  def KillAll(self, process):
+    """Android version of killall, connected via adb.
+
+    Args:
+      process: name of the process to kill off
+
+    Returns:
+      the number of processess killed
+    """
+    pids = self.ExtractPid(process)
+    if pids:
+      self.RunShellCommand('kill ' + ' '.join(pids))
+    return len(pids)
+
+  def StartActivity(self, package, activity,
+                    action='android.intent.action.VIEW', data=None,
+                    extras=None, trace_file_name=None):
+    """Starts |package|'s activity on the device.
+
+    Args:
+      package: Name of package to start (e.g. 'com.android.chrome').
+      activity: Name of activity (e.g. '.Main' or 'com.android.chrome.Main').
+      data: Data string to pass to activity (e.g. 'http://www.example.com/').
+      extras: Dict of extras to pass to activity.
+      trace_file_name: If used, turns on and saves the trace to this file name.
+    """
+    cmd = 'am start -a %s -n %s/%s' % (action, package, activity)
+    if data:
+      cmd += ' -d "%s"' % data
+    if extras:
+      cmd += ' -e'
+      for key in extras:
+        cmd += ' %s %s' % (key, extras[key])
+    if trace_file_name:
+      cmd += ' -S -P ' + trace_file_name
+    self.RunShellCommand(cmd)
+
+  def EnableAdbRoot(self):
+    """Enable root on the device."""
+    self._adb.EnableAdbRoot()
+
+  def CloseApplication(self, package):
+    """Attempt to close down the application, using increasing violence.
+
+    Args:
+      package: Name of the process to kill off, e.g. com.android.chrome
+    """
+    self.RunShellCommand('am force-stop ' + package)
+
+  def ClearApplicationState(self, package):
+    """Closes and clears all state for the given |package|."""
+    self.CloseApplication(package)
+    self.RunShellCommand('rm -r /data/data/%s/cache/*' % package)
+    self.RunShellCommand('rm -r /data/data/%s/files/*' % package)
+    self.RunShellCommand('rm -r /data/data/%s/shared_prefs/*' % package)
+
+  def SendKeyEvent(self, keycode):
+    """Sends keycode to the device.
+
+    Args:
+      keycode: Numeric keycode to send (see "enum" at top of file).
+    """
+    self.RunShellCommand('input keyevent %d' % keycode)
+
+  def PushIfNeeded(self, local_path, device_path):
+    """Pushes |local_path| to |device_path|.
+
+    Works for files and directories. This method skips copying any paths in
+    |test_data_paths| that already exist on the device with the same timestamp
+    and size.
+
+    All pushed files can be removed by calling RemovePushedFiles().
+    """
+    assert os.path.exists(local_path)
+    self._pushed_files.append(device_path)
+
+    # If the path contents are the same, there's nothing to do.
+    local_contents = ListHostPathContents(local_path)
+    device_contents = self.ListPathContents(device_path)
+    # Only compare the size and timestamp if only copying a file because
+    # the filename on device can be renamed.
+    if os.path.isfile(local_path):
+      assert len(local_contents) == 1
+      is_equal = local_contents.values() == device_contents.values()
+    else:
+      is_equal = local_contents == device_contents
+    if is_equal:
+      logging.info('%s is up-to-date. Skipping file push.' % device_path)
+      return
+
+    # They don't match, so remove everything first and then create it.
+    if os.path.isdir(local_path):
+      self.RunShellCommand('rm -r %s' % device_path, timeout_time=2*60)
+      self.RunShellCommand('mkdir -p %s' % device_path)
+
+    # NOTE: We can't use adb_interface.Push() because it hardcodes a timeout of
+    # 60 seconds which isn't sufficient for a lot of users of this method.
+    push_command = 'push %s %s' % (local_path, device_path)
+    logging.info('>>> $' + push_command)
+    output = self._adb.SendCommand(push_command, timeout_time=30*60)
+    # Success looks like this: "3035 KB/s (12512056 bytes in 4.025s)"
+    # Errors look like this: "failed to copy  ... "
+    if not re.search('^[0-9]', output):
+      logging.critical('PUSH FAILED: ' + output)
+
+  def GetFileContents(self, filename):
+    """Gets contents from the file specified by |filename|."""
+    return self.RunShellCommand('if [ -f "' + filename + '" ]; then cat "' +
+                                filename + '"; fi')
+
+  def SetFileContents(self, filename, contents):
+    """Writes |contents| to the file specified by |filename|."""
+    with tempfile.NamedTemporaryFile() as f:
+      f.write(contents)
+      f.flush()
+      self._adb.Push(f.name, filename)
+
+  def RemovePushedFiles(self):
+    """Removes all files pushed with PushIfNeeded() from the device."""
+    for p in self._pushed_files:
+      self.RunShellCommand('rm -r %s' % p, timeout_time=2*60)
+
+  def ListPathContents(self, path):
+    """Lists files in all subdirectories of |path|.
+
+    Args:
+      path: The path to list.
+
+    Returns:
+      A dict of {"name": (size, lastmod), ...}.
+    """
+    # Example output:
+    # /foo/bar:
+    # -rw-r----- 1 user group   102 2011-05-12 12:29:54.131623387 +0100 baz.txt
+    re_file = re.compile('^-(?P<perms>[^\s]+)\s+'
+                         '(?P<user>[^\s]+)\s+'
+                         '(?P<group>[^\s]+)\s+'
+                         '(?P<size>[^\s]+)\s+'
+                         '(?P<date>[^\s]+)\s+'
+                         '(?P<time>[^\s]+)\s+'
+                         '(?P<filename>[^\s]+)$')
+    return _GetFilesFromRecursiveLsOutput(
+        path, self.RunShellCommand('ls -lR %s' % path), re_file,
+        self.RunShellCommand('date +%z')[0])
+
+  def SetupPerformanceTest(self):
+    """Sets up performance tests."""
+    # Disable CPU scaling to reduce noise in tests
+    if not self._original_governor:
+      self._original_governor = self.RunShellCommand('cat ' + SCALING_GOVERNOR)
+      self.RunShellCommand('echo performance > ' + SCALING_GOVERNOR)
+    self.DropRamCaches()
+
+  def TearDownPerformanceTest(self):
+    """Tears down performance tests."""
+    if self._original_governor:
+      self.RunShellCommand('echo %s > %s' % (self._original_governor[0],
+                                             SCALING_GOVERNOR))
+    self._original_governor = None
+
+  def SetJavaAssertsEnabled(self, enable):
+    """Sets or removes the device java assertions property.
+
+    Args:
+      enable: If True the property will be set.
+
+    Returns:
+      True if the file was modified (reboot is required for it to take effect).
+    """
+    # First ensure the desired property is persisted.
+    temp_props_file = tempfile.NamedTemporaryFile()
+    properties = ''
+    if self._adb.Pull(LOCAL_PROPERTIES_PATH, temp_props_file.name):
+      properties = file(temp_props_file.name).read()
+    re_search = re.compile(r'^\s*' + re.escape(JAVA_ASSERT_PROPERTY) +
+                           r'\s*=\s*all\s*$', re.MULTILINE)
+    if enable != bool(re.search(re_search, properties)):
+      re_replace = re.compile(r'^\s*' + re.escape(JAVA_ASSERT_PROPERTY) +
+                              r'\s*=\s*\w+\s*$', re.MULTILINE)
+      properties = re.sub(re_replace, '', properties)
+      if enable:
+        properties += '\n%s=all\n' % JAVA_ASSERT_PROPERTY
+
+      file(temp_props_file.name, 'w').write(properties)
+      self._adb.Push(temp_props_file.name, LOCAL_PROPERTIES_PATH)
+
+    # Next, check the current runtime value is what we need, and
+    # if not, set it and report that a reboot is required.
+    was_set = 'all' in self.RunShellCommand('getprop ' + JAVA_ASSERT_PROPERTY)
+    if was_set == enable:
+      return False
+
+    self.RunShellCommand('setprop %s "%s"' % (JAVA_ASSERT_PROPERTY,
+                                              enable and 'all' or ''))
+    return True
+
+  def DropRamCaches(self):
+    """Drops the filesystem ram caches for performance testing."""
+    self.RunShellCommand('echo 3 > ' + DROP_CACHES)
+
+  def StartMonitoringLogcat(self, clear=True, timeout=10, logfile=None,
+                            filters=[]):
+    """Starts monitoring the output of logcat, for use with WaitForLogMatch.
+
+    Args:
+      clear: If True the existing logcat output will be cleared, to avoiding
+             matching historical output lurking in the log.
+      timeout: How long WaitForLogMatch will wait for the given match
+      filters: A list of logcat filters to be used.
+    """
+    if clear:
+      self.RunShellCommand('logcat -c')
+    args = ['logcat', '-v', 'threadtime']
+    if filters:
+      args.extend(filters)
+    else:
+      args.append('*:v')
+
+    # Spawn logcat and syncronize with it.
+    for _ in range(4):
+      self._logcat = pexpect.spawn('adb', args, timeout=timeout,
+                                   logfile=logfile)
+      self.RunShellCommand('log startup_sync')
+      if self._logcat.expect(['startup_sync', pexpect.EOF,
+                              pexpect.TIMEOUT]) == 0:
+        break
+      self._logcat.close(force=True)
+    else:
+      logging.critical('Error reading from logcat: ' + str(self._logcat.match))
+      sys.exit(1)
+
+  def GetMonitoredLogCat(self):
+    """Returns an "adb logcat" command as created by pexpected.spawn."""
+    if not self._logcat:
+      self.StartMonitoringLogcat(clear=False)
+    return self._logcat
+
+  def WaitForLogMatch(self, search_re):
+    """Blocks until a line containing |line_re| is logged or a timeout occurs.
+
+    Args:
+      search_re: The compiled re to search each line for.
+
+    Returns:
+      The re match object.
+    """
+    if not self._logcat:
+      self.StartMonitoringLogcat(clear=False)
+    logging.info('<<< Waiting for logcat:' + str(search_re.pattern))
+    t0 = time.time()
+    try:
+      while True:
+        # Note this will block for upto the timeout _per log line_, so we need
+        # to calculate the overall timeout remaining since t0.
+        time_remaining = t0 + self._logcat.timeout - time.time()
+        if time_remaining < 0: raise pexpect.TIMEOUT(self._logcat)
+        self._logcat.expect(PEXPECT_LINE_RE, timeout=time_remaining)
+        line = self._logcat.match.group(1)
+        search_match = search_re.search(line)
+        if search_match:
+          return search_match
+        logging.info('<<< Skipped Logcat Line:' + str(line))
+    except pexpect.TIMEOUT:
+      raise pexpect.TIMEOUT(
+          'Timeout (%ds) exceeded waiting for pattern "%s" (tip: use -vv '
+          'to debug)' %
+          (self._logcat.timeout, search_re.pattern))
+
+  def StartRecordingLogcat(self, clear=True, filters=['*:v']):
+    """Starts recording logcat output to eventually be saved as a string.
+
+    This call should come before some series of tests are run, with either
+    StopRecordingLogcat or SearchLogcatRecord following the tests.
+
+    Args:
+      clear: True if existing log output should be cleared.
+      filters: A list of logcat filters to be used.
+    """
+    if clear:
+      self._adb.SendCommand('logcat -c')
+    logcat_command = 'adb logcat -v threadtime %s' % ' '.join(filters)
+    self.logcat_process = subprocess.Popen(logcat_command, shell=True,
+                                           stdout=subprocess.PIPE)
+
+  def StopRecordingLogcat(self):
+    """Stops an existing logcat recording subprocess and returns output.
+
+    Returns:
+      The logcat output as a string or an empty string if logcat was not
+      being recorded at the time.
+    """
+    if not self.logcat_process:
+      return ''
+    # Cannot evaluate directly as 0 is a possible value.
+    # Better to read the self.logcat_process.stdout before killing it,
+    # Otherwise the communicate may return incomplete output due to pipe break.
+    if self.logcat_process.poll() == None:
+      self.logcat_process.kill()
+    (output, _) = self.logcat_process.communicate()
+    self.logcat_process = None
+    return output
+
+  def SearchLogcatRecord(self, record, message, thread_id=None, proc_id=None,
+                         log_level=None, component=None):
+    """Searches the specified logcat output and returns results.
+
+    This method searches through the logcat output specified by record for a
+    certain message, narrowing results by matching them against any other
+    specified criteria.  It returns all matching lines as described below.
+
+    Args:
+      record: A string generated by Start/StopRecordingLogcat to search.
+      message: An output string to search for.
+      thread_id: The thread id that is the origin of the message.
+      proc_id: The process that is the origin of the message.
+      log_level: The log level of the message.
+      component: The name of the component that would create the message.
+
+    Returns:
+      A list of dictionaries represeting matching entries, each containing keys
+      thread_id, proc_id, log_level, component, and message.
+    """
+    if thread_id:
+      thread_id = str(thread_id)
+    if proc_id:
+      proc_id = str(proc_id)
+    results = []
+    reg = re.compile('(\d+)\s+(\d+)\s+([A-Z])\s+([A-Za-z]+)\s*:(.*)$',
+                     re.MULTILINE)
+    log_list = reg.findall(record)
+    for (tid, pid, log_lev, comp, msg) in log_list:
+      if ((not thread_id or thread_id == tid) and
+          (not proc_id or proc_id == pid) and
+          (not log_level or log_level == log_lev) and
+          (not component or component == comp) and msg.find(message) > -1):
+        match = dict({'thread_id': tid, 'proc_id': pid,
+                      'log_level': log_lev, 'component': comp,
+                      'message': msg})
+        results.append(match)
+    return results
+
+  def ExtractPid(self, process_name):
+    """Extracts Process Ids for a given process name from Android Shell.
+
+    Args:
+      process_name: name of the process on the device.
+
+    Returns:
+      List of all the process ids (as strings) that match the given name.
+    """
+    pids = []
+    for line in self.RunShellCommand('ps'):
+      data = line.split()
+      try:
+        if process_name in data[-1]:  # name is in the last column
+          pids.append(data[1])  # PID is in the second column
+      except IndexError:
+        pass
+    return pids
+
+  def GetIoStats(self):
+    """Gets cumulative disk IO stats since boot (for all processes).
+
+    Returns:
+      Dict of {num_reads, num_writes, read_ms, write_ms} or None if there
+      was an error.
+    """
+    # Field definitions.
+    # http://www.kernel.org/doc/Documentation/iostats.txt
+    device = 2
+    num_reads_issued_idx = 3
+    num_reads_merged_idx = 4
+    num_sectors_read_idx = 5
+    ms_spent_reading_idx = 6
+    num_writes_completed_idx = 7
+    num_writes_merged_idx = 8
+    num_sectors_written_idx = 9
+    ms_spent_writing_idx = 10
+    num_ios_in_progress_idx = 11
+    ms_spent_doing_io_idx = 12
+    ms_spent_doing_io_weighted_idx = 13
+
+    for line in self.RunShellCommand('cat /proc/diskstats'):
+      fields = line.split()
+      if fields[device] == 'mmcblk0':
+        return {
+            'num_reads': int(fields[num_reads_issued_idx]),
+            'num_writes': int(fields[num_writes_completed_idx]),
+            'read_ms': int(fields[ms_spent_reading_idx]),
+            'write_ms': int(fields[ms_spent_writing_idx]),
+        }
+    logging.warning('Could not find disk IO stats.')
+    return None
+
+  def GetMemoryUsage(self, package):
+    """Returns the memory usage for all processes whose name contains |pacakge|.
+
+    Args:
+      name: A string holding process name to lookup pid list for.
+
+    Returns:
+      Dict of {metric:usage_kb}, summed over all pids associated with |name|.
+      The metric keys retruned are: Size, Rss, Pss, Shared_Clean, Shared_Dirty,
+      Private_Clean, Private_Dirty, Referenced, Swap, KernelPageSize,
+      MMUPageSize.
+    """
+    usage_dict = collections.defaultdict(int)
+    pid_list = self.ExtractPid(package)
+    # We used to use the showmap command, but it is currently broken on
+    # stingray so it's easier to just parse /proc/<pid>/smaps directly.
+    memory_stat_re = re.compile('^(?P<key>\w+):\s+(?P<value>\d+) kB$')
+    for pid in pid_list:
+      for line in self.RunShellCommand('cat /proc/%s/smaps' % pid,
+                                       log_result=False):
+        match = re.match(memory_stat_re, line)
+        if match: usage_dict[match.group('key')] += int(match.group('value'))
+      if not usage_dict or not any(usage_dict.values()):
+        # Presumably the process died between ps and showmap.
+        logging.warning('Could not find memory usage for pid ' + str(pid))
+    return usage_dict
+
+  def UnlockDevice(self):
+    """Unlocks the screen of the device."""
+    # Make sure a menu button event will actually unlock the screen.
+    if IsRunningAsBuildbot():
+      assert self.RunShellCommand('getprop ro.test_harness')[0].strip() == '1'
+    # The following keyevent unlocks the screen if locked.
+    self.SendKeyEvent(KEYCODE_MENU)
+    # If the screen wasn't locked the previous command will bring up the menu,
+    # which this will dismiss. Otherwise this shouldn't change anything.
+    self.SendKeyEvent(KEYCODE_BACK)
+
+
+def main(argv):
+  option_parser = optparse.OptionParser()
+  option_parser.add_option('-w', '--wait_for_pm', action='store_true',
+      default=False, dest='wait_for_pm',
+      help='Waits for Device Package Manager to become available')
+  option_parser.add_option('--enable_asserts', dest='set_asserts',
+      action='store_true', default=None,
+      help='Sets the dalvik.vm.enableassertions property to "all"')
+  option_parser.add_option('--disable_asserts', dest='set_asserts',
+      action='store_false', default=None,
+      help='Removes the dalvik.vm.enableassertions property')
+  options, args = option_parser.parse_args(argv)
+
+  commands = AndroidCommands(wait_for_pm=options.wait_for_pm)
+  if options.set_asserts != None:
+    if commands.SetJavaAssertsEnabled(options.set_asserts):
+      commands.Reboot(full_reboot=False)
+
+
+if __name__ == '__main__':
+  main(sys.argv)
diff --git a/build/android/base_test_runner.py b/build/android/base_test_runner.py
new file mode 100644
index 0000000..bb0316b
--- /dev/null
+++ b/build/android/base_test_runner.py
@@ -0,0 +1,146 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+
+import android_commands
+from chrome_test_server_spawner import SpawningServer
+from flag_changer import FlagChanger
+import lighttpd_server
+import run_tests_helper
+
+FORWARDER_PATH = '/data/local/tmp/forwarder'
+# These ports must match up with the constants in net/test/test_server.cc
+TEST_SERVER_SPAWNER_PORT = 8001
+TEST_SERVER_PORT = 8002
+TEST_SYNC_SERVER_PORT = 8003
+
+
+class BaseTestRunner(object):
+  """Base class for running tests on a single device."""
+
+  def __init__(self, device):
+    """
+      Args:
+        device: Tests will run on the device of this ID.
+    """
+    self.device = device
+    self.adb = android_commands.AndroidCommands(device=device)
+    # Synchronize date/time between host and device. Otherwise same file on
+    # host and device may have different timestamp which may cause
+    # AndroidCommands.PushIfNeeded failed, or a test which may compare timestamp
+    # got from http head and local time could be failed.
+    self.adb.SynchronizeDateTime()
+    self._http_server = None
+    self._forwarder = None
+    self._spawning_server = None
+    self._spawner_forwarder = None
+    self._forwarder_device_port = 8000
+    self.forwarder_base_url = ('http://localhost:%d' %
+        self._forwarder_device_port)
+    self.flags = FlagChanger(self.adb)
+
+  def RunTests(self):
+    # TODO(bulach): this should actually do SetUp / RunTestsInternal / TearDown.
+    # Refactor the various subclasses to expose a RunTestsInternal without
+    # any params.
+    raise NotImplementedError
+
+  def SetUp(self):
+    """Called before tests run."""
+    pass
+
+  def TearDown(self):
+    """Called when tests finish running."""
+    self.ShutdownHelperToolsForTestSuite()
+
+  def CopyTestData(self, test_data_paths, dest_dir):
+    """Copies |test_data_paths| list of files/directories to |dest_dir|.
+
+    Args:
+      test_data_paths: A list of files or directories relative to |dest_dir|
+          which should be copied to the device. The paths must exist in
+          |CHROME_DIR|.
+      dest_dir: Absolute path to copy to on the device.
+    """
+    for p in test_data_paths:
+      self.adb.PushIfNeeded(
+          os.path.join(run_tests_helper.CHROME_DIR, p),
+          os.path.join(dest_dir, p))
+
+  def LaunchTestHttpServer(self, document_root, extra_config_contents=None):
+    """Launches an HTTP server to serve HTTP tests.
+
+    Args:
+      document_root: Document root of the HTTP server.
+      extra_config_contents: Extra config contents for the HTTP server.
+    """
+    self._http_server = lighttpd_server.LighttpdServer(
+        document_root, extra_config_contents=extra_config_contents)
+    if self._http_server.StartupHttpServer():
+      logging.info('http server started: http://localhost:%s',
+                   self._http_server.port)
+    else:
+      logging.critical('Failed to start http server')
+    # Root access needed to make the forwarder executable work.
+    self.adb.EnableAdbRoot()
+    self.StartForwarderForHttpServer()
+
+  def StartForwarderForHttpServer(self):
+    """Starts a forwarder for the HTTP server.
+
+    The forwarder forwards HTTP requests and responses between host and device.
+    """
+    # Sometimes the forwarder device port may be already used. We have to kill
+    # all forwarder processes to ensure that the forwarder can be started since
+    # currently we can not associate the specified port to related pid.
+    # TODO(yfriedman/wangxianzhu): This doesn't work as most of the time the
+    # port is in use but the forwarder is already dead. Killing all forwarders
+    # is overly destructive and breaks other tests which make use of forwarders.
+    # if IsDevicePortUsed(self.adb, self._forwarder_device_port):
+    #   self.adb.KillAll('forwarder')
+    self._forwarder = run_tests_helper.ForwardDevicePorts(
+        self.adb, [(self._forwarder_device_port, self._http_server.port)])
+
+  def RestartHttpServerForwarderIfNecessary(self):
+    """Restarts the forwarder if it's not open."""
+    # Checks to see if the http server port is being used.  If not forwards the
+    # request.
+    # TODO(dtrainor): This is not always reliable because sometimes the port
+    # will be left open even after the forwarder has been killed.
+    if not run_tests_helper.IsDevicePortUsed(self.adb,
+        self._forwarder_device_port):
+      self.StartForwarderForHttpServer()
+
+  def ShutdownHelperToolsForTestSuite(self):
+    """Shuts down the server and the forwarder."""
+    # Forwarders should be killed before the actual servers they're forwarding
+    # to as they are clients potentially with open connections and to allow for
+    # proper hand-shake/shutdown.
+    if self._forwarder or self._spawner_forwarder:
+      # Kill all forwarders on the device and then kill the process on the host
+      # (if it exists)
+      self.adb.KillAll('forwarder')
+      if self._forwarder:
+        self._forwarder.kill()
+      if self._spawner_forwarder:
+        self._spawner_forwarder.kill()
+    if self._http_server:
+      self._http_server.ShutdownHttpServer()
+    if self._spawning_server:
+      self._spawning_server.Stop()
+    self.flags.Restore()
+
+  def LaunchChromeTestServerSpawner(self):
+    """Launches test server spawner."""
+    self._spawning_server = SpawningServer(TEST_SERVER_SPAWNER_PORT,
+                                          TEST_SERVER_PORT)
+    self._spawning_server.Start()
+    # TODO(yfriedman): Ideally we'll only try to start up a port forwarder if
+    # there isn't one already running but for now we just get an error message
+    # and the existing forwarder still works.
+    self._spawner_forwarder = run_tests_helper.ForwardDevicePorts(
+        self.adb, [(TEST_SERVER_SPAWNER_PORT, TEST_SERVER_SPAWNER_PORT),
+                   (TEST_SERVER_PORT, TEST_SERVER_PORT)])
diff --git a/build/android/buildbot.sh b/build/android/buildbot.sh
new file mode 100755
index 0000000..4483fd1
--- /dev/null
+++ b/build/android/buildbot.sh
@@ -0,0 +1,89 @@
+#!/bin/bash
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# "compile and run tests" script for the android build of chromium.
+# Intended for use by buildbot.
+# At this time, we only have one bot which is both a builder and
+# tester.  Script assumes it runs in the "build" directory.
+#
+# This script uses buildbot "Annotator" style for steps.
+# This script does not sync the source tree.
+
+set -e
+set -x
+
+# Options in this script.
+BUILD_EXPERIMENTAL_TARGETS=1
+RUN_TESTS=1
+NEED_CLOBBER=0
+JOBS=4   # make -j"${JOBS}"
+
+# If we are a trybot, disable experimental targets and tests.  We
+# eventually want tests on a trybot but emulator launch/restart is not
+# reliable enough yet.
+# TODO(jrg): when setting up a trybot, make sure to add TRYBOT=1 in
+# the environment.
+if [ "${TRYBOT:-0}" = 1 ] ; then
+  echo "Disabling experimental builds and tests since we are a trybot."
+  BUILD_EXPERIMENTAL_TARGETS=0
+  RUN_TESTS=0
+fi
+
+echo "@@@BUILD_STEP cd into source root@@@"
+SRC_ROOT=$(cd "$(dirname $0)/../.."; pwd)
+cd $SRC_ROOT
+
+echo "@@@BUILD_STEP Basic setup@@@"
+export ANDROID_SDK_ROOT=/usr/local/google/android-sdk-linux_x86
+export ANDROID_NDK_ROOT=/usr/local/google/android-ndk-r7
+for mandatory_directory in "${ANDROID_SDK_ROOT}" "${ANDROID_NDK_ROOT}" ; do
+  if [[ ! -d "${mandatory_directory}" ]]; then
+    echo "Directory ${mandatory_directory} does not exist."
+    echo "Build cannot continue."
+    exit 1
+  fi
+done
+
+if [ ! "$BUILDBOT_CLOBBER" = "" ]; then
+  NEED_CLOBBER=1
+fi
+
+## Build and test steps
+
+echo "@@@BUILD_STEP Configure with envsetup.sh@@@"
+. build/android/envsetup.sh
+
+if [ "$NEED_CLOBBER" -eq 1 ]; then
+  echo "@@@BUILD_STEP Clobber@@@"
+  rm -rf "${SRC_ROOT}"/out
+fi
+
+echo "@@@BUILD_STEP android_gyp@@@"
+android_gyp
+
+echo "@@@BUILD_STEP Compile@@@"
+make -j${JOBS}
+
+if [ "${BUILD_EXPERIMENTAL_TARGETS}" = 1 ] ; then
+  # Linking DumpRenderTree appears to hang forever?
+  # EXPERIMENTAL_TARGETS="DumpRenderTree webkit_unit_tests"
+  EXPERIMENTAL_TARGETS="webkit_unit_tests"
+  for target in ${EXPERIMENTAL_TARGETS} ; do
+    echo "@@@BUILD_STEP Experimental Compile $target @@@"
+    set +e
+    make -j4 "${target}"
+    if [ $? -ne 0 ] ; then
+      echo "@@@STEP_WARNINGS@@@"
+    fi
+    set -e
+  done
+fi
+
+if [ "${RUN_TESTS}" = 1 ] ; then
+  echo "@@@BUILD_STEP Run Tests@@@"
+  build/android/run_tests.py -e --xvfb --verbose
+fi
+
+exit 0
diff --git a/build/android/chrome_test_server_spawner.py b/build/android/chrome_test_server_spawner.py
new file mode 100644
index 0000000..85864da
--- /dev/null
+++ b/build/android/chrome_test_server_spawner.py
@@ -0,0 +1,114 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A "Test Server Spawner" that handles killing/stopping per-test test servers.
+
+It's used to accept requests from the device to spawn and kill instances of the
+chrome test server on the host.
+"""
+
+import BaseHTTPServer
+import logging
+import os
+import sys
+import threading
+import time
+import urlparse
+
+# Path that are needed to import testserver
+cr_src = os.path.join(os.path.abspath(os.path.dirname(__file__)), '..', '..')
+sys.path.append(os.path.join(cr_src, 'third_party'))
+sys.path.append(os.path.join(cr_src, 'third_party', 'tlslite'))
+sys.path.append(os.path.join(cr_src, 'third_party', 'pyftpdlib', 'src'))
+sys.path.append(os.path.join(os.path.abspath(os.path.dirname(__file__)), '..',
+   '..', 'net', 'tools', 'testserver'))
+import testserver
+
+_test_servers = []
+
+class SpawningServerRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
+  """A handler used to process http GET request.
+  """
+
+  def GetServerType(self, server_type):
+    """Returns the server type to use when starting the test server.
+
+    This function translate the command-line argument into the appropriate
+    numerical constant.
+    # TODO(yfriedman): Do that translation!
+    """
+    if server_type:
+      pass
+    return 0
+
+  def do_GET(self):
+    parsed_path = urlparse.urlparse(self.path)
+    action = parsed_path.path
+    params = urlparse.parse_qs(parsed_path.query, keep_blank_values=1)
+    logging.info('Action is: %s' % action)
+    if action == '/killserver':
+      # There should only ever be one test server at a time. This may do the
+      # wrong thing if we try and start multiple test servers.
+      _test_servers.pop().Stop()
+    elif action == '/start':
+      logging.info('Handling request to spawn a test webserver')
+      for param in params:
+        logging.info('%s=%s' % (param, params[param][0]))
+      s_type = 0
+      doc_root = None
+      if 'server_type' in params:
+        s_type = self.GetServerType(params['server_type'][0])
+      if 'doc_root' in params:
+        doc_root = params['doc_root'][0]
+      self.webserver_thread = threading.Thread(
+          target=self.SpawnTestWebServer, args=(s_type, doc_root))
+      self.webserver_thread.setDaemon(True)
+      self.webserver_thread.start()
+    self.send_response(200, 'OK')
+    self.send_header('Content-type', 'text/html')
+    self.end_headers()
+    self.wfile.write('<html><head><title>started</title></head></html>')
+    logging.info('Returned OK!!!')
+
+  def SpawnTestWebServer(self, s_type, doc_root):
+    class Options(object):
+      log_to_console = True
+      server_type = s_type
+      port = self.server.test_server_port
+      data_dir = doc_root or 'chrome/test/data'
+      file_root_url = '/files/'
+      cert = False
+      policy_keys = None
+      policy_user = None
+      startup_pipe = None
+    options = Options()
+    logging.info('Listening on %d, type %d, data_dir %s' % (options.port,
+        options.server_type, options.data_dir))
+    testserver.main(options, None, server_list=_test_servers)
+    logging.info('Test-server has died.')
+
+
+class SpawningServer(object):
+  """The class used to start/stop a http server.
+  """
+
+  def __init__(self, test_server_spawner_port, test_server_port):
+    logging.info('Creating new spawner %d', test_server_spawner_port)
+    self.server = testserver.StoppableHTTPServer(('', test_server_spawner_port),
+                                                 SpawningServerRequestHandler)
+    self.port = test_server_spawner_port
+    self.server.test_server_port = test_server_port
+
+  def Listen(self):
+    logging.info('Starting test server spawner')
+    self.server.serve_forever()
+
+  def Start(self):
+    listener_thread = threading.Thread(target=self.Listen)
+    listener_thread.setDaemon(True)
+    listener_thread.start()
+    time.sleep(1)
+
+  def Stop(self):
+    self.server.Stop()
diff --git a/build/android/cmd_helper.py b/build/android/cmd_helper.py
new file mode 100644
index 0000000..901cbe9
--- /dev/null
+++ b/build/android/cmd_helper.py
@@ -0,0 +1,40 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import logging
+import subprocess
+
+
+def RunCmd(args, cwd=None):
+  """Opens a subprocess to execute a program and returns its return value.
+
+  Args:
+    args: A string or a sequence of program arguments. The program to execute is
+      the string or the first item in the args sequence.
+    cwd: If not None, the subprocess's current directory will be changed to
+      |cwd| before it's executed.
+  """
+  logging.info(str(args) + ' ' + (cwd or ''))
+  p = subprocess.Popen(args=args, cwd=cwd)
+  return p.wait()
+
+
+def GetCmdOutput(args, cwd=None):
+  """Open a subprocess to execute a program and returns its output.
+
+  Args:
+    args: A string or a sequence of program arguments. The program to execute is
+      the string or the first item in the args sequence.
+    cwd: If not None, the subprocess's current directory will be changed to
+      |cwd| before it's executed.
+  """
+  logging.info(str(args) + ' ' + (cwd or ''))
+  p = subprocess.Popen(args=args, cwd=cwd, stdout=subprocess.PIPE,
+                       stderr=subprocess.PIPE)
+  stdout, stderr = p.communicate()
+  if stderr:
+    logging.critical(stderr)
+  logging.info(stdout[:4096])  # Truncate output longer than 4k.
+  return stdout
diff --git a/build/android/debug_info.py b/build/android/debug_info.py
new file mode 100644
index 0000000..9a836e3
--- /dev/null
+++ b/build/android/debug_info.py
@@ -0,0 +1,202 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Collect debug info for a test."""
+
+import datetime
+import logging
+import os
+import re
+import shutil
+import string
+import subprocess
+import tempfile
+
+import cmd_helper
+
+
+TOMBSTONE_DIR = '/data/tombstones/'
+
+
+class GTestDebugInfo(object):
+  """A helper class to get relate debug information for a gtest.
+
+  Args:
+    adb: ADB interface the tests are using.
+    device: Serial# of the Android device in which the specified gtest runs.
+    testsuite_name: Name of the specified gtest.
+    gtest_filter: Test filter used by the specified gtest.
+  """
+
+  def __init__(self, adb, device, testsuite_name, gtest_filter,
+              collect_new_crashes=True):
+    """Initializes the DebugInfo class for a specified gtest."""
+    self.adb = adb
+    self.device = device
+    self.testsuite_name = testsuite_name
+    self.gtest_filter = gtest_filter
+    self.logcat_process = None
+    self.has_storage = False
+    self.log_dir = None
+    self.log_file_name = None
+    self.collect_new_crashes = collect_new_crashes
+    self.old_crash_files = self.ListCrashFiles()
+
+  def InitStorage(self):
+    """Initializes the storage in where we put the debug information."""
+    if self.has_storage:
+      return
+    self.has_storage = True
+    self.log_dir = tempfile.mkdtemp()
+    self.log_file_name = os.path.join(self.log_dir,
+                                      self._GeneratePrefixName() + '_log.txt')
+
+  def CleanupStorage(self):
+    """Cleans up the storage in where we put the debug information."""
+    if not self.has_storage:
+      return
+    self.has_storage = False
+    assert os.path.exists(self.log_dir)
+    shutil.rmtree(self.log_dir)
+    self.log_dir = None
+    self.log_file_name = None
+
+  def GetStoragePath(self):
+    """Returns the path in where we store the debug information."""
+    self.InitStorage()
+    return self.log_dir
+
+  def _GetSignatureFromGTestFilter(self):
+    """Gets a signature from gtest_filter.
+
+    Signature is used to identify the tests from which we collect debug
+    information.
+
+    Returns:
+      A signature string. Returns 'all' if there is no gtest filter.
+    """
+    if not self.gtest_filter:
+      return 'all'
+    filename_chars = "-_()%s%s" % (string.ascii_letters, string.digits)
+    return ''.join(c for c in self.gtest_filter if c in filename_chars)
+
+  def _GeneratePrefixName(self):
+    """Generates a prefix name for debug information of the test.
+
+    The prefix name consists of the following:
+    (1) root name of test_suite_base.
+    (2) device serial number.
+    (3) filter signature generate from gtest_filter.
+    (4) date & time when calling this method.
+
+    Returns:
+      Name of the log file.
+    """
+    return (os.path.splitext(self.testsuite_name)[0] + '_' + self.device + '_' +
+            self._GetSignatureFromGTestFilter() + '_' +
+            datetime.datetime.utcnow().strftime('%Y-%m-%d-%H-%M-%S-%f'))
+
+  def StartRecordingLog(self, clear=True, filters=['*:v']):
+    """Starts recording logcat output to a file.
+
+    This call should come before running test, with calling StopRecordingLog
+    following the tests.
+
+    Args:
+      clear: True if existing log output should be cleared.
+      filters: A list of logcat filters to be used.
+    """
+    self.InitStorage()
+    self.StopRecordingLog()
+    if clear:
+      cmd_helper.RunCmd(['adb', 'logcat', '-c'])
+    logging.info('Start dumping log to %s ...' % self.log_file_name)
+    command = 'adb logcat -v threadtime %s > %s' % (' '.join(filters),
+                                                    self.log_file_name)
+    self.logcat_process = subprocess.Popen(command, shell=True)
+
+  def StopRecordingLog(self):
+    """Stops an existing logcat recording subprocess."""
+    if not self.logcat_process:
+      return
+    # Cannot evaluate directly as 0 is a possible value.
+    if self.logcat_process.poll() is None:
+      self.logcat_process.kill()
+    self.logcat_process = None
+    logging.info('Finish log dump.')
+
+  def TakeScreenshot(self, identifier_mark):
+    """Takes a screen shot from current specified device.
+
+    Args:
+      identifier_mark: A string to identify the screen shot DebugInfo will take.
+                       It will be part of filename of the screen shot. Empty
+                       string is acceptable.
+    Returns:
+      Returns True if successfully taking screen shot from device, otherwise
+      returns False.
+    """
+    self.InitStorage()
+    assert isinstance(identifier_mark, str)
+    shot_path = os.path.join(self.log_dir, ''.join([self._GeneratePrefixName(),
+                                                    identifier_mark,
+                                                    '_screenshot.png']))
+    screenshot_path = os.path.join(os.getenv('ANDROID_HOST_OUT'), 'bin',
+                                   'screenshot2')
+    re_success = re.compile(re.escape('Success.'), re.MULTILINE)
+    if re_success.findall(cmd_helper.GetCmdOutput([screenshot_path, '-s',
+                                                   self.device, shot_path])):
+      logging.info("Successfully took a screen shot to %s" % shot_path)
+      return True
+    logging.error('Failed to take screen shot from device %s' % self.device)
+    return False
+
+  def ListCrashFiles(self):
+    """Collects crash files from current specified device.
+
+    Returns:
+      A dict of crash files in format {"name": (size, lastmod), ...}.
+    """
+    if not self.collect_new_crashes:
+      return {}
+    return self.adb.ListPathContents(TOMBSTONE_DIR)
+
+  def ArchiveNewCrashFiles(self):
+    """Archives the crash files newly generated until calling this method."""
+    if not self.collect_new_crashes:
+      return
+    current_crash_files = self.ListCrashFiles()
+    files = [f for f in current_crash_files if f not in self.old_crash_files]
+    logging.info('New crash file(s):%s' % ' '.join(files))
+    for f in files:
+      self.adb.Adb().Pull(TOMBSTONE_DIR + f,
+                          os.path.join(self.GetStoragePath(), f))
+
+  @staticmethod
+  def ZipAndCleanResults(dest_dir, dump_file_name, debug_info_list):
+    """A helper method to zip all debug information results into a dump file.
+
+    Args:
+      dest-dir: Dir path in where we put the dump file.
+      dump_file_name: Desired name of the dump file. This method makes sure
+                      '.zip' will be added as ext name.
+      debug_info_list: List of all debug info objects.
+    """
+    if not dest_dir or not dump_file_name or not debug_info_list:
+      return
+    cmd_helper.RunCmd(['mkdir', '-p', dest_dir])
+    log_basename = os.path.basename(dump_file_name)
+    log_file = os.path.join(dest_dir,
+                            os.path.splitext(log_basename)[0] + '.zip')
+    logging.info('Zipping debug dumps into %s ...' % log_file)
+    for d in debug_info_list:
+      d.ArchiveNewCrashFiles()
+    # Add new dumps into the zip file. The zip may exist already if previous
+    # gtest also dumps the debug information. It's OK since we clean up the old
+    # dumps in each build step.
+    cmd_helper.RunCmd(['zip', '-q', '-r', log_file,
+                       ' '.join([d.GetStoragePath() for d in debug_info_list])])
+    assert os.path.exists(log_file)
+    for debug_info in debug_info_list:
+      debug_info.CleanupStorage()
diff --git a/build/android/emulator.py b/build/android/emulator.py
new file mode 100755
index 0000000..49c3caa
--- /dev/null
+++ b/build/android/emulator.py
@@ -0,0 +1,219 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provides an interface to start and stop Android emulator.
+
+Assumes system environment ANDROID_NDK_ROOT has been set.
+
+  Emulator: The class provides the methods to launch/shutdown the emulator with
+            the android virtual device named 'buildbot' .
+"""
+
+import logging
+import os
+import signal
+import subprocess
+import sys
+import time
+
+import android_commands
+
+# adb_interface.py is under ../../third_party/android/testrunner/
+sys.path.append(os.path.join(os.path.abspath(os.path.dirname(__file__)), '..',
+   '..', 'third_party', 'android', 'testrunner'))
+import adb_interface
+import cmd_helper
+import errors
+import run_command
+
+class EmulatorLaunchException(Exception):
+  """Emulator failed to launch."""
+  pass
+
+def _KillAllEmulators():
+  """Kill all running emulators that look like ones we started.
+
+  There are odd 'sticky' cases where there can be no emulator process
+  running but a device slot is taken.  A little bot trouble and and
+  we're out of room forever.
+  """
+  emulators = android_commands.GetEmulators()
+  if not emulators:
+    return
+  for emu_name in emulators:
+    cmd_helper.GetCmdOutput(['adb', '-s', emu_name, 'emu', 'kill'])
+  logging.info('Emulator killing is async; give a few seconds for all to die.')
+  for i in range(5):
+    if not android_commands.GetEmulators():
+      return
+    time.sleep(1)
+
+def _GetAvailablePort():
+  """Returns an available TCP port for the console."""
+  used_ports = []
+  emulators = android_commands.GetEmulators()
+  for emulator in emulators:
+    used_ports.append(emulator.split('-')[1])
+  # The port must be an even number between 5554 and 5584.
+  for port in range(5554, 5585, 2):
+    if str(port) not in used_ports:
+      return port
+
+
+class Emulator(object):
+  """Provides the methods to lanuch/shutdown the emulator.
+
+  The emulator has the android virtual device named 'buildbot'.
+
+  The emulator could use any even TCP port between 5554 and 5584 for the
+  console communication, and this port will be part of the device name like
+  'emulator-5554'. Assume it is always True, as the device name is the id of
+  emulator managed in this class.
+
+  Attributes:
+    emulator: Path of Android's emulator tool.
+    popen: Popen object of the running emulator process.
+    device: Device name of this emulator.
+  """
+
+  # Signals we listen for to kill the emulator on
+  _SIGNALS = (signal.SIGINT, signal.SIGHUP)
+
+  # Time to wait for an emulator launch, in seconds.  This includes
+  # the time to launch the emulator and a wait-for-device command.
+  _LAUNCH_TIMEOUT = 120
+
+  # Timeout interval of wait-for-device command before bouncing to a a
+  # process life check.
+  _WAITFORDEVICE_TIMEOUT = 5
+
+  # Time to wait for a "wait for boot complete" (property set on device).
+  _WAITFORBOOT_TIMEOUT = 300
+
+  def __init__(self):
+    try:
+      android_sdk_root = os.environ['ANDROID_SDK_ROOT']
+    except KeyError:
+      logging.critical('The ANDROID_SDK_ROOT must be set to run the test on '
+                       'emulator.')
+      raise
+    self.emulator = os.path.join(android_sdk_root, 'tools', 'emulator')
+    self.popen = None
+    self.device = None
+
+  def _DeviceName(self):
+    """Return our device name."""
+    port = _GetAvailablePort()
+    return ('emulator-%d' % port, port)
+
+  def Launch(self):
+    """Launches the emulator and waits for package manager to startup.
+
+    If fails, an exception will be raised.
+    """
+    _KillAllEmulators()  # just to be sure
+    self._AggressiveImageCleanup()
+    (self.device, port) = self._DeviceName()
+    emulator_command = [
+        self.emulator,
+        # Speed up emulator launch by 40%.  Really.
+        '-no-boot-anim',
+        # The default /data size is 64M.
+        # That's not enough for 4 unit test bundles and their data.
+        '-partition-size', '256',
+        # ALWAYS wipe the data.  We've seen cases where an emulator
+        # gets 'stuck' if we don't do this (every thousand runs or
+        # so).
+        '-wipe-data',
+        # Use a familiar name and port.
+        '-avd', 'buildbot',
+        '-port', str(port)]
+    logging.info('Emulator launch command: %s', ' '.join(emulator_command))
+    self.popen = subprocess.Popen(args=emulator_command,
+                                  stderr=subprocess.STDOUT)
+    self._InstallKillHandler()
+    self._ConfirmLaunch()
+
+  def _AggressiveImageCleanup(self):
+    """Aggressive cleanup of emulator images.
+
+    Experimentally it looks like our current emulator use on the bot
+    leaves image files around in /tmp/android-$USER.  If a "random"
+    name gets reused, we choke with a 'File exists' error.
+    TODO(jrg): is there a less hacky way to accomplish the same goal?
+    """
+    logging.info('Aggressive Image Cleanup')
+    emulator_imagedir = '/tmp/android-%s' % os.environ['USER']
+    for image in os.listdir(emulator_imagedir):
+      full_name = os.path.join(emulator_imagedir, image)
+      if 'emulator' in full_name:
+        logging.info('Deleting emulator image %s', full_name)
+        os.unlink(full_name)
+
+  def _ConfirmLaunch(self, wait_for_boot=False):
+    """Confirm the emulator launched properly.
+
+    Loop on a wait-for-device with a very small timeout.  On each
+    timeout, check the emulator process is still alive.
+    After confirming a wait-for-device can be successful, make sure
+    it returns the right answer.
+    """
+    a = android_commands.AndroidCommands(self.device, False)
+    seconds_waited = 0
+    number_of_waits = 2  # Make sure we can wfd twice
+    adb_cmd = "adb -s %s %s" % (self.device, 'wait-for-device')
+    while seconds_waited < self._LAUNCH_TIMEOUT:
+      try:
+        run_command.RunCommand(adb_cmd,
+                               timeout_time=self._WAITFORDEVICE_TIMEOUT,
+                               retry_count=1)
+        number_of_waits -= 1
+        if not number_of_waits:
+          break
+      except errors.WaitForResponseTimedOutError as e:
+        seconds_waited += self._WAITFORDEVICE_TIMEOUT
+        adb_cmd = "adb -s %s %s" % (self.device, 'kill-server')
+        run_command.RunCommand(adb_cmd)
+      self.popen.poll()
+      if self.popen.returncode != None:
+        raise EmulatorLaunchException('EMULATOR DIED')
+    if seconds_waited >= self._LAUNCH_TIMEOUT:
+      raise EmulatorLaunchException('TIMEOUT with wait-for-device')
+    logging.info('Seconds waited on wait-for-device: %d', seconds_waited)
+    if wait_for_boot:
+      # Now that we checked for obvious problems, wait for a boot complete.
+      # Waiting for the package manager is sometimes problematic.
+      # TODO(jrg): for reasons I don't understand, sometimes this
+      # gives an "error: device not found" which is only fixed with an
+      # 'adb kill-server' command.  Fix.
+      a.Adb().SetTargetSerial(self.device)
+      a.Adb().WaitForBootComplete(self._WAITFORBOOT_TIMEOUT)
+
+  def Shutdown(self):
+    """Shuts down the process started by launch."""
+    if self.popen:
+      self.popen.poll()
+      if self.popen.returncode == None:
+        self.popen.kill()
+      self.popen = None
+
+  def _ShutdownOnSignal(self, signum, frame):
+    logging.critical('emulator _ShutdownOnSignal')
+    for sig in self._SIGNALS:
+      signal.signal(sig, signal.SIG_DFL)
+    self.Shutdown()
+    raise KeyboardInterrupt  # print a stack
+
+  def _InstallKillHandler(self):
+    """Install a handler to kill the emulator when we exit unexpectedly."""
+    for sig in self._SIGNALS:
+      signal.signal(sig, self._ShutdownOnSignal)
+
+def main(argv):
+  Emulator().launch()
+
+
+if __name__ == '__main__':
+  main(sys.argv)
diff --git a/build/android/envsetup.sh b/build/android/envsetup.sh
new file mode 100755
index 0000000..3fa1918
--- /dev/null
+++ b/build/android/envsetup.sh
@@ -0,0 +1,155 @@
+#!/bin/bash
+
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Sets up environment for building Chromium on Android. Only Android NDK,
+# Revision 6b on Linux or Mac is offically supported.
+#
+# To run this script, the system environment ANDROID_NDK_ROOT must be set
+# to Android NDK's root path.
+#
+# TODO(michaelbai): Develop a standard for NDK/SDK integration.
+#
+# If current path isn't the Chrome's src directory, CHROME_SRC must be set
+# to the Chrome's src directory.
+
+if [ ! -d "${ANDROID_NDK_ROOT}" ]; then
+  echo "ANDROID_NDK_ROOT must be set to the path of Android NDK, Revision 6b." \
+    >& 2
+  echo "which could be installed by" >& 2
+  echo "<chromium_tree>/src/build/install-build-deps-android.sh" >& 2
+  return 1
+fi
+
+if [ ! -d "${ANDROID_SDK_ROOT}" ]; then
+  echo "ANDROID_SDK_ROOT must be set to the path of Android SDK, Android 3.2." \
+    >& 2
+  echo "which could be installed by" >& 2
+  echo "<chromium_tree>/src/build/install-build-deps-android.sh" >& 2
+  return 1
+fi
+
+host_os=$(uname -s | sed -e 's/Linux/linux/;s/Darwin/mac/')
+
+case "${host_os}" in
+  "linux")
+    toolchain_dir="linux-x86"
+    ;;
+  "mac")
+    toolchain_dir="darwin-x86"
+    ;;
+  *)
+    echo "Host platform ${host_os} is not supported" >& 2
+    return 1
+esac
+
+export ANDROID_TOOLCHAIN="${ANDROID_NDK_ROOT}/toolchains/arm-linux-androideabi-4.4.3/prebuilt/${toolchain_dir}/bin/"
+
+# Add Android SDK's platform-tools to system path.
+export PATH="${PATH}:${ANDROID_SDK_ROOT}/platform-tools/"
+
+if [ ! -d "${ANDROID_TOOLCHAIN}" ]; then
+  echo "Can not find Android toolchain in ${ANDROID_TOOLCHAIN}." >& 2
+  echo "The NDK version might be wrong." >& 2
+  return 1
+fi
+
+if [ -z "${CHROME_SRC}" ]; then
+  # if $CHROME_SRC was not set, assume current directory is CHROME_SRC.
+  export CHROME_SRC=$(pwd)
+fi
+
+if [ ! -d "${CHROME_SRC}" ]; then
+  echo "CHROME_SRC must be set to the path of Chrome source code." >& 2
+  return 1
+fi
+
+make() {
+  # TODO(michaelbai): how to use ccache in NDK.
+  if [ -n "${USE_CCACHE}" ]; then
+    if [ -e "${PREBUILT_CCACHE_PATH}" ]; then
+      use_ccache_var="$PREBUILT_CCACHE_PATH "
+    else
+      use_ccache_var=""
+    fi
+  fi
+  # Only cross-compile if the build is being done either from Chromium's src/
+  # directory, or through WebKit, in which case the WEBKIT_ANDROID_BUILD
+  # environment variable will be defined. WebKit uses a different directory.
+  if [ -f "$PWD/build/android/envsetup.sh" ] ||
+     [ -n "${WEBKIT_ANDROID_BUILD}" ]; then
+    CC="${use_ccache_var}${CROSS_CC}" CXX="${use_ccache_var}${CROSS_CXX}" \
+    LINK="${CROSS_LINK}" AR="${CROSS_AR}" RANLIB="${CROSS_RANLIB}" \
+      command make $*
+  else
+    command make $*
+  fi
+}
+
+# Performs a gyp_chromium run to convert gyp->Makefile for android code.
+android_gyp() {
+  "${CHROME_SRC}/build/gyp_chromium" --depth="${CHROME_SRC}"
+}
+
+firstword() {
+  echo "${1}"
+}
+
+export CROSS_AR="$(firstword "${ANDROID_TOOLCHAIN}"/*-ar)"
+export CROSS_CC="$(firstword "${ANDROID_TOOLCHAIN}"/*-gcc)"
+export CROSS_CXX="$(firstword "${ANDROID_TOOLCHAIN}"/*-g++)"
+export CROSS_LINK="$(firstword "${ANDROID_TOOLCHAIN}"/*-gcc)"
+export CROSS_RANLIB="$(firstword "${ANDROID_TOOLCHAIN}"/*-ranlib)"
+export OBJCOPY="$(firstword "${ANDROID_TOOLCHAIN}"/*-objcopy)"
+export STRIP="$(firstword "${ANDROID_TOOLCHAIN}"/*-strip)"
+
+# The set of GYP_DEFINES to pass to gyp. Use 'readlink -e' on directories
+# to canonicalize them (remove double '/', remove trailing '/', etc).
+DEFINES="OS=android"
+DEFINES+=" android_build_type=0"  # Currently, Only '0' is supportted.
+DEFINES+=" host_os=${host_os}"
+DEFINES+=" linux_fpic=1"
+DEFINES+=" release_optimize=s"
+DEFINES+=" linux_use_tcmalloc=0"
+DEFINES+=" disable_nacl=1"
+DEFINES+=" remoting=0"
+DEFINES+=" p2p_apis=0"
+DEFINES+=" enable_touch_events=1"
+DEFINES+=" build_ffmpegsumo=0"
+# TODO(bulach): use "shared_libraries" once the transition from executable
+# is over.
+DEFINES+=" gtest_target_type=executable"
+DEFINES+=" branding=Chromium"
+
+# If the TARGET_PRODUCT wasn't set, use 'full' by default.
+if [ -z "${TARGET_PRODUCT}" ]; then
+  TARGET_PRODUCT="full"
+fi
+
+# The following defines will affect ARM code generation of both C/C++ compiler
+# and V8 mksnapshot.
+case "${TARGET_PRODUCT}" in
+  "full")
+    DEFINES+=" target_arch=arm"
+    DEFINES+=" arm_neon=0 armv7=0 arm_thumb=1 arm_fpu=vfp"
+    ;;
+  *x86*)
+    DEFINES+=" target_arch=ia32 use_libffmpeg=0"
+    ;;
+  *)
+    echo "TARGET_PRODUCT: ${TARGET_PRODUCT} is not supported." >& 2
+    return 1
+esac
+
+export GYP_DEFINES="${DEFINES}"
+
+# Use the "android" flavor of the Makefile generator for both Linux and OS X.
+export GYP_GENERATORS="make-android"
+
+# Use our All target as the default
+export GYP_GENERATOR_FLAGS="${GYP_GENERATOR_FLAGS} default_target=All"
+
+# We want to use our version of "all" targets.
+export CHROMIUM_GYP_FILE="${CHROME_SRC}/build/all_android.gyp"
diff --git a/build/android/flag_changer.py b/build/android/flag_changer.py
new file mode 100644
index 0000000..2c3da49
--- /dev/null
+++ b/build/android/flag_changer.py
@@ -0,0 +1,49 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+# Location where chrome reads command line flags from
+CHROME_COMMAND_FILE = '/data/local/chrome-command-line'
+
+
+class FlagChanger(object):
+  """Temporarily changes the flags Chrome runs with."""
+
+  def __init__(self, android_cmd):
+    self._android_cmd = android_cmd
+    self._old_flags = None
+
+  def Set(self, flags, append=False):
+    """Sets the command line flags used when chrome is started.
+
+    Args:
+      flags: A list of flags to set, eg. ['--single-process'].
+      append: Whether to append to existing flags or overwrite them.
+    """
+    if flags:
+      assert flags[0] != 'chrome'
+
+    if not self._old_flags:
+      self._old_flags = self._android_cmd.GetFileContents(CHROME_COMMAND_FILE)
+      if self._old_flags:
+        self._old_flags = self._old_flags[0].strip()
+
+    if append and self._old_flags:
+      # Avoid appending flags that are already present.
+      new_flags = filter(lambda flag: self._old_flags.find(flag) == -1, flags)
+      self._android_cmd.SetFileContents(CHROME_COMMAND_FILE,
+                                        self._old_flags + ' ' +
+                                        ' '.join(new_flags))
+    else:
+      self._android_cmd.SetFileContents(CHROME_COMMAND_FILE,
+                                        'chrome ' + ' '.join(flags))
+
+  def Restore(self):
+    """Restores the flags to their original state."""
+    if self._old_flags == None:
+      return  # Set() was never called.
+    elif self._old_flags:
+      self._android_cmd.SetFileContents(CHROME_COMMAND_FILE, self._old_flags)
+    else:
+      self._android_cmd.RunShellCommand('rm ' + CHROME_COMMAND_FILE)
diff --git a/build/android/gtest_filter/base_unittests_disabled b/build/android/gtest_filter/base_unittests_disabled
new file mode 100644
index 0000000..1ff27b2
--- /dev/null
+++ b/build/android/gtest_filter/base_unittests_disabled
@@ -0,0 +1,8 @@
+# List of suppressions
+#
+# Automatically generated by run_tests.py
+RTLTest.GetTextDirection
+ReadOnlyFileUtilTest.ContentsEqual
+ReadOnlyFileUtilTest.TextContentsEqual
+SharedMemoryTest.OpenExclusive
+StackTrace.DebugPrintBacktrace
diff --git a/build/android/gtest_filter/base_unittests_emulator_additional_disabled b/build/android/gtest_filter/base_unittests_emulator_additional_disabled
new file mode 100644
index 0000000..85e8fd6
--- /dev/null
+++ b/build/android/gtest_filter/base_unittests_emulator_additional_disabled
@@ -0,0 +1,10 @@
+# Addtional list of suppressions from emulator
+#
+# Automatically generated by run_tests.py
+PathServiceTest.Get
+SharedMemoryTest.OpenClose
+StringPrintfTest.StringAppendfInt
+StringPrintfTest.StringAppendfString
+StringPrintfTest.StringPrintfBounds
+StringPrintfTest.StringPrintfMisc
+VerifyPathControlledByUserTest.Symlinks
diff --git a/build/android/gtest_filter/ipc_tests_disabled b/build/android/gtest_filter/ipc_tests_disabled
new file mode 100644
index 0000000..7b16f21
--- /dev/null
+++ b/build/android/gtest_filter/ipc_tests_disabled
@@ -0,0 +1,2 @@
+# Times out
+IPCSyncChannelTest.ChattyServer
diff --git a/build/android/gtest_filter/net_unittests_disabled b/build/android/gtest_filter/net_unittests_disabled
new file mode 100644
index 0000000..215e9ad
--- /dev/null
+++ b/build/android/gtest_filter/net_unittests_disabled
@@ -0,0 +1,52 @@
+# List of suppressions.
+# Generated by hand to get net_unittests running initially.
+# Likely too aggressive disabling.
+CertVerifierTest.*
+CookieMonsterTest.*
+DirectoryListerTest.*
+DiskCacheTest.*
+DiskCacheBackendTest.*
+DnsConfigServiceTest.*
+DnsRRResolverTest.Resolve
+GZipUnitTest.*
+HostResolverImplTest.*
+NetUtilTest.GetNetworkList
+ProxyResolverV8Test.*
+TransportSecurityStateTest.ParseSidePins*
+X509CertificateTest.*
+X509CertificateParseTest.*
+FtpDirectoryListingParserWindowsTest.Good
+HttpNetworkTransactionTest.SOCKS4_HTTP_GET
+HttpNetworkTransactionTest.SOCKS4_SSL_GET
+HttpNetworkTransactionTest.UploadUnreadableFile
+HttpNetworkTransactionTest.UnreadableUploadFileAfterAuthRestart
+ProxyResolverJSBindingsTest.MyIpAddress
+ProxyScriptFetcherImplTest.*
+SOCKSClientSocketTest.*
+SSLClientSocketTest.*
+PythonUtils.PythonRunTime
+URLRequestTestHTTP.*
+HTTPSRequestTest.HTTPSMismatchedTest
+HTTPSRequestTest.HTTPSExpiredTest
+HTTPSRequestTest.HTTPSPreloadedHSTSTest
+HTTPSRequestTest.ClientAuthTest
+URLRequestTest.FileTest
+URLRequestTest.FileDirRedirectNoCrash
+URLRequestTest.DelayedCookieCallback
+URLRequestTest.DoNotSendCookies
+URLRequestTest.DoNotSaveCookies
+URLRequestTest.DoNotSendCookies_ViaPolicy
+URLRequestTest.DoNotSaveCookies_ViaPolicy
+URLRequestTest.DoNotSaveEmptyCookies
+URLRequestTest.DoNotSendCookies_ViaPolicy_Async
+URLRequestTest.DoNotSaveCookies_ViaPolicy_Async
+URLRequestTest.CookiePolicy_ForceSession
+URLRequestTest.DoNotOverrideReferrer
+WebSocketJobTest.ThrottlingWebSocket
+WebSocketJobTest.ThrottlingWebSocketSpdyEnabled
+WebSocketJobTest.ThrottlingSpdy
+WebSocketJobTest.ThrottlingSpdySpdyEnabled
+X509CertificateWeakDigestTest.*
+FtpDirectoryListingParserTest.*
+*/X509CertificateWeakDigestTest.*
+
diff --git a/build/android/lighttpd_server.py b/build/android/lighttpd_server.py
new file mode 100755
index 0000000..ffe985b
--- /dev/null
+++ b/build/android/lighttpd_server.py
@@ -0,0 +1,234 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provides a convenient wrapper for spawning a test lighttpd instance.
+
+Usage:
+  lighttpd_server PATH_TO_DOC_ROOT
+"""
+
+import codecs
+import contextlib
+import httplib
+import os
+import pexpect
+import random
+import shutil
+import socket
+import sys
+import tempfile
+
+
+class LighttpdServer(object):
+  """Wraps lighttpd server, providing robust startup.
+
+  Args:
+    document_root: Path to root of this server's hosted files.
+    port: TCP port on the _host_ machine that the server will listen on. If
+        ommitted it will attempt to use 9000, or if unavailable it will find
+        a free port from 8001 - 8999.
+    lighttpd_path, lighttpd_module_path: Optional paths to lighttpd binaries.
+    base_config_path: If supplied this file will replace the built-in default
+        lighttpd config file.
+    extra_config_contents: If specified, this string will be appended to the
+        base config (default built-in, or from base_config_path).
+    config_path, error_log, access_log: Optional paths where the class should
+        place temprary files for this session.
+  """
+
+  def __init__(self, document_root, port=None,
+               lighttpd_path=None, lighttpd_module_path=None,
+               base_config_path=None, extra_config_contents=None,
+               config_path=None, error_log=None, access_log=None):
+    self.temp_dir = tempfile.mkdtemp(prefix='lighttpd_for_chrome_android')
+    self.document_root = os.path.abspath(document_root)
+    self.fixed_port = port
+    self.port = port or 9000
+    self.server_tag = 'LightTPD ' + str(random.randint(111111, 999999))
+    self.lighttpd_path = lighttpd_path or '/usr/sbin/lighttpd'
+    self.lighttpd_module_path = lighttpd_module_path or '/usr/lib/lighttpd'
+    self.base_config_path = base_config_path
+    self.extra_config_contents = extra_config_contents
+    self.config_path = config_path or self._Mktmp('config')
+    self.error_log = error_log or self._Mktmp('error_log')
+    self.access_log = access_log or self._Mktmp('access_log')
+    self.pid_file = self._Mktmp('pid_file')
+    self.process = None
+
+  def _Mktmp(self, name):
+    return os.path.join(self.temp_dir, name)
+
+  def _GetRandomPort(self):
+    # Ports 8001-8004 are reserved for other test servers. Ensure we don't
+    # collide with them.
+    return random.randint(8005, 8999)
+
+  def StartupHttpServer(self):
+    """Starts up a http server with specified document root and port."""
+    # Currently we use lighttpd as http sever in test.
+    while True:
+      if self.base_config_path:
+        # Read the config
+        with codecs.open(self.base_config_path, 'r', 'utf-8') as f:
+          config_contents = f.read()
+      else:
+        config_contents = self._GetDefaultBaseConfig()
+      if self.extra_config_contents:
+        config_contents += self.extra_config_contents
+      # Write out the config, filling in placeholders from the members of |self|
+      with codecs.open(self.config_path, 'w', 'utf-8') as f:
+        f.write(config_contents % self.__dict__)
+      if (not os.path.exists(self.lighttpd_path) or
+          not os.access(self.lighttpd_path, os.X_OK)):
+        raise EnvironmentError(
+            'Could not find lighttpd at %s.\n'
+            'It may need to be installed (e.g. sudo apt-get install lighttpd)'
+            % self.lighttpd_path)
+      self.process = pexpect.spawn(self.lighttpd_path,
+                                   ['-D', '-f', self.config_path,
+                                    '-m', self.lighttpd_module_path],
+                                   cwd=self.temp_dir)
+      client_error, server_error = self._TestServerConnection()
+      if not client_error:
+        assert int(open(self.pid_file, 'r').read()) == self.process.pid
+        break
+      self.process.close()
+
+      if self.fixed_port or not 'in use' in server_error:
+        print 'Client error:', client_error
+        print 'Server error:', server_error
+        return False
+      self.port = self._GetRandomPort()
+    return True
+
+  def ShutdownHttpServer(self):
+    """Shuts down our lighttpd processes."""
+    if self.process:
+      self.process.terminate()
+    shutil.rmtree(self.temp_dir, ignore_errors=True)
+
+  def _TestServerConnection(self):
+    # Wait for server to start
+    server_msg = ''
+    for timeout in xrange(1, 5):
+      client_error = None
+      try:
+        with contextlib.closing(httplib.HTTPConnection(
+            '127.0.0.1', self.port, timeout=timeout)) as http:
+          http.set_debuglevel(timeout > 3)
+          http.request('HEAD', '/')
+          r = http.getresponse()
+          r.read()
+          if (r.status == 200 and r.reason == 'OK' and
+              r.getheader('Server') == self.server_tag):
+            return (None, server_msg)
+          client_error = ('Bad response: %s %s version %s\n  ' %
+                          (r.status, r.reason, r.version) +
+                          '\n  '.join([': '.join(h) for h in r.getheaders()]))
+      except (httplib.HTTPException, socket.error) as client_error:
+        pass  # Probably too quick connecting: try again
+      # Check for server startup error messages
+      ix = self.process.expect([pexpect.TIMEOUT, pexpect.EOF, '.+'],
+                               timeout=timeout)
+      if ix == 2:  # stdout spew from the server
+        server_msg += self.process.match.group(0)
+      elif ix == 1:  # EOF -- server has quit so giveup.
+        client_error = client_error or 'Server exited'
+        break
+    return (client_error or 'Timeout', server_msg)
+
+  def _GetDefaultBaseConfig(self):
+    return """server.tag                  = "%(server_tag)s"
+server.modules              = ( "mod_access",
+                                "mod_accesslog",
+                                "mod_alias",
+                                "mod_cgi",
+                                "mod_rewrite" )
+
+# default document root required
+#server.document-root = "."
+
+# files to check for if .../ is requested
+index-file.names            = ( "index.php", "index.pl", "index.cgi",
+                                "index.html", "index.htm", "default.htm" )
+# mimetype mapping
+mimetype.assign             = (
+  ".gif"          =>      "image/gif",
+  ".jpg"          =>      "image/jpeg",
+  ".jpeg"         =>      "image/jpeg",
+  ".png"          =>      "image/png",
+  ".svg"          =>      "image/svg+xml",
+  ".css"          =>      "text/css",
+  ".html"         =>      "text/html",
+  ".htm"          =>      "text/html",
+  ".xhtml"        =>      "application/xhtml+xml",
+  ".xhtmlmp"      =>      "application/vnd.wap.xhtml+xml",
+  ".js"           =>      "application/x-javascript",
+  ".log"          =>      "text/plain",
+  ".conf"         =>      "text/plain",
+  ".text"         =>      "text/plain",
+  ".txt"          =>      "text/plain",
+  ".dtd"          =>      "text/xml",
+  ".xml"          =>      "text/xml",
+  ".manifest"     =>      "text/cache-manifest",
+ )
+
+# Use the "Content-Type" extended attribute to obtain mime type if possible
+mimetype.use-xattr          = "enable"
+
+##
+# which extensions should not be handle via static-file transfer
+#
+# .php, .pl, .fcgi are most often handled by mod_fastcgi or mod_cgi
+static-file.exclude-extensions = ( ".php", ".pl", ".cgi" )
+
+server.bind = "127.0.0.1"
+server.port = %(port)s
+
+## virtual directory listings
+dir-listing.activate        = "enable"
+#dir-listing.encoding       = "iso-8859-2"
+#dir-listing.external-css   = "style/oldstyle.css"
+
+## enable debugging
+#debug.log-request-header   = "enable"
+#debug.log-response-header  = "enable"
+#debug.log-request-handling = "enable"
+#debug.log-file-not-found   = "enable"
+
+#### SSL engine
+#ssl.engine                 = "enable"
+#ssl.pemfile                = "server.pem"
+
+# Autogenerated test-specific config follows.
+
+cgi.assign = ( ".cgi"  => "/usr/bin/env",
+               ".pl"   => "/usr/bin/env",
+               ".asis" => "/bin/cat",
+               ".php"  => "/usr/bin/php-cgi" )
+
+server.errorlog = "%(error_log)s"
+accesslog.filename = "%(access_log)s"
+server.upload-dirs = ( "/tmp" )
+server.pid-file = "%(pid_file)s"
+server.document-root = "%(document_root)s"
+
+"""
+
+
+def main(argv):
+  server = LighttpdServer(*argv[1:])
+  try:
+    if server.StartupHttpServer():
+      raw_input('Server running at http://127.0.0.1:%s -'
+                ' press Enter to exit it.' % server.port)
+    else:
+      print 'Server exit code:', server.process.exitstatus
+  finally:
+    server.ShutdownHttpServer()
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/perf_tests_helper.py b/build/android/perf_tests_helper.py
new file mode 100644
index 0000000..740c455
--- /dev/null
+++ b/build/android/perf_tests_helper.py
@@ -0,0 +1,56 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import re
+
+
+def _EscapePerfResult(s):
+  """Escapes |s| for use in a perf result."""
+  # Colons (:) and equal signs (=) are not allowed, and we chose an arbitrary
+  # limit of 40 chars.
+  return re.sub(':|=', '_', s[:40])
+
+
+def PrintPerfResult(measurement, trace, values, units, important=True,
+                    print_to_stdout=True):
+  """Prints numerical data to stdout in the format required by perf tests.
+
+  The string args may be empty but they must not contain any colons (:) or
+  equals signs (=).
+
+  Args:
+    measurement: A description of the quantity being measured, e.g. "vm_peak".
+    trace: A description of the particular data point, e.g. "reference".
+    values: A list of numeric measured values.
+    units: A description of the units of measure, e.g. "bytes".
+    important: If True, the output line will be specially marked, to notify the
+        post-processor.
+
+    Returns:
+      String of the formated perf result.
+  """
+  important_marker = '*' if important else ''
+
+  assert isinstance(values, list)
+  assert len(values)
+  assert '/' not in measurement
+  avg = None
+  if len(values) > 1:
+    try:
+      value = '[%s]' % ','.join([str(v) for v in values])
+      avg = sum([float(v) for v in values]) / len(values)
+    except ValueError:
+      value = ", ".join(values)
+  else:
+    value = values[0]
+
+  output = '%sRESULT %s: %s= %s %s' % (important_marker,
+                                       _EscapePerfResult(measurement),
+                                       _EscapePerfResult(trace),
+                                       value, units)
+  if avg:
+    output += '\nAvg %s: %d%s' % (measurement, avg, units)
+  if print_to_stdout:
+    print output
+  return output
diff --git a/build/android/run_tests.py b/build/android/run_tests.py
new file mode 100755
index 0000000..6e3eccb
--- /dev/null
+++ b/build/android/run_tests.py
@@ -0,0 +1,288 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs all the native unit tests.
+
+1. Copy over test binary to /data/local on device.
+2. Resources: chrome/unit_tests requires resources (chrome.pak and en-US.pak)
+   to be deployed to the device (in /data/local/tmp).
+3. Environment:
+3.1. chrome/unit_tests requires (via chrome_paths.cc) a directory named:
+     /data/local/tmp/chrome/test/data
+3.2. page_cycler_tests have following requirements,
+3.2.1  the following data on host:
+       <chrome_src_dir>/tools/page_cycler
+       <chrome_src_dir>/data/page_cycler
+3.2.2. two data directories to store above test data on device named:
+       /data/local/tmp/tools/ (for database perf test)
+       /data/local/tmp/data/ (for other perf tests)
+3.2.3. a http server to serve http perf tests.
+       The http root is host's <chrome_src_dir>/data/page_cycler/, port 8000.
+3.2.4  a tool named forwarder is also required to run on device to
+       forward the http request/response between host and device.
+3.2.5  Chrome is installed on device.
+4. Run the binary in the device and stream the log to the host.
+4.1. Optionally, filter specific tests.
+4.2. Optionally, rebaseline: run the available tests and update the
+     suppressions file for failures.
+4.3. If we're running a single test suite and we have multiple devices
+     connected, we'll shard the tests.
+5. Clean up the device.
+
+Suppressions:
+
+Individual tests in a test binary can be suppressed by listing it in
+the gtest_filter directory in a file of the same name as the test binary,
+one test per line. Here is an example:
+
+  $ cat gtest_filter/base_unittests_disabled
+  DataPackTest.Load
+  ReadOnlyFileUtilTest.ContentsEqual
+
+This file is generated by the tests running on devices. If running on emulator,
+additonal filter file which lists the tests only failed in emulator will be
+loaded. We don't care about the rare testcases which succeeded on emuatlor, but
+failed on device.
+"""
+
+import logging
+import os
+import re
+import subprocess
+import sys
+import time
+
+import android_commands
+import cmd_helper
+import debug_info
+import emulator
+import run_tests_helper
+from single_test_runner import SingleTestRunner
+from test_package_executable import TestPackageExecutable
+from test_result import BaseTestResult, TestResults
+
+_TEST_SUITES = ['base_unittests', 'sql_unittests', 'ipc_tests', 'net_unittests']
+
+
+class TimeProfile(object):
+  """Class for simple profiling of action, with logging of cost."""
+
+  def __init__(self, description):
+    self._description = description
+    self.Start()
+
+  def Start(self):
+    self._starttime = time.time()
+
+  def Stop(self):
+    """Stop profiling and dump a log."""
+    if self._starttime:
+      stoptime = time.time()
+      logging.info('%fsec to perform %s' %
+                   (stoptime - self._starttime, self._description))
+      self._starttime = None
+
+class Xvfb(object):
+  """Class to start and stop Xvfb if relevant.  Nop if not Linux."""
+
+  def __init__(self):
+    self._pid = 0
+
+  def _IsLinux(self):
+    """Return True if on Linux; else False."""
+    return sys.platform.startswith('linux')
+
+  def Start(self):
+    """Start Xvfb and set an appropriate DISPLAY environment.  Linux only.
+
+    Copied from tools/code_coverage/coverage_posix.py
+    """
+    if not self._IsLinux():
+      return
+    proc = subprocess.Popen(["Xvfb", ":9", "-screen", "0", "1024x768x24",
+                             "-ac"],
+                            stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    self._pid = proc.pid
+    if not self._pid:
+      raise Exception('Could not start Xvfb')
+    os.environ['DISPLAY'] = ":9"
+
+    # Now confirm, giving a chance for it to start if needed.
+    for test in range(10):
+      proc = subprocess.Popen('xdpyinfo >/dev/null', shell=True)
+      pid, retcode = os.waitpid(proc.pid, 0)
+      if retcode == 0:
+        break
+      time.sleep(0.25)
+    if retcode != 0:
+      raise Exception('Could not confirm Xvfb happiness')
+
+  def Stop(self):
+    """Stop Xvfb if needed.  Linux only."""
+    if self._pid:
+      try:
+        os.kill(self._pid, signal.SIGKILL)
+      except:
+        pass
+      del os.environ['DISPLAY']
+      self._pid = 0
+
+
+def RunTests(device, test_suite, gtest_filter, test_arguments, rebaseline,
+             timeout, performance_test, cleanup_test_files, tool,
+             log_dump_name):
+  """Runs the tests.
+
+  Args:
+    device: Device to run the tests.
+    test_suite: A specific test suite to run, empty to run all.
+    gtest_filter: A gtest_filter flag.
+    test_arguments: Additional arguments to pass to the test binary.
+    rebaseline: Whether or not to run tests in isolation and update the filter.
+    timeout: Timeout for each test.
+    performance_test: Whether or not performance test(s).
+    cleanup_test_files: Whether or not to cleanup test files on device.
+    tool: Name of the Valgrind tool.
+    log_dump_name: Name of log dump file.
+
+  Returns:
+    A TestResults object.
+  """
+  results = []
+
+  if test_suite:
+    global _TEST_SUITES
+    if not os.path.exists(test_suite):
+      logging.critical('Unrecognized test suite, supported: %s' %
+                       _TEST_SUITES)
+      if test_suite in _TEST_SUITES:
+        logging.critical('(Remember to include the path: out/Release/%s)',
+                         test_suite)
+      return TestResults.FromOkAndFailed([], [BaseTestResult(test_suite, '')])
+    _TEST_SUITES = [test_suite]
+  else:
+    # If not specified, assume the test suites are in out/Release
+    test_suite_dir = os.path.abspath(os.path.join(run_tests_helper.CHROME_DIR,
+        'out', 'Release'))
+    _TEST_SUITES = [os.path.join(test_suite_dir, t) for t in _TEST_SUITES]
+  debug_info_list = []
+  print _TEST_SUITES  # So it shows up in buildbot output
+  for t in _TEST_SUITES:
+    test = SingleTestRunner(device, t, gtest_filter, test_arguments,
+                            timeout, rebaseline, performance_test,
+                            cleanup_test_files, tool, not not log_dump_name)
+    test.RunTests()
+    results += [test.test_results]
+    # Collect debug info.
+    debug_info_list += [test.dump_debug_info]
+    if rebaseline:
+      test.UpdateFilter(test.test_results.failed)
+    elif test.test_results.failed:
+      # Stop running test if encountering failed test.
+      test.test_results.LogFull()
+      break
+  # Zip all debug info outputs into a file named by log_dump_name.
+  debug_info.GTestDebugInfo.ZipAndCleanResults(
+      os.path.join(run_tests_helper.CHROME_DIR, 'out', 'Release',
+          'debug_info_dumps'),
+      log_dump_name, [d for d in debug_info_list if d])
+  return TestResults.FromTestResults(results)
+
+def Dispatch(options):
+  """Dispatches the tests, sharding if possible.
+
+  If options.use_emulator is True, all tests will be run in a new emulator
+  instance.
+
+  Args:
+    options: options for running the tests.
+
+  Returns:
+    0 if successful, number of failing tests otherwise.
+  """
+  if options.test_suite == 'help':
+    ListTestSuites()
+    return 0
+  buildbot_emulator = None
+  attached_devices = []
+
+  if options.use_xvfb:
+    xvfb = Xvfb()
+    xvfb.Start()
+
+  if options.use_emulator:
+    t = TimeProfile('Emulator launch')
+    buildbot_emulator = emulator.Emulator()
+    buildbot_emulator.Launch()
+    t.Stop()
+    attached_devices.append(buildbot_emulator.device)
+  else:
+    attached_devices = android_commands.GetAttachedDevices()
+
+  if not attached_devices:
+    logging.critical('A device must be attached and online.')
+    return 1
+
+  test_results = RunTests(attached_devices[0], options.test_suite,
+                          options.gtest_filter, options.test_arguments,
+                          options.rebaseline, options.timeout,
+                          options.performance_test,
+                          options.cleanup_test_files, options.tool,
+                          options.log_dump)
+  if buildbot_emulator:
+    buildbot_emulator.Shutdown()
+  if options.use_xvfb:
+    xvfb.Stop()
+
+  return len(test_results.failed)
+
+def ListTestSuites():
+  """Display a list of available test suites
+  """
+  print 'Available test suites are:'
+  for test_suite in _TEST_SUITES:
+    print test_suite
+
+
+def main(argv):
+  option_parser = run_tests_helper.CreateTestRunnerOptionParser(None,
+      default_timeout=0)
+  option_parser.add_option('-s', dest='test_suite',
+                           help='Executable name of the test suite to run '
+                           '(use -s help to list them)')
+  option_parser.add_option('-r', dest='rebaseline',
+                           help='Rebaseline and update *testsuite_disabled',
+                           action='store_true',
+                           default=False)
+  option_parser.add_option('-f', dest='gtest_filter',
+                           help='gtest filter')
+  option_parser.add_option('-a', '--test_arguments', dest='test_arguments',
+                           help='Additional arguments to pass to the test')
+  option_parser.add_option('-p', dest='performance_test',
+                           help='Indicator of performance test',
+                           action='store_true',
+                           default=False)
+  option_parser.add_option('-L', dest='log_dump',
+                           help='file name of log dump, which will be put in'
+                           'subfolder debug_info_dumps under the same directory'
+                           'in where the test_suite exists.')
+  option_parser.add_option('-e', '--emulator', dest='use_emulator',
+                           help='Run tests in a new instance of emulator',
+                           action='store_true',
+                           default=False)
+  option_parser.add_option('-x', '--xvfb', dest='use_xvfb',
+                           action='store_true', default=False,
+                           help='Use Xvfb around tests (ignored if not Linux)')
+  options, args = option_parser.parse_args(argv)
+  if len(args) > 1:
+    print 'Unknown argument:', args[1:]
+    option_parser.print_usage()
+    sys.exit(1)
+  run_tests_helper.SetLogLevel(options.verbose_count)
+  return Dispatch(options)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/run_tests_helper.py b/build/android/run_tests_helper.py
new file mode 100644
index 0000000..45e3afd
--- /dev/null
+++ b/build/android/run_tests_helper.py
@@ -0,0 +1,133 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions common to native test runners."""
+
+import logging
+import optparse
+import os
+import subprocess
+import sys
+
+# TODO(michaelbai): Move constant definitions like below to a common file.
+FORWARDER_PATH = '/data/local/tmp/forwarder'
+
+CHROME_DIR = os.path.abspath(os.path.join(sys.path[0], '..', '..'))
+
+
+def IsRunningAsBuildbot():
+  """Returns True if we are currently running on buildbot; False otherwise."""
+  return bool(os.getenv('BUILDBOT_BUILDERNAME'))
+
+
+def ReportBuildbotLink(label, url):
+  """Adds a link with name |label| linking to |url| to current buildbot step.
+
+  Args:
+    label: A string with the name of the label.
+    url: A string of the URL.
+  """
+  if IsRunningAsBuildbot():
+    print '@@@STEP_LINK@%s@%s@@@' % (label, url)
+
+
+def ReportBuildbotMsg(msg):
+  """Appends |msg| to the current buildbot step text.
+
+  Args:
+    msg: String to be appended.
+  """
+  if IsRunningAsBuildbot():
+    print '@@@STEP_TEXT@%s@@@' % msg
+
+def ReportBuildbotError():
+  """Marks the current step as failed."""
+  if IsRunningAsBuildbot():
+    print '@@@STEP_FAILURE@@@'
+
+
+def GetExpectations(file_name):
+  """Returns a list of test names in the |file_name| test expectations file."""
+  if not file_name or not os.path.exists(file_name):
+    return []
+  return [x for x in [x.strip() for x in file(file_name).readlines()]
+          if x and x[0] != '#']
+
+
+def SetLogLevel(verbose_count):
+  """Sets log level as |verbose_count|."""
+  log_level = logging.WARNING  # Default.
+  if verbose_count == 1:
+    log_level = logging.INFO
+  elif verbose_count >= 2:
+    log_level = logging.DEBUG
+  logging.getLogger().setLevel(log_level)
+
+
+def CreateTestRunnerOptionParser(usage=None, default_timeout=60):
+  """Returns a new OptionParser with arguments applicable to all tests."""
+  option_parser = optparse.OptionParser(usage=usage)
+  option_parser.add_option('-t', dest='timeout',
+                           help='Timeout to wait for each test',
+                           type='int',
+                           default=default_timeout)
+  option_parser.add_option('-c', dest='cleanup_test_files',
+                           help='Cleanup test files on the device after run',
+                           action='store_true',
+                           default=False)
+  option_parser.add_option('-v',
+                           '--verbose',
+                           dest='verbose_count',
+                           default=0,
+                           action='count',
+                           help='Verbose level (multiple times for more)')
+  option_parser.add_option('--tool',
+                           dest='tool',
+                           help='Run the test under a tool '
+                           '(use --tool help to list them)')
+  return option_parser
+
+
+def ForwardDevicePorts(adb, ports, host_name='127.0.0.1'):
+  """Forwards a TCP port on the device back to the host.
+
+  Works like adb forward, but in reverse.
+
+  Args:
+    adb: Instance of AndroidCommands for talking to the device.
+    ports: A list of tuples (device_port, host_port) to forward.
+    host_name: Optional. Address to forward to, must be addressable from the
+               host machine. Usually this is omitted and loopback is used.
+
+  Returns:
+    subprocess instance connected to the forwarder process on the device.
+  """
+  adb.PushIfNeeded(
+      os.path.join(CHROME_DIR, 'out', 'Release', 'forwarder'), FORWARDER_PATH)
+  forward_string = ['%d:%d:%s' %
+                    (device, host, host_name) for device, host in ports]
+  logging.info("Forwarding ports: %s" % (forward_string))
+
+  return subprocess.Popen(
+      ['adb', '-s', adb._adb.GetSerialNumber(),
+       'shell', '%s -D %s' % (FORWARDER_PATH, ' '.join(forward_string))])
+
+
+def IsDevicePortUsed(adb, device_port):
+  """Checks whether the specified device port is used or not.
+
+  Args:
+    adb: Instance of AndroidCommands for talking to the device.
+    device_port: Port on device we want to check.
+
+  Returns:
+    True if the port on device is already used, otherwise returns False.
+  """
+  base_url = '127.0.0.1:%d' % device_port
+  netstat_results = adb.RunShellCommand('netstat')
+  for single_connect in netstat_results:
+    # Column 3 is the local address which we want to check with.
+    if single_connect.split()[3] == base_url:
+      return True
+  return False
diff --git a/build/android/single_test_runner.py b/build/android/single_test_runner.py
new file mode 100644
index 0000000..8ec9501
--- /dev/null
+++ b/build/android/single_test_runner.py
@@ -0,0 +1,314 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import sys
+
+from base_test_runner import BaseTestRunner
+import debug_info
+import run_tests_helper
+from test_package_executable import TestPackageExecutable
+from test_result import TestResults
+
+
+class SingleTestRunner(BaseTestRunner):
+  """Single test suite attached to a single device.
+
+  Args:
+    device: Device to run the tests.
+    test_suite: A specific test suite to run, empty to run all.
+    gtest_filter: A gtest_filter flag.
+    test_arguments: Additional arguments to pass to the test binary.
+    timeout: Timeout for each test.
+    rebaseline: Whether or not to run tests in isolation and update the filter.
+    performance_test: Whether or not performance test(s).
+    cleanup_test_files: Whether or not to cleanup test files on device.
+    tool: Name of the Valgrind tool.
+    dump_debug_info: Whether or not to dump debug information.
+  """
+
+  def __init__(self, device, test_suite, gtest_filter, test_arguments, timeout,
+               rebaseline, performance_test, cleanup_test_files, tool,
+               dump_debug_info=False):
+    BaseTestRunner.__init__(self, device)
+    self._running_on_emulator = self.device.startswith('emulator')
+    self._gtest_filter = gtest_filter
+    self._test_arguments = test_arguments
+    self.test_results = TestResults()
+    if dump_debug_info:
+      self.dump_debug_info = debug_info.GTestDebugInfo(self.adb, device,
+           os.path.basename(test_suite), gtest_filter)
+    else:
+      self.dump_debug_info = None
+
+    self.test_package = TestPackageExecutable(self.adb, device,
+        test_suite, timeout, rebaseline, performance_test, cleanup_test_files,
+        tool, self.dump_debug_info)
+
+  def _GetHttpServerDocumentRootForTestSuite(self):
+    """Returns the document root needed by the test suite."""
+    if self.test_package.test_suite_basename == 'page_cycler_tests':
+      return os.path.join(run_tests_helper.CHROME_DIR, 'data', 'page_cycler')
+    return None
+
+
+  def _TestSuiteRequiresMockTestServer(self):
+    """Returns True if the test suite requires mock test server."""
+    return False
+  # TODO(yfriedman): Disabled because of flakiness.
+  # (self.test_package.test_suite_basename == 'unit_tests' or
+  #          self.test_package.test_suite_basename == 'net_unittests' or
+  #          False)
+
+  def _GetFilterFileName(self):
+    """Returns the filename of gtest filter."""
+    filter_dir = os.path.join(sys.path[0], 'gtest_filter')
+    filter_name = self.test_package.test_suite_basename + '_disabled'
+    disabled_filter = os.path.join(filter_dir, filter_name)
+    return disabled_filter
+
+  def _GetAdditionalEmulatorFilterName(self):
+    """Returns the filename of additional gtest filter for emulator."""
+    filter_dir = os.path.join(sys.path[0], 'gtest_filter')
+    filter_name = '%s%s' % (self.test_package.test_suite_basename,
+        '_emulator_additional_disabled')
+    disabled_filter = os.path.join(filter_dir, filter_name)
+    return disabled_filter
+
+  def GetDisabledTests(self):
+    """Returns a list of disabled tests.
+
+    Returns:
+      A list of disabled tests obtained from gtest_filter/test_suite_disabled.
+    """
+    disabled_tests = run_tests_helper.GetExpectations(self._GetFilterFileName())
+    if self._running_on_emulator:
+      # Append emulator's filter file.
+      disabled_tests.extend(run_tests_helper.GetExpectations(
+          self._GetAdditionalEmulatorFilterName()))
+    return disabled_tests
+
+  def UpdateFilter(self, failed_tests):
+    """Updates test_suite_disabled file with the new filter (deletes if empty).
+
+    If running in Emulator, only the failed tests which are not in the normal
+    filter returned by _GetFilterFileName() are written to emulator's
+    additional filter file.
+
+    Args:
+      failed_tests: A sorted list of failed tests.
+    """
+    disabled_tests = []
+    if not self._running_on_emulator:
+      filter_file_name = self._GetFilterFileName()
+    else:
+      filter_file_name = self._GetAdditionalEmulatorFilterName()
+      disabled_tests.extend(
+          run_tests_helper.GetExpectations(self._GetFilterFileName()))
+      logging.info('About to update emulator\'s addtional filter (%s).'
+          % filter_file_name)
+
+    new_failed_tests = []
+    if failed_tests:
+      for test in failed_tests:
+        if test.name not in disabled_tests:
+          new_failed_tests.append(test.name)
+
+    if not new_failed_tests:
+      if os.path.exists(filter_file_name):
+        os.unlink(filter_file_name)
+      return
+
+    filter_file = file(filter_file_name, 'w')
+    if self._running_on_emulator:
+      filter_file.write('# Addtional list of suppressions from emulator\n')
+    else:
+      filter_file.write('# List of suppressions\n')
+    filter_file.write("""This file was automatically generated by run_tests.py
+        """)
+    filter_file.write('\n'.join(sorted(new_failed_tests)))
+    filter_file.write('\n')
+    filter_file.close()
+
+  def GetDataFilesForTestSuite(self):
+    """Returns a list of data files/dirs needed by the test suite."""
+    # Ideally, we'd just push all test data. However, it has >100MB, and a lot
+    # of the files are not relevant (some are used for browser_tests, others for
+    # features not supported, etc..).
+    if self.test_package.test_suite_basename in ['base_unittests',
+                                                 'sql_unittests',
+                                                 'unit_tests']:
+      return [
+          'net/data/cache_tests/insert_load1',
+          'net/data/cache_tests/dirty_entry5',
+          'ui/base/test/data/data_pack_unittest',
+          'chrome/test/data/bookmarks/History_with_empty_starred',
+          'chrome/test/data/bookmarks/History_with_starred',
+          'chrome/test/data/extensions/json_schema_test.js',
+          'chrome/test/data/History/',
+          'chrome/test/data/json_schema_validator/',
+          'chrome/test/data/serializer_nested_test.js',
+          'chrome/test/data/serializer_test.js',
+          'chrome/test/data/serializer_test_nowhitespace.js',
+          'chrome/test/data/top_sites/',
+          'chrome/test/data/web_database',
+          'chrome/test/data/zip',
+          ]
+    elif self.test_package.test_suite_basename == 'net_unittests':
+      return [
+          'net/data/cache_tests',
+          'net/data/filter_unittests',
+          'net/data/ftp',
+          'net/data/proxy_resolver_v8_unittest',
+          'net/data/ssl/certificates',
+          ]
+    elif self.test_package.test_suite_basename == 'ui_tests':
+      return [
+          'chrome/test/data/dromaeo',
+          'chrome/test/data/json2.js',
+          'chrome/test/data/sunspider',
+          'chrome/test/data/v8_benchmark',
+          'chrome/test/ui/sunspider_uitest.js',
+          'chrome/test/ui/v8_benchmark_uitest.js',
+          ]
+    elif self.test_package.test_suite_basename == 'page_cycler_tests':
+      data = [
+          'tools/page_cycler',
+          'data/page_cycler',
+          ]
+      for d in data:
+        if not os.path.exists(d):
+          raise Exception('Page cycler data not found.')
+      return data
+    elif self.test_package.test_suite_basename == 'webkit_unit_tests':
+      return [
+          'third_party/WebKit/Source/WebKit/chromium/tests/data',
+          ]
+    return []
+
+  def LaunchHelperToolsForTestSuite(self):
+    """Launches helper tools for the test suite.
+
+    Sometimes one test may need to run some helper tools first in order to
+    successfully complete the test.
+    """
+    document_root = self._GetHttpServerDocumentRootForTestSuite()
+    if document_root:
+      self.LaunchTestHttpServer(document_root)
+    if self._TestSuiteRequiresMockTestServer():
+      self.LaunchChromeTestServerSpawner()
+
+  def StripAndCopyFiles(self):
+    """Strips and copies the required data files for the test suite."""
+    self.test_package.StripAndCopyExecutable()
+    self.test_package.tool.CopyFiles()
+    test_data = self.GetDataFilesForTestSuite()
+    if test_data:
+      if self.test_package.test_suite_basename == 'page_cycler_tests':
+        # Since the test data for page cycler are huge (around 200M), we use
+        # sdcard to store the data and create symbol links to map them to
+        # data/local/tmp/ later.
+        self.CopyTestData(test_data, '/sdcard/')
+        for p in [os.path.dirname(d) for d in test_data if os.path.isdir(d)]:
+          mapped_device_path = '/data/local/tmp/' + p
+          # Unlink the mapped_device_path at first in case it was mapped to
+          # a wrong path. Add option '-r' becuase the old path could be a dir.
+          self.adb.RunShellCommand('rm -r %s' %  mapped_device_path)
+          self.adb.RunShellCommand(
+              'ln -s /sdcard/%s %s' % (p, mapped_device_path))
+      else:
+        self.CopyTestData(test_data, '/data/local/tmp/')
+
+  def RunTestsWithFilter(self):
+    """Runs a tests via a small, temporary shell script."""
+    self.test_package.CreateTestRunnerScript(self._gtest_filter,
+                                            self._test_arguments)
+    self.test_results = self.test_package.RunTestsAndListResults()
+
+  def RebaselineTests(self):
+    """Runs all available tests, restarting in case of failures."""
+    if self._gtest_filter:
+      all_tests = set(self._gtest_filter.split(':'))
+    else:
+      all_tests = set(self.test_package.GetAllTests())
+    failed_results = set()
+    executed_results = set()
+    while True:
+      executed_names = set([f.name for f in executed_results])
+      self._gtest_filter = ':'.join(all_tests - executed_names)
+      self.RunTestsWithFilter()
+      failed_results.update(self.test_results.crashed,
+          self.test_results.failed)
+      executed_results.update(self.test_results.crashed,
+                              self.test_results.failed,
+                              self.test_results.ok)
+      executed_names = set([f.name for f in executed_results])
+      logging.info('*' * 80)
+      logging.info(self.device)
+      logging.info('Executed: ' + str(len(executed_names)) + ' of ' +
+                   str(len(all_tests)))
+      logging.info('Failed so far: ' + str(len(failed_results)) + ' ' +
+                   str([f.name for f in failed_results]))
+      logging.info('Remaining: ' + str(len(all_tests - executed_names)) + ' ' +
+                   str(all_tests - executed_names))
+      logging.info('*' * 80)
+      if executed_names == all_tests:
+        break
+    self.test_results = TestResults.FromOkAndFailed(list(executed_results -
+                                                         failed_results),
+                                                    list(failed_results))
+
+  def _RunTestsForSuiteInternal(self):
+    """Runs all tests (in rebaseline mode, run each test in isolation).
+
+    Returns:
+      A TestResults object.
+    """
+    if self.test_package.rebaseline:
+      self.RebaselineTests()
+    else:
+      if not self._gtest_filter:
+        self._gtest_filter = ('-' + ':'.join(self.GetDisabledTests()) + ':' +
+                             ':'.join(['*.' + x + '*' for x in
+                                     self.test_package.GetDisabledPrefixes()]))
+      self.RunTestsWithFilter()
+
+  def SetUp(self):
+    """Sets up necessary test enviroment for the test suite."""
+    super(SingleTestRunner, self).SetUp()
+    if self.test_package.performance_test:
+      if run_tests_helper.IsRunningAsBuildbot():
+        self.adb.SetJavaAssertsEnabled(enable=False)
+        self.adb.Reboot(full_reboot=False)
+      self.adb.SetupPerformanceTest()
+    if self.dump_debug_info:
+      self.dump_debug_info.StartRecordingLog(True)
+    self.StripAndCopyFiles()
+    self.LaunchHelperToolsForTestSuite()
+    self.test_package.tool.SetupEnvironment()
+
+  def TearDown(self):
+    """Cleans up the test enviroment for the test suite."""
+    super(SingleTestRunner, self).TearDown()
+    self.test_package.tool.CleanUpEnvironment()
+    if self.test_package.cleanup_test_files:
+      self.adb.RemovePushedFiles()
+    if self.dump_debug_info:
+      self.dump_debug_info.StopRecordingLog()
+    if self.test_package.performance_test:
+      self.adb.TearDownPerformanceTest()
+
+  def RunTests(self):
+    """Runs the tests and cleans up the files once finished.
+
+    Returns:
+      A TestResults object.
+    """
+    self.SetUp()
+    try:
+      self._RunTestsForSuiteInternal()
+    finally:
+      self.TearDown()
+    return self.test_results
diff --git a/build/android/system.gyp b/build/android/system.gyp
new file mode 100644
index 0000000..dd1d521
--- /dev/null
+++ b/build/android/system.gyp
@@ -0,0 +1,24 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'ssl',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'defines': [
+          'USE_OPENSSL',
+        ],
+        'include_dirs': [
+          '../../third_party/openssl/openssl/include',
+          '../../third_party/openssl/config/android',
+        ],
+      },
+      'dependencies': [
+        '../../third_party/openssl/openssl.gyp:openssl',
+      ],
+    },
+  ],
+}
diff --git a/build/android/test_package.py b/build/android/test_package.py
new file mode 100644
index 0000000..433e7f1
--- /dev/null
+++ b/build/android/test_package.py
@@ -0,0 +1,166 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import logging
+import re
+import os
+import pexpect
+
+from perf_tests_helper import PrintPerfResult
+from test_result import BaseTestResult, TestResults
+from valgrind_tools import CreateTool
+
+
+# TODO(bulach): TestPackage, TestPackageExecutable and
+# TestPackageApk are a work in progress related to making the native tests
+# run as a NDK-app from an APK rather than a stand-alone executable.
+class TestPackage(object):
+  """A helper base class for both APK and stand-alone executables.
+
+  Args:
+    adb: ADB interface the tests are using.
+    device: Device to run the tests.
+    test_suite: A specific test suite to run, empty to run all.
+    timeout: Timeout for each test.
+    rebaseline: Whether or not to run tests in isolation and update the filter.
+    performance_test: Whether or not performance test(s).
+    cleanup_test_files: Whether or not to cleanup test files on device.
+    tool: Name of the Valgrind tool.
+    dump_debug_info: A debug_info object.
+  """
+
+  def __init__(self, adb, device, test_suite, timeout, rebaseline,
+               performance_test, cleanup_test_files, tool, dump_debug_info):
+    self.adb = adb
+    self.device = device
+    self.test_suite = os.path.splitext(test_suite)[0]
+    self.test_suite_basename = os.path.basename(self.test_suite)
+    self.test_suite_dirname = os.path.dirname(self.test_suite)
+    self.rebaseline = rebaseline
+    self.performance_test = performance_test
+    self.cleanup_test_files = cleanup_test_files
+    self.tool = CreateTool(tool, self.adb)
+    if timeout == 0:
+      if self.test_suite_basename == 'page_cycler_tests':
+        timeout = 900
+      else:
+        timeout = 60
+    # On a VM (e.g. chromium buildbots), this timeout is way too small.
+    if os.environ.get('BUILDBOT_SLAVENAME'):
+      timeout = timeout * 2
+    self.timeout = timeout * self.tool.GetTimeoutScale()
+    self.dump_debug_info = dump_debug_info
+
+  def _BeginGetIOStats(self):
+    """Gets I/O statistics before running test.
+
+    Return:
+      Tuple of (I/O stats object, flag of ready to continue). When encountering
+      error, ready-to-continue flag is False, True otherwise. The I/O stats
+      object may be None if the test is not performance test.
+    """
+    initial_io_stats = None
+    # Try to get the disk I/O statistics for all performance tests.
+    if self.performance_test and not self.rebaseline:
+      initial_io_stats = self.adb.GetIoStats()
+      # Get rid of the noise introduced by launching Chrome for page cycler.
+      if self.test_suite_basename == 'page_cycler_tests':
+        try:
+          chrome_launch_done_re = re.compile(
+              re.escape('Finish waiting for browser launch!'))
+          self.adb.WaitForLogMatch(chrome_launch_done_re)
+          initial_io_stats = self.adb.GetIoStats()
+        except pexpect.TIMEOUT:
+          logging.error('Test terminated because Chrome launcher has no'
+                        'response after 120 second.')
+          return (None, False)
+        finally:
+          if self.dump_debug_info:
+            self.dump_debug_info.TakeScreenshot('_Launch_Chrome_')
+    return (initial_io_stats, True)
+
+  def _EndGetIOStats(self, initial_io_stats):
+    """Gets I/O statistics after running test and calcuate the I/O delta.
+
+    Args:
+      initial_io_stats: I/O stats object got from _BeginGetIOStats.
+
+    Return:
+      String for formated diso I/O statistics.
+    """
+    disk_io = ''
+    if self.performance_test and initial_io_stats:
+      final_io_stats = self.adb.GetIoStats()
+      for stat in final_io_stats:
+        disk_io += '\n' + PrintPerfResult(stat, stat,
+                                          [final_io_stats[stat] -
+                                           initial_io_stats[stat]],
+                                          stat.split('_')[1], True, False)
+      logging.info(disk_io)
+    return disk_io
+
+  def GetDisabledPrefixes(self):
+    return ['DISABLED_', 'FLAKY_', 'FAILS_']
+
+  def _ParseGTestListTests(self, all_tests):
+    ret = []
+    current = ''
+    disabled_prefixes = self.GetDisabledPrefixes()
+    for test in all_tests:
+      if not test:
+        continue
+      if test[0] != ' ':
+        current = test
+        continue
+      if 'YOU HAVE' in test:
+        break
+      test_name = test[2:]
+      if not any([test_name.startswith(x) for x in disabled_prefixes]):
+        ret += [current + test_name]
+    return ret
+
+  def _WatchTestOutput(self, p):
+    """Watches the test output.
+    Args:
+      p: the process generating output as created by pexpect.spawn.
+    """
+    ok_tests = []
+    failed_tests = []
+    re_run = re.compile('\[ RUN      \] ?(.*)\r\n')
+    re_fail = re.compile('\[  FAILED  \] ?(.*)\r\n')
+    re_ok = re.compile('\[       OK \] ?(.*)\r\n')
+    (io_stats_before, ready_to_continue) = self._BeginGetIOStats()
+    while ready_to_continue:
+      found = p.expect([re_run, pexpect.EOF], timeout=self.timeout)
+      if found == 1:  # matched pexpect.EOF
+        break
+      if self.dump_debug_info:
+        self.dump_debug_info.TakeScreenshot('_Test_Start_Run_')
+      full_test_name = p.match.group(1)
+      found = p.expect([re_ok, re_fail, pexpect.EOF, pexpect.TIMEOUT],
+                       timeout=self.timeout)
+      if found == 0:  # re_ok
+        ok_tests += [BaseTestResult(full_test_name.replace('\r', ''),
+                                    p.before)]
+        continue
+      failed_tests += [BaseTestResult(full_test_name.replace('\r', ''),
+                                      p.before)]
+      if found >= 2:
+        # The test crashed / bailed out (i.e., didn't print OK or FAIL).
+        if found == 3:  # pexpect.TIMEOUT
+          logging.error('Test terminated after %d second timeout.',
+                        self.timeout)
+        break
+    p.close()
+    if not self.rebaseline and ready_to_continue:
+      ok_tests += self._EndGetIOStats(io_stats_before)
+      ret_code = self._GetGTestReturnCode()
+      if ret_code:
+        failed_tests += [BaseTestResult('gtest exit code: %d' % ret_code,
+                                        'pexpect.before: %s'
+                                        '\npexpect.after: %s'
+                                        % (p.before,
+                                           p.after))]
+    return TestResults.FromOkAndFailed(ok_tests, failed_tests)
diff --git a/build/android/test_package_executable.py b/build/android/test_package_executable.py
new file mode 100644
index 0000000..badea4a
--- /dev/null
+++ b/build/android/test_package_executable.py
@@ -0,0 +1,152 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import logging
+import os
+import pexpect
+import shutil
+import sys
+import tempfile
+
+import cmd_helper
+from test_package import TestPackage
+
+
+class TestPackageExecutable(TestPackage):
+  """A helper class for running stand-alone executables."""
+
+  _TEST_RUNNER_RET_VAL_FILE = '/data/local/tmp/gtest_retval'
+
+  def __init__(self, adb, device, test_suite, timeout, rebaseline,
+               performance_test, cleanup_test_files, tool, dump_debug_info,
+               symbols_dir=None):
+    """
+    Args:
+      adb: ADB interface the tests are using.
+      device: Device to run the tests.
+      test_suite: A specific test suite to run, empty to run all.
+      timeout: Timeout for each test.
+      rebaseline: Whether or not to run tests in isolation and update the
+          filter.
+      performance_test: Whether or not performance test(s).
+      cleanup_test_files: Whether or not to cleanup test files on device.
+      tool: Name of the Valgrind tool.
+      dump_debug_info: A debug_info object.
+      symbols_dir: Directory to put the stripped binaries.
+    """
+    TestPackage.__init__(self, adb, device, test_suite, timeout,
+                         rebaseline, performance_test, cleanup_test_files,
+                         tool, dump_debug_info)
+    self.symbols_dir = symbols_dir
+
+  def _GetGTestReturnCode(self):
+    ret = None
+    ret_code_file = tempfile.NamedTemporaryFile()
+    try:
+      if not self.adb.Adb().Pull(
+          TestPackageExecutable._TEST_RUNNER_RET_VAL_FILE, ret_code_file.name):
+        logging.critical('Unable to pull gtest ret val file %s',
+                         ret_code_file.name)
+        raise ValueError
+      ret_code = file(ret_code_file.name).read()
+      ret = int(ret_code)
+    except ValueError:
+      logging.critical('Error reading gtest ret val file %s [%s]',
+                       ret_code_file.name, ret_code)
+      ret = 1
+    return ret
+
+  def _AddNativeCoverageExports(self):
+    # export GCOV_PREFIX set the path for native coverage results
+    # export GCOV_PREFIX_STRIP indicates how many initial directory
+    #                          names to strip off the hardwired absolute paths.
+    #                          This value is calculated in buildbot.sh and
+    #                          depends on where the tree is built.
+    # Ex: /usr/local/google/code/chrome will become
+    #     /code/chrome if GCOV_PREFIX_STRIP=3
+    try:
+      depth = os.environ['NATIVE_COVERAGE_DEPTH_STRIP']
+    except KeyError:
+      logging.info('NATIVE_COVERAGE_DEPTH_STRIP is not defined: '
+                   'No native coverage.')
+      return ''
+    export_string = 'export GCOV_PREFIX="/data/local/gcov"\n'
+    export_string += 'export GCOV_PREFIX_STRIP=%s\n' % depth
+    return export_string
+
+  def GetAllTests(self):
+    """Returns a list of all tests available in the test suite."""
+    all_tests = self.adb.RunShellCommand(
+        '/data/local/%s --gtest_list_tests' % self.test_suite_basename)
+    return self._ParseGTestListTests(all_tests)
+
+  def CreateTestRunnerScript(self, gtest_filter, test_arguments):
+    """Creates a test runner script and pushes to the device.
+
+    Args:
+      gtest_filter: A gtest_filter flag.
+      test_arguments: Additional arguments to pass to the test binary.
+    """
+    tool_wrapper = self.tool.GetTestWrapper()
+    sh_script_file = tempfile.NamedTemporaryFile()
+    # We need to capture the exit status from the script since adb shell won't
+    # propagate to us.
+    sh_script_file.write('cd /data/local\n'
+                         '%s'
+                         '%s /data/local/%s --gtest_filter=%s %s\n'
+                         'echo $? > %s' %
+                         (self._AddNativeCoverageExports(),
+                          tool_wrapper, self.test_suite_basename,
+                          gtest_filter, test_arguments,
+                          TestPackageExecutable._TEST_RUNNER_RET_VAL_FILE))
+    sh_script_file.flush()
+    cmd_helper.RunCmd(['chmod', '+x', sh_script_file.name])
+    self.adb.PushIfNeeded(sh_script_file.name,
+                          '/data/local/chrome_test_runner.sh')
+
+  def RunTestsAndListResults(self):
+    """Runs all the tests and checks for failures.
+
+    Returns:
+      A TestResults object.
+    """
+    args = ['adb', '-s', self.device, 'shell', 'sh',
+            '/data/local/chrome_test_runner.sh']
+    logging.info(args)
+    p = pexpect.spawn(args[0], args[1:], logfile=sys.stdout)
+    return self._WatchTestOutput(p)
+
+  def StripAndCopyExecutable(self):
+    """Strips and copies the executable to the device."""
+    if self.tool.NeedsDebugInfo():
+      target_name = self.test_suite
+    elif self.test_suite_basename == 'webkit_unit_tests':
+      # webkit_unit_tests has been stripped in build step.
+      target_name = self.test_suite
+    else:
+      target_name = self.test_suite + '_' + self.device + '_stripped'
+      should_strip = True
+      if os.path.isfile(target_name):
+        logging.info('Found target file %s' % target_name)
+        target_mtime = os.stat(target_name).st_mtime
+        source_mtime = os.stat(self.test_suite).st_mtime
+        if target_mtime > source_mtime:
+          logging.info('Target mtime (%d) is newer than source (%d), assuming '
+                       'no change.' % (target_mtime, source_mtime))
+          should_strip = False
+
+      if should_strip:
+        logging.info('Did not find up-to-date stripped binary. Generating a '
+                     'new one (%s).' % target_name)
+        # Whenever we generate a stripped binary, copy to the symbols dir. If we
+        # aren't stripping a new binary, assume it's there.
+        if self.symbols_dir:
+          if not os.path.exists(self.symbols_dir):
+            os.makedirs(self.symbols_dir)
+          shutil.copy(self.test_suite, self.symbols_dir)
+        strip = os.environ['STRIP']
+        cmd_helper.RunCmd([strip, self.test_suite, '-o', target_name])
+    test_binary = '/data/local/' + self.test_suite_basename
+    self.adb.PushIfNeeded(target_name, test_binary)
diff --git a/build/android/test_result.py b/build/android/test_result.py
new file mode 100644
index 0000000..eb468f3
--- /dev/null
+++ b/build/android/test_result.py
@@ -0,0 +1,106 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import logging
+
+
+# Language values match constants in Sponge protocol buffer (sponge.proto).
+JAVA = 5
+PYTHON = 7
+
+
+class BaseTestResult(object):
+  """A single result from a unit test."""
+
+  def __init__(self, name, log):
+    self.name = name
+    self.log = log
+
+
+class SingleTestResult(BaseTestResult):
+  """Result information for a single test.
+
+  Args:
+    full_name: Full name of the test.
+    start_date: Date in milliseconds when the test began running.
+    dur: Duration of the test run in milliseconds.
+    lang: Language of the test (JAVA or PYTHON).
+    log: An optional string listing any errors.
+    error: A tuple of a short error message and a longer version used by Sponge
+    if test resulted in a fail or error.  An empty tuple implies a pass.
+  """
+
+  def __init__(self, full_name, start_date, dur, lang, log='', error=()):
+    BaseTestResult.__init__(self, full_name, log)
+    name_pieces = full_name.rsplit('#')
+    if len(name_pieces) > 0:
+      self.test_name = name_pieces[1]
+      self.class_name = name_pieces[0]
+    else:
+      self.class_name = full_name
+      self.test_name = full_name
+    self.start_date = start_date
+    self.dur = dur
+    self.error = error
+    self.lang = lang
+
+
+class TestResults(object):
+  """Results of a test run."""
+
+  def __init__(self):
+    self.ok = []
+    self.failed = []
+    self.crashed = []
+    self.unknown = []
+    self.disabled = []
+    self.unexpected_pass = []
+
+  @staticmethod
+  def FromOkAndFailed(ok, failed):
+    ret = TestResults()
+    ret.ok = ok
+    ret.failed = failed
+    return ret
+
+  @staticmethod
+  def FromTestResults(results):
+    """Combines a list of results in a single TestResults object."""
+    ret = TestResults()
+    for t in results:
+      ret.ok += t.ok
+      ret.failed += t.failed
+      ret.crashed += t.crashed
+      ret.unknown += t.unknown
+      ret.disabled += t.disabled
+      ret.unexpected_pass += t.unexpected_pass
+    return ret
+
+  def _Log(self, sorted_list):
+    for t in sorted_list:
+      logging.critical(t.name)
+      if t.log:
+        logging.critical(t.log)
+
+  def GetAllBroken(self):
+    """Returns the all broken tests including failed, crashed, unknown."""
+    return self.failed + self.crashed + self.unknown
+
+  def LogFull(self):
+    """Output all broken tests or 'passed' if none broken"""
+    logging.critical('*' * 80)
+    logging.critical('Final result')
+    if self.failed:
+      logging.critical('Failed:')
+      self._Log(sorted(self.failed))
+    if self.crashed:
+      logging.critical('Crashed:')
+      self._Log(sorted(self.crashed))
+    if self.unknown:
+      logging.critical('Unknown:')
+      self._Log(sorted(self.unknown))
+    if not self.GetAllBroken():
+      logging.critical('Passed')
+    logging.critical('*' * 80)
diff --git a/build/android/valgrind_tools.py b/build/android/valgrind_tools.py
new file mode 100644
index 0000000..bc917e6
--- /dev/null
+++ b/build/android/valgrind_tools.py
@@ -0,0 +1,185 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Classes in this file define additional actions that need to be taken to run a
+test under some kind of runtime error detection tool.
+
+The interface is intended to be used as follows.
+
+1. For tests that simply run a native process (i.e. no activity is spawned):
+
+Call tool.CopyFiles().
+Prepend test command line with tool.GetTestWrapper().
+
+2. For tests that spawn an activity:
+
+Call tool.CopyFiles().
+Call tool.SetupEnvironment().
+Run the test as usual.
+Call tool.CleanUpEnvironment().
+"""
+
+import os.path
+import sys
+
+from run_tests_helper import CHROME_DIR
+
+
+class BaseTool(object):
+  """A tool that does nothing."""
+
+  def __init__(self, *args, **kwargs):
+    pass
+
+  def GetTestWrapper(self):
+    """Returns a string that is to be prepended to the test command line."""
+    return ''
+
+  def CopyFiles(self):
+    """Copies tool-specific files to the device, create directories, etc."""
+    pass
+
+  def SetupEnvironment(self):
+    """Sets up the system environment for a test.
+
+    This is a good place to set system properties.
+    """
+    pass
+
+  def CleanUpEnvironment(self):
+    """Cleans up environment."""
+    pass
+
+  def GetTimeoutScale(self):
+    """Returns a multiplier that should be applied to timeout values."""
+    return 1.0
+
+  def NeedsDebugInfo(self):
+    """Whether this tool requires debug info.
+
+    Returns True if this tool can not work with stripped binaries.
+    """
+    return False
+
+
+class ValgrindTool(BaseTool):
+  """Base abstract class for Valgrind tools."""
+
+  VG_DIR = '/data/local/tmp/valgrind'
+  VGLOGS_DIR = '/data/local/tmp/vglogs'
+
+  def __init__(self, adb, renderer=False):
+    self.adb = adb
+    if renderer:
+      # exactly 31 chars, SystemProperties::PROP_NAME_MAX
+      self.wrap_property = 'wrap.com.android.chrome:sandbox'
+    else:
+      self.wrap_property = 'wrap.com.android.chrome'
+
+  def CopyFiles(self):
+    """Copies Valgrind tools to the device."""
+    self.adb.RunShellCommand('rm -r %s; mkdir %s' %
+                             (ValgrindTool.VG_DIR, ValgrindTool.VG_DIR))
+    self.adb.RunShellCommand('rm -r %s; mkdir %s' %
+                             (ValgrindTool.VGLOGS_DIR, ValgrindTool.VGLOGS_DIR))
+    files = self.GetFilesForTool()
+    for f in files:
+      self.adb.PushIfNeeded(os.path.join(CHROME_DIR, f),
+                            os.path.join(ValgrindTool.VG_DIR,
+                                         os.path.basename(f)))
+
+  def SetupEnvironment(self):
+    """Sets up device environment."""
+    self.adb.RunShellCommand('chmod 777 /data/local/tmp')
+    self.adb.RunShellCommand('setprop %s "logwrapper %s"' % (
+        self.wrap_property, self.GetTestWrapper()))
+    self.adb.RunShellCommand('setprop chrome.timeout_scale %f' % (
+        self.GetTimeoutScale()))
+
+  def CleanUpEnvironment(self):
+    """Cleans up device environment."""
+    self.adb.RunShellCommand('setprop %s ""' % (self.wrap_property,))
+    self.adb.RunShellCommand('setprop chrome.timeout_scale ""')
+
+  def GetFilesForTool(self):
+    """Returns a list of file names for the tool."""
+    raise NotImplementedError()
+
+  def NeedsDebugInfo(self):
+    """Whether this tool requires debug info.
+
+    Returns True if this tool can not work with stripped binaries.
+    """
+    return True
+
+
+class MemcheckTool(ValgrindTool):
+  """Memcheck tool."""
+
+  def __init__(self, adb, renderer=False):
+    super(MemcheckTool, self).__init__(adb, renderer)
+
+  def GetFilesForTool(self):
+    """Returns a list of file names for the tool."""
+    return ['tools/valgrind/android/vg-chrome-wrapper.sh',
+            'tools/valgrind/memcheck/suppressions.txt',
+            'tools/valgrind/memcheck/suppressions_android.txt']
+
+  def GetTestWrapper(self):
+    """Returns a string that is to be prepended to the test command line."""
+    return ValgrindTool.VG_DIR + '/' + 'vg-chrome-wrapper.sh'
+
+  def GetTimeoutScale(self):
+    """Returns a multiplier that should be applied to timeout values."""
+    return 30
+
+
+class TSanTool(ValgrindTool):
+  """ThreadSanitizer tool. See http://code.google.com/p/data-race-test ."""
+
+  def __init__(self, adb, renderer=False):
+    super(TSanTool, self).__init__(adb, renderer)
+
+  def GetFilesForTool(self):
+    """Returns a list of file names for the tool."""
+    return ['tools/valgrind/android/vg-chrome-wrapper-tsan.sh',
+            'tools/valgrind/tsan/suppressions.txt',
+            'tools/valgrind/tsan/suppressions_android.txt',
+            'tools/valgrind/tsan/ignores.txt']
+
+  def GetTestWrapper(self):
+    """Returns a string that is to be prepended to the test command line."""
+    return ValgrindTool.VG_DIR + '/' + 'vg-chrome-wrapper-tsan.sh'
+
+  def GetTimeoutScale(self):
+    """Returns a multiplier that should be applied to timeout values."""
+    return 30
+
+
+TOOL_REGISTRY = {
+  'memcheck': lambda x: MemcheckTool(x, False),
+  'memcheck-renderer': lambda x: MemcheckTool(x, True),
+  'tsan': lambda x: TSanTool(x, False),
+  'tsan-renderer': lambda x: TSanTool(x, True)
+}
+
+
+def CreateTool(tool_name, adb):
+  """Creates a tool with the specified tool name.
+
+  Args:
+    tool_name: Name of the tool to create.
+    adb: ADB interface the tool will use.
+  """
+  if not tool_name:
+    return BaseTool()
+
+  ctor = TOOL_REGISTRY.get(tool_name)
+  if ctor:
+    return ctor(adb)
+  else:
+    print 'Unknown tool %s, available tools: %s' % (
+      tool_name, ', '.join(sorted(TOOL_REGISTRY.keys())))
+    sys.exit(1)
diff --git a/build/apply_locales.py b/build/apply_locales.py
new file mode 100755
index 0000000..6af7280
--- /dev/null
+++ b/build/apply_locales.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+# Copyright (c) 2009 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TODO: remove this script when GYP has for loops
+
+import sys
+import optparse
+
+def main(argv):
+
+  parser = optparse.OptionParser()
+  usage = 'usage: %s [options ...] format_string locale_list'
+  parser.set_usage(usage.replace('%s', '%prog'))
+  parser.add_option('-d', dest='dash_to_underscore', action="store_true",
+                    default=False,
+                    help='map "en-US" to "en" and "-" to "_" in locales')
+
+  (options, arglist) = parser.parse_args(argv)
+
+  if len(arglist) < 3:
+    print 'ERROR: need string and list of locales'
+    return 1
+
+  str_template = arglist[1]
+  locales = arglist[2:]
+
+  results = []
+  for locale in locales:
+    # For Cocoa to find the locale at runtime, it needs to use '_' instead
+    # of '-' (http://crbug.com/20441).  Also, 'en-US' should be represented
+    # simply as 'en' (http://crbug.com/19165, http://crbug.com/25578).
+    if options.dash_to_underscore:
+      if locale == 'en-US':
+        locale = 'en'
+      locale = locale.replace('-', '_')
+    results.append(str_template.replace('ZZLOCALE', locale))
+
+  # Quote each element so filename spaces don't mess up GYP's attempt to parse
+  # it into a list.
+  print ' '.join(["'%s'" % x for x in results])
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/asan.saves b/build/asan.saves
new file mode 100644
index 0000000..f844f7b
--- /dev/null
+++ b/build/asan.saves
@@ -0,0 +1,21 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file lists symbols that should not be stripped by Xcode from the binaries
+# built for Mac OS X using AddressSanitizer
+# (http://dev.chromium.org/developers/testing/addresssanitizer).
+
+___asan_init
+___asan_register_global
+___asan_register_globals
+___asan_report_load1
+___asan_report_load2
+___asan_report_load4
+___asan_report_load8
+___asan_report_load16
+___asan_report_store1
+___asan_report_store2
+___asan_report_store4
+___asan_report_store8
+___asan_report_store16
diff --git a/build/branding_value.sh b/build/branding_value.sh
new file mode 100755
index 0000000..9fcb550
--- /dev/null
+++ b/build/branding_value.sh
@@ -0,0 +1,51 @@
+#!/bin/sh
+
+# Copyright (c) 2008 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is a wrapper for fetching values from the BRANDING files.  Pass the
+# value of GYP's branding variable followed by the key you want and the right
+# file is checked.
+#
+#  branding_value.sh Chromium COPYRIGHT
+#  branding_value.sh Chromium PRODUCT_FULLNAME
+#
+
+set -e
+
+if [ $# -ne 2 ] ;  then
+  echo "error: expect two arguments, branding and key" >&2
+  exit 1
+fi
+
+BUILD_BRANDING=$1
+THE_KEY=$2
+
+pushd $(dirname "${0}") > /dev/null
+BUILD_DIR=$(pwd)
+popd > /dev/null
+
+TOP="${BUILD_DIR}/.."
+
+case ${BUILD_BRANDING} in
+  Chromium)
+    BRANDING_FILE="${TOP}/chrome/app/theme/chromium/BRANDING"
+    ;;
+  Chrome)
+    BRANDING_FILE="${TOP}/chrome/app/theme/google_chrome/BRANDING"
+    ;;
+  *)
+    echo "error: unknown branding: ${BUILD_BRANDING}" >&2
+    exit 1
+    ;;
+esac
+
+BRANDING_VALUE=$(sed -n -e "s/^${THE_KEY}=\(.*\)\$/\1/p" "${BRANDING_FILE}")
+
+if [ -z "${BRANDING_VALUE}" ] ; then
+  echo "error: failed to find key '${THE_KEY}'" >&2
+  exit 1
+fi
+
+echo "${BRANDING_VALUE}"
diff --git a/build/build_config.h b/build/build_config.h
new file mode 100644
index 0000000..97b0b5a
--- /dev/null
+++ b/build/build_config.h
@@ -0,0 +1,148 @@
+// Copyright (c) 2011 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file adds defines about the platform we're currently building on.
+//  Operating System:
+//    OS_WIN / OS_MACOSX / OS_LINUX / OS_POSIX (MACOSX or LINUX)
+//  Compiler:
+//    COMPILER_MSVC / COMPILER_GCC
+//  Processor:
+//    ARCH_CPU_X86 / ARCH_CPU_X86_64 / ARCH_CPU_X86_FAMILY (X86 or X86_64)
+//    ARCH_CPU_32_BITS / ARCH_CPU_64_BITS
+
+#ifndef BUILD_BUILD_CONFIG_H_
+#define BUILD_BUILD_CONFIG_H_
+
+// A set of macros to use for platform detection.
+#if defined(__APPLE__)
+#define OS_MACOSX 1
+#elif defined(ANDROID)
+#define OS_ANDROID 1
+#elif defined(__native_client__)
+#define OS_NACL 1
+#elif defined(__linux__)
+#define OS_LINUX 1
+// Use TOOLKIT_GTK on linux if TOOLKIT_VIEWS isn't defined.
+#if !defined(TOOLKIT_VIEWS)
+#define TOOLKIT_GTK
+#endif
+#elif defined(_WIN32)
+#define OS_WIN 1
+#define TOOLKIT_VIEWS 1
+#elif defined(__FreeBSD__)
+#define OS_FREEBSD 1
+#define TOOLKIT_GTK
+#elif defined(__OpenBSD__)
+#define OS_OPENBSD 1
+#define TOOLKIT_GTK
+#elif defined(__sun)
+#define OS_SOLARIS 1
+#define TOOLKIT_GTK
+#else
+#error Please add support for your platform in build/build_config.h
+#endif
+
+#if defined(USE_OPENSSL) && defined(USE_NSS)
+#error Cannot use both OpenSSL and NSS
+#endif
+
+// For access to standard BSD features, use OS_BSD instead of a
+// more specific macro.
+#if defined(OS_FREEBSD) || defined(OS_OPENBSD)
+#define OS_BSD 1
+#endif
+
+// For access to standard POSIXish features, use OS_POSIX instead of a
+// more specific macro.
+#if defined(OS_MACOSX) || defined(OS_LINUX) || defined(OS_FREEBSD) ||     \
+    defined(OS_OPENBSD) || defined(OS_SOLARIS) || defined(OS_ANDROID) ||  \
+    defined(OS_NACL)
+#define OS_POSIX 1
+#endif
+
+#if defined(OS_POSIX) && !defined(OS_MACOSX) && !defined(OS_ANDROID) && \
+    !defined(OS_NACL)
+#define USE_X11 1  // Use X for graphics.
+#endif
+
+// Use tcmalloc
+#if (defined(OS_WIN) || defined(OS_LINUX)) && !defined(NO_TCMALLOC)
+#define USE_TCMALLOC 1
+#endif
+
+// Use heapchecker.
+#if defined(OS_LINUX) && !defined(NO_HEAPCHECKER)
+#define USE_HEAPCHECKER 1
+#endif
+
+// Compiler detection.
+#if defined(__GNUC__)
+#define COMPILER_GCC 1
+#elif defined(_MSC_VER)
+#define COMPILER_MSVC 1
+#else
+#error Please add support for your compiler in build/build_config.h
+#endif
+
+// Processor architecture detection.  For more info on what's defined, see:
+//   http://msdn.microsoft.com/en-us/library/b0084kay.aspx
+//   http://www.agner.org/optimize/calling_conventions.pdf
+//   or with gcc, run: "echo | gcc -E -dM -"
+#if defined(_M_X64) || defined(__x86_64__)
+#define ARCH_CPU_X86_FAMILY 1
+#define ARCH_CPU_X86_64 1
+#define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(_M_IX86) || defined(__i386__)
+#define ARCH_CPU_X86_FAMILY 1
+#define ARCH_CPU_X86 1
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(__ARMEL__)
+#define ARCH_CPU_ARM_FAMILY 1
+#define ARCH_CPU_ARMEL 1
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#define WCHAR_T_IS_UNSIGNED 1
+#elif defined(__pnacl__)
+#define ARCH_CPU_32_BITS 1
+#else
+#error Please add support for your architecture in build/build_config.h
+#endif
+
+// Type detection for wchar_t.
+#if defined(OS_WIN)
+#define WCHAR_T_IS_UTF16
+#elif defined(OS_POSIX) && defined(COMPILER_GCC) && \
+    defined(__WCHAR_MAX__) && \
+    (__WCHAR_MAX__ == 0x7fffffff || __WCHAR_MAX__ == 0xffffffff)
+#define WCHAR_T_IS_UTF32
+#elif defined(OS_POSIX) && defined(COMPILER_GCC) && \
+    defined(__WCHAR_MAX__) && \
+    (__WCHAR_MAX__ == 0x7fff || __WCHAR_MAX__ == 0xffff)
+// On Posix, we'll detect short wchar_t, but projects aren't guaranteed to
+// compile in this mode (in particular, Chrome doesn't). This is intended for
+// other projects using base who manage their own dependencies and make sure
+// short wchar works for them.
+#define WCHAR_T_IS_UTF16
+#else
+#error Please add support for your compiler in build/build_config.h
+#endif
+
+#if defined(OS_CHROMEOS)
+// Single define to trigger whether CrOS fonts have BCI on.
+// In that case font sizes/deltas should be adjusted.
+//define CROS_FONTS_USING_BCI
+#endif
+
+#if defined(OS_ANDROID)
+// The compiler thinks std::string::const_iterator and "const char*" are
+// equivalent types.
+#define STD_STRING_ITERATOR_IS_CHAR_POINTER
+// The compiler thinks base::string16::const_iterator and "char16*" are
+// equivalent types.
+#define BASE_STRING16_ITERATOR_IS_CHAR16_POINTER
+#endif
+
+#endif  // BUILD_BUILD_CONFIG_H_
diff --git a/build/common.croc b/build/common.croc
new file mode 100644
index 0000000..7281bb4
--- /dev/null
+++ b/build/common.croc
@@ -0,0 +1,127 @@
+# -*- python -*-
+# Crocodile config file for Chromium - settings common to all platforms
+#
+# This should be speicified before the platform-specific config, for example:
+#       croc -c chrome_common.croc -c linux/chrome_linux.croc
+
+{
+  # List of root directories, applied in order
+  'roots' : [
+    # Sub-paths we specifically care about and want to call out
+    {
+      'root' : '_/src',
+      'altname' : 'CHROMIUM',
+    },
+  ],
+
+  # List of rules, applied in order
+  # Note that any 'include':0 rules here will be overridden by the 'include':1
+  # rules in the platform-specific configs.
+  'rules' : [
+    # Don't scan for executable lines in uninstrumented C++ header files
+    {
+      'regexp' : '.*\\.(h|hpp)$',
+      'add_if_missing' : 0,
+    },
+
+    # Groups
+    {
+      'regexp' : '',
+      'group' : 'source',
+    },
+    {
+      'regexp' : '.*_(test|unittest|uitest|browsertest)\\.',
+      'group' : 'test',
+    },
+
+    # Languages
+    {
+      'regexp' : '.*\\.(c|h)$',
+      'language' : 'C',
+    },
+    {
+      'regexp' : '.*\\.(cc|cpp|hpp)$',
+      'language' : 'C++',
+    },
+
+    # Files/paths to include.  Specify these before the excludes, since rules
+    # are in order.
+    {
+      'regexp' : '^CHROMIUM/(base|media|net|printing|remoting|chrome|content|webkit/glue|native_client)/',
+      'include' : 1,
+    },
+    # Don't include subversion or mercurial SCM dirs
+    {
+      'regexp' : '.*/(\\.svn|\\.hg)/',
+      'include' : 0,
+    },
+    # Don't include output dirs
+    {
+      'regexp' : '.*/(Debug|Release|sconsbuild|out|xcodebuild)/',
+      'include' : 0,
+    },
+    # Don't include third-party source
+    {
+      'regexp' : '.*/third_party/',
+      'include' : 0,
+    },
+    # We don't run the V8 test suite, so we don't care about V8 coverage.
+    {
+      'regexp' : '.*/v8/',
+      'include' : 0,
+    },
+  ],
+
+  # Paths to add source from
+  'add_files' : [
+    'CHROMIUM'
+  ],
+
+  # Statistics to print
+  'print_stats' : [
+    {
+      'stat' : 'files_executable',
+      'format' : '*RESULT FilesKnown: files_executable= %d files',
+    },
+    {
+      'stat' : 'files_instrumented',
+      'format' : '*RESULT FilesInstrumented: files_instrumented= %d files',
+    },
+    {
+      'stat' : '100.0 * files_instrumented / files_executable',
+      'format' : '*RESULT FilesInstrumentedPercent: files_instrumented_percent= %g',
+    },
+    {
+      'stat' : 'lines_executable',
+      'format' : '*RESULT LinesKnown: lines_known= %d lines',
+    },
+    {
+      'stat' : 'lines_instrumented',
+      'format' : '*RESULT LinesInstrumented: lines_instrumented= %d lines',
+    },
+    {
+      'stat' : 'lines_covered',
+      'format' : '*RESULT LinesCoveredSource: lines_covered_source= %d lines',
+      'group' : 'source',
+    },
+    {
+      'stat' : 'lines_covered',
+      'format' : '*RESULT LinesCoveredTest: lines_covered_test= %d lines',
+      'group' : 'test',
+    },
+    {
+      'stat' : '100.0 * lines_covered / lines_executable',
+      'format' : '*RESULT PercentCovered: percent_covered= %g',
+    },
+    {
+      'stat' : '100.0 * lines_covered / lines_executable',
+      'format' : '*RESULT PercentCoveredSource: percent_covered_source= %g',
+      'group' : 'source',
+    },
+    {
+      'stat' : '100.0 * lines_covered / lines_executable',
+      'format' : '*RESULT PercentCoveredTest: percent_covered_test= %g',
+      'group' : 'test',
+    },
+  ],
+}
diff --git a/build/common.gypi b/build/common.gypi
new file mode 100644
index 0000000..a37498f
--- /dev/null
+++ b/build/common.gypi
@@ -0,0 +1,2714 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# IMPORTANT:
+# Please don't directly include this file if you are building via gyp_chromium,
+# since gyp_chromium is automatically forcing its inclusion.
+{
+  # Variables expected to be overriden on the GYP command line (-D) or by
+  # ~/.gyp/include.gypi.
+  'variables': {
+    # Putting a variables dict inside another variables dict looks kind of
+    # weird.  This is done so that 'host_arch', 'chromeos', etc are defined as
+    # variables within the outer variables dict here.  This is necessary
+    # to get these variables defined for the conditions within this variables
+    # dict that operate on these variables (e.g., for setting 'toolkit_views',
+    # we need to have 'chromeos' already set).
+    'variables': {
+      'variables': {
+        'includes': [
+          'use_skia_on_mac.gypi',
+        ],
+        'variables': {
+          # Whether we're building a ChromeOS build.
+          'chromeos%': 0,
+
+          # Whether we are using Views Toolkit
+          'toolkit_views%': 0,
+
+          # Whether the compositor is enabled on views.
+          'views_compositor%': 0,
+
+          # Whether or not we are building with the Aura window manager.
+          'use_aura%': 0,
+
+          # Use OpenSSL instead of NSS. Under development: see http://crbug.com/62803
+          'use_openssl%': 0,
+
+          # Disable Virtual keyboard support by default.
+          'use_virtual_keyboard%': 0,
+
+          # Default setting for use_skia on mac platform.
+          # This is typically overridden in use_skia_on_mac.gypi.
+          'use_skia_on_mac%': 0,
+        },
+        # Copy conditionally-set variables out one scope.
+        'chromeos%': '<(chromeos)',
+        'views_compositor%': '<(views_compositor)',
+        'use_aura%': '<(use_aura)',
+        'use_openssl%': '<(use_openssl)',
+        'use_virtual_keyboard%': '<(use_virtual_keyboard)',
+        'use_skia_on_mac%': '<(use_skia_on_mac)',
+
+        # Compute the architecture that we're building on.
+        'conditions': [
+          [ 'OS=="win" or OS=="mac"', {
+            'host_arch%': 'ia32',
+          }, {
+            # This handles the Unix platforms for which there is some support.
+            # Anything else gets passed through, which probably won't work very
+            # well; such hosts should pass an explicit target_arch to gyp.
+            'host_arch%':
+              '<!(uname -m | sed -e "s/i.86/ia32/;s/x86_64/x64/;s/amd64/x64/;s/arm.*/arm/;s/i86pc/ia32/")',
+          }],
+
+          # Set default value of toolkit_views based on OS.
+          ['OS=="win" or chromeos==1 or use_aura==1', {
+            'toolkit_views%': 1,
+          }, {
+            'toolkit_views%': 0,
+          }],
+
+          # Use the views compositor when using the Aura window manager.
+          ['use_aura==1', {
+            'views_compositor%': 1,
+          }],
+
+          # Use the WebKit compositor for ui, when Aura is on.
+          ['use_aura==1', {
+            'use_webkit_compositor%': 1,
+          }, {
+            'use_webkit_compositor%': 0,
+          }],
+        ],
+      },
+
+      # Copy conditionally-set variables out one scope.
+      'chromeos%': '<(chromeos)',
+      'host_arch%': '<(host_arch)',
+      'toolkit_views%': '<(toolkit_views)',
+      'views_compositor%': '<(views_compositor)',
+      'use_webkit_compositor%': '<(use_webkit_compositor)',
+      'use_aura%': '<(use_aura)',
+      'use_openssl%': '<(use_openssl)',
+      'use_virtual_keyboard%': '<(use_virtual_keyboard)',
+      'use_skia_on_mac%': '<(use_skia_on_mac)',
+
+      # We used to provide a variable for changing how libraries were built.
+      # This variable remains until we can clean up all the users.
+      # This needs to be one nested variables dict in so that dependent
+      # gyp files can make use of it in their outer variables.  (Yikes!)
+      # http://code.google.com/p/chromium/issues/detail?id=83308
+      'library%': 'static_library',
+
+      # Override branding to select the desired branding flavor.
+      'branding%': 'Chromium',
+
+      # Override buildtype to select the desired build flavor.
+      # Dev - everyday build for development/testing
+      # Official - release build (generally implies additional processing)
+      # TODO(mmoss) Once 'buildtype' is fully supported (e.g. Windows gyp
+      # conversion is done), some of the things which are now controlled by
+      # 'branding', such as symbol generation, will need to be refactored based
+      # on 'buildtype' (i.e. we don't care about saving symbols for non-Official
+      # builds).
+      'buildtype%': 'Dev',
+
+      # Default architecture we're building for is the architecture we're
+      # building on.
+      'target_arch%': '<(host_arch)',
+
+      # This variable tells WebCore.gyp and JavaScriptCore.gyp whether they are
+      # are built under a chromium full build (1) or a webkit.org chromium
+      # build (0).
+      'inside_chromium_build%': 1,
+
+      # Set to 1 to enable fast builds. It disables debug info for fastest
+      # compilation.
+      'fastbuild%': 0,
+
+      # Set to 1 to enable dcheck in release without having to use the flag.
+      'dcheck_always_on%': 0,
+
+      # Disable file manager component extension by default.
+      'file_manager_extension%': 0,
+
+      # Enable WebUI TaskManager by default.
+      'webui_task_manager%': 1,
+
+      # Python version.
+      'python_ver%': '2.6',
+
+      # Set ARM-v7 compilation flags
+      'armv7%': 0,
+
+      # Set Neon compilation flags (only meaningful if armv7==1).
+      'arm_neon%': 1,
+
+      # The system root for cross-compiles. Default: none.
+      'sysroot%': '',
+
+      # The system libdir used for this ABI.
+      'system_libdir%': 'lib',
+
+      # On Linux, we build with sse2 for Chromium builds.
+      'disable_sse2%': 0,
+
+      # Use libjpeg-turbo as the JPEG codec used by Chromium.
+      'use_libjpeg_turbo%': 1,
+
+      # Variable 'component' is for cases where we would like to build some
+      # components as dynamic shared libraries but still need variable
+      # 'library' for static libraries.
+      # By default, component is set to whatever library is set to and
+      # it can be overriden by the GYP command line or by ~/.gyp/include.gypi.
+      'component%': 'static_library',
+
+      # Set to select the Title Case versions of strings in GRD files.
+      'use_titlecase_in_grd_files%': 0,
+
+      # Use translations provided by volunteers at launchpad.net.  This
+      # currently only works on Linux.
+      'use_third_party_translations%': 0,
+
+      # Remoting compilation is enabled by default. Set to 0 to disable.
+      'remoting%': 1,
+
+      # Threaded compositing
+      'use_threaded_compositing%': 0,
+
+      # P2P APIs are compiled in by default. Set to 0 to disable.
+      # Also note that this should be enabled for remoting to compile.
+      'p2p_apis%': 1,
+
+      # Configuration policy is enabled by default. Set to 0 to disable.
+      'configuration_policy%': 1,
+
+      # Safe browsing is compiled in by default. Set to 0 to disable.
+      'safe_browsing%': 1,
+
+      # Speech input is compiled in by default. Set to 0 to disable.
+      'input_speech%': 1,
+
+      # Notifications are compiled in by default. Set to 0 to disable.
+      'notifications%' : 1,
+
+      # If this is set, the clang plugins used on the buildbot will be used.
+      # Run tools/clang/scripts/update.sh to make sure they are compiled.
+      # This causes 'clang_chrome_plugins_flags' to be set.
+      # Has no effect if 'clang' is not set as well.
+      'clang_use_chrome_plugins%': 0,
+
+      # Enable building with ASAN (Clang's -faddress-sanitizer option).
+      # -faddress-sanitizer only works with clang, but asan=1 implies clang=1
+      # See https://sites.google.com/a/chromium.org/dev/developers/testing/addresssanitizer
+      'asan%': 0,
+
+      # Set to 1 compile with -fPIC cflag on linux. This is a must for shared
+      # libraries on linux x86-64 and arm, plus ASLR.
+      'linux_fpic%': 1,
+
+      # Enable navigator.registerProtocolHandler and supporting UI.
+      'enable_register_protocol_handler%': 1,
+
+      # Enable Web Intents and supporting UI.
+      'enable_web_intents%': 0,
+
+      # Webrtc compilation is enabled by default. Set to 0 to disable.
+      'enable_webrtc%': 1,
+
+      # PPAPI by default does not support plugins making calls off the main
+      # thread. Set to 1 to turn on experimental support for out-of-process
+      # plugins to make call of the main thread.
+      'enable_pepper_threading%': 0,
+
+      # XInput2 multitouch support is disabled by default (use_xi2_mt=0).
+      # Setting to non-zero value enables XI2 MT. When XI2 MT is enabled,
+      # the input value also defines the required XI2 minor minimum version.
+      # For example, use_xi2_mt=2 means XI2.2 or above version is required.
+      'use_xi2_mt%': 0,
+
+      # Use of precompiled headers on Windows is off by default
+      # because of complications that it can cause with our
+      # infrastructure (trybots etc.).  Enable by setting to 1 in
+      # ~/.gyp/include.gypi or via the GYP command line for ~20-25%
+      # faster builds.
+      'chromium_win_pch%': 0,
+
+      'conditions': [
+        # TODO(epoger): Figure out how to set use_skia=1 for Mac outside of
+        # the 'conditions' clause.  Initial attempts resulted in chromium and
+        # webkit disagreeing on its setting.
+        ['OS=="mac"', {
+          'use_skia%': '<(use_skia_on_mac)',
+          # Mac uses clang by default, so turn on the plugin as well.
+          'clang_use_chrome_plugins%': 1,
+        }, {
+          'use_skia%': 1,
+        }],
+
+        # A flag for POSIX platforms
+        ['OS=="win"', {
+          'os_posix%': 0,
+        }, {
+          'os_posix%': 1,
+        }],
+
+        # A flag for BSD platforms
+        ['OS=="freebsd" or OS=="openbsd"', {
+          'os_bsd%': 1,
+        }, {
+          'os_bsd%': 0,
+        }],
+
+        # NSS usage.
+        ['(OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris") and use_openssl==0', {
+          'use_nss%': 1,
+        }, {
+          'use_nss%': 0,
+        }],
+
+        # Flags to use X11 on non-Mac POSIX platforms
+        ['OS=="win" or OS=="mac" or OS=="android"', {
+          'use_glib%': 0,
+          'toolkit_uses_gtk%': 0,
+          'use_x11%': 0,
+        }, {
+          # TODO(dnicoara) Wayland build should have these disabled, but
+          # currently GTK and X is too spread and it's hard to completely
+          # remove every dependency.
+          'use_glib%': 1,
+          'toolkit_uses_gtk%': 1,
+          'use_x11%': 1,
+        }],
+        # We always use skia text rendering in Aura on Windows, since GDI
+        # doesn't agree with our BackingStore.
+        # TODO(beng): remove once skia text rendering is on by default.
+        ['use_aura==1 and OS=="win"', {
+          'enable_skia_text%': 1,
+        }],
+        ['use_aura==1 and OS!="win"', {
+          'toolkit_uses_gtk%': 0,
+        }],
+
+        # A flag to enable or disable our compile-time dependency
+        # on gnome-keyring. If that dependency is disabled, no gnome-keyring
+        # support will be available. This option is useful
+        # for Linux distributions and for Aura.
+        ['chromeos==1 or use_aura==1', {
+          'use_gnome_keyring%': 0,
+        }, {
+          'use_gnome_keyring%': 1,
+        }],
+
+        ['toolkit_views==0 or OS=="mac"', {
+          # GTK+ and Mac wants Title Case strings
+          'use_titlecase_in_grd_files%': 1,
+        }],
+
+        # Enable some hacks to support Flapper only on Chrome OS.
+        ['chromeos==1', {
+          'enable_flapper_hacks%': 1,
+        }, {
+          'enable_flapper_hacks%': 0,
+        }],
+
+        # Enable file manager extension on Chrome OS or Aura.
+        ['chromeos==1 or use_aura==1', {
+          'file_manager_extension%': 1,
+        }, {
+          'file_manager_extension%': 0,
+        }],
+
+        # ... except on Windows even with Aura.
+        ['use_aura==1 and OS=="win"', {
+          'file_manager_extension%': 0,
+        }],
+
+        # Enable WebUI TaskManager always on Chrome OS or Aura.
+        ['chromeos==1 or use_aura==1', {
+          'webui_task_manager%': 1,
+        }],
+
+        ['OS=="android"', {
+          'proprietary_codecs%': 1,
+          'enable_webrtc%': 0,
+        }],
+
+        # Use GPU accelerated cross process image transport by default
+        # on linux builds with the Aura window manager
+        ['views_compositor==1 and OS=="linux"', {
+          'ui_compositor_image_transport%': 1,
+        }, {
+          'ui_compositor_image_transport%': 0,
+        }],
+      ],
+    },
+
+    # Copy conditionally-set variables out one scope.
+    'branding%': '<(branding)',
+    'buildtype%': '<(buildtype)',
+    'target_arch%': '<(target_arch)',
+    'host_arch%': '<(host_arch)',
+    'library%': 'static_library',
+    'toolkit_views%': '<(toolkit_views)',
+    'views_compositor%': '<(views_compositor)',
+    'ui_compositor_image_transport%': '<(ui_compositor_image_transport)',
+    'use_webkit_compositor%': '<(use_webkit_compositor)',
+    'use_aura%': '<(use_aura)',
+    'use_openssl%': '<(use_openssl)',
+    'use_nss%': '<(use_nss)',
+    'os_bsd%': '<(os_bsd)',
+    'os_posix%': '<(os_posix)',
+    'use_glib%': '<(use_glib)',
+    'toolkit_uses_gtk%': '<(toolkit_uses_gtk)',
+    'use_skia%': '<(use_skia)',
+    'use_x11%': '<(use_x11)',
+    'use_gnome_keyring%': '<(use_gnome_keyring)',
+    'linux_fpic%': '<(linux_fpic)',
+    'enable_flapper_hacks%': '<(enable_flapper_hacks)',
+    'enable_pepper_threading%': '<(enable_pepper_threading)',
+    'chromeos%': '<(chromeos)',
+    'use_virtual_keyboard%': '<(use_virtual_keyboard)',
+    'use_skia_on_mac%': '<(use_skia_on_mac)',
+    'use_xi2_mt%':'<(use_xi2_mt)',
+    'file_manager_extension%': '<(file_manager_extension)',
+    'webui_task_manager%': '<(webui_task_manager)',
+    'inside_chromium_build%': '<(inside_chromium_build)',
+    'fastbuild%': '<(fastbuild)',
+    'dcheck_always_on%': '<(dcheck_always_on)',
+    'python_ver%': '<(python_ver)',
+    'armv7%': '<(armv7)',
+    'arm_neon%': '<(arm_neon)',
+    'sysroot%': '<(sysroot)',
+    'system_libdir%': '<(system_libdir)',
+    'disable_sse2%': '<(disable_sse2)',
+    'component%': '<(component)',
+    'use_titlecase_in_grd_files%': '<(use_titlecase_in_grd_files)',
+    'use_third_party_translations%': '<(use_third_party_translations)',
+    'remoting%': '<(remoting)',
+    'use_threaded_compositing%': '<(use_threaded_compositing)',
+    'enable_webrtc%': '<(enable_webrtc)',
+    'chromium_win_pch%': '<(chromium_win_pch)',
+    'p2p_apis%': '<(p2p_apis)',
+    'configuration_policy%': '<(configuration_policy)',
+    'safe_browsing%': '<(safe_browsing)',
+    'input_speech%': '<(input_speech)',
+    'notifications%': '<(notifications)',
+    'clang_use_chrome_plugins%': '<(clang_use_chrome_plugins)',
+    'asan%': '<(asan)',
+    'enable_register_protocol_handler%': '<(enable_register_protocol_handler)',
+    'enable_web_intents%': '<(enable_web_intents)',
+    # Whether to build for Wayland display server
+    'use_wayland%': 0,
+
+    # Use system yasm instead of bundled one.
+    'use_system_yasm%': 0,
+
+    # Default to enabled PIE; this is important for ASLR but we need to be
+    # able to turn it off for remote debugging on Chromium OS
+    'linux_disable_pie%': 0,
+
+    # The release channel that this build targets. This is used to restrict
+    # channel-specific build options, like which installer packages to create.
+    # The default is 'all', which does no channel-specific filtering.
+    'channel%': 'all',
+
+    # Override chromium_mac_pch and set it to 0 to suppress the use of
+    # precompiled headers on the Mac.  Prefix header injection may still be
+    # used, but prefix headers will not be precompiled.  This is useful when
+    # using distcc to distribute a build to compile slaves that don't
+    # share the same compiler executable as the system driving the compilation,
+    # because precompiled headers rely on pointers into a specific compiler
+    # executable's image.  Setting this to 0 is needed to use an experimental
+    # Linux-Mac cross compiler distcc farm.
+    'chromium_mac_pch%': 1,
+
+    # Mac OS X SDK and deployment target support.
+    # The SDK identifies the version of the system headers that will be used,
+    # and corresponds to the MAC_OS_X_VERSION_MAX_ALLOWED compile-time macro.
+    # "Maximum allowed" refers to the operating system version whose APIs are
+    # available in the headers.
+    # The deployment target identifies the minimum system version that the
+    # built products are expected to function on.  It corresponds to the
+    # MAC_OS_X_VERSION_MIN_REQUIRED compile-time macro.
+    # To ensure these macros are available, #include <AvailabilityMacros.h>.
+    # Additional documentation on these macros is available at
+    # http://developer.apple.com/mac/library/technotes/tn2002/tn2064.html#SECTION3
+    # Chrome normally builds with the Mac OS X 10.5 SDK and sets the
+    # deployment target to 10.5.  Other projects, such as O3D, may override
+    # these defaults.
+    'mac_sdk%': '10.5',
+    'mac_deployment_target%': '10.5',
+
+    # Set to 1 to enable code coverage.  In addition to build changes
+    # (e.g. extra CFLAGS), also creates a new target in the src/chrome
+    # project file called "coverage".
+    # Currently ignored on Windows.
+    'coverage%': 0,
+
+    # Although base/allocator lets you select a heap library via an
+    # environment variable, the libcmt shim it uses sometimes gets in
+    # the way.  To disable it entirely, and switch to normal msvcrt, do e.g.
+    #  'win_use_allocator_shim': 0,
+    #  'win_release_RuntimeLibrary': 2
+    # to ~/.gyp/include.gypi, gclient runhooks --force, and do a release build.
+    'win_use_allocator_shim%': 1, # 1 = shim allocator via libcmt; 0 = msvcrt
+
+    # Whether usage of OpenMAX is enabled.
+    'enable_openmax%': 0,
+
+    # Whether proprietary audio/video codecs are assumed to be included with
+    # this build (only meaningful if branding!=Chrome).
+    'proprietary_codecs%': 0,
+
+    # TODO(bradnelson): eliminate this when possible.
+    # To allow local gyp files to prevent release.vsprops from being included.
+    # Yes(1) means include release.vsprops.
+    # Once all vsprops settings are migrated into gyp, this can go away.
+    'msvs_use_common_release%': 1,
+
+    # TODO(bradnelson): eliminate this when possible.
+    # To allow local gyp files to override additional linker options for msvs.
+    # Yes(1) means set use the common linker options.
+    'msvs_use_common_linker_extras%': 1,
+
+    # TODO(sgk): eliminate this if possible.
+    # It would be nicer to support this via a setting in 'target_defaults'
+    # in chrome/app/locales/locales.gypi overriding the setting in the
+    # 'Debug' configuration in the 'target_defaults' dict below,
+    # but that doesn't work as we'd like.
+    'msvs_debug_link_incremental%': '2',
+
+    # Needed for some of the largest modules.
+    'msvs_debug_link_nonincremental%': '1',
+
+    # Turn on Use Library Dependency Inputs for linking chrome.dll on Windows
+    # to get incremental linking to be faster in debug builds.
+    'incremental_chrome_dll%': '<!(python <(DEPTH)/tools/win/supalink/check_installed.py)',
+
+    # This is the location of the sandbox binary. Chrome looks for this before
+    # running the zygote process. If found, and SUID, it will be used to
+    # sandbox the zygote process and, thus, all renderer processes.
+    'linux_sandbox_path%': '',
+
+    # Set this to true to enable SELinux support.
+    'selinux%': 0,
+
+    # Set this to true when building with Clang.
+    # See http://code.google.com/p/chromium/wiki/Clang for details.
+    'clang%': 0,
+    'make_clang_dir%': 'third_party/llvm-build/Release+Asserts',
+
+    # These two variables can be set in GYP_DEFINES while running
+    # |gclient runhooks| to let clang run a plugin in every compilation.
+    # Only has an effect if 'clang=1' is in GYP_DEFINES as well.
+    # Example:
+    #     GYP_DEFINES='clang=1 clang_load=/abs/path/to/libPrintFunctionNames.dylib clang_add_plugin=print-fns' gclient runhooks
+
+    'clang_load%': '',
+    'clang_add_plugin%': '',
+
+    # The default type of gtest.
+    'gtest_target_type%': 'executable',
+
+    # Enable sampling based profiler.
+    # See http://google-perftools.googlecode.com/svn/trunk/doc/cpuprofile.html
+    'profiling%': '0',
+
+    # Enable strict glibc debug mode.
+    'glibcxx_debug%': 0,
+
+    # Override whether we should use Breakpad on Linux. I.e. for Chrome bot.
+    'linux_breakpad%': 0,
+    # And if we want to dump symbols for Breakpad-enabled builds.
+    'linux_dump_symbols%': 0,
+    # And if we want to strip the binary after dumping symbols.
+    'linux_strip_binary%': 0,
+    # Strip the test binaries needed for Linux reliability tests.
+    'linux_strip_reliability_tests%': 0,
+
+    # Enable TCMalloc.
+    'linux_use_tcmalloc%': 1,
+
+    # Disable TCMalloc's debugallocation.
+    'linux_use_debugallocation%': 0,
+
+    # Disable TCMalloc's heapchecker.
+    'linux_use_heapchecker%': 0,
+
+    # Disable shadow stack keeping used by heapcheck to unwind the stacks
+    # better.
+    'linux_keep_shadow_stacks%': 0,
+
+    # Set to 1 to link against libgnome-keyring instead of using dlopen().
+    'linux_link_gnome_keyring%': 0,
+    # Set to 1 to link against gsettings APIs instead of using dlopen().
+    'linux_link_gsettings%': 0,
+
+    # Set Thumb compilation flags.
+    'arm_thumb%': 0,
+
+    # Set ARM fpu compilation flags (only meaningful if armv7==1 and
+    # arm_neon==0).
+    'arm_fpu%': 'vfpv3',
+
+    # Enable new NPDevice API.
+    'enable_new_npdevice_api%': 0,
+
+    # Enable EGLImage support in OpenMAX
+    'enable_eglimage%': 1,
+
+    # Enable a variable used elsewhere throughout the GYP files to determine
+    # whether to compile in the sources for the GPU plugin / process.
+    'enable_gpu%': 1,
+
+    # .gyp files or targets should set chromium_code to 1 if they build
+    # Chromium-specific code, as opposed to external code.  This variable is
+    # used to control such things as the set of warnings to enable, and
+    # whether warnings are treated as errors.
+    'chromium_code%': 0,
+
+    # TODO(thakis): Make this a blacklist instead, http://crbug.com/101600
+    'enable_wexit_time_destructors%': 0,
+
+    # Set to 1 to compile with the built in pdf viewer.
+    'internal_pdf%': 0,
+
+    # This allows to use libcros from the current system, ie. /usr/lib/
+    # The cros_api will be pulled in as a static library, and all headers
+    # from the system include dirs.
+    'system_libcros%': 0,
+
+    # NOTE: When these end up in the Mac bundle, we need to replace '-' for '_'
+    # so Cocoa is happy (http://crbug.com/20441).
+    'locales': [
+      'am', 'ar', 'bg', 'bn', 'ca', 'cs', 'da', 'de', 'el', 'en-GB',
+      'en-US', 'es-419', 'es', 'et', 'fa', 'fi', 'fil', 'fr', 'gu', 'he',
+      'hi', 'hr', 'hu', 'id', 'it', 'ja', 'kn', 'ko', 'lt', 'lv',
+      'ml', 'mr', 'nb', 'nl', 'pl', 'pt-BR', 'pt-PT', 'ro', 'ru',
+      'sk', 'sl', 'sr', 'sv', 'sw', 'ta', 'te', 'th', 'tr', 'uk',
+      'vi', 'zh-CN', 'zh-TW',
+    ],
+
+    # Pseudo locales are special locales which are used for testing and
+    # debugging. They don't get copied to the final app. For more info,
+    # check out https://sites.google.com/a/chromium.org/dev/Home/fake-bidi
+    'pseudo_locales': [
+      'fake-bidi',
+    ],
+
+    'grit_defines': [],
+
+    # Use Harfbuzz-NG instead of Harfbuzz.
+    # Under development: http://crbug.com/68551
+    'use_harfbuzz_ng%': 0,
+
+    # If debug_devtools is set to 1, JavaScript files for DevTools are
+    # stored as is and loaded from disk. Otherwise, a concatenated file
+    # is stored in resources.pak. It is still possible to load JS files
+    # from disk by passing --debug-devtools cmdline switch.
+    'debug_devtools%': 0,
+
+    # Point to ICU directory.
+    'icu_src_dir': '../third_party/icu',
+
+    # The Java Bridge is not compiled in by default.
+    'java_bridge%': 0,
+
+    # TODO(dpranke): This determines whether we should attempt to build DRT
+    # et al. from WebKit/Source/WebKit.gyp or Tools/Tools.gyp. This
+    # flag should only be needed temporarily. See
+    # https://bugs.webkit.org/show_bug.cgi?id=68463.
+    'build_webkit_exes_from_webkit_gyp%': 1,
+
+    # This flag is only used when disable_nacl==0 and disables all those
+    # subcomponents which would require the installation of a native_client
+    # untrusted toolchain.
+    'disable_nacl_untrusted%': 0,
+
+    'conditions': [
+      # Used to disable Native Client at compile time, for platforms where it
+      # isn't supported (ARM)
+      ['target_arch=="arm" and chromeos == 1', {
+        'disable_nacl%': 1,
+       }, {
+        'disable_nacl%': 0,
+      }],
+      ['os_posix==1 and OS!="mac" and OS!="android"', {
+        # This will set gcc_version to XY if you are running gcc X.Y.*.
+        # This is used to tweak build flags for gcc 4.4.
+        'gcc_version%': '<!(python <(DEPTH)/build/compiler_version.py)',
+        # Figure out the python architecture to decide if we build pyauto.
+        'python_arch%': '<!(<(DEPTH)/build/linux/python_arch.sh <(sysroot)/usr/<(system_libdir)/libpython<(python_ver).so.1.0)',
+        'conditions': [
+          ['branding=="Chrome"', {
+            'linux_breakpad%': 1,
+          }],
+          # All Chrome builds have breakpad symbols, but only process the
+          # symbols from official builds.
+          ['(branding=="Chrome" and buildtype=="Official")', {
+            'linux_dump_symbols%': 1,
+          }],
+        ],
+      }],  # os_posix==1 and OS!="mac" and OS!="android"
+      ['OS=="android"', {
+        # Location of Android NDK.
+        'variables': {
+          'variables': {
+            'android_ndk_root%': '<!(/bin/echo -n $ANDROID_NDK_ROOT)',
+            'android_target_arch%': 'arm',  # target_arch in android terms.
+
+            # Switch between different build types, currently only '0' is
+            # supported.
+            'android_build_type%': 0,
+          },
+          'android_ndk_root%': '<(android_ndk_root)',
+          'android_ndk_sysroot': '<(android_ndk_root)/platforms/android-9/arch-<(android_target_arch)',
+          'android_build_type%': '<(android_build_type)',
+        },
+        'android_ndk_root%': '<(android_ndk_root)',
+        'android_ndk_sysroot': '<(android_ndk_sysroot)',
+        'android_ndk_include': '<(android_ndk_sysroot)/usr/include',
+        'android_ndk_lib': '<(android_ndk_sysroot)/usr/lib',
+
+        # Uses Android's crash report system
+        'linux_breakpad%': 0,
+
+        # Always uses openssl.
+        'use_openssl%': 1,
+
+        'proprietary_codecs%': '<(proprietary_codecs)',
+        'safe_browsing%': 0,
+        'configuration_policy%': 0,
+        'input_speech%': 0,
+        'java_bridge%': 1,
+        'notifications%': 0,
+
+        # Builds the gtest targets as a shared_library.
+        # TODO(michaelbai): Use the fixed value 'shared_library' once it
+        # is fully supported.
+        'gtest_target_type%': '<(gtest_target_type)',
+
+        # Uses system APIs for decoding audio and video.
+        'use_libffmpeg%': '0',
+
+        # Always use the chromium skia. The use_system_harfbuzz needs to
+        # match use_system_skia.
+        'use_system_skia%': '0',
+        'use_system_harfbuzz%': '0',
+
+        # Use the system icu.
+        'use_system_icu%': 0,
+
+        # Choose static link by build type.
+        'conditions': [
+          ['android_build_type==0', {
+            'static_link_system_icu%': 1,
+          }, {
+            'static_link_system_icu%': 0,
+          }],
+        ],
+        # Enable to use system sqlite.
+        'use_system_sqlite%': '<(android_build_type)',
+        # Enable to use system libjpeg.
+        'use_system_libjpeg%': '<(android_build_type)',
+        # Enable to use the system libexpat.
+        'use_system_libexpat%': '<(android_build_type)',
+        # Enable to use the system stlport, otherwise statically
+        # link the NDK one?
+        'use_system_stlport%': '<(android_build_type)',
+        # Copy it out one scope.
+        'android_build_type%': '<(android_build_type)',
+      }],  # OS=="android"
+      ['OS=="mac"', {
+        # Enable clang on mac by default!
+        'clang%': 1,
+        # Compile in Breakpad support by default so that it can be
+        # tested, even if it is not enabled by default at runtime.
+        'mac_breakpad_compiled_in%': 1,
+        'conditions': [
+          # mac_product_name is set to the name of the .app bundle as it should
+          # appear on disk.  This duplicates data from
+          # chrome/app/theme/chromium/BRANDING and
+          # chrome/app/theme/google_chrome/BRANDING, but is necessary to get
+          # these names into the build system.
+          ['branding=="Chrome"', {
+            'mac_product_name%': 'Google Chrome',
+          }, { # else: branding!="Chrome"
+            'mac_product_name%': 'Chromium',
+          }],
+
+          ['branding=="Chrome" and buildtype=="Official"', {
+            # Enable uploading crash dumps.
+            'mac_breakpad_uploads%': 1,
+            # Enable dumping symbols at build time for use by Mac Breakpad.
+            'mac_breakpad%': 1,
+            # Enable Keystone auto-update support.
+            'mac_keystone%': 1,
+          }, { # else: branding!="Chrome" or buildtype!="Official"
+            'mac_breakpad_uploads%': 0,
+            'mac_breakpad%': 0,
+            'mac_keystone%': 0,
+          }],
+        ],
+      }],  # OS=="mac"
+
+      # Whether to use multiple cores to compile with visual studio. This is
+      # optional because it sometimes causes corruption on VS 2005.
+      # It is on by default on VS 2008 and off on VS 2005.
+      ['OS=="win"', {
+        'conditions': [
+          ['component=="shared_library"', {
+            'win_use_allocator_shim%': 0,
+          }],
+          ['MSVS_VERSION=="2005"', {
+            'msvs_multi_core_compile%': 0,
+          },{
+            'msvs_multi_core_compile%': 1,
+          }],
+          # Don't do incremental linking for large modules on 32-bit.
+          ['MSVS_OS_BITS==32', {
+            'msvs_large_module_debug_link_mode%': '1',  # No
+          },{
+            'msvs_large_module_debug_link_mode%': '2',  # Yes
+          }],
+          ['MSVS_VERSION=="2010e" or MSVS_VERSION=="2008e" or MSVS_VERSION=="2005e"', {
+            'msvs_express%': 1,
+            'secure_atl%': 0,
+          },{
+            'msvs_express%': 0,
+            'secure_atl%': 1,
+          }],
+        ],
+        'nacl_win64_defines': [
+          # This flag is used to minimize dependencies when building
+          # Native Client loader for 64-bit Windows.
+          'NACL_WIN64',
+        ],
+      }],
+
+      ['os_posix==1 and chromeos==0 and target_arch!="arm"', {
+        'use_cups%': 1,
+      }, {
+        'use_cups%': 0,
+      }],
+
+      # Set the relative path from this file to the GYP file of the JPEG
+      # library used by Chromium.
+      ['use_libjpeg_turbo==1', {
+        'libjpeg_gyp_path': '../third_party/libjpeg_turbo/libjpeg.gyp',
+      }, {
+        'libjpeg_gyp_path': '../third_party/libjpeg/libjpeg.gyp',
+      }],  # use_libjpeg_turbo==1
+
+      # Options controlling the use of GConf (the classic GNOME configuration
+      # system) and GIO, which contains GSettings (the new GNOME config system).
+      ['chromeos==1', {
+        'use_gconf%': 0,
+        'use_gio%': 0,
+      }, {
+        'use_gconf%': 1,
+        'use_gio%': 1,
+      }],
+
+      # Set up -D and -E flags passed into grit.
+      ['branding=="Chrome"', {
+        # TODO(mmoss) The .grd files look for _google_chrome, but for
+        # consistency they should look for google_chrome_build like C++.
+        'grit_defines': ['-D', '_google_chrome',
+                         '-E', 'CHROMIUM_BUILD=google_chrome'],
+      }, {
+        'grit_defines': ['-D', '_chromium',
+                         '-E', 'CHROMIUM_BUILD=chromium'],
+      }],
+      ['chromeos==1', {
+        'grit_defines': ['-D', 'chromeos'],
+      }],
+      ['toolkit_views==1', {
+        'grit_defines': ['-D', 'toolkit_views'],
+      }],
+      ['use_aura==1', {
+        'grit_defines': ['-D', 'use_aura'],
+      }],
+      ['use_nss==1', {
+        'grit_defines': ['-D', 'use_nss'],
+      }],
+      ['use_virtual_keyboard==1', {
+        'grit_defines': ['-D', 'use_virtual_keyboard'],
+      }],
+      ['file_manager_extension==1', {
+        'grit_defines': ['-D', 'file_manager_extension'],
+      }],
+      ['webui_task_manager==1', {
+        'grit_defines': ['-D', 'webui_task_manager'],
+      }],
+      ['remoting==1', {
+        'grit_defines': ['-D', 'remoting'],
+      }],
+      ['use_titlecase_in_grd_files==1', {
+        'grit_defines': ['-D', 'use_titlecase'],
+      }],
+      ['use_third_party_translations==1', {
+        'grit_defines': ['-D', 'use_third_party_translations'],
+        'locales': [
+          'ast', 'bs', 'ca@valencia', 'en-AU', 'eo', 'eu', 'gl', 'hy', 'ia',
+          'ka', 'ku', 'kw', 'ms', 'ug'
+        ],
+      }],
+      ['OS=="android"', {
+        'grit_defines': ['-D', 'android'],
+      }],
+      ['clang_use_chrome_plugins==1', {
+        'clang_chrome_plugins_flags':
+            '<!(<(DEPTH)/tools/clang/scripts/plugin_flags.sh)',
+      }],
+
+      # Set use_ibus to 1 to enable ibus support.
+      ['use_virtual_keyboard==1 and chromeos==1', {
+        'use_ibus%': 1,
+      }, {
+        'use_ibus%': 0,
+      }],
+
+      ['enable_register_protocol_handler==1', {
+        'grit_defines': ['-D', 'enable_register_protocol_handler'],
+      }],
+
+      ['enable_web_intents==1', {
+        'grit_defines': ['-D', 'enable_web_intents'],
+      }],
+
+      ['asan==1', {
+        'clang%': 1,
+        # Do not use Chrome plugins for Clang. The Clang version in
+        # third_party/asan may be different from the default one.
+        'clang_use_chrome_plugins%': 0,
+      }],
+    ],
+    # List of default apps to install in new profiles.  The first list contains
+    # the source files as found in svn.  The second list, used only for linux,
+    # contains the destination location for each of the files.  When a crx
+    # is added or removed from the list, the chrome/browser/resources/
+    # default_apps/external_extensions.json file must also be updated.
+    'default_apps_list': [
+      'browser/resources/default_apps/external_extensions.json',
+      'browser/resources/default_apps/gmail.crx',
+      'browser/resources/default_apps/search.crx',
+      'browser/resources/default_apps/youtube.crx',
+    ],
+    'default_apps_list_linux_dest': [
+      '<(PRODUCT_DIR)/default_apps/external_extensions.json',
+      '<(PRODUCT_DIR)/default_apps/gmail.crx',
+      '<(PRODUCT_DIR)/default_apps/search.crx',
+      '<(PRODUCT_DIR)/default_apps/youtube.crx',
+    ],
+  },
+  'target_defaults': {
+    'variables': {
+      # The condition that operates on chromium_code is in a target_conditions
+      # section, and will not have access to the default fallback value of
+      # chromium_code at the top of this file, or to the chromium_code
+      # variable placed at the root variables scope of .gyp files, because
+      # those variables are not set at target scope.  As a workaround,
+      # if chromium_code is not set at target scope, define it in target scope
+      # to contain whatever value it has during early variable expansion.
+      # That's enough to make it available during target conditional
+      # processing.
+      'chromium_code%': '<(chromium_code)',
+
+      # See http://gcc.gnu.org/onlinedocs/gcc-4.4.2/gcc/Optimize-Options.html
+      'mac_release_optimization%': '3', # Use -O3 unless overridden
+      'mac_debug_optimization%': '0',   # Use -O0 unless overridden
+
+      # See http://msdn.microsoft.com/en-us/library/aa652360(VS.71).aspx
+      'win_release_Optimization%': '2', # 2 = /Os
+      'win_debug_Optimization%': '0',   # 0 = /Od
+
+      # See http://msdn.microsoft.com/en-us/library/2kxx5t2c(v=vs.80).aspx
+      # Tri-state: blank is default, 1 on, 0 off
+      'win_release_OmitFramePointers%': '1',
+      # Tri-state: blank is default, 1 on, 0 off
+      'win_debug_OmitFramePointers%': '',
+
+      # See http://msdn.microsoft.com/en-us/library/8wtf2dfz(VS.71).aspx
+      'win_debug_RuntimeChecks%': '3',    # 3 = all checks enabled, 0 = off
+
+      # See http://msdn.microsoft.com/en-us/library/47238hez(VS.71).aspx
+      'win_debug_InlineFunctionExpansion%': '',    # empty = default, 0 = off,
+      'win_release_InlineFunctionExpansion%': '2', # 1 = only __inline, 2 = max
+
+      # VS inserts quite a lot of extra checks to algorithms like
+      # std::partial_sort in Debug build which make them O(N^2)
+      # instead of O(N*logN). This is particularly slow under memory
+      # tools like ThreadSanitizer so we want it to be disablable.
+      # See http://msdn.microsoft.com/en-us/library/aa985982(v=VS.80).aspx
+      'win_debug_disable_iterator_debugging%': '0',
+
+      'release_extra_cflags%': '',
+      'debug_extra_cflags%': '',
+      'release_valgrind_build%': 0,
+
+      # the non-qualified versions are widely assumed to be *nix-only
+      'win_release_extra_cflags%': '',
+      'win_debug_extra_cflags%': '',
+
+      # TODO(thakis): Make this a blacklist instead, http://crbug.com/101600
+      'enable_wexit_time_destructors%': '<(enable_wexit_time_destructors)',
+
+      # Only used by Windows build for now.  Can be used to build into a
+      # differet output directory, e.g., a build_dir_prefix of VS2010_ would
+      # output files in src/build/VS2010_{Debug,Release}.
+      'build_dir_prefix%': '',
+
+      'conditions': [
+        ['OS=="win" and component=="shared_library"', {
+          # See http://msdn.microsoft.com/en-us/library/aa652367.aspx
+          'win_release_RuntimeLibrary%': '2', # 2 = /MT (nondebug DLL)
+          'win_debug_RuntimeLibrary%': '3',   # 3 = /MTd (debug DLL)
+        }, {
+          # See http://msdn.microsoft.com/en-us/library/aa652367.aspx
+          'win_release_RuntimeLibrary%': '0', # 0 = /MT (nondebug static)
+          'win_debug_RuntimeLibrary%': '1',   # 1 = /MTd (debug static)
+        }],
+      ],
+    },
+    'conditions': [
+      ['branding=="Chrome"', {
+        'defines': ['GOOGLE_CHROME_BUILD'],
+      }, {  # else: branding!="Chrome"
+        'defines': ['CHROMIUM_BUILD'],
+      }],
+      ['component=="shared_library"', {
+        'defines': ['COMPONENT_BUILD'],
+      }],
+      ['component=="shared_library" and incremental_chrome_dll==1', {
+        # TODO(dpranke): We can't incrementally link chrome when
+        # content is being built as a DLL because chrome links in
+        # webkit_glue and webkit_glue depends on symbols defined in
+        # content. We can remove this when we fix glue.
+        # See http://code.google.com/p/chromium/issues/detail?id=98755 .
+        'defines': ['COMPILE_CONTENT_STATICALLY'],
+      }],
+      ['toolkit_views==1', {
+        'defines': ['TOOLKIT_VIEWS=1'],
+      }],
+      ['views_compositor==1', {
+        'defines': ['VIEWS_COMPOSITOR=1'],
+      }],
+      ['ui_compositor_image_transport==1', {
+        'defines': ['UI_COMPOSITOR_IMAGE_TRANSPORT'],
+      }],
+      ['use_webkit_compositor==1', {
+        'defines': ['USE_WEBKIT_COMPOSITOR=1'],
+      }],
+      ['use_aura==1', {
+        'defines': ['USE_AURA=1'],
+      }],
+      ['use_nss==1', {
+        'defines': ['USE_NSS=1'],
+      }],
+      ['toolkit_uses_gtk==1', {
+        'defines': ['TOOLKIT_USES_GTK=1'],
+      }],
+      ['toolkit_uses_gtk==1 and toolkit_views==0', {
+        # TODO(erg): We are progressively sealing up use of deprecated features
+        # in gtk in preparation for an eventual porting to gtk3.
+        'defines': ['GTK_DISABLE_SINGLE_INCLUDES=1'],
+      }],
+      ['chromeos==1', {
+        'defines': ['OS_CHROMEOS=1'],
+      }],
+      ['use_virtual_keyboard==1', {
+        'defines': ['USE_VIRTUAL_KEYBOARD=1'],
+      }],
+      ['use_xi2_mt!=0', {
+        'defines': ['USE_XI2_MT=<(use_xi2_mt)'],
+      }],
+      ['use_wayland==1', {
+        'defines': ['USE_WAYLAND=1', 'WL_EGL_PLATFORM=1'],
+      }],
+      ['file_manager_extension==1', {
+        'defines': ['FILE_MANAGER_EXTENSION=1'],
+      }],
+      ['webui_task_manager==1', {
+        'defines': ['WEBUI_TASK_MANAGER=1'],
+      }],
+      ['profiling==1', {
+        'defines': ['ENABLE_PROFILING=1'],
+      }],
+      ['OS=="linux" and glibcxx_debug==1', {
+        'defines': ['_GLIBCXX_DEBUG=1',],
+        'cflags_cc!': ['-fno-rtti'],
+        'cflags_cc+': ['-frtti', '-g'],
+      }],
+      ['remoting==1', {
+        'defines': ['ENABLE_REMOTING=1'],
+      }],
+      ['p2p_apis==1', {
+        'defines': ['ENABLE_P2P_APIS=1'],
+      }],
+      ['proprietary_codecs==1', {
+        'defines': ['USE_PROPRIETARY_CODECS'],
+      }],
+      ['enable_flapper_hacks==1', {
+        'defines': ['ENABLE_FLAPPER_HACKS=1'],
+      }],
+      ['enable_pepper_threading==1', {
+        'defines': ['ENABLE_PEPPER_THREADING'],
+      }],
+      ['configuration_policy==1', {
+        'defines': ['ENABLE_CONFIGURATION_POLICY'],
+      }],
+      ['input_speech==1', {
+        'defines': ['ENABLE_INPUT_SPEECH'],
+      }],
+      ['notifications==1', {
+        'defines': ['ENABLE_NOTIFICATIONS'],
+      }],
+      ['fastbuild!=0', {
+
+        'conditions': [
+          # For Windows and Mac, we don't genererate debug information.
+          ['OS=="win" or OS=="mac"', {
+            'msvs_settings': {
+              'VCLinkerTool': {
+                'GenerateDebugInformation': 'false',
+              },
+              'VCCLCompilerTool': {
+                'DebugInformationFormat': '0',
+              }
+            },
+            'xcode_settings': {
+              'GCC_GENERATE_DEBUGGING_SYMBOLS': 'NO',
+            },
+          }, { # else: OS != "win", generate less debug information.
+            'variables': {
+              'debug_extra_cflags': '-g1',
+            },
+          }],
+          # Clang creates chubby debug information, which makes linking very
+          # slow. For now, don't create debug information with clang.  See
+          # http://crbug.com/70000
+          ['OS=="linux" and clang==1', {
+            'variables': {
+              'debug_extra_cflags': '-g0',
+            },
+          }],
+        ],  # conditions for fastbuild.
+      }],  # fastbuild!=0
+      ['dcheck_always_on!=0', {
+        'defines': ['DCHECK_ALWAYS_ON=1'],
+      }],  # dcheck_always_on!=0
+      ['selinux==1', {
+        'defines': ['CHROMIUM_SELINUX=1'],
+      }],
+      ['win_use_allocator_shim==0', {
+        'conditions': [
+          ['OS=="win"', {
+            'defines': ['NO_TCMALLOC'],
+          }],
+        ],
+      }],
+      ['enable_gpu==1', {
+        'defines': [
+          'ENABLE_GPU=1',
+        ],
+      }],
+      ['use_openssl==1', {
+        'defines': [
+          'USE_OPENSSL=1',
+        ],
+      }],
+      ['enable_eglimage==1', {
+        'defines': [
+          'ENABLE_EGLIMAGE=1',
+        ],
+      }],
+      ['use_skia==1', {
+        'defines': [
+          'USE_SKIA=1',
+        ],
+      }],
+      ['coverage!=0', {
+        'conditions': [
+          ['OS=="mac"', {
+            'xcode_settings': {
+              'GCC_INSTRUMENT_PROGRAM_FLOW_ARCS': 'YES',  # -fprofile-arcs
+              'GCC_GENERATE_TEST_COVERAGE_FILES': 'YES',  # -ftest-coverage
+            },
+            # Add -lgcov for types executable, shared_library, and
+            # loadable_module; not for static_library.
+            # This is a delayed conditional.
+            'target_conditions': [
+              ['_type!="static_library"', {
+                'xcode_settings': { 'OTHER_LDFLAGS': [ '-lgcov' ] },
+              }],
+            ],
+          }],
+          ['OS=="linux" or OS=="android"', {
+            'cflags': [ '-ftest-coverage',
+                        '-fprofile-arcs' ],
+            'link_settings': { 'libraries': [ '-lgcov' ] },
+          }],
+          # Finally, for Windows, we simply turn on profiling.
+          ['OS=="win"', {
+            'msvs_settings': {
+              'VCLinkerTool': {
+                'Profile': 'true',
+              },
+              'VCCLCompilerTool': {
+                # /Z7, not /Zi, so coverage is happyb
+                'DebugInformationFormat': '1',
+                'AdditionalOptions': ['/Yd'],
+              }
+            }
+         }],  # OS==win
+        ],  # conditions for coverage
+      }],  # coverage!=0
+      ['OS=="win"', {
+        'defines': [
+          '__STD_C',
+          '_CRT_SECURE_NO_DEPRECATE',
+          '_SCL_SECURE_NO_DEPRECATE',
+        ],
+        'include_dirs': [
+          '<(DEPTH)/third_party/wtl/include',
+        ],
+      }],  # OS==win
+      ['enable_register_protocol_handler==1', {
+        'defines': [
+          'ENABLE_REGISTER_PROTOCOL_HANDLER=1',
+        ],
+      }],
+      ['enable_web_intents==1', {
+        'defines': [
+          'ENABLE_WEB_INTENTS=1',
+        ],
+      }],
+      ['OS=="win" and branding=="Chrome"', {
+        'defines': ['ENABLE_SWIFTSHADER'],
+      }],
+    ],  # conditions for 'target_defaults'
+    'target_conditions': [
+      ['enable_wexit_time_destructors==1', {
+        'conditions': [
+          [ 'clang==1', {
+            'cflags': [
+              '-Wexit-time-destructors',
+            ],
+            'xcode_settings': {
+              'WARNING_CFLAGS': [
+                '-Wexit-time-destructors',
+              ],
+            },
+          }],
+        ],
+      }],
+      ['chromium_code==0', {
+        'conditions': [
+          [ 'os_posix==1 and OS!="mac"', {
+            # We don't want to get warnings from third-party code,
+            # so remove any existing warning-enabling flags like -Wall.
+            'cflags!': [
+              '-Wall',
+              '-Wextra',
+              '-Werror',
+            ],
+            'cflags_cc': [
+              # Don't warn about hash_map in third-party code.
+              '-Wno-deprecated',
+            ],
+            'cflags': [
+              # Don't warn about printf format problems.
+              # This is off by default in gcc but on in Ubuntu's gcc(!).
+              '-Wno-format',
+            ],
+            'cflags_cc!': [
+              # TODO(fischman): remove this.
+              # http://code.google.com/p/chromium/issues/detail?id=90453
+              '-Wsign-compare',
+            ]
+          }],
+          [ 'os_posix==1 and os_bsd!=1 and OS!="mac" and OS!="android" and chromeos==0', {
+            'cflags': [
+              # Don't warn about ignoring the return value from e.g. close().
+              # This is off by default in some gccs but on by default in others.
+              # Currently this option is not set for Chrome OS build because
+              # the current version of gcc (4.3.4) used for building Chrome in
+              # Chrome OS chroot doesn't support this option.
+              # BSD systems do not support this option either, since they are
+              # usually using gcc 4.2.1, which does not have this flag yet.
+              # TODO(mazda): remove the conditional for Chrome OS when gcc
+              # version is upgraded.
+              '-Wno-unused-result',
+            ],
+          }],
+          [ 'OS=="win"', {
+            'defines': [
+              '_CRT_SECURE_NO_DEPRECATE',
+              '_CRT_NONSTDC_NO_WARNINGS',
+              '_CRT_NONSTDC_NO_DEPRECATE',
+              '_SCL_SECURE_NO_DEPRECATE',
+            ],
+            'msvs_disabled_warnings': [4800],
+            'msvs_settings': {
+              'VCCLCompilerTool': {
+                'WarningLevel': '3',
+                'WarnAsError': 'false', # TODO(maruel): Enable it.
+                'Detect64BitPortabilityProblems': 'false',
+              },
+            },
+          }],
+          # TODO(darin): Unfortunately, some third_party code depends on base/
+          [ 'OS=="win" and component=="shared_library"', {
+            'msvs_disabled_warnings': [
+              4251,  # class 'std::xx' needs to have dll-interface.
+            ],
+          }],
+          [ 'OS=="mac"', {
+            'xcode_settings': {
+              'GCC_TREAT_WARNINGS_AS_ERRORS': 'NO',
+              'WARNING_CFLAGS!': ['-Wall', '-Wextra'],
+            },
+          }],
+        ],
+      }, {
+        # In Chromium code, we define __STDC_FORMAT_MACROS in order to get the
+        # C99 macros on Mac and Linux.
+        'defines': [
+          '__STDC_FORMAT_MACROS',
+        ],
+        'conditions': [
+          ['OS!="win"', {
+            'sources/': [ ['exclude', '_win(_unittest)?\\.(h|cc)$'],
+                          ['exclude', '(^|/)win/'],
+                          ['exclude', '(^|/)win_[^/]*\\.(h|cc)$'] ],
+          }],
+          ['OS!="mac"', {
+            'sources/': [ ['exclude', '_(cocoa|mac)(_unittest)?\\.(h|cc)$'],
+                          ['exclude', '(^|/)(cocoa|mac)/'],
+                          ['exclude', '\\.mm?$' ] ],
+          }],
+          ['use_x11!=1', {
+            'sources/': [
+              ['exclude', '_(chromeos|x|x11)(_unittest)?\\.(h|cc)$'],
+              ['exclude', '(^|/)x11_[^/]*\\.(h|cc)$'],
+            ],
+          }],
+          ['toolkit_uses_gtk!=1', {
+            'sources/': [
+              ['exclude', '_gtk(_unittest)?\\.(h|cc)$'],
+              ['exclude', '(^|/)gtk/'],
+              ['exclude', '(^|/)gtk_[^/]*\\.(h|cc)$'],
+            ],
+          }],
+          ['OS!="linux" and OS!="openbsd" and OS!="freebsd"', {
+            'sources/': [
+              ['exclude', '_xdg(_unittest)?\\.(h|cc)$'],
+            ],
+          }],
+          ['use_wayland!=1', {
+            'sources/': [
+              ['exclude', '_(wayland)(_unittest)?\\.(h|cc)$'],
+              ['exclude', '(^|/)wayland/'],
+              ['exclude', '(^|/)(wayland)_[^/]*\\.(h|cc)$'],
+            ],
+          }],
+          # Do not exclude the linux files on *BSD since most of them can be
+          # shared at this point.
+          # In case a file is not needed, it is going to be excluded later on.
+          ['OS!="linux" and OS!="openbsd" and OS!="freebsd"', {
+            'sources/': [
+              ['exclude', '_linux(_unittest)?\\.(h|cc)$'],
+              ['exclude', '(^|/)linux/'],
+            ],
+          }],
+          ['OS!="android"', {
+            'sources/': [
+              ['exclude', '_android(_unittest)?\\.cc$'],
+              ['exclude', '(^|/)android/'],
+            ],
+          }],
+          # We use "POSIX" to refer to all non-Windows operating systems.
+          ['OS=="win"', {
+            'sources/': [ ['exclude', '_posix(_unittest)?\\.(h|cc)$'] ],
+            # turn on warnings for signed/unsigned mismatch on chromium code.
+            'msvs_settings': {
+              'VCCLCompilerTool': {
+                'AdditionalOptions': ['/we4389'],
+              },
+            },
+          }],
+          ['OS=="win" and component=="shared_library"', {
+            'msvs_disabled_warnings': [
+              4251,  # class 'std::xx' needs to have dll-interface.
+            ],
+          }],
+          ['chromeos!=1', {
+            'sources/': [ ['exclude', '_chromeos\\.(h|cc)$'] ]
+          }],
+          ['toolkit_views==0', {
+            'sources/': [ ['exclude', '_views\\.(h|cc)$'] ]
+          }],
+          ['use_aura==0', {
+            'sources/': [ ['exclude', '_aura\\.(h|cc)$'],
+                          ['exclude', '(^|/)aura/'],
+            ]
+          }],
+          ['use_aura==0 or use_x11==0', {
+            'sources/': [ ['exclude', '_aurax11\\.(h|cc)$'] ]
+          }],
+          ['use_aura==0 or OS!="win"', {
+            'sources/': [ ['exclude', '_aurawin\\.(h|cc)$'] ]
+          }],
+        ],
+      }],
+    ],  # target_conditions for 'target_defaults'
+    'default_configuration': 'Debug',
+    'configurations': {
+      # VCLinkerTool LinkIncremental values below:
+      #   0 == default
+      #   1 == /INCREMENTAL:NO
+      #   2 == /INCREMENTAL
+      # Debug links incremental, Release does not.
+      #
+      # Abstract base configurations to cover common attributes.
+      #
+      'Common_Base': {
+        'abstract': 1,
+        'msvs_configuration_attributes': {
+          'OutputDirectory': '<(DEPTH)\\build\\<(build_dir_prefix)$(ConfigurationName)',
+          'IntermediateDirectory': '$(OutDir)\\obj\\$(ProjectName)',
+          'CharacterSet': '1',
+        },
+      },
+      'x86_Base': {
+        'abstract': 1,
+        'msvs_settings': {
+          'VCLinkerTool': {
+            'TargetMachine': '1',
+          },
+        },
+        'msvs_configuration_platform': 'Win32',
+      },
+      'x64_Base': {
+        'abstract': 1,
+        'msvs_configuration_platform': 'x64',
+        'msvs_settings': {
+          'VCLinkerTool': {
+            'TargetMachine': '17', # x86 - 64
+            'AdditionalLibraryDirectories!':
+              ['<(DEPTH)/third_party/platformsdk_win7/files/Lib'],
+            'AdditionalLibraryDirectories':
+              ['<(DEPTH)/third_party/platformsdk_win7/files/Lib/x64'],
+          },
+          'VCLibrarianTool': {
+            'AdditionalLibraryDirectories!':
+              ['<(DEPTH)/third_party/platformsdk_win7/files/Lib'],
+            'AdditionalLibraryDirectories':
+              ['<(DEPTH)/third_party/platformsdk_win7/files/Lib/x64'],
+          },
+        },
+        'defines': [
+          # Not sure if tcmalloc works on 64-bit Windows.
+          'NO_TCMALLOC',
+        ],
+      },
+      'Debug_Base': {
+        'abstract': 1,
+        'defines': [
+          'DYNAMIC_ANNOTATIONS_ENABLED=1',
+          'WTF_USE_DYNAMIC_ANNOTATIONS=1',
+        ],
+        'xcode_settings': {
+          'COPY_PHASE_STRIP': 'NO',
+          'GCC_OPTIMIZATION_LEVEL': '<(mac_debug_optimization)',
+          'OTHER_CFLAGS': [
+            '<@(debug_extra_cflags)',
+          ],
+        },
+        'msvs_settings': {
+          'VCCLCompilerTool': {
+            'Optimization': '<(win_debug_Optimization)',
+            'PreprocessorDefinitions': ['_DEBUG'],
+            'BasicRuntimeChecks': '<(win_debug_RuntimeChecks)',
+            'RuntimeLibrary': '<(win_debug_RuntimeLibrary)',
+            'conditions': [
+              # According to MSVS, InlineFunctionExpansion=0 means
+              # "default inlining", not "/Ob0".
+              # Thus, we have to handle InlineFunctionExpansion==0 separately.
+              ['win_debug_InlineFunctionExpansion==0', {
+                'AdditionalOptions': ['/Ob0'],
+              }],
+              ['win_debug_InlineFunctionExpansion!=""', {
+                'InlineFunctionExpansion':
+                  '<(win_debug_InlineFunctionExpansion)',
+              }],
+              ['win_debug_disable_iterator_debugging==1', {
+                'PreprocessorDefinitions': ['_HAS_ITERATOR_DEBUGGING=0'],
+              }],
+
+              # if win_debug_OmitFramePointers is blank, leave as default
+              ['win_debug_OmitFramePointers==1', {
+                'OmitFramePointers': 'true',
+              }],
+              ['win_debug_OmitFramePointers==0', {
+                'OmitFramePointers': 'false',
+                # The above is not sufficient (http://crbug.com/106711): it
+                # simply eliminates an explicit "/Oy", but both /O2 and /Ox
+                # perform FPO regardless, so we must explicitly disable.
+                # We still want the false setting above to avoid having
+                # "/Oy /Oy-" and warnings about overriding.
+                'AdditionalOptions': ['/Oy-'],
+              }],
+            ],
+            'AdditionalOptions': [ '<@(win_debug_extra_cflags)', ],
+          },
+          'VCLinkerTool': {
+            'LinkIncremental': '<(msvs_debug_link_incremental)',
+            # ASLR makes debugging with windbg difficult because Chrome.exe and
+            # Chrome.dll share the same base name. As result, windbg will
+            # name the Chrome.dll module like chrome_<base address>, where
+            # <base address> typically changes with each launch. This in turn
+            # means that breakpoints in Chrome.dll don't stick from one launch
+            # to the next. For this reason, we turn ASLR off in debug builds.
+            # Note that this is a three-way bool, where 0 means to pick up
+            # the default setting, 1 is off and 2 is on.
+            'RandomizedBaseAddress': 1,
+          },
+          'VCResourceCompilerTool': {
+            'PreprocessorDefinitions': ['_DEBUG'],
+          },
+        },
+        'conditions': [
+          ['OS=="linux"', {
+            'cflags': [
+              '<@(debug_extra_cflags)',
+            ],
+          }],
+          ['release_valgrind_build==0', {
+            'xcode_settings': {
+              'OTHER_CFLAGS': [
+                '-fstack-protector-all',  # Implies -fstack-protector
+              ],
+            },
+          }],
+        ],
+      },
+      'Release_Base': {
+        'abstract': 1,
+        'defines': [
+          'NDEBUG',
+        ],
+        'xcode_settings': {
+          'DEAD_CODE_STRIPPING': 'YES',  # -Wl,-dead_strip
+          'GCC_OPTIMIZATION_LEVEL': '<(mac_release_optimization)',
+          'OTHER_CFLAGS': [ '<@(release_extra_cflags)', ],
+        },
+        'msvs_settings': {
+          'VCCLCompilerTool': {
+            'Optimization': '<(win_release_Optimization)',
+            'RuntimeLibrary': '<(win_release_RuntimeLibrary)',
+            'conditions': [
+              # According to MSVS, InlineFunctionExpansion=0 means
+              # "default inlining", not "/Ob0".
+              # Thus, we have to handle InlineFunctionExpansion==0 separately.
+              ['win_release_InlineFunctionExpansion==0', {
+                'AdditionalOptions': ['/Ob0'],
+              }],
+              ['win_release_InlineFunctionExpansion!=""', {
+                'InlineFunctionExpansion':
+                  '<(win_release_InlineFunctionExpansion)',
+              }],
+
+              # if win_release_OmitFramePointers is blank, leave as default
+              ['win_release_OmitFramePointers==1', {
+                'OmitFramePointers': 'true',
+              }],
+              ['win_release_OmitFramePointers==0', {
+                'OmitFramePointers': 'false',
+                # The above is not sufficient (http://crbug.com/106711): it
+                # simply eliminates an explicit "/Oy", but both /O2 and /Ox
+                # perform FPO regardless, so we must explicitly disable.
+                # We still want the false setting above to avoid having
+                # "/Oy /Oy-" and warnings about overriding.
+                'AdditionalOptions': ['/Oy-'],
+              }],
+            ],
+            'AdditionalOptions': [ '<@(win_release_extra_cflags)', ],
+          },
+          'VCLinkerTool': {
+            # LinkIncremental is a tri-state boolean, where 0 means default
+            # (i.e., inherit from parent solution), 1 means false, and
+            # 2 means true.
+            'LinkIncremental': '1',
+            # This corresponds to the /PROFILE flag which ensures the PDB
+            # file contains FIXUP information (growing the PDB file by about
+            # 5%) but does not otherwise alter the output binary. This
+            # information is used by the Syzygy optimization tool when
+            # decomposing the release image.
+            'Profile': 'true',
+          },
+        },
+        'conditions': [
+          ['release_valgrind_build==0', {
+            'defines': [
+              'NVALGRIND',
+              'DYNAMIC_ANNOTATIONS_ENABLED=0',
+            ],
+          }, {
+            'defines': [
+              'DYNAMIC_ANNOTATIONS_ENABLED=1',
+              'WTF_USE_DYNAMIC_ANNOTATIONS=1',
+            ],
+          }],
+          ['win_use_allocator_shim==0', {
+            'defines': ['NO_TCMALLOC'],
+          }],
+          ['OS=="linux"', {
+            'cflags': [
+             '<@(release_extra_cflags)',
+            ],
+          }],
+        ],
+      },
+      #
+      # Concrete configurations
+      #
+      'Debug': {
+        'inherit_from': ['Common_Base', 'x86_Base', 'Debug_Base'],
+      },
+      'Release': {
+        'inherit_from': ['Common_Base', 'x86_Base', 'Release_Base'],
+        'conditions': [
+          ['msvs_use_common_release', {
+            'includes': ['release.gypi'],
+          }],
+        ]
+      },
+      'conditions': [
+        [ 'OS=="win"', {
+          # TODO(bradnelson): add a gyp mechanism to make this more graceful.
+          'Debug_x64': {
+            'inherit_from': ['Common_Base', 'x64_Base', 'Debug_Base'],
+          },
+          'Release_x64': {
+            'inherit_from': ['Common_Base', 'x64_Base', 'Release_Base'],
+          },
+        }],
+      ],
+    },
+  },
+  'conditions': [
+    ['os_posix==1 and OS!="mac"', {
+      'target_defaults': {
+        # Enable -Werror by default, but put it in a variable so it can
+        # be disabled in ~/.gyp/include.gypi on the valgrind builders.
+        'variables': {
+          # Use -fno-strict-aliasing, see http://crbug.com/32204
+          'no_strict_aliasing%': 1,
+          'conditions': [
+            ['OS=="linux"', {
+              'werror%': '-Werror',
+              }, { # turn off -Werror on other Unices
+              'werror%': '',
+            }],
+          ],
+        },
+        'cflags': [
+          '<(werror)',  # See note above about the werror variable.
+          '-pthread',
+          '-fno-exceptions',
+          '-Wall',
+          # TODO(evan): turn this back on once all the builds work.
+          # '-Wextra',
+          # Don't warn about unused function params.  We use those everywhere.
+          '-Wno-unused-parameter',
+          # Don't warn about the "struct foo f = {0};" initialization pattern.
+          '-Wno-missing-field-initializers',
+          '-D_FILE_OFFSET_BITS=64',
+          # Don't export any symbols (for example, to plugins we dlopen()).
+          # Note: this is *required* to make some plugins work.
+          '-fvisibility=hidden',
+          '-pipe',
+        ],
+        'cflags_cc': [
+          '-fno-rtti',
+          '-fno-threadsafe-statics',
+          # Make inline functions have hidden visiblity by default.
+          # Surprisingly, not covered by -fvisibility=hidden.
+          '-fvisibility-inlines-hidden',
+          # GCC turns on -Wsign-compare for C++ under -Wall, but clang doesn't,
+          # so we specify it explicitly.
+          # TODO(fischman): remove this if http://llvm.org/PR10448 obsoletes it.
+          # http://code.google.com/p/chromium/issues/detail?id=90453
+          '-Wsign-compare',
+        ],
+        'ldflags': [
+          '-pthread', '-Wl,-z,noexecstack',
+        ],
+        'configurations': {
+          'Debug_Base': {
+            'variables': {
+              'debug_optimize%': '0',
+            },
+            'defines': [
+              '_DEBUG',
+            ],
+            'cflags': [
+              '-O>(debug_optimize)',
+              '-g',
+            ],
+            'conditions' : [
+              ['OS=="android"', {
+                'cflags': [
+                  '-fno-omit-frame-pointer',
+                ],
+              }],
+            ],
+            'target_conditions' : [
+              ['_toolset=="target"', {
+                'conditions': [
+                  ['OS=="android" and debug_optimize==0', {
+                    'cflags': [
+                      '-mlong-calls',  # Needed when compiling with -O0
+                    ],
+                  }],
+                  ['arm_thumb==1', {
+                    'cflags': [
+                      '-marm',
+                    ],
+                  }],
+                 ],
+              }],
+            ],
+          },
+          'Release_Base': {
+            'variables': {
+              'release_optimize%': '2',
+              # Binaries become big and gold is unable to perform GC
+              # and remove unused sections for some of test targets
+              # on 32 bit platform.
+              # (This is currently observed only in chromeos valgrind bots)
+              # The following flag is to disable --gc-sections linker
+              # option for these bots.
+              'no_gc_sections%': 0,
+
+              # TODO(bradnelson): reexamine how this is done if we change the
+              # expansion of configurations
+              'release_valgrind_build%': 0,
+            },
+            'cflags': [
+              '-O>(release_optimize)',
+              # Don't emit the GCC version ident directives, they just end up
+              # in the .comment section taking up binary size.
+              '-fno-ident',
+              # Put data and code in their own sections, so that unused symbols
+              # can be removed at link time with --gc-sections.
+              '-fdata-sections',
+              '-ffunction-sections',
+            ],
+            'ldflags': [
+              # Specifically tell the linker to perform optimizations.
+              # See http://lwn.net/Articles/192624/ .
+              '-Wl,-O1',
+              '-Wl,--as-needed',
+            ],
+            'conditions' : [
+              ['no_gc_sections==0', {
+                'ldflags': [
+                  '-Wl,--gc-sections',
+                ],
+              }],
+              ['OS=="android"', {
+                'cflags': [
+                  '-fomit-frame-pointer',
+                ],
+              }],
+              ['clang==1', {
+                'cflags!': [
+                  '-fno-ident',
+                ],
+              }],
+              ['profiling==1', {
+                'cflags': [
+                  '-fno-omit-frame-pointer',
+                  '-g',
+                ],
+              }],
+              # At gyp time, we test the linker for ICF support; this flag
+              # is then provided to us by gyp.  (Currently only gold supports
+              # an --icf flag.)
+              # There seems to be a conflict of --icf and -pie in gold which
+              # can generate crashy binaries. As a security measure, -pie
+              # takes precendence for now.
+              ['LINKER_SUPPORTS_ICF==1 and release_valgrind_build==0', {
+                'target_conditions': [
+                  ['_toolset=="target"', {
+                    'ldflags': [
+                      #'-Wl,--icf=safe',
+                      '-Wl,--icf=none',
+                    ]
+                  }]
+                ]
+              }],
+            ]
+          },
+        },
+        'variants': {
+          'coverage': {
+            'cflags': ['-fprofile-arcs', '-ftest-coverage'],
+            'ldflags': ['-fprofile-arcs'],
+          },
+          'profile': {
+            'cflags': ['-pg', '-g'],
+            'ldflags': ['-pg'],
+          },
+          'symbols': {
+            'cflags': ['-g'],
+          },
+        },
+        'conditions': [
+          ['target_arch=="ia32"', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'asflags': [
+                  # Needed so that libs with .s files (e.g. libicudata.a)
+                  # are compatible with the general 32-bit-ness.
+                  '-32',
+                ],
+                # All floating-point computations on x87 happens in 80-bit
+                # precision.  Because the C and C++ language standards allow
+                # the compiler to keep the floating-point values in higher
+                # precision than what's specified in the source and doing so
+                # is more efficient than constantly rounding up to 64-bit or
+                # 32-bit precision as specified in the source, the compiler,
+                # especially in the optimized mode, tries very hard to keep
+                # values in x87 floating-point stack (in 80-bit precision)
+                # as long as possible. This has important side effects, that
+                # the real value used in computation may change depending on
+                # how the compiler did the optimization - that is, the value
+                # kept in 80-bit is different than the value rounded down to
+                # 64-bit or 32-bit. There are possible compiler options to
+                # make this behavior consistent (e.g. -ffloat-store would keep
+                # all floating-values in the memory, thus force them to be
+                # rounded to its original precision) but they have significant
+                # runtime performance penalty.
+                #
+                # -mfpmath=sse -msse2 makes the compiler use SSE instructions
+                # which keep floating-point values in SSE registers in its
+                # native precision (32-bit for single precision, and 64-bit
+                # for double precision values). This means the floating-point
+                # value used during computation does not change depending on
+                # how the compiler optimized the code, since the value is
+                # always kept in its specified precision.
+                'conditions': [
+                  ['branding=="Chromium" and disable_sse2==0', {
+                    'cflags': [
+                      '-march=pentium4',
+                      '-msse2',
+                      '-mfpmath=sse',
+                    ],
+                  }],
+                  # ChromeOS targets Pinetrail, which is sse3, but most of the
+                  # benefit comes from sse2 so this setting allows ChromeOS
+                  # to build on other CPUs.  In the future -march=atom would
+                  # help but requires a newer compiler.
+                  ['chromeos==1 and disable_sse2==0', {
+                    'cflags': [
+                      '-msse2',
+                    ],
+                  }],
+                  # Install packages have started cropping up with
+                  # different headers between the 32-bit and 64-bit
+                  # versions, so we have to shadow those differences off
+                  # and make sure a 32-bit-on-64-bit build picks up the
+                  # right files.
+                  ['host_arch!="ia32"', {
+                    'include_dirs+': [
+                      '/usr/include32',
+                    ],
+                  }],
+                ],
+                # -mmmx allows mmintrin.h to be used for mmx intrinsics.
+                # video playback is mmx and sse2 optimized.
+                'cflags': [
+                  '-m32',
+                  '-mmmx',
+                ],
+                'ldflags': [
+                  '-m32',
+                ],
+              }],
+            ],
+          }],
+          ['target_arch=="arm"', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags_cc': [
+                  # The codesourcery arm-2009q3 toolchain warns at that the ABI
+                  # has changed whenever it encounters a varargs function. This
+                  # silences those warnings, as they are not helpful and
+                  # clutter legitimate warnings.
+                  '-Wno-abi',
+                ],
+                'conditions': [
+                  ['arm_thumb==1', {
+                    'cflags': [
+                    '-mthumb',
+                    ]
+                  }],
+                  ['armv7==1', {
+                    'cflags': [
+                      '-march=armv7-a',
+                      '-mtune=cortex-a8',
+                      '-mfloat-abi=softfp',
+                    ],
+                    'conditions': [
+                      ['arm_neon==1', {
+                        'cflags': [ '-mfpu=neon', ],
+                      }, {
+                        'cflags': [ '-mfpu=<(arm_fpu)', ],
+                      }]
+                    ],
+                  }],
+                  ['OS=="android"', {
+                    # The following flags are derived from what Android uses
+                    # by default when building for arm.
+                    'cflags': [ '-Wno-psabi', ],
+                    'conditions': [
+                      ['arm_thumb == 1', {
+                        # Android toolchain doesn't support -mimplicit-it=thumb
+                        'cflags!': [ '-Wa,-mimplicit-it=thumb', ],
+                        'cflags': [ '-mthumb-interwork', ],
+                      }],
+                      ['armv7==0', {
+                        # Flags suitable for Android emulator
+                        'cflags': [
+                          '-march=armv5te',
+                          '-mtune=xscale',
+                          '-msoft-float',
+                          '-D__ARM_ARCH_5__',
+                          '-D__ARM_ARCH_5T__',
+                          '-D__ARM_ARCH_5E__',
+                          '-D__ARM_ARCH_5TE__',
+                        ],
+                      }],
+                    ],
+                  }],
+                ],
+              }],
+            ],
+          }],
+          ['linux_fpic==1', {
+            'cflags': [
+              '-fPIC',
+            ],
+            'ldflags': [
+              '-fPIC',
+            ],
+          }],
+          # TODO(rkc): Currently building Chrome with the PIE flag causes
+          # remote debugging to break (remote debugger does not get correct
+          # section header offsets hence causing all symbol handling to go
+          # kaboom). See crosbug.com/15266
+          # Remove this flag once this issue is fixed.
+          ['linux_disable_pie==1', {
+            'target_conditions': [
+              ['_type=="executable"', {
+                'ldflags': [
+                  '-nopie',
+                ],
+              }],
+            ],
+          }],
+          ['sysroot!=""', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '--sysroot=<(sysroot)',
+                ],
+                'ldflags': [
+                  '--sysroot=<(sysroot)',
+                ],
+              }]]
+          }],
+          ['clang==1', {
+            'cflags': [
+              '-Wheader-hygiene',
+              # Clang spots more unused functions.
+              '-Wno-unused-function',
+              # Don't die on dtoa code that uses a char as an array index.
+              '-Wno-char-subscripts',
+              # Especially needed for gtest macros using enum values from Mac
+              # system headers.
+              # TODO(pkasting): In C++11 this is legal, so this should be
+              # removed when we change to that.  (This is also why we don't
+              # bother fixing all these cases today.)
+              '-Wno-unnamed-type-template-args',
+              # WebKit uses nullptr in a legit way, other that that this warning
+              # doesn't fire.
+              '-Wno-c++11-compat',
+              # This (rightyfully) complains about 'override', which we use
+              # heavily.
+              '-Wno-c++11-extensions',
+            ],
+            'cflags!': [
+              # Clang doesn't seem to know know this flag.
+              '-mfpmath=sse',
+            ],
+          }],
+          ['clang==1 and clang_use_chrome_plugins==1', {
+            'cflags': [
+              '<(clang_chrome_plugins_flags)',
+            ],
+          }],
+          ['clang==1 and clang_load!=""', {
+            'cflags': [
+              '-Xclang', '-load', '-Xclang', '<(clang_load)',
+            ],
+          }],
+          ['clang==1 and clang_add_plugin!=""', {
+            'cflags': [
+              '-Xclang', '-add-plugin', '-Xclang', '<(clang_add_plugin)',
+            ],
+          }],
+          ['asan==1', {
+            # TODO(glider): -fasan is deprecated. Remove it when we stop using
+            # it.
+            'cflags': [
+              '-fasan',
+              '-faddress-sanitizer',
+              '-w',
+            ],
+            'ldflags': [
+              '-fasan',
+              '-faddress-sanitizer',
+            ],
+            'defines': [
+              'ADDRESS_SANITIZER',
+            ],
+          }],
+          ['no_strict_aliasing==1', {
+            'cflags': [
+              '-fno-strict-aliasing',
+            ],
+          }],
+          ['linux_breakpad==1', {
+            'cflags': [ '-g' ],
+            'defines': ['USE_LINUX_BREAKPAD'],
+          }],
+          ['linux_use_heapchecker==1', {
+            'variables': {'linux_use_tcmalloc%': 1},
+          }],
+          ['linux_use_tcmalloc==0', {
+            'defines': ['NO_TCMALLOC'],
+          }],
+          ['linux_use_heapchecker==0', {
+            'defines': ['NO_HEAPCHECKER'],
+          }],
+          ['linux_keep_shadow_stacks==1', {
+            'defines': ['KEEP_SHADOW_STACKS'],
+            'cflags': ['-finstrument-functions'],
+          }],
+        ],
+      },
+    }],
+    # FreeBSD-specific options; note that most FreeBSD options are set above,
+    # with Linux.
+    ['OS=="freebsd"', {
+      'target_defaults': {
+        'ldflags': [
+          '-Wl,--no-keep-memory',
+        ],
+      },
+    }],
+    # Android-specific options; note that most are set above with Linux.
+    ['OS=="android"', {
+      'variables': {
+        'android_target_arch%': 'arm',  # target_arch in android terms.
+        'conditions': [
+          # Android uses x86 instead of ia32 for their target_arch designation.
+          ['target_arch=="ia32"', {
+            'android_target_arch%': 'x86',
+          }],
+          # Use shared stlport library when system one used.
+          # Figure this out early since it needs symbols from libgcc.a, so it
+          # has to be before that in the set of libraries.
+          ['use_system_stlport==1', {
+            'android_stlport_library': 'stlport',
+          }, {
+            'android_stlport_library': 'stlport_static',
+          }],
+        ],
+
+        # Placing this variable here prevents from forking libvpx, used
+        # by remoting.  Remoting is off, so it needn't built,
+        # so forking it's deps seems like overkill.
+        # But this variable need defined to properly run gyp.
+        # A proper solution is to have an OS==android conditional
+        # in third_party/libvpx/libvpx.gyp to define it.
+        'libvpx_path': 'lib/linux/arm',
+      },
+      'target_defaults': {
+        # Build a Release build by default to match Android build behavior.
+        # This is typical with Android because Debug builds tend to be much
+        # larger and run very slowly on constrained devices. It is still
+        # possible to do a Debug build by specifying BUILDTYPE=Debug on the
+        # 'make' command line.
+        'default_configuration': 'Release',
+
+        'variables': {
+          'release_extra_cflags%': '',
+         },
+
+        'target_conditions': [
+          # Settings for building device targets using Android's toolchain.
+          # These are based on the setup.mk file from the Android NDK.
+          #
+          # The NDK Android executable link step looks as follows:
+          #  $LDFLAGS
+          #  $(TARGET_CRTBEGIN_DYNAMIC_O)  <-- crtbegin.o
+          #  $(PRIVATE_OBJECTS)            <-- The .o that we built
+          #  $(PRIVATE_STATIC_LIBRARIES)   <-- The .a that we built
+          #  $(TARGET_LIBGCC)              <-- libgcc.a
+          #  $(PRIVATE_SHARED_LIBRARIES)   <-- The .so that we built
+          #  $(PRIVATE_LDLIBS)             <-- System .so
+          #  $(TARGET_CRTEND_O)            <-- crtend.o
+          #
+          # For now the above are approximated for executables by adding
+          # crtbegin.o to the end of the ldflags and 'crtend.o' to the end
+          # of 'libraries'.
+          #
+          # The NDK Android shared library link step looks as follows:
+          #  $LDFLAGS
+          #  $(PRIVATE_OBJECTS)            <-- The .o that we built
+          #  -l,--whole-archive
+          #  $(PRIVATE_WHOLE_STATIC_LIBRARIES)
+          #  -l,--no-whole-archive
+          #  $(PRIVATE_STATIC_LIBRARIES)   <-- The .a that we built
+          #  $(TARGET_LIBGCC)              <-- libgcc.a
+          #  $(PRIVATE_SHARED_LIBRARIES)   <-- The .so that we built
+          #  $(PRIVATE_LDLIBS)             <-- System .so
+          #
+          # For now, assume not need any whole static libs.
+          #
+          # For both executables and shared libraries, add the proper
+          # libgcc.a to the start of libraries which puts it in the
+          # proper spot after .o and .a files get linked in.
+          #
+          # TODO: The proper thing to do longer-tem would be proper gyp
+          # support for a custom link command line.
+          ['_toolset=="target"', {
+            'cflags!': [
+              '-pthread',  # Not supported by Android toolchain.
+            ],
+            'cflags': [
+              '-U__linux__',  # Don't allow toolchain to claim -D__linux__
+              '-ffunction-sections',
+              '-funwind-tables',
+              '-g',
+              '-fstack-protector',
+              '-fno-short-enums',
+              '-finline-limit=64',
+              '-Wa,--noexecstack',
+              '-Wno-error=non-virtual-dtor',  # TODO(michaelbai): Fix warnings.
+              '<@(release_extra_cflags)',
+              # Note: This include is in cflags to ensure that it comes after
+              # all of the includes.
+              '-I<(android_ndk_include)',
+            ],
+            'defines': [
+              'ANDROID',
+              '__GNU_SOURCE=1',  # Necessary for clone()
+              'USE_STLPORT=1',
+              '_STLP_USE_PTR_SPECIALIZATIONS=1',
+              'HAVE_OFF64_T',
+              'HAVE_SYS_UIO_H',
+              'ANDROID_BINSIZE_HACK', # Enable temporary hacks to reduce binsize.
+            ],
+            'ldflags!': [
+              '-pthread',  # Not supported by Android toolchain.
+            ],
+            'ldflags': [
+              '-nostdlib',
+              '-Wl,--no-undefined',
+              '-Wl,--icf=safe',  # Enable identical code folding to reduce size
+              # Don't export symbols from statically linked libraries.
+              '-Wl,--exclude-libs=ALL',
+            ],
+            'libraries': [
+              '-l<(android_stlport_library)',
+              # Manually link the libgcc.a that the cross compiler uses.
+              '<!($CROSS_CC -print-libgcc-file-name)',
+              '-lc',
+              '-ldl',
+              '-lstdc++',
+              '-lm',
+            ],
+            'conditions': [
+              ['android_build_type==0', {
+                'ldflags': [
+                  '--sysroot=<(android_ndk_sysroot)',
+                ],
+              }],
+              # NOTE: The stlport header include paths below are specified in
+              # cflags rather than include_dirs because they need to come
+              # after include_dirs. Think of them like system headers, but
+              # don't use '-isystem' because the arm-linux-androideabi-4.4.3
+              # toolchain (circa Gingerbread) will exhibit strange errors.
+              # The include ordering here is important; change with caution.
+              ['use_system_stlport==0', {
+                'cflags': [
+                  '-I<(android_ndk_root)/sources/cxx-stl/stlport/stlport',
+                ],
+                'conditions': [
+                  ['target_arch=="arm" and armv7==1', {
+                    'ldflags': [
+                      '-L<(android_ndk_root)/sources/cxx-stl/stlport/libs/armeabi-v7a',
+                    ],
+                  }],
+                  ['target_arch=="arm" and armv7==0', {
+                    'ldflags': [
+                      '-L<(android_ndk_root)/sources/cxx-stl/stlport/libs/armeabi',
+                    ],
+                  }],
+                  ['target_arch=="ia32"', {
+                    'ldflags': [
+                      '-L<(android_ndk_root)/sources/cxx-stl/stlport/libs/x86',
+                    ],
+                  }],
+                ],
+              }],
+              ['target_arch=="ia32"', {
+                # The x86 toolchain currently has problems with stack-protector.
+                'cflags!': [
+                  '-fstack-protector',
+                ],
+                'cflags': [
+                  '-fno-stack-protector',
+                ],
+              }],
+            ],
+            'target_conditions': [
+              ['_type=="executable"', {
+                'ldflags': [
+                  '-Bdynamic',
+                  '-Wl,-dynamic-linker,/system/bin/linker',
+                  '-Wl,--gc-sections',
+                  '-Wl,-z,nocopyreloc',
+                  # crtbegin_dynamic.o should be the last item in ldflags.
+                  '<(android_ndk_lib)/crtbegin_dynamic.o',
+                ],
+                'libraries': [
+                  # crtend_android.o needs to be the last item in libraries.
+                  # Do not add any libraries after this!
+                  '<(android_ndk_lib)/crtend_android.o',
+                ],
+              }],
+              ['_type=="shared_library"', {
+                'ldflags': [
+                  '-Wl,-shared,-Bsymbolic',
+                ],
+              }],
+            ],
+          }],
+          # Settings for building host targets using the system toolchain.
+          ['_toolset=="host"', {
+            'ldflags!': [
+              '-Wl,-z,noexecstack',
+              '-Wl,--gc-sections',
+              '-Wl,-O1',
+              '-Wl,--as-needed',
+            ],
+            'sources/': [
+              ['exclude', '_android(_unittest)?\\.cc$'],
+              ['exclude', '(^|/)android/']
+            ],
+          }],
+        ],
+      },
+    }],
+    ['OS=="solaris"', {
+      'cflags!': ['-fvisibility=hidden'],
+      'cflags_cc!': ['-fvisibility-inlines-hidden'],
+    }],
+    ['OS=="mac"', {
+      'target_defaults': {
+        'variables': {
+          # These should end with %, but there seems to be a bug with % in
+          # variables that are intended to be set to different values in
+          # different targets, like these.
+          'mac_pie': 1,        # Most executables can be position-independent.
+          'mac_real_dsym': 0,  # Fake .dSYMs are fine in most cases.
+          'mac_strip': 1,      # Strip debugging symbols from the target.
+        },
+        'mac_bundle': 0,
+        'xcode_settings': {
+          'ALWAYS_SEARCH_USER_PATHS': 'NO',
+          'GCC_C_LANGUAGE_STANDARD': 'c99',         # -std=c99
+          'GCC_CW_ASM_SYNTAX': 'NO',                # No -fasm-blocks
+          'GCC_DYNAMIC_NO_PIC': 'NO',               # No -mdynamic-no-pic
+                                                    # (Equivalent to -fPIC)
+          'GCC_ENABLE_CPP_EXCEPTIONS': 'NO',        # -fno-exceptions
+          'GCC_ENABLE_CPP_RTTI': 'NO',              # -fno-rtti
+          'GCC_ENABLE_PASCAL_STRINGS': 'NO',        # No -mpascal-strings
+          # GCC_INLINES_ARE_PRIVATE_EXTERN maps to -fvisibility-inlines-hidden
+          'GCC_INLINES_ARE_PRIVATE_EXTERN': 'YES',
+          'GCC_OBJC_CALL_CXX_CDTORS': 'YES',        # -fobjc-call-cxx-cdtors
+          'GCC_SYMBOLS_PRIVATE_EXTERN': 'YES',      # -fvisibility=hidden
+          'GCC_THREADSAFE_STATICS': 'NO',           # -fno-threadsafe-statics
+          'GCC_TREAT_WARNINGS_AS_ERRORS': 'YES',    # -Werror
+          'GCC_VERSION': '4.2',
+          'GCC_WARN_ABOUT_MISSING_NEWLINE': 'YES',  # -Wnewline-eof
+          # MACOSX_DEPLOYMENT_TARGET maps to -mmacosx-version-min
+          'MACOSX_DEPLOYMENT_TARGET': '<(mac_deployment_target)',
+          # Keep pch files below xcodebuild/.
+          'SHARED_PRECOMPS_DIR': '$(CONFIGURATION_BUILD_DIR)/SharedPrecompiledHeaders',
+          'USE_HEADERMAP': 'NO',
+          'OTHER_CFLAGS': [
+            '-fno-strict-aliasing',  # See http://crbug.com/32204
+          ],
+          'WARNING_CFLAGS': [
+            '-Wall',
+            '-Wendif-labels',
+            '-Wextra',
+            # Don't warn about unused function parameters.
+            '-Wno-unused-parameter',
+            # Don't warn about the "struct foo f = {0};" initialization
+            # pattern.
+            '-Wno-missing-field-initializers',
+          ],
+          'conditions': [
+            ['chromium_mac_pch', {'GCC_PRECOMPILE_PREFIX_HEADER': 'YES'},
+                                 {'GCC_PRECOMPILE_PREFIX_HEADER': 'NO'}
+            ],
+            ['clang==1', {
+              'CC': '$(SOURCE_ROOT)/<(clang_dir)/clang',
+              'LDPLUSPLUS': '$(SOURCE_ROOT)/<(clang_dir)/clang++',
+
+	      # Don't use -Wc++0x-extensions, which Xcode 4 enables by default
+	      # when buliding with clang. This warning is triggered when the
+	      # override keyword is used via the OVERRIDE macro from
+	      # base/compiler_specific.h.
+	      'CLANG_WARN_CXX0X_EXTENSIONS': 'NO',
+
+              'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
+              'WARNING_CFLAGS': [
+                '-Wheader-hygiene',
+                # Don't die on dtoa code that uses a char as an array index.
+                # This is required solely for base/third_party/dmg_fp/dtoa.cc.
+                '-Wno-char-subscripts',
+                # Clang spots more unused functions.
+                '-Wno-unused-function',
+                # See comments on this flag higher up in this file.
+                '-Wno-unnamed-type-template-args',
+                # WebKit uses nullptr in a legit way, other that that this
+                # warning doesn't fire.
+                '-Wno-c++0x-compat',
+                # This (rightyfully) complains about 'override', which we use
+                # heavily.
+                '-Wno-c++11-extensions',
+              ],
+            }],
+            ['clang==1 and clang_use_chrome_plugins==1', {
+              'OTHER_CFLAGS': [
+                '<(clang_chrome_plugins_flags)',
+              ],
+            }],
+            ['clang==1 and clang_load!=""', {
+              'OTHER_CFLAGS': [
+                '-Xclang', '-load', '-Xclang', '<(clang_load)',
+              ],
+            }],
+            ['clang==1 and clang_add_plugin!=""', {
+              'OTHER_CFLAGS': [
+                '-Xclang', '-add-plugin', '-Xclang', '<(clang_add_plugin)',
+              ],
+            }],
+          ],
+        },
+        'conditions': [
+          ['clang==1', {
+            'variables': {
+              'clang_dir': '../third_party/llvm-build/Release+Asserts/bin',
+            },
+          }],
+          ['asan==1', {
+            'xcode_settings': {
+              'OTHER_CFLAGS': [
+                '-fasan',
+                '-faddress-sanitizer',
+                '-w',
+              ],
+              'OTHER_LDFLAGS': [
+                '-fasan',
+                '-faddress-sanitizer',
+                # The symbols below are referenced in the ASan runtime
+                # library (compiled on OS X 10.6), but may be unavailable 
+                # on the prior OS X versions. Because Chromium is currently
+                # targeting 10.5.0, we need to explicitly mark these
+                # symbols as dynamic_lookup.
+                '-Wl,-U,_malloc_default_purgeable_zone',
+                '-Wl,-U,_malloc_zone_memalign',
+                '-Wl,-U,_dispatch_sync_f',
+                '-Wl,-U,_dispatch_async_f',
+                '-Wl,-U,_dispatch_barrier_async_f',
+                '-Wl,-U,_dispatch_group_async_f',
+                '-Wl,-U,_dispatch_after_f',
+              ],
+            },
+            'defines': [
+              'ADDRESS_SANITIZER',
+            ],
+          }],
+        ],
+        'target_conditions': [
+          ['_type!="static_library"', {
+            'xcode_settings': {'OTHER_LDFLAGS': ['-Wl,-search_paths_first']},
+          }],
+          ['_mac_bundle', {
+            'xcode_settings': {'OTHER_LDFLAGS': ['-Wl,-ObjC']},
+          }],
+          ['_type=="executable"', {
+            'postbuilds': [
+              {
+                # Arranges for data (heap) pages to be protected against
+                # code execution when running on Mac OS X 10.7 ("Lion"), and
+                # ensures that the position-independent executable (PIE) bit
+                # is set for ASLR when running on Mac OS X 10.5 ("Leopard").
+                'variables': {
+                  # Define change_mach_o_flags in a variable ending in _path
+                  # so that GYP understands it's a path and performs proper
+                  # relativization during dict merging.
+                  'change_mach_o_flags_path':
+                      'mac/change_mach_o_flags_from_xcode.sh',
+                  'change_mach_o_flags_options%': [
+                  ],
+                  'target_conditions': [
+                    ['mac_pie==0 or release_valgrind_build==1', {
+                      # Don't enable PIE if it's unwanted. It's unwanted if
+                      # the target specifies mac_pie=0 or if building for
+                      # Valgrind, because Valgrind doesn't understand slide.
+                      # See the similar mac_pie/release_valgrind_build check
+                      # below.
+                      'change_mach_o_flags_options': [
+                        '--no-pie',
+                      ],
+                    }],
+                  ],
+                },
+                'postbuild_name': 'Change Mach-O Flags',
+                'action': [
+                  '<(change_mach_o_flags_path)',
+                  '>@(change_mach_o_flags_options)',
+                ],
+              },
+            ],
+            'conditions': [
+              ['asan==1', {
+                'variables': {
+                 'asan_saves_file': 'asan.saves',
+                },
+                'xcode_settings': {
+                  'CHROMIUM_STRIP_SAVE_FILE': '<(asan_saves_file)',
+                },
+              }],
+            ],
+            'target_conditions': [
+              ['mac_pie==1 and release_valgrind_build==0', {
+                # Turn on position-independence (ASLR) for executables. When
+                # PIE is on for the Chrome executables, the framework will
+                # also be subject to ASLR.
+                # Don't do this when building for Valgrind, because Valgrind
+                # doesn't understand slide. TODO: Make Valgrind on Mac OS X
+                # understand slide, and get rid of the Valgrind check.
+                'xcode_settings': {
+                  'OTHER_LDFLAGS': [
+                    '-Wl,-pie',  # Position-independent executable (MH_PIE)
+                  ],
+                },
+              }],
+            ],
+          }],
+          ['(_type=="executable" or _type=="shared_library" or \
+             _type=="loadable_module") and mac_strip!=0', {
+            'target_conditions': [
+              ['mac_real_dsym == 1', {
+                # To get a real .dSYM bundle produced by dsymutil, set the
+                # debug information format to dwarf-with-dsym.  Since
+                # strip_from_xcode will not be used, set Xcode to do the
+                # stripping as well.
+                'configurations': {
+                  'Release_Base': {
+                    'xcode_settings': {
+                      'DEBUG_INFORMATION_FORMAT': 'dwarf-with-dsym',
+                      'DEPLOYMENT_POSTPROCESSING': 'YES',
+                      'STRIP_INSTALLED_PRODUCT': 'YES',
+                      'target_conditions': [
+                        ['_type=="shared_library" or _type=="loadable_module"', {
+                          # The Xcode default is to strip debugging symbols
+                          # only (-S).  Local symbols should be stripped as
+                          # well, which will be handled by -x.  Xcode will
+                          # continue to insert -S when stripping even when
+                          # additional flags are added with STRIPFLAGS.
+                          'STRIPFLAGS': '-x',
+                        }],  # _type=="shared_library" or _type=="loadable_module"'
+                      ],  # target_conditions
+                    },  # xcode_settings
+                  },  # configuration "Release"
+                },  # configurations
+              }, {  # mac_real_dsym != 1
+                # To get a fast fake .dSYM bundle, use a post-build step to
+                # produce the .dSYM and strip the executable.  strip_from_xcode
+                # only operates in the Release configuration.
+                'postbuilds': [
+                  {
+                    'variables': {
+                      # Define strip_from_xcode in a variable ending in _path
+                      # so that gyp understands it's a path and performs proper
+                      # relativization during dict merging.
+                      'strip_from_xcode_path': 'mac/strip_from_xcode',
+                    },
+                    'postbuild_name': 'Strip If Needed',
+                    'action': ['<(strip_from_xcode_path)'],
+                  },
+                ],  # postbuilds
+              }],  # mac_real_dsym
+            ],  # target_conditions
+          }],  # (_type=="executable" or _type=="shared_library" or
+               #  _type=="loadable_module") and mac_strip!=0
+        ],  # target_conditions
+      },  # target_defaults
+    }],  # OS=="mac"
+    ['OS=="win"', {
+      'target_defaults': {
+        'defines': [
+          '_WIN32_WINNT=0x0601',
+          'WINVER=0x0601',
+          'WIN32',
+          '_WINDOWS',
+          'NOMINMAX',
+          'PSAPI_VERSION=1',
+          '_CRT_RAND_S',
+          'CERT_CHAIN_PARA_HAS_EXTRA_FIELDS',
+          'WIN32_LEAN_AND_MEAN',
+          '_ATL_NO_OPENGL',
+          '_HAS_TR1=0',
+        ],
+        'conditions': [
+          ['component=="static_library"', {
+            'defines': [
+              '_HAS_EXCEPTIONS=0',
+            ],
+          }],
+          ['secure_atl', {
+            'defines': [
+              '_SECURE_ATL',
+            ],
+          }],
+        ],
+        'msvs_system_include_dirs': [
+          '<(DEPTH)/third_party/directxsdk/files/Include',
+          '<(DEPTH)/third_party/platformsdk_win7/files/Include',
+          '$(VSInstallDir)/VC/atlmfc/include',
+        ],
+        'msvs_cygwin_dirs': ['<(DEPTH)/third_party/cygwin'],
+        'msvs_disabled_warnings': [4351, 4396, 4503, 4819,
+          # TODO(maruel): These warnings are level 4. They will be slowly
+          # removed as code is fixed.
+          4100, 4121, 4125, 4127, 4130, 4131, 4189, 4201, 4238, 4244, 4245,
+          4310, 4355, 4428, 4481, 4505, 4510, 4512, 4530, 4610, 4611, 4701,
+          4702, 4706,
+        ],
+        'msvs_settings': {
+          'VCCLCompilerTool': {
+            'MinimalRebuild': 'false',
+            'BufferSecurityCheck': 'true',
+            'EnableFunctionLevelLinking': 'true',
+            'RuntimeTypeInfo': 'false',
+            'WarningLevel': '4',
+            'WarnAsError': 'true',
+            'DebugInformationFormat': '3',
+            'conditions': [
+              ['msvs_multi_core_compile', {
+                'AdditionalOptions': ['/MP'],
+              }],
+              ['MSVS_VERSION=="2005e"', {
+                'AdditionalOptions': ['/w44068'], # Unknown pragma to 4 (ATL)
+              }],
+              ['component=="shared_library"', {
+                'ExceptionHandling': '1',  # /EHsc
+              }, {
+                'ExceptionHandling': '0',
+              }],
+            ],
+          },
+          'VCLibrarianTool': {
+            'AdditionalOptions': ['/ignore:4221'],
+            'AdditionalLibraryDirectories': [
+              '<(DEPTH)/third_party/directxsdk/files/Lib/x86',
+              '<(DEPTH)/third_party/platformsdk_win7/files/Lib',
+            ],
+          },
+          'VCLinkerTool': {
+            'AdditionalDependencies': [
+              'wininet.lib',
+              'dnsapi.lib',
+              'version.lib',
+              'msimg32.lib',
+              'ws2_32.lib',
+              'usp10.lib',
+              'psapi.lib',
+              'dbghelp.lib',
+              'winmm.lib',
+              'shlwapi.lib',
+            ],
+            'conditions': [
+              ['msvs_express', {
+                # Explicitly required when using the ATL with express
+                'AdditionalDependencies': [
+                  'atlthunk.lib',
+                ],
+              }],
+              ['MSVS_VERSION=="2005e"', {
+                # Non-express versions link automatically to these
+                'AdditionalDependencies': [
+                  'advapi32.lib',
+                  'comdlg32.lib',
+                  'ole32.lib',
+                  'shell32.lib',
+                  'user32.lib',
+                  'winspool.lib',
+                ],
+              }],
+            ],
+            'AdditionalLibraryDirectories': [
+              '<(DEPTH)/third_party/directxsdk/files/Lib/x86',
+              '<(DEPTH)/third_party/platformsdk_win7/files/Lib',
+            ],
+            'GenerateDebugInformation': 'true',
+            'MapFileName': '$(OutDir)\\$(TargetName).map',
+            'ImportLibrary': '$(OutDir)\\lib\\$(TargetName).lib',
+            'FixedBaseAddress': '1',
+            # SubSystem values:
+            #   0 == not set
+            #   1 == /SUBSYSTEM:CONSOLE
+            #   2 == /SUBSYSTEM:WINDOWS
+            # Most of the executables we'll ever create are tests
+            # and utilities with console output.
+            'SubSystem': '1',
+          },
+          'VCMIDLTool': {
+            'GenerateStublessProxies': 'true',
+            'TypeLibraryName': '$(InputName).tlb',
+            'OutputDirectory': '$(IntDir)',
+            'HeaderFileName': '$(InputName).h',
+            'DLLDataFileName': 'dlldata.c',
+            'InterfaceIdentifierFileName': '$(InputName)_i.c',
+            'ProxyFileName': '$(InputName)_p.c',
+          },
+          'VCResourceCompilerTool': {
+            'Culture' : '1033',
+            'AdditionalIncludeDirectories': [
+              '<(DEPTH)',
+              '<(SHARED_INTERMEDIATE_DIR)',
+            ],
+          },
+        },
+      },
+    }],
+    ['disable_nacl==1', {
+      'target_defaults': {
+        'defines': [
+          'DISABLE_NACL',
+        ],
+      },
+    }],
+    ['OS=="win" and msvs_use_common_linker_extras', {
+      'target_defaults': {
+        'msvs_settings': {
+          'VCLinkerTool': {
+            'DelayLoadDLLs': [
+              'dbghelp.dll',
+              'dwmapi.dll',
+              'uxtheme.dll',
+            ],
+          },
+        },
+        'configurations': {
+          'x86_Base': {
+            'msvs_settings': {
+              'VCLinkerTool': {
+                'AdditionalOptions': [
+                  '/safeseh',
+                  '/dynamicbase',
+                  '/ignore:4199',
+                  '/ignore:4221',
+                  '/nxcompat',
+                ],
+              },
+            },
+          },
+          'x64_Base': {
+            'msvs_settings': {
+              'VCLinkerTool': {
+                'AdditionalOptions': [
+                  # safeseh is not compatible with x64
+                  '/dynamicbase',
+                  '/ignore:4199',
+                  '/ignore:4221',
+                  '/nxcompat',
+                ],
+              },
+            },
+          },
+        },
+      },
+    }],
+    ['enable_new_npdevice_api==1', {
+      'target_defaults': {
+        'defines': [
+          'ENABLE_NEW_NPDEVICE_API',
+        ],
+      },
+    }],
+    ['clang==1', {
+      'make_global_settings': [
+        ['CC', '<(make_clang_dir)/bin/clang'],
+        ['CXX', '<(make_clang_dir)/bin/clang++'],
+        ['LINK', '$(CXX)'],
+        ['CC.host', '$(CC)'],
+        ['CXX.host', '$(CXX)'],
+        ['LINK.host', '$(LINK)'],
+      ],
+    }],
+  ],
+  'xcode_settings': {
+    # DON'T ADD ANYTHING NEW TO THIS BLOCK UNLESS YOU REALLY REALLY NEED IT!
+    # This block adds *project-wide* configuration settings to each project
+    # file.  It's almost always wrong to put things here.  Specify your
+    # custom xcode_settings in target_defaults to add them to targets instead.
+
+    # In an Xcode Project Info window, the "Base SDK for All Configurations"
+    # setting sets the SDK on a project-wide basis.  In order to get the
+    # configured SDK to show properly in the Xcode UI, SDKROOT must be set
+    # here at the project level.
+    'SDKROOT': 'macosx<(mac_sdk)',  # -isysroot
+
+    # The Xcode generator will look for an xcode_settings section at the root
+    # of each dict and use it to apply settings on a file-wide basis.  Most
+    # settings should not be here, they should be in target-specific
+    # xcode_settings sections, or better yet, should use non-Xcode-specific
+    # settings in target dicts.  SYMROOT is a special case, because many other
+    # Xcode variables depend on it, including variables such as
+    # PROJECT_DERIVED_FILE_DIR.  When a source group corresponding to something
+    # like PROJECT_DERIVED_FILE_DIR is added to a project, in order for the
+    # files to appear (when present) in the UI as actual files and not red
+    # red "missing file" proxies, the correct path to PROJECT_DERIVED_FILE_DIR,
+    # and therefore SYMROOT, needs to be set at the project level.
+    'SYMROOT': '<(DEPTH)/xcodebuild',
+  },
+}
diff --git a/build/compiler_version.py b/build/compiler_version.py
new file mode 100755
index 0000000..9132261
--- /dev/null
+++ b/build/compiler_version.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Compiler version checking tool for gcc
+
+Print gcc version as XY if you are running gcc X.Y.*.
+This is used to tweak build flags for gcc 4.4.
+"""
+
+import os
+import re
+import subprocess
+import sys
+
+def GetVersion(compiler):
+  try:
+    # Note that compiler could be something tricky like "distcc g++".
+    compiler = compiler + " -dumpversion"
+    pipe = subprocess.Popen(compiler, stdout=subprocess.PIPE, shell=True)
+    gcc_output = pipe.communicate()[0]
+    result = re.match(r"(\d+)\.(\d+)", gcc_output)
+    return result.group(1) + result.group(2)
+  except Exception, e:
+    print >> sys.stderr, "compiler_version.py failed to execute:", compiler
+    print >> sys.stderr, e
+    return ""
+
+def main():
+  # Check if CXX environment variable exists and
+  # if it does use that compiler.
+  cxx = os.getenv("CXX", None)
+  if cxx:
+    cxxversion = GetVersion(cxx)
+    if cxxversion != "":
+      print cxxversion
+      return 0
+  else:
+    # Otherwise we check the g++ version.
+    gccversion = GetVersion("g++")
+    if gccversion != "":
+      print gccversion
+      return 0
+
+  return 1
+
+if __name__ == "__main__":
+  sys.exit(main())
diff --git a/build/cp.py b/build/cp.py
new file mode 100644
index 0000000..7dfeb38
--- /dev/null
+++ b/build/cp.py
@@ -0,0 +1,18 @@
+#!/usr/bin/python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import shutil, sys;
+
+""" Copy File.
+
+This module works much like the cp posix command - it takes 2 arguments:
+(src, dst) and copies the file with path |src| to |dst|.
+"""
+
+def Main(src, dst):
+  return shutil.copyfile(src, dst)
+
+if __name__ == '__main__':
+  sys.exit(Main(sys.argv[1], sys.argv[2]))
diff --git a/build/dir_exists.py b/build/dir_exists.py
new file mode 100755
index 0000000..0a89bc8
--- /dev/null
+++ b/build/dir_exists.py
@@ -0,0 +1,15 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Writes True if the argument is a directory."""
+
+import os.path
+import sys
+
+def main():
+  sys.stdout.write(str(os.path.isdir(sys.argv[1])))
+  return 0
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/download_nacl_toolchains.py b/build/download_nacl_toolchains.py
new file mode 100755
index 0000000..46c7879
--- /dev/null
+++ b/build/download_nacl_toolchains.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Shim to run nacl toolchain download script only if there is a nacl dir."""
+
+import os
+import sys
+
+
+def Main(args):
+  # Exit early if disable_nacl=1.
+  if 'disable_nacl=1' in os.environ.get('GYP_DEFINES', ''):
+    return 0
+  # Handle chromeos=1 specially (until its building its own toolchain).
+  if 'chromeos=1' in os.environ.get('GYP_DEFINES', ''):
+    args = [
+      '--nacl-newlib-only',
+      '--file-hash', 'linux_x86_newlib',
+           '1deb316302fde89a2200dff6550cf510ae90b89b',
+      '--base-url', ('https://commondatastorage.googleapis.com/'
+                     'nativeclient-archive2/special_chromeos'),
+      '--x86-version', '7258',
+    ]
+    print 'NOTE: Special handling for chromeos'
+    print 'Running with these argument instead:'
+    print args
+  script_dir = os.path.dirname(os.path.abspath(__file__))
+  src_dir = os.path.dirname(script_dir)
+  nacl_dir = os.path.join(src_dir, 'native_client')
+  nacl_build_dir = os.path.join(nacl_dir, 'build')
+  download_script = os.path.join(nacl_build_dir, 'download_toolchains.py')
+  if not os.path.exists(download_script):
+    print "Can't find '%s'" % download_script
+    print 'Presumably you are intentionally building without NativeClient.'
+    print 'Skipping NativeClient toolchain download.'
+    sys.exit(0)
+  sys.path.insert(0, nacl_build_dir)
+  import download_toolchains
+  download_toolchains.Main(args)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(Main(sys.argv[1:]))
diff --git a/build/escape_unicode.py b/build/escape_unicode.py
new file mode 100755
index 0000000..859ba5d
--- /dev/null
+++ b/build/escape_unicode.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Convert any unicode characters found in the input file to C literals."""
+
+import codecs
+import optparse
+import os
+import sys
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  usage = 'Usage: %prog -o <output_dir> <input_file>'
+  parser.set_usage(usage)
+  parser.add_option('-o', dest='output_dir')
+
+  options, arglist = parser.parse_args(argv)
+
+  if not options.output_dir:
+    print "output_dir required"
+    return 1
+
+  if len(arglist) != 2:
+    print "input_file required"
+    return 1
+
+  in_filename = arglist[1]
+
+  if not in_filename.endswith('.utf8'):
+    print "input_file should end in .utf8"
+    return 1
+
+  out_filename = os.path.join(options.output_dir, os.path.basename(
+      os.path.splitext(in_filename)[0]))
+
+  WriteEscapedFile(in_filename, out_filename)
+  return 0
+
+
+def WriteEscapedFile(in_filename, out_filename):
+  input_data = codecs.open(in_filename, 'r', 'utf8').read()
+  with codecs.open(out_filename, 'w', 'ascii') as out_file:
+    for i, char in enumerate(input_data):
+      if ord(char) > 127:
+        out_file.write(repr(char.encode('utf8'))[1:-1])
+        if input_data[i + 1:i + 2] in '0123456789abcdefABCDEF':
+          out_file.write('""')
+      else:
+        out_file.write(char.encode('ascii'))
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/extract_from_cab.py b/build/extract_from_cab.py
new file mode 100755
index 0000000..d5410d6
--- /dev/null
+++ b/build/extract_from_cab.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Extracts a single file from a CAB archive."""
+
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+
+def main():
+  if len(sys.argv) != 4:
+    print 'Usage: extract_from_cab.py cab_path archived_file output_dir'
+    return 1
+
+  [cab_path, archived_file, output_dir] = sys.argv[1:]
+
+  # Expand.exe does its work in a fixed-named temporary directory created within
+  # the given output directory. This is a problem for concurrent extractions, so
+  # create a unique temp dir within the desired output directory to work around
+  # this limitation.
+  temp_dir = tempfile.mkdtemp(dir=output_dir)
+
+  try:
+    # Invoke the Windows expand utility to extract the file.
+    level = subprocess.call(
+        ['expand', cab_path, '-F:' + archived_file, temp_dir])
+    if level == 0:
+      # Move the output file into place, preserving expand.exe's behavior of
+      # paving over any preexisting file.
+      output_file = os.path.join(output_dir, archived_file)
+      try:
+        os.remove(output_file)
+      except OSError:
+        pass
+      os.rename(os.path.join(temp_dir, archived_file), output_file)
+  finally:
+    shutil.rmtree(temp_dir, True)
+
+  if level != 0:
+    return level
+
+  # The expand utility preserves the modification date and time of the archived
+  # file. Touch the extracted file. This helps build systems that compare the
+  # modification times of input and output files to determine whether to do an
+  # action.
+  os.utime(os.path.join(output_dir, archived_file), None)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/grit_action.gypi b/build/grit_action.gypi
new file mode 100644
index 0000000..60a2320
--- /dev/null
+++ b/build/grit_action.gypi
@@ -0,0 +1,31 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to invoke grit in a
+# consistent manner. To use this the following variables need to be
+# defined:
+#   grit_grd_file: string: grd file path
+#   grit_out_dir: string: the output directory path
+
+# It would be really nice to do this with a rule instead of actions, but it
+# would need to determine inputs and outputs via grit_info on a per-file
+# basis. GYP rules don’t currently support that. They could be extended to
+# do this, but then every generator would need to be updated to handle this.
+
+{
+  'variables': {
+    'grit_cmd': ['python', '<(DEPTH)/tools/grit/grit.py'],
+  },
+  'inputs': [
+    '<!@pymod_do_main(grit_info <@(grit_defines) --inputs <(grit_grd_file))',
+  ],
+  'outputs': [
+    '<!@pymod_do_main(grit_info <@(grit_defines) --outputs \'<(grit_out_dir)\' <(grit_grd_file))',
+  ],
+  'action': ['<@(grit_cmd)',
+             '-i', '<(grit_grd_file)', 'build',
+             '-o', '<(grit_out_dir)',
+             '<@(grit_defines)' ],
+  'message': 'Generating resources from <(grit_grd_file)',
+}
diff --git a/build/grit_target.gypi b/build/grit_target.gypi
new file mode 100644
index 0000000..e05f927
--- /dev/null
+++ b/build/grit_target.gypi
@@ -0,0 +1,33 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target that will have one or more
+# uses of grit_action.gypi. To use this the following variables need to be
+# defined:
+#   grit_out_dir: string: the output directory path
+
+# NOTE: This file is optional, not all targets that use grit include it, some
+# do their own custom directives instead.
+{
+  'conditions': [
+    # If the target is a direct binary, it needs to be able to find the header,
+    # otherwise it probably a supporting target just for grit so the include
+    # dir needs to be set on anything that depends on this action.
+    ['_type=="executable" or _type=="shared_library" or \
+      _type=="loadable_module" or _type=="static_library"', {
+      'include_dirs': [
+        '<(grit_out_dir)',
+      ],
+    }, {
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '<(grit_out_dir)',
+        ],
+      },
+    }],
+    ['OS=="win"', {
+      'dependencies': ['<(DEPTH)/build/win/system.gyp:cygwin'],
+    }],
+  ],
+}
diff --git a/build/gyp_chromium b/build/gyp_chromium
new file mode 100755
index 0000000..226ba1a
--- /dev/null
+++ b/build/gyp_chromium
@@ -0,0 +1,171 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script is wrapper for Chromium that adds some support for how GYP
+# is invoked by Chromium beyond what can be done in the gclient hooks.
+
+import glob
+import os
+import shlex
+import subprocess
+import sys
+
+script_dir = os.path.dirname(__file__)
+chrome_src = os.path.abspath(os.path.join(script_dir, os.pardir))
+
+sys.path.insert(0, os.path.join(chrome_src, 'tools', 'gyp', 'pylib'))
+import gyp
+
+# Add paths so that pymod_do_main(...) can import files.
+sys.path.insert(1, os.path.join(chrome_src, 'tools', 'grit'))
+sys.path.insert(1, os.path.join(chrome_src, 'chrome', 'tools', 'build'))
+
+
+# On Windows, Psyco shortens warm runs of build/gyp_chromium by about
+# 20 seconds on a z600 machine with 12 GB of RAM, from 90 down to 70
+# seconds.  Conversely, memory usage of build/gyp_chromium with Psyco
+# maxes out at about 158 MB vs. 132 MB without it.
+#
+# Psyco uses native libraries, so we need to load a different
+# installation depending on which OS we are running under. It has not
+# been tested whether using Psyco on our Mac and Linux builds is worth
+# it (the GYP running time is a lot shorter, so the JIT startup cost
+# may not be worth it).
+if sys.platform == 'win32':
+  try:
+    sys.path.insert(0, os.path.join(chrome_src, 'third_party', 'psyco_win32'))
+    import psyco
+  except:
+    psyco = None
+else:
+  psyco = None
+
+def apply_gyp_environment(file_path=None):
+  """
+  Reads in a *.gyp_env file and applies the valid keys to os.environ.
+  """
+  if not file_path or not os.path.exists(file_path):
+    return
+  file_contents = open(file_path).read()
+  try:
+    file_data = eval(file_contents, {'__builtins__': None}, None)
+  except SyntaxError, e:
+    e.filename = os.path.abspath(file_path)
+    raise
+  supported_vars = ( 'CHROMIUM_GYP_FILE',
+                     'CHROMIUM_GYP_SYNTAX_CHECK',
+                     'GYP_DEFINES',
+                     'GYP_GENERATOR_FLAGS',
+                     'GYP_GENERATOR_OUTPUT', )
+  for var in supported_vars:
+    val = file_data.get(var)
+    if val:
+      if var in os.environ:
+        print 'INFO: Environment value for "%s" overrides value in %s.' % (
+            var, os.path.abspath(file_path)
+        )
+      else:
+        os.environ[var] = val
+
+def additional_include_files(args=[]):
+  """
+  Returns a list of additional (.gypi) files to include, without
+  duplicating ones that are already specified on the command line.
+  """
+  # Determine the include files specified on the command line.
+  # This doesn't cover all the different option formats you can use,
+  # but it's mainly intended to avoid duplicating flags on the automatic
+  # makefile regeneration which only uses this format.
+  specified_includes = set()
+  for arg in args:
+    if arg.startswith('-I') and len(arg) > 2:
+      specified_includes.add(os.path.realpath(arg[2:]))
+
+  result = []
+  def AddInclude(path):
+    if os.path.realpath(path) not in specified_includes:
+      result.append(path)
+
+  # Always include common.gypi.
+  AddInclude(os.path.join(script_dir, 'common.gypi'))
+
+  # Optionally add supplemental .gypi files if present.
+  supplements = glob.glob(os.path.join(chrome_src, '*', 'supplement.gypi'))
+  for supplement in supplements:
+    AddInclude(supplement)
+
+  return result
+
+if __name__ == '__main__':
+  args = sys.argv[1:]
+
+  # Use the Psyco JIT if available.
+  if psyco:
+    psyco.profile()
+    print "Enabled Psyco JIT."
+
+  # Fall back on hermetic python if we happen to get run under cygwin.
+  # TODO(bradnelson): take this out once this issue is fixed:
+  #    http://code.google.com/p/gyp/issues/detail?id=177
+  if sys.platform == 'cygwin':
+    python_dir = os.path.join(chrome_src, 'third_party', 'python_26')
+    env = os.environ.copy()
+    env['PATH'] = python_dir + os.pathsep + env.get('PATH', '')
+    p = subprocess.Popen(
+       [os.path.join(python_dir, 'python.exe')] + sys.argv,
+       env=env, shell=False)
+    p.communicate()
+    sys.exit(p.returncode)
+
+  if 'SKIP_CHROMIUM_GYP_ENV' not in os.environ:
+    # Update the environment based on chromium.gyp_env
+    gyp_env_path = os.path.join(os.path.dirname(chrome_src), 'chromium.gyp_env')
+    apply_gyp_environment(gyp_env_path)
+
+  # This could give false positives since it doesn't actually do real option
+  # parsing.  Oh well.
+  gyp_file_specified = False
+  for arg in args:
+    if arg.endswith('.gyp'):
+      gyp_file_specified = True
+      break
+
+  # If we didn't get a file, check an env var, and then fall back to
+  # assuming 'all.gyp' from the same directory as the script.
+  if not gyp_file_specified:
+    gyp_file = os.environ.get('CHROMIUM_GYP_FILE')
+    if gyp_file:
+      # Note that CHROMIUM_GYP_FILE values can't have backslashes as
+      # path separators even on Windows due to the use of shlex.split().
+      args.extend(shlex.split(gyp_file))
+    else:
+      args.append(os.path.join(script_dir, 'all.gyp'))
+
+  args.extend(['-I' + i for i in additional_include_files(args)])
+
+  # There shouldn't be a circular dependency relationship between .gyp files,
+  # but in Chromium's .gyp files, on non-Mac platforms, circular relationships
+  # currently exist.  The check for circular dependencies is currently
+  # bypassed on other platforms, but is left enabled on the Mac, where a
+  # violation of the rule causes Xcode to misbehave badly.
+  # TODO(mark): Find and kill remaining circular dependencies, and remove this
+  # option.  http://crbug.com/35878.
+  # TODO(tc): Fix circular dependencies in ChromiumOS then add linux2 to the
+  # list.
+  if sys.platform not in ('darwin',):
+    args.append('--no-circular-check')
+
+  # If CHROMIUM_GYP_SYNTAX_CHECK is set to 1, it will invoke gyp with --check
+  # to enfore syntax checking.
+  syntax_check = os.environ.get('CHROMIUM_GYP_SYNTAX_CHECK')
+  if syntax_check and int(syntax_check):
+    args.append('--check')
+
+  print 'Updating projects from gyp files...'
+  sys.stdout.flush()
+
+  # Off we go...
+  sys.exit(gyp.main(args))
diff --git a/build/install-build-deps-android.sh b/build/install-build-deps-android.sh
new file mode 100755
index 0000000..c23107e
--- /dev/null
+++ b/build/install-build-deps-android.sh
@@ -0,0 +1,132 @@
+#!/bin/bash
+
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+set -e
+
+# The script is to install Android SDK, NDK for build chromium on Android, and
+# doesn't need to run as root.
+
+# Using Android 3.2, API Level: 13 (Honeycomb). The SDK package is about 30M.
+SDK_FILE_NAME="android-sdk_r13-linux_x86.tgz"
+SDK_DOWNLOAD_URL="http://dl.google.com/android/${SDK_FILE_NAME}"
+SDK_MD5SUM="d80d7530a46c665644ae76084a9a0dc4"
+
+# Using "ANDROID_SDK_ROOT/tools/android list targets" to get the matching target
+# id which will be loaded in simulator for testing.
+# For example: the output of the listed the target could be below, and the
+# 'android-13' is the SDK_TARGET_ID in this case.
+# id: 9 or "android-13"
+#     Name: Android 3.2
+#     Type: Platform
+#     API level: 13
+#     Revision: 1
+#     Skins: WXGA (default)
+SDK_TARGET_ID=android-13
+
+# Using NDK r7; The package is about 64M.
+NDK_FILE_NAME="android-ndk-r7-linux-x86.tar.bz2"
+NDK_DOWNLOAD_URL="http://dl.google.com/android/ndk/${NDK_FILE_NAME}"
+NDK_MD5SUM="bf15e6b47bf50824c4b96849bf003ca3"
+
+# The temporary directory used to store the downloaded file.
+TEMPDIR=$(mktemp -d)
+cleanup() {
+  local status=${?}
+  trap - EXIT
+  rm -rf "${TEMPDIR}"
+  exit ${status}
+}
+trap cleanup EXIT
+
+##########################################################
+# Download and install a tgz package by wget and tar -xvf.
+# The current directory is changed in this function.
+# Arguments:
+#   local_file_name, the name of downloaded file.
+#   download_url, the url to download the package.
+#   md5, the package's md5 which could be found in download page.
+#   install_path, where the package should be installed.
+# Returns:
+#   None
+##########################################################
+install_dev_kit() {
+  local local_file_name="${1}"
+  local download_url="${2}"
+  local md5="${3}"
+  local install_path="${4}"
+
+  cd "${TEMPDIR}"
+  wget "${download_url}"
+
+  local computed_md5=$(md5sum "${local_file_name}" | cut -d' ' -f1)
+  if [[ "${computed_md5}" != "${md5}" ]]; then
+    echo "Downloaded ${local_file_name} has bad md5sum, which is expected" >& 2
+    echo "to be ${md5} but was ${computed_md5}" >& 2
+    exit 1
+  fi
+
+  echo "Install ${local_file_name}"
+  mv "${local_file_name}" "${install_path}"
+  cd "${install_path}"
+  tar -xvf "${local_file_name}"
+}
+
+if [[ -z "${ANDROID_SDK_ROOT}" ]]; then
+  echo "Please set ANDROID_SDK_ROOT to where they should installed to." >& 2
+  echo "For example: /usr/local/android-sdk-linux_x86" >& 2
+  exit 1
+fi
+
+if [[ -z "${ANDROID_NDK_ROOT}" ]]; then
+  echo "Please set ANDROID_NDK_ROOT to where they should installed to." >& 2
+  echo "For example: /usr/local/android-ndk-r6b" >& 2
+  exit 1
+fi
+
+# Install Android SDK if it doesn't exist.
+if [[ ! -d "${ANDROID_SDK_ROOT}" ]]; then
+  echo 'Install ANDROID SDK ...'
+  (install_dev_kit "${SDK_FILE_NAME}" "${SDK_DOWNLOAD_URL}" "${SDK_MD5SUM}" \
+                  $(dirname "${ANDROID_SDK_ROOT}"))
+fi
+
+# Install the target if it doesn't exist. The package installed above contains
+# no platform, platform-tool or tool, all those should be installed by
+# ${ANDROID_SDK_ROOT}/tools/android.
+if [[ ! $("${ANDROID_SDK_ROOT}/tools/android" list targets \
+  | grep -q "${SDK_TARGET_ID}") ]]; then
+  # Updates the SDK by installing the necessary components.
+  # From current configuration, all android platforms will be installed.
+  # This will take a little bit long time.
+  echo "Install platform, platform-tool and tool ..."
+
+  # This needs to be called twice.  The first time, "android" itself
+  # references
+  # https://dl-ssl.google.com/android/repository/addons_list.xml,
+  # which no longer exists.  On the second run, "android" (or one of
+  # it's config files) has been updated to now reference curl
+  # https://dl-ssl.google.com/android/repository/addons_list-1.xml,
+  # which contains what we need.
+  for try in 1 2 ; do
+    echo "==== SDK update $try"
+    "${ANDROID_SDK_ROOT}"/tools/android update sdk --no-ui \
+      --filter platform,platform-tool,tool
+  done
+fi
+
+# Create a Android Virtual Device named 'buildbot' with default hardware
+# configuration and override the existing one, since there is no easy way to
+# check whether current AVD has correct configuration and it takes almost no
+# time to create a new one.
+"${ANDROID_SDK_ROOT}/tools/android" --silent create avd --name buildbot \
+  --target ${SDK_TARGET_ID} --force <<< "no"
+
+# Install Android NDK if it doesn't exist.
+if [[ ! -d "${ANDROID_NDK_ROOT}" ]]; then
+  echo 'Install ANDROID NDK ...'
+  (install_dev_kit "${NDK_FILE_NAME}" "${NDK_DOWNLOAD_URL}" "${NDK_MD5SUM}" \
+                  $(dirname "${ANDROID_NDK_ROOT}"))
+fi
diff --git a/build/install-build-deps.sh b/build/install-build-deps.sh
new file mode 100755
index 0000000..bb16a79
--- /dev/null
+++ b/build/install-build-deps.sh
@@ -0,0 +1,513 @@
+#!/bin/bash -e
+
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Script to install everything needed to build chromium (well, ideally, anyway)
+# See http://code.google.com/p/chromium/wiki/LinuxBuildInstructions
+# and http://code.google.com/p/chromium/wiki/LinuxBuild64Bit
+
+usage() {
+  echo "Usage: $0 [--options]"
+  echo "Options:"
+  echo "--[no-]syms: enable or disable installation of debugging symbols"
+  echo "--[no-]gold: enable or disable installation of gold linker"
+  echo "--[no-]lib32: enable or disable installation of 32 bit libraries"
+  echo "--[no-]restore-usr-bin-ld: enable or disable restoring /usr/bin/ld to"
+  echo "                           ld.bfd if it is currently gold"
+  echo "Script will prompt interactively if options not given."
+  exit 1
+}
+
+while test "$1" != ""
+do
+  case "$1" in
+  --syms)                   do_inst_syms=1;;
+  --no-syms)                do_inst_syms=0;;
+  --gold)                   do_inst_gold=1;;
+  --no-gold)                do_inst_gold=0;;
+  --lib32)                  do_inst_lib32=1;;
+  --no-lib32)               do_inst_lib32=0;;
+  --restore-usr-bin-ld)     do_restore_usr_bin_ld=1;;
+  --no-restore-usr-bin-ld)  do_restore_usr_bin_ld=0;;
+  *) usage;;
+  esac
+  shift
+done
+
+install_gold() {
+  # Gold is optional; it's a faster replacement for ld,
+  # and makes life on 2GB machines much more pleasant.
+
+  # First make sure root can access this directory, as that's tripped
+  # up some folks.
+  if sudo touch xyz.$$
+  then
+    sudo rm xyz.$$
+  else
+    echo root cannot write to the current directory, not installing gold
+    return
+  fi
+
+  BINUTILS=binutils-2.21.1
+  BINUTILS_URL=http://ftp.gnu.org/gnu/binutils/$BINUTILS.tar.bz2
+  BINUTILS_SHA1=525255ca6874b872540c9967a1d26acfbc7c8230
+
+  test -f $BINUTILS.tar.bz2 || wget $BINUTILS_URL
+  if test "`sha1sum $BINUTILS.tar.bz2|cut -d' ' -f1`" != "$BINUTILS_SHA1"
+  then
+    echo Bad sha1sum for $BINUTILS.tar.bz2
+    exit 1
+  fi
+
+  tar -xjvf $BINUTILS.tar.bz2
+  cd $BINUTILS
+  ./configure --prefix=/usr/local/gold --enable-gold=default --enable-threads \
+    --enable-bfd=yes
+  NCPU=`cat /proc/cpuinfo |grep ^processor|wc -l`
+  make maybe-all-binutils maybe-all-gold maybe-all-ld -j${NCPU}
+  if sudo make maybe-install-binutils maybe-install-gold maybe-install-ld
+  then
+    # Still need to figure out graceful way of pointing gyp to use
+    # /usr/local/gold/bin/ld without requiring him to set environment
+    # variables.
+    sudo strip /usr/local/gold/bin/ld.gold
+    sudo strip /usr/local/gold/bin/ld.bfd
+  else
+    echo "make install failed, not installing gold"
+  fi
+}
+
+if ! egrep -q \
+    'Ubuntu (10\.04|10\.10|11\.04|11\.10|lucid|maverick|natty|oneiric)' \
+    /etc/issue; then
+  echo "Only Ubuntu 10.04 (lucid) through 11.10 (oneiric) are currently" \
+      "supported" >&2
+  exit 1
+fi
+
+if ! uname -m | egrep -q "i686|x86_64"; then
+  echo "Only x86 architectures are currently supported" >&2
+  exit
+fi
+
+if [ "x$(id -u)" != x0 ]; then
+  echo "Running as non-root user."
+  echo "You might have to enter your password one or more times for 'sudo'."
+  echo
+fi
+
+# Packages needed for chromeos only
+chromeos_dev_list="libpulse-dev"
+
+# Packages need for development
+dev_list="apache2.2-bin bison curl elfutils fakeroot flex g++ gperf
+          language-pack-fr libapache2-mod-php5 libasound2-dev libbz2-dev
+          libcairo2-dev libcups2-dev libcurl4-gnutls-dev libdbus-glib-1-dev
+          libelf-dev libgconf2-dev libgl1-mesa-dev libglib2.0-dev
+          libglu1-mesa-dev libgnome-keyring-dev libgtk2.0-dev libjpeg62-dev
+          libkrb5-dev libnspr4-dev libnss3-dev libpam0g-dev libsctp-dev
+          libsqlite3-dev libssl-dev libudev-dev libwww-perl libxslt1-dev
+          libxss-dev libxt-dev libxtst-dev mesa-common-dev msttcorefonts patch
+          perl php5-cgi pkg-config python python-cherrypy3 python-dev
+          python-psutil rpm ruby subversion ttf-dejavu-core ttf-indic-fonts
+          ttf-kochi-gothic ttf-kochi-mincho ttf-thai-tlwg wdiff
+          $chromeos_dev_list"
+
+# Run-time libraries required by chromeos only
+chromeos_lib_list="libpulse0 libbz2-1.0 libcurl4-gnutls-dev"
+
+# Full list of required run-time libraries
+lib_list="libatk1.0-0 libc6 libasound2 libcairo2 libcups2 libdbus-glib-1-2
+          libexpat1 libfontconfig1 libfreetype6 libglib2.0-0 libgnome-keyring0
+          libgtk2.0-0 libpam0g libpango1.0-0 libpcre3 libpixman-1-0
+          libpng12-0 libstdc++6 libsqlite3-0 libudev0 libx11-6 libxau6 libxcb1
+          libxcomposite1 libxcursor1 libxdamage1 libxdmcp6 libxext6 libxfixes3
+          libxi6 libxinerama1 libxrandr2 libxrender1 libxtst6 zlib1g
+          $chromeos_lib_list"
+
+# Debugging symbols for all of the run-time libraries
+dbg_list="libatk1.0-dbg libc6-dbg libcairo2-dbg libdbus-glib-1-2-dbg
+          libfontconfig1-dbg libglib2.0-0-dbg libgtk2.0-0-dbg
+          libpango1.0-0-dbg libpcre3-dbg libpixman-1-0-dbg
+          libsqlite3-0-dbg
+          libx11-6-dbg libxau6-dbg libxcb1-dbg libxcomposite1-dbg
+          libxcursor1-dbg libxdamage1-dbg libxdmcp6-dbg libxext6-dbg
+          libxfixes3-dbg libxi6-dbg libxinerama1-dbg libxrandr2-dbg
+          libxrender1-dbg libxtst6-dbg zlib1g-dbg"
+
+# Plugin lists needed for tests.
+plugin_list="flashplugin-installer"
+
+# Some NSS packages were renamed in Natty.
+if egrep -q 'Ubuntu (10\.04|10\.10)' /etc/issue; then
+  dbg_list="${dbg_list} libnspr4-0d-dbg libnss3-1d-dbg"
+  lib_list="${lib_list} libnspr4-0d libnss3-1d"
+else
+  dbg_list="${dbg_list} libnspr4-dbg libnss3-dbg"
+  lib_list="${lib_list} libnspr4 libnss3"
+fi
+
+# Waits for the user to press 'Y' or 'N'. Either uppercase of lowercase is
+# accepted. Returns 0 for 'Y' and 1 for 'N'. If an optional parameter has
+# been provided to yes_no(), the function also accepts RETURN as a user input.
+# The parameter specifies the exit code that should be returned in that case.
+# The function will echo the user's selection followed by a newline character.
+# Users can abort the function by pressing CTRL-C. This will call "exit 1".
+yes_no() {
+  local c
+  while :; do
+    c="$(trap 'stty echo -iuclc icanon 2>/dev/null' EXIT INT TERM QUIT
+         stty -echo iuclc -icanon 2>/dev/null
+         dd count=1 bs=1 2>/dev/null | od -An -tx1)"
+    case "$c" in
+      " 0a") if [ -n "$1" ]; then
+               [ $1 -eq 0 ] && echo "Y" || echo "N"
+               return $1
+             fi
+             ;;
+      " 79") echo "Y"
+             return 0
+             ;;
+      " 6e") echo "N"
+             return 1
+             ;;
+      "")    echo "Aborted" >&2
+             exit 1
+             ;;
+      *)     # The user pressed an unrecognized key. As we are not echoing
+             # any incorrect user input, alert the user by ringing the bell.
+             (tput bel) 2>/dev/null
+             ;;
+    esac
+  done
+}
+
+if test "$do_inst_syms" = ""
+then
+  echo "This script installs all tools and libraries needed to build Chromium."
+  echo ""
+  echo "For most of the libraries, it can also install debugging symbols, which"
+  echo "will allow you to debug code in the system libraries. Most developers"
+  echo "won't need these symbols."
+  echo -n "Do you want me to install them for you (y/N) "
+  if yes_no 1; then
+    do_inst_syms=1
+  fi
+fi
+if test "$do_inst_syms" = "1"; then
+  echo "Installing debugging symbols."
+else
+  echo "Skipping installation of debugging symbols."
+  dbg_list=
+fi
+
+sudo apt-get update
+
+# We initially run "apt-get" with the --reinstall option and parse its output.
+# This way, we can find all the packages that need to be newly installed
+# without accidentally promoting any packages from "auto" to "manual".
+# We then re-run "apt-get" with just the list of missing packages.
+echo "Finding missing packages..."
+packages="${dev_list} ${lib_list} ${dbg_list} ${plugin_list}"
+# Intentially leaving $packages unquoted so it's more readable.
+echo "Packages required: " $packages
+echo
+new_list_cmd="sudo apt-get install --reinstall $(echo $packages)"
+if new_list="$(yes n | LANG=C $new_list_cmd)"; then
+  # We probably never hit this following line.
+  echo "No missing packages, and the packages are up-to-date."
+elif [ $? -eq 1 ]; then
+  # We expect apt-get to have exit status of 1.
+  # This indicates that we canceled the install with "yes n|".
+  new_list=$(echo "$new_list" |
+    sed -e '1,/The following NEW packages will be installed:/d;s/^  //;t;d')
+  new_list=$(echo "$new_list" | sed 's/ *$//')
+  if [ -z "$new_list" ] ; then
+    echo "No missing packages, and the packages are up-to-date."
+  else
+    echo "Installing missing packages: $new_list."
+    sudo apt-get install ${new_list}
+  fi
+  echo
+else
+  # An apt-get exit status of 100 indicates that a real error has occurred.
+
+  # I am intentionally leaving out the '"'s around new_list_cmd,
+  # as this makes it easier to cut and paste the output
+  echo "The following command failed: " ${new_list_cmd}
+  echo
+  echo "It produces the following output:"
+  yes n | $new_list_cmd || true
+  echo
+  echo "You will have to install the above packages yourself."
+  echo
+  exit 100
+fi
+
+# Some operating systems already ship gold (on recent Debian and
+# Ubuntu you can do "apt-get install binutils-gold" to get it), but
+# older releases didn't.  Additionally, gold 2.20 (included in Ubuntu
+# Lucid) makes binaries that just segfault, and 2.20.1 does not support
+# --map-whole-files.
+# So install from source if we don't have a good version.
+
+case `ld --version` in
+*gold*2.2[1-9].*)
+  echo "*** Warning ***"
+  echo "If the default linker is gold, linking may fail for:"
+  echo "the Linux kernel, kernel modules, Valgrind, and Wine."
+  echo "If you previously installed gold as the default linker,"
+  echo "you can restore the original linker by running:"
+  echo "'cd /usr/bin; sudo rm ld; sudo mv ld.orig ld'"
+  echo
+  if [ "$do_restore_usr_bin_ld" = "" ]
+  then
+    echo -n "Restore /usr/bin/ld to the original linker? (Y/n) "
+    if yes_no 0
+    then
+      do_restore_usr_bin_ld=1
+    fi
+    echo
+  fi
+  if [ "$do_restore_usr_bin_ld" = "1" ]
+  then
+    if sudo mv /usr/bin/ld.orig /usr/bin/ld
+    then
+      echo "Restored /usr/bin/ld.orig as /usr/bin/ld"
+    else
+      echo "Failed to restore /usr/bin/ld.orig as /usr/bin/ld"
+    fi
+    echo
+  fi
+  ;;
+esac
+
+# Check the gold version first.
+gold_up_to_date="1"
+if [ -x "/usr/local/gold/bin/ld" ]
+then
+  case `/usr/local/gold/bin/ld --version` in
+  *gold*2.2[1-9].*) ;;
+  * )
+    gold_up_to_date="0"
+  esac
+fi
+
+# Then check and make sure ld.bfd exists.
+if [ "$gold_up_to_date" = "1" ] && [ ! -x "/usr/local/gold/bin/ld.bfd" ]
+then
+  gold_up_to_date="0"
+fi
+
+if [ "$gold_up_to_date" = "0" ]
+then
+  if test "$do_inst_gold" = ""
+  then
+    echo "Gold is a new linker that links Chrome 5x faster than GNU ld."
+    echo -n "*** To use the gold linker, "
+    echo "you must pass -B/usr/local/gold/bin/ to g++ ***"
+    echo -n "Install the gold linker? (y/N) "
+    if yes_no 1; then
+      do_inst_gold=1
+    fi
+  fi
+  if test "$do_inst_gold" = "1"
+  then
+    echo "Building binutils with gold..."
+    install_gold || exit 99
+  else
+    echo "Not installing gold."
+  fi
+fi
+
+# Install 32bit backwards compatibility support for 64bit systems
+if [ "$(uname -m)" = "x86_64" ]; then
+  if test "$do_inst_lib32" = ""
+  then
+    echo "Installing 32bit libraries not already provided by the system"
+    echo
+    echo "This is only needed to build a 32-bit Chrome on your 64-bit system."
+    echo
+    echo "While we only need to install a relatively small number of library"
+    echo "files, we temporarily need to download a lot of large *.deb packages"
+    echo "that contain these files. We will create new *.deb packages that"
+    echo "include just the 32bit libraries. These files will then be found on"
+    echo "your system in places like /lib32, /usr/lib32, /usr/lib/debug/lib32,"
+    echo "/usr/lib/debug/usr/lib32. If you ever need to uninstall these files,"
+    echo "look for packages named *-ia32.deb."
+    echo "Do you want me to download all packages needed to build new 32bit"
+    echo -n "package files (y/N) "
+    if yes_no 1; then
+      do_inst_lib32=1
+    fi
+  fi
+  if test "$do_inst_lib32" != "1"
+  then
+    echo "Exiting without installing any 32bit libraries."
+    exit 0
+  fi
+
+  # Standard 32bit compatibility libraries
+  echo "First, installing the limited existing 32-bit support..."
+  cmp_list="ia32-libs lib32asound2-dev lib32stdc++6 lib32z1
+            lib32z1-dev libc6-dev-i386 libc6-i386 g++-multilib"
+  if [ -n "`apt-cache search lib32readline-gplv2-dev 2>/dev/null`" ]; then
+    cmp_list="${cmp_list} lib32readline-gplv2-dev"
+  else
+    cmp_list="${cmp_list} lib32readline5-dev"
+  fi
+  sudo apt-get install $cmp_list
+
+  tmp=/tmp/install-32bit.$$
+  trap 'rm -rf "${tmp}"' EXIT INT TERM QUIT
+  mkdir -p "${tmp}/apt/lists/partial" "${tmp}/cache" "${tmp}/partial"
+  touch "${tmp}/status"
+
+  [ -r /etc/apt/apt.conf ] && cp /etc/apt/apt.conf "${tmp}/apt/"
+  cat >>"${tmp}/apt/apt.conf" <<EOF
+        Apt::Architecture "i386";
+        Dir::Cache "${tmp}/cache";
+        Dir::Cache::Archives "${tmp}/";
+        Dir::State::Lists "${tmp}/apt/lists/";
+        Dir::State::status "${tmp}/status";
+EOF
+
+  # Download 32bit packages
+  echo "Computing list of available 32bit packages..."
+  sudo apt-get -c="${tmp}/apt/apt.conf" update
+
+  echo "Downloading available 32bit packages..."
+  sudo apt-get -c="${tmp}/apt/apt.conf" \
+          --yes --download-only --force-yes --reinstall install \
+          ${lib_list} ${dbg_list}
+
+  # Open packages, remove everything that is not a library, move the
+  # library to a lib32 directory and package everything as a *.deb file.
+  echo "Repackaging and installing 32bit packages for use on 64bit systems..."
+  for i in ${lib_list} ${dbg_list}; do
+    orig="$(echo "${tmp}/${i}"_*_i386.deb)"
+    compat="$(echo "${orig}" |
+              sed -e 's,\(_[^_/]*_\)i386\(.deb\),-ia32\1amd64\2,')"
+    rm -rf "${tmp}/staging"
+    msg="$(fakeroot -u sh -exc '
+      # Unpack 32bit Debian archive
+      umask 022
+      mkdir -p "'"${tmp}"'/staging/dpkg/DEBIAN"
+      cd "'"${tmp}"'/staging"
+      ar x "'${orig}'"
+      tar zCfx dpkg data.tar.gz
+      tar zCfx dpkg/DEBIAN control.tar.gz
+
+      # Create a posix extended regular expression fragment that will
+      # recognize the includes which have changed. Should be rare,
+      # will almost always be empty.
+      includes=`sed -n -e "s/^[0-9a-z]*  //g" \
+                       -e "\,usr/include/,p" dpkg/DEBIAN/md5sums |
+                  xargs -n 1 -I FILE /bin/sh -c \
+                    "cmp -s dpkg/FILE /FILE || echo FILE" |
+                  tr "\n" "|" |
+                  sed -e "s,|$,,"`
+
+      # If empty, set it to not match anything.
+      test -z "$includes" && includes="^//"
+
+      # Turn the conflicts into an extended RE for removal from the
+      # Provides line.
+      conflicts=`sed -n -e "/Conflicts/s/Conflicts: *//;T;s/, */|/g;p" \
+                   dpkg/DEBIAN/control`
+
+      # Rename package, change architecture, remove conflicts and dependencies
+      sed -r -i                              \
+          -e "/Package/s/$/-ia32/"           \
+          -e "/Architecture/s/:.*$/: amd64/" \
+          -e "/Depends/s/:.*/: ia32-libs/"   \
+          -e "/Provides/s/($conflicts)(, *)?//g;T1;s/, *$//;:1"   \
+          -e "/Recommends/d"                 \
+          -e "/Conflicts/d"                  \
+        dpkg/DEBIAN/control
+
+      # Only keep files that live in "lib" directories or the includes
+      # that have changed.
+      sed -r -i                                                               \
+          -e "/\/lib64\//d" -e "/\/.?bin\//d"                                 \
+          -e "\,$includes,s,[ /]include/,&32/,g;s,include/32/,include32/,g"   \
+          -e "s, lib/, lib32/,g"                                              \
+          -e "s,/lib/,/lib32/,g"                                              \
+          -e "t;d"                                                            \
+          -e "\,^/usr/lib32/debug\(.*/lib32\),s,^/usr/lib32/debug,/usr/lib/debug," \
+        dpkg/DEBIAN/md5sums
+
+      # Re-run ldconfig after installation/removal
+      { echo "#!/bin/sh"; echo "[ \"x\$1\" = xconfigure ]&&ldconfig||:"; } \
+        >dpkg/DEBIAN/postinst
+      { echo "#!/bin/sh"; echo "[ \"x\$1\" = xremove ]&&ldconfig||:"; } \
+        >dpkg/DEBIAN/postrm
+      chmod 755 dpkg/DEBIAN/postinst dpkg/DEBIAN/postrm
+
+      # Remove any other control files
+      find dpkg/DEBIAN -mindepth 1 "(" -name control -o -name md5sums -o \
+                       -name postinst -o -name postrm ")" -o -print |
+        xargs -r rm -rf
+
+      # Remove any files/dirs that live outside of "lib" directories,
+      # or are not in our list of changed includes.
+      find dpkg -mindepth 1 -regextype posix-extended \
+          "(" -name DEBIAN -o -name lib -o -regex "dpkg/($includes)" ")" \
+          -prune -o -print | tac |
+        xargs -r -n 1 sh -c "rm \$0 2>/dev/null || rmdir \$0 2>/dev/null || : "
+      find dpkg -name lib64 -o -name bin -o -name "?bin" |
+        tac | xargs -r rm -rf
+
+      # Remove any symbolic links that were broken by the above steps.
+      find -L dpkg -type l -print | tac | xargs -r rm -rf
+
+      # Rename lib to lib32, but keep debug symbols in /usr/lib/debug/usr/lib32
+      # That is where gdb looks for them.
+      find dpkg -type d -o -path "*/lib/*" -print |
+        xargs -r -n 1 sh -c "
+          i=\$(echo \"\${0}\" |
+               sed -e s,/lib/,/lib32/,g \
+               -e s,/usr/lib32/debug\\\\\(.*/lib32\\\\\),/usr/lib/debug\\\\1,);
+          mkdir -p \"\${i%/*}\";
+          mv \"\${0}\" \"\${i}\""
+
+      # Rename include to include32.
+      [ -d "dpkg/usr/include" ] && mv "dpkg/usr/include" "dpkg/usr/include32"
+
+      # Prune any empty directories
+      find dpkg -type d | tac | xargs -r -n 1 rmdir 2>/dev/null || :
+
+      # Create our own Debian package
+      cd ..
+      dpkg --build staging/dpkg .' 2>&1)"
+    compat="$(eval echo $(echo "${compat}" |
+                          sed -e 's,_[^_/]*_amd64.deb,_*_amd64.deb,'))"
+    [ -r "${compat}" ] || {
+      echo "${msg}" >&2
+      echo "Failed to build new Debian archive!" >&2
+      exit 1
+    }
+
+    msg="$(sudo dpkg -i "${compat}" 2>&1)" && {
+        echo "Installed ${compat##*/}"
+      } || {
+        # echo "${msg}" >&2
+        echo "Skipped ${compat##*/}"
+      }
+  done
+
+  # Add symbolic links for developing 32bit code
+  echo "Adding missing symbolic links, enabling 32bit code development..."
+  for i in $(find /lib32 /usr/lib32 -maxdepth 1 -name \*.so.\* |
+             sed -e 's/[.]so[.][0-9].*/.so/' |
+             sort -u); do
+    [ "x${i##*/}" = "xld-linux.so" ] && continue
+    [ -r "$i" ] && continue
+    j="$(ls "$i."* | sed -e 's/.*[.]so[.]\([^.]*\)$/\1/;t;d' |
+         sort -n | tail -n 1)"
+    [ -r "$i.$j" ] || continue
+    sudo ln -s "${i##*/}.$j" "$i"
+  done
+fi
diff --git a/build/install-chroot.sh b/build/install-chroot.sh
new file mode 100755
index 0000000..b80aea0
--- /dev/null
+++ b/build/install-chroot.sh
@@ -0,0 +1,325 @@
+#!/bin/bash -e
+
+# Copyright (c) 2010 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script installs Debian-derived distributions in a chroot environment.
+# It can for example be used to have an accurate 32bit build and test
+# environment when otherwise working on a 64bit machine.
+# N. B. it is unlikely that this script will ever work on anything other than a
+# Debian-derived system.
+
+usage() {
+  echo "usage: ${0##*/} [-m mirror] [-g group,...] [-s] [-c]"
+  echo "-g group,... groups that can use the chroot unauthenticated"
+  echo "             Default: 'admin' and current user's group ('$(id -gn)')"
+  echo "-m mirror    an alternate repository mirror for package downloads"
+  echo "-s           configure default deb-srcs"
+  echo "-c           always copy 64bit helper binaries to 32bit chroot"
+  echo "-h           this help message"
+}
+
+process_opts() {
+  local OPTNAME OPTIND OPTERR OPTARG
+  while getopts ":g:m:sch" OPTNAME; do
+    case "$OPTNAME" in
+      g)
+        [ -n "${OPTARG}" ] &&
+          chroot_groups="${chroot_groups}${chroot_groups:+,}${OPTARG}"
+        ;;
+      m)
+        if [ -n "${mirror}" ]; then
+          echo "You can only specify exactly one mirror location"
+          usage
+          exit 1
+        fi
+        mirror="$OPTARG"
+        ;;
+      s)
+        add_srcs="y"
+        ;;
+      c)
+        copy_64="y"
+        ;;
+      h)
+        usage
+        exit 0
+        ;;
+      \:)
+        echo "'-$OPTARG' needs an argument."
+        usage
+        exit 1
+        ;;
+      *)
+        echo "invalid command-line option: $OPTARG"
+        usage
+        exit 1
+        ;;
+    esac
+  done
+
+  if [ $# -ge ${OPTIND} ]; then
+    eval echo "Unexpected command line argument: \${${OPTIND}}"
+    usage
+    exit 1
+  fi
+}
+
+
+# Check that we are running as a regular user
+[ "$(id -nu)" = root ] && {
+  echo "Run this script as a regular user and provide your \"sudo\""           \
+       "password if requested" >&2
+  exit 1
+}
+mkdir -p "$HOME/chroot/"
+
+process_opts "$@"
+
+# Error handler
+trap 'exit 1' INT TERM QUIT
+trap 'sudo apt-get clean; tput bel; echo; echo Failed' EXIT
+
+# Install any missing applications that this script relies on. If these packages
+# are already installed, don't force another "apt-get install". That would
+# prevent them from being auto-removed, if they ever become eligible for that.
+# And as this script only needs the packages once, there is no good reason to
+# introduce a hard dependency on things such as dchroot and debootstrap.
+dep=
+for i in dchroot debootstrap; do
+  [ -d /usr/share/doc/"$i" ] || dep="$dep $i"
+done
+[ -n "$dep" ] && sudo apt-get -y install $dep
+sudo apt-get -y install schroot
+
+# Create directory for chroot
+sudo mkdir -p /var/lib/chroot
+
+# Find chroot environments that can be installed with debootstrap
+targets="$(cd /usr/share/debootstrap/scripts
+           ls | grep '^[a-z]*$')"
+
+# Ask user to pick one of the available targets
+echo "The following targets are available to be installed in a chroot:"
+j=1; for i in $targets; do
+  printf '%4d: %s\n' "$j" "$i"
+  j=$(($j+1))
+done
+while :; do
+  printf "Which target would you like to install: "
+  read n
+  [ "$n" -gt 0 -a "$n" -lt "$j" ] >&/dev/null && break
+done
+j=1; for i in $targets; do
+  [ "$j" -eq "$n" ] && { distname="$i"; break; }
+  j=$(($j+1))
+done
+
+# On x86-64, ask whether the user wants to install x86-32 or x86-64
+archflag=
+arch=
+if [ "$(uname -m)" = x86_64 ]; then
+  while :; do
+    echo "You are running a 64bit kernel. This allows you to install either a"
+    printf "32bit or a 64bit chroot environment. %s"                           \
+           "Which one do you want (32, 64) "
+    read arch
+    [ "${arch}" == 32 -o "${arch}" == 64 ] && break
+  done
+  [ "${arch}" == 32 ] && archflag="--arch i386" || archflag="--arch amd64"
+  arch="${arch}bit"
+fi
+target="${distname}${arch}"
+
+# Don't overwrite an existing installation
+[ -d /var/lib/chroot/"${target}" ] && {
+  echo "This chroot already exists on your machine." >&2
+  echo "Delete /var/lib/chroot/${target} if you want to start over." >&2
+  exit 1
+}
+sudo mkdir -p /var/lib/chroot/"${target}"
+
+# Offer to include additional standard repositories for Ubuntu-based chroots.
+alt_repos=
+grep ubuntu.com /usr/share/debootstrap/scripts/"${distname}" >&/dev/null && {
+  while :; do
+    echo "Would you like to add ${distname}-updates and ${distname}-security "
+    echo -n "to the chroot's sources.list (y/n)? "
+    read alt_repos
+    case "${alt_repos}" in
+      y|Y)
+        alt_repos="y"
+        break
+      ;;
+      n|N)
+        break
+      ;;
+    esac
+  done
+}
+
+# Remove stale entry from /etc/schroot/schroot.conf. Entries start
+# with the target name in square brackets, followed by an arbitrary
+# number of lines. The entry stops when either the end of file has
+# been reached, or when the beginning of a new target is encountered.
+# This means, we cannot easily match for a range of lines in
+# "sed". Instead, we actually have to iterate over each line and check
+# whether it is the beginning of a new entry.
+sudo sed -ni '/^[[]'"${target%bit}"']$/,${:1;n;/^[[]/b2;b1;:2;p;n;b2};p'       \
+         /etc/schroot/schroot.conf
+
+# Download base system. This takes some time
+if [ -z "${mirror}" ]; then
+ grep ubuntu.com /usr/share/debootstrap/scripts/"${distname}" >&/dev/null &&
+   mirror="http://archive.ubuntu.com/ubuntu" ||
+   mirror="http://ftp.us.debian.org/debian"
+fi
+ sudo debootstrap ${archflag} "${distname}" /var/lib/chroot/"${target}"        \
+                  "$mirror"
+
+# Add new entry to /etc/schroot/schroot.conf
+grep ubuntu.com /usr/share/debootstrap/scripts/"${distname}" >&/dev/null &&
+  brand="Ubuntu" || brand="Debian"
+if [ -z "${chroot_groups}" ]; then
+  chroot_groups="admin,$(id -gn)"
+fi
+sudo sh -c 'cat >>/etc/schroot/schroot.conf' <<EOF
+[${target%bit}]
+description=${brand} ${distname} ${arch}
+type=directory
+directory=/var/lib/chroot/${target}
+priority=3
+users=root
+groups=${chroot_groups}
+root-groups=${chroot_groups}
+personality=linux$([ "${arch}" != 64bit ] && echo 32)
+script-config=script-${target}
+
+EOF
+
+# Set up a special directory that changes contents depending on the target
+# that is executing.
+sed '/^FSTAB=/s,/mount-defaults",/mount-'"${target}"'",'                       \
+         /etc/schroot/script-defaults |
+  sudo sh -c 'cat >/etc/schroot/script-'"${target}"
+sudo cp /etc/schroot/mount-defaults /etc/schroot/mount-"${target}"
+echo "$HOME/chroot/.${target} $HOME/chroot none rw,bind 0 0" |
+  sudo sh -c 'cat >>/etc/schroot/mount-'"${target}"
+mkdir -p "$HOME/chroot/.${target}"
+
+# Install a helper script to launch commands in the chroot
+sudo sh -c 'cat >/usr/local/bin/'"${target%bit}" <<EOF
+#!/bin/bash
+if [ \$# -eq 0 ]; then
+  exec schroot -c ${target%bit} -p
+else
+  p="\$1"; shift
+  exec schroot -c ${target%bit} -p "\$p" -- "\$@"
+fi
+exit 1
+EOF
+sudo chown root:root /usr/local/bin/"${target%bit}"
+sudo chmod 755 /usr/local/bin/"${target%bit}"
+
+# Add the standard Ubuntu update repositories if requested.
+[ "${alt_repos}" = "y" -a \
+  -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
+sudo sed -i '/^deb .* [^ -]\+ main$/p
+             s/^\(deb .* [^ -]\+\) main/\1-security main/
+             p
+             t1
+             d
+             :1;s/-security main/-updates main/
+             t
+             d' "/var/lib/chroot/${target}/etc/apt/sources.list"
+
+# Add a few more repositories to the chroot
+[ "${add_srcs}" = "y" -a \
+  -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
+sudo sed -i 's/ main$/ main restricted universe multiverse/
+             p
+             t1
+             d
+          :1;s/^deb/deb-src/
+             t
+             d' "/var/lib/chroot/${target}/etc/apt/sources.list"
+
+# Update packages
+sudo schroot -c "${target%bit}" -p -- /bin/sh -c '
+  apt-get update; apt-get -y dist-upgrade' || :
+
+# Install a couple of missing packages
+for i in debian-keyring ubuntu-keyring locales sudo; do
+  [ -d "/var/lib/chroot/${target}/usr/share/doc/$i" ] ||
+    sudo schroot -c "${target%bit}" -p -- apt-get -y install "$i" || :
+done
+
+# Configure locales
+sudo schroot -c "${target%bit}" -p -- /bin/sh -c '
+  l='"${LANG:-en_US}"'; l="${l%%.*}"
+  [ -r /etc/locale.gen ] &&
+    sed -i "s/^# \($l\)/\1/" /etc/locale.gen
+  locale-gen $LANG en_US en_US.UTF-8' || :
+
+# Configure "sudo" package
+sudo schroot -c "${target%bit}" -p -- /bin/sh -c '
+  egrep '"'^$(id -nu) '"' /etc/sudoers >/dev/null 2>&1 ||
+  echo '"'$(id -nu) ALL=(ALL) ALL'"' >>/etc/sudoers'
+
+# Install a few more commonly used packages
+sudo schroot -c "${target%bit}" -p -- apt-get -y install                       \
+  autoconf automake1.9 dpkg-dev g++-multilib gcc-multilib gdb less libtool     \
+  strace
+
+# If running a 32bit environment on a 64bit machine, install a few binaries
+# as 64bit. This is only done automatically if the chroot distro is the same as
+# the host, otherwise there might be incompatibilities in build settings or
+# runtime dependencies. The user can force it with the '-c' flag.
+host_distro=$(grep DISTRIB_CODENAME /etc/lsb-release 2>/dev/null | \
+  cut -d "=" -f 2)
+if [ "${copy_64}" = "y" -o \
+    "${host_distro}" = "${distname}" -a "${arch}" = 32bit ] && \
+    file /bin/bash 2>/dev/null | grep -q x86-64; then
+  readlinepkg=$(sudo schroot -c "${target%bit}" -p -- sh -c \
+    'apt-cache search "lib64readline.\$" | sort | tail -n 1 | cut -d " " -f 1')
+  sudo schroot -c "${target%bit}" -p -- apt-get -y install                     \
+    lib64expat1 lib64ncurses5 ${readlinepkg} lib64z1
+  dep=
+  for i in binutils gdb strace; do
+    [ -d /usr/share/doc/"$i" ] || dep="$dep $i"
+  done
+  [ -n "$dep" ] && sudo apt-get -y install $dep
+  sudo cp /usr/bin/gdb "/var/lib/chroot/${target}/usr/local/bin/"
+  sudo cp /usr/bin/ld "/var/lib/chroot/${target}/usr/local/bin/"
+  for i in libbfd libpython; do
+    lib="$({ ldd /usr/bin/ld; ldd /usr/bin/gdb; } |
+           grep "$i" | awk '{ print $3 }')"
+    if [ -n "$lib" -a -r "$lib" ]; then
+      sudo cp "$lib" "/var/lib/chroot/${target}/usr/lib64/"
+    fi
+  done
+  for lib in libssl libcrypt; do
+    sudo cp /usr/lib/$lib* "/var/lib/chroot/${target}/usr/lib64/" || :
+  done
+fi
+
+# Clean up package files
+sudo schroot -c "${target%bit}" -p -- apt-get clean
+sudo apt-get clean
+
+# Let the user know what we did
+trap '' INT TERM QUIT
+trap '' EXIT
+cat <<EOF
+
+
+Successfully installed ${distname} ${arch}
+
+You can run programs inside of the chroot by invoking the "${target%bit}"
+command.
+
+Your home directory is shared between the host and the chroot. But I configured
+$HOME/chroot to be private to the chroot environment. You can use it
+for files that need to differ between environments.
+EOF
diff --git a/build/internal/README.chromium b/build/internal/README.chromium
new file mode 100644
index 0000000..4624830
--- /dev/null
+++ b/build/internal/README.chromium
@@ -0,0 +1,24 @@
+Internal property sheets:
+  essential.vsprops
+    Contains the common settings used throughout the projects. Is included by either ..\debug.vsprops or ..\release.vsprops, so in general, it is not included directly.
+
+  release_defaults.vsprops
+    Included by ..\release.vsprops. Its settings are overriden by release_impl$(CHROME_BUILD_TYPE).vsprops. Uses the default VS setting which is "Maximize Speed". Results in relatively fast build with reasonable optimization level but without whole program optimization to reduce build time.
+
+  release_impl.vsprops
+    Included by ..\release.vsprops by default when CHROME_BUILD_TYPE is undefined. Includes release_defaults.vsprops.
+
+  release_impl_checksenabled.vsprops
+    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_checksenabled. Matches what release_defaults.vsprops does, but doesn't actually inherit from it as we couldn't quite get that working. The only difference is that _DEBUG is set instead of NDEBUG. Used for keeping debug checks enabled with a build that is fast enough to dogfood with.
+
+  release_impl_official.vsprops
+    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_official. Includes release_defaults.vsprops. Enables Whole Program Optimizations (WPO), which doubles the build time. Results in much more optimized build. Uses "Full Optimization" and "Flavor small code".
+
+  release_impl_pgo_instrument.vsprops
+    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_pgo_instrument. Includes release_defaults.vsprops. Enables Profile Guided Optimization (PGO) instrumentation (first pass). Uses "Full Optimization" and "Flavor small code".
+
+  release_impl_pgo_optimize.vsprops
+    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_pgo_optimize. Includes release_defaults.vsprops. Enables Profile Guided Optimization (PGO) optimization (second pass). Uses "Full Optimization" and "Flavor small code".
+
+  release_impl_purify.vsprops
+    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_purify. Includes release_defaults.vsprops. Disables optimizations. Used with Purify to test without debug tools and without optimization; i.e. NDEBUG is defined but the compiler doesn't optimize the binary.
diff --git a/build/internal/release_defaults.gypi b/build/internal/release_defaults.gypi
new file mode 100644
index 0000000..7f1ddb8
--- /dev/null
+++ b/build/internal/release_defaults.gypi
@@ -0,0 +1,12 @@
+{
+  'msvs_settings': {
+    'VCCLCompilerTool': {
+      'StringPooling': 'true',
+    },
+    'VCLinkerTool': {
+      'LinkIncremental': '1',
+      'OptimizeReferences': '2',
+      'EnableCOMDATFolding': '2',
+    },
+  },
+}
diff --git a/build/internal/release_impl.gypi b/build/internal/release_impl.gypi
new file mode 100644
index 0000000..aff06dc
--- /dev/null
+++ b/build/internal/release_impl.gypi
@@ -0,0 +1,3 @@
+{
+  'includes': ['release_defaults.gypi'],
+}
diff --git a/build/internal/release_impl_official.gypi b/build/internal/release_impl_official.gypi
new file mode 100644
index 0000000..d62e955
--- /dev/null
+++ b/build/internal/release_impl_official.gypi
@@ -0,0 +1,37 @@
+{
+  'includes': ['release_defaults.gypi'],
+  'defines': ['OFFICIAL_BUILD'],
+  'msvs_settings': {
+    'VCCLCompilerTool': {
+      'Optimization': '3',
+      'InlineFunctionExpansion': '2',
+      'EnableIntrinsicFunctions': 'true',
+      'FavorSizeOrSpeed': '2',
+      'OmitFramePointers': 'true',
+      'EnableFiberSafeOptimizations': 'true',
+      'WholeProgramOptimization': 'true',
+    },
+    'VCLibrarianTool': {
+      'AdditionalOptions': ['/ltcg', '/expectedoutputsize:120000000'],
+    },
+    'VCLinkerTool': {
+      # Get more debug spew from the linker while we're sorting out
+      # build problems and performance.
+      # TODO(siggi): Remove these flags after we're out of the woods.
+      'AdditionalOptions': [
+        '/time',
+        # This may reduce memory fragmentation during linking.
+        # The expected size is 40*1024*1024, which gives us about 10M of
+        # headroom as of Dec 16, 2011.
+        '/expectedoutputsize:41943040',
+      ],
+      'LinkTimeCodeGeneration': '1',
+      # The /PROFILE flag causes the linker to add a "FIXUP" debug stream to
+      # the generated PDB. According to MSDN documentation, this flag is only
+      # available (or perhaps supported) in the Enterprise (team development)
+      # version of Visual Studio. If this blocks your official build, simply
+      # comment out this line, then  re-run "gclient runhooks".
+      'Profile': 'true',
+    },
+  },
+}
diff --git a/build/linux/chrome_linux.croc b/build/linux/chrome_linux.croc
new file mode 100644
index 0000000..f400306
--- /dev/null
+++ b/build/linux/chrome_linux.croc
@@ -0,0 +1,29 @@
+# -*- python -*-
+# Crocodile config file for Chromium linux
+
+# TODO(jhawkins): We'll need to add a chromeos.croc once we get a coverage bot
+# for that platform.
+
+{
+  # List of rules, applied in order
+  'rules' : [
+    # Specify inclusions before exclusions, since rules are in order.
+
+    # Don't include non-Linux platform dirs
+    {
+      'regexp' : '.*/(chromeos|views)/',
+      'include' : 0,
+    },
+    # Don't include chromeos, windows, or mac specific files
+    {
+      'regexp' : '.*(_|/)(chromeos|mac|win|views)(\\.|_)',
+      'include' : 0,
+    },
+
+    # Groups
+    {
+      'regexp' : '.*_test_linux\\.',
+      'group' : 'test',
+    },
+  ],
+}
diff --git a/build/linux/dump_app_syms b/build/linux/dump_app_syms
new file mode 100755
index 0000000..632bcc7
--- /dev/null
+++ b/build/linux/dump_app_syms
@@ -0,0 +1,36 @@
+#!/bin/sh
+
+# Copyright (c) 2010 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Helper script to run dump_syms on Chrome Linux executables and strip
+# them if needed.
+
+set -e
+
+usage() {
+  echo -n "$0 <dump_syms_exe> <strip_binary> " >&2
+  echo "<binary_with_symbols> <symbols_output>" >&2
+}
+
+
+if [ $# -ne 4 ]; then
+  usage
+  exit 1
+fi
+
+SCRIPTDIR="$(readlink -f "$(dirname "$0")")"
+DUMPSYMS="$1"
+STRIP_BINARY="$2"
+INFILE="$3"
+OUTFILE="$4"
+
+# Dump the symbols from the given binary.
+if [ ! -e "$OUTFILE" -o "$INFILE" -nt "$OUTFILE" ]; then
+  "$DUMPSYMS" "$INFILE" > "$OUTFILE"
+fi
+
+if [ "$STRIP_BINARY" != "0" ]; then
+  strip "$INFILE"
+fi
diff --git a/build/linux/pkg-config-wrapper b/build/linux/pkg-config-wrapper
new file mode 100755
index 0000000..4b5455b
--- /dev/null
+++ b/build/linux/pkg-config-wrapper
@@ -0,0 +1,37 @@
+#!/bin/bash
+# Copyright (c) 2010 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This program wraps around pkg-config to generate the correct include and
+# library paths when cross-compiling using a sysroot.
+# The assumption is that the sysroot contains the .pc files in usr/lib/pkgconfig
+# and usr/share/pkgconfig (relative to the sysroot) and that they output paths
+# relative to some parent path of the sysroot.
+# This assumption is valid for a range of sysroots, in particular: a
+# LSB-compliant root filesystem mounted at the sysroot, and a board build
+# directory of a Chromium OS chroot.
+
+root="$1"
+if [ -z "$root" ]
+then
+  echo "usage: $0 /path/to/sysroot [pkg-config-arguments] package" >&2
+  exit 1
+fi
+
+rewrite=`dirname $0`/rewrite_dirs.py
+package=${!#}
+
+shift
+config_path=$root/usr/lib/pkgconfig:$root/usr/share/pkgconfig
+set -e
+# Some sysroots, like the Chromium OS ones, may generate paths that are not
+# relative to the sysroot. For example,
+# /path/to/chroot/build/x86-generic/usr/lib/pkgconfig/pkg.pc may have all paths
+# relative to /path/to/chroot (i.e. prefix=/build/x86-generic/usr) instead of
+# relative to /path/to/chroot/build/x86-generic (i.e prefix=/usr).
+# To support this correctly, it's necessary to extract the prefix to strip from
+# pkg-config's |prefix| variable.
+prefix=`PKG_CONFIG_PATH=$config_path pkg-config --variable=prefix "$package" | sed -e 's|/usr$||'`
+result=`PKG_CONFIG_PATH=$config_path pkg-config "$@"`
+echo "$result"| $rewrite --sysroot "$root" --strip-prefix "$prefix"
diff --git a/build/linux/python_arch.sh b/build/linux/python_arch.sh
new file mode 100755
index 0000000..01e41d0
--- /dev/null
+++ b/build/linux/python_arch.sh
@@ -0,0 +1,42 @@
+#!/bin/sh
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This figures out the architecture of the version of Python we are building
+# pyautolib against.
+#
+#  python_arch.sh /usr/lib/libpython2.5.so.1.0
+#  python_arch.sh /path/to/sysroot/usr/lib/libpython2.4.so.1.0
+#
+
+python=$(readlink -f "$1")
+if [ ! -r "$python" ]; then
+  echo unknown
+  exit 0
+fi
+file_out=$(file "$python")
+if [ $? -ne 0 ]; then
+  echo unknown
+  exit 0
+fi
+
+echo $file_out | grep -qs "ARM"
+if [ $? -eq 0 ]; then
+  echo arm
+  exit 0
+fi
+
+echo $file_out | grep -qs "x86-64"
+if [ $? -eq 0 ]; then
+  echo x64
+  exit 0
+fi
+
+echo $file_out | grep -qs "Intel 80386"
+if [ $? -eq 0 ]; then
+  echo ia32
+  exit 0
+fi
+
+exit 1
diff --git a/build/linux/rewrite_dirs.py b/build/linux/rewrite_dirs.py
new file mode 100755
index 0000000..30f22f0
--- /dev/null
+++ b/build/linux/rewrite_dirs.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Rewrites paths in -I, -L and other option to be relative to a sysroot."""
+
+import sys
+import os
+import optparse
+
+REWRITE_PREFIX = ['-I',
+                  '-idirafter',
+                  '-imacros',
+                  '-imultilib',
+                  '-include',
+                  '-iprefix',
+                  '-iquote',
+                  '-isystem',
+                  '-L']
+
+def RewritePath(path, opts):
+  """Rewrites a path by stripping the prefix and prepending the sysroot."""
+  sysroot = opts.sysroot
+  prefix = opts.strip_prefix
+  if os.path.isabs(path) and not path.startswith(sysroot):
+    if path.startswith(prefix):
+      path = path[len(prefix):]
+    path = path.lstrip('/')
+    return os.path.join(sysroot, path)
+  else:
+    return path
+
+
+def RewriteLine(line, opts):
+  """Rewrites all the paths in recognized options."""
+  args = line.split()
+  count = len(args)
+  i = 0
+  while i < count:
+    for prefix in REWRITE_PREFIX:
+      # The option can be either in the form "-I /path/to/dir" or
+      # "-I/path/to/dir" so handle both.
+      if args[i] == prefix:
+        i += 1
+        try:
+          args[i] = RewritePath(args[i], opts)
+        except IndexError:
+          sys.stderr.write('Missing argument following %s\n' % prefix)
+          break
+      elif args[i].startswith(prefix):
+        args[i] = prefix + RewritePath(args[i][len(prefix):], opts)
+    i += 1
+
+  return ' '.join(args)
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('-s', '--sysroot', default='/', help='sysroot to prepend')
+  parser.add_option('-p', '--strip-prefix', default='', help='prefix to strip')
+  opts, args = parser.parse_args(argv[1:])
+
+  for line in sys.stdin.readlines():
+    line = RewriteLine(line.strip(), opts)
+    print line
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/linux/system.gyp b/build/linux/system.gyp
new file mode 100644
index 0000000..90c8b45
--- /dev/null
+++ b/build/linux/system.gyp
@@ -0,0 +1,629 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'conditions': [
+    ['sysroot!=""', {
+      'variables': {
+        'pkg-config': './pkg-config-wrapper "<(sysroot)"',
+      },
+    }, {
+      'variables': {
+        'pkg-config': 'pkg-config'
+      },
+    }],
+    [ 'os_posix==1 and OS!="mac"', {
+      'variables': {
+        # We use our own copy of libssl3, although we still need to link against
+        # the rest of NSS.
+        'use_system_ssl%': 0,
+      },
+    }, {
+      'variables': {
+        'use_system_ssl%': 1,
+      },
+    }],
+  ],
+
+
+  'targets': [
+    {
+      'target_name': 'gtk',
+      'type': 'none',
+      'toolsets': ['host', 'target'],
+      'conditions': [
+        ['_toolset=="target"', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags gtk+-2.0 gthread-2.0)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other gtk+-2.0 gthread-2.0)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l gtk+-2.0 gthread-2.0)',
+            ],
+          },
+        }, {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(pkg-config --cflags gtk+-2.0 gthread-2.0)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(pkg-config --libs-only-L --libs-only-other gtk+-2.0 gthread-2.0)',
+            ],
+            'libraries': [
+              '<!@(pkg-config --libs-only-l gtk+-2.0 gthread-2.0)',
+            ],
+          },
+        }],
+        ['chromeos==1', {
+          'link_settings': {
+            'libraries': [ '-lXtst' ]
+          }
+        }],
+      ],
+    },
+    {
+      'target_name': 'gtkprint',
+      'type': 'none',
+      'conditions': [
+        ['_toolset=="target"', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags gtk+-unix-print-2.0)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other gtk+-unix-print-2.0)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l gtk+-unix-print-2.0)',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'ssl',
+      'type': 'none',
+      'conditions': [
+        ['_toolset=="target"', {
+          'conditions': [
+            ['use_openssl==1', {
+              'dependencies': [
+                '../../third_party/openssl/openssl.gyp:openssl',
+              ],
+            }],
+            ['use_openssl==0 and use_system_ssl==0', {
+              'dependencies': [
+                '../../net/third_party/nss/ssl.gyp:libssl',
+                '../../third_party/zlib/zlib.gyp:zlib',
+              ],
+              'direct_dependent_settings': {
+                'include_dirs+': [
+                  # We need for our local copies of the libssl3 headers to come
+                  # before other includes, as we are shadowing system headers.
+                  '<(DEPTH)/net/third_party/nss/ssl',
+                ],
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags nss)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other nss)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l nss | sed -e "s/-lssl3//")',
+                ],
+              },
+            }],
+            ['use_openssl==0 and use_system_ssl==1', {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags nss)',
+                ],
+                'defines': [
+                  'USE_SYSTEM_SSL',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other nss)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l nss)',
+                ],
+              },
+            }],
+          ]
+        }],
+      ],
+    },
+    {
+      'target_name': 'freetype2',
+      'type': 'none',
+      'conditions': [
+        ['_toolset=="target"', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags freetype2)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other freetype2)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l freetype2)',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'fontconfig',
+      'type': 'none',
+      'conditions': [
+        ['_toolset=="target"', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags fontconfig)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other fontconfig)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l fontconfig)',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'gdk',
+      'type': 'none',
+      'conditions': [
+        ['_toolset=="target"', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags gdk-2.0)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other gdk-2.0)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l gdk-2.0)',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'gconf',
+      'type': 'none',
+      'conditions': [
+        ['use_gconf==1 and _toolset=="target"', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags gconf-2.0)',
+            ],
+            'defines': [
+              'USE_GCONF',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other gconf-2.0)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l gconf-2.0)',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'gio',
+      'type': 'none',
+      'conditions': [
+        ['use_gio==1 and _toolset=="target"', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags gio-2.0)',
+            ],
+            'defines': [
+              'USE_GIO',
+            ],
+            'conditions': [
+              ['linux_link_gsettings==0', {
+                'defines': ['DLOPEN_GSETTINGS'],
+              }],
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other gio-2.0)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l gio-2.0)',
+            ],
+            'conditions': [
+              ['linux_link_gsettings==0 and OS=="linux"', {
+                'libraries': [
+                  '-ldl',
+                ],
+              }],
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'x11',
+      'type': 'none',
+      'toolsets': ['host', 'target'],
+      'conditions': [
+        ['_toolset=="target"', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags x11)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other x11 xi)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l x11 xi)',
+            ],
+          },
+        }, {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(pkg-config --cflags x11)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(pkg-config --libs-only-L --libs-only-other x11 xi)',
+            ],
+            'libraries': [
+              '<!@(pkg-config --libs-only-l x11 xi)',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'xext',
+      'type': 'none',
+      'conditions': [
+        ['_toolset=="target"', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags xext)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other xext)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l xext)',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'xfixes',
+      'type': 'none',
+      'conditions': [
+        ['_toolset=="target"', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags xfixes)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other xfixes)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l xfixes)',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'libgcrypt',
+      'type': 'none',
+      'conditions': [
+        ['_toolset=="target" and use_cups==1', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(libgcrypt-config --cflags)',
+            ],
+          },
+          'link_settings': {
+            'libraries': [
+              '<!@(libgcrypt-config --libs)',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'selinux',
+      'type': 'none',
+      'conditions': [
+        ['_toolset=="target"', {
+          'link_settings': {
+            'libraries': [
+              '-lselinux',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'gnome_keyring',
+      'type': 'none',
+      'conditions': [
+        ['use_gnome_keyring==1', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags gnome-keyring-1)',
+            ],
+            'defines': [
+              'USE_GNOME_KEYRING',
+            ],
+            'conditions': [
+              ['linux_link_gnome_keyring==0', {
+                'defines': ['DLOPEN_GNOME_KEYRING'],
+              }],
+            ],
+          },
+          'conditions': [
+            ['linux_link_gnome_keyring!=0', {
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other gnome-keyring-1)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l gnome-keyring-1)',
+                ],
+              },
+            }, {
+              'conditions': [
+                ['OS=="linux"', {
+                 'link_settings': {
+                   'libraries': [
+                     '-ldl',
+                   ],
+                 },
+                }],
+              ],
+            }],
+          ],
+        }],
+      ],
+    },
+    {
+      # The unit tests use a few convenience functions from the GNOME
+      # Keyring library directly. We ignore linux_link_gnome_keyring and
+      # link directly in this version of the target to allow this.
+      # *** Do not use this target in the main binary! ***
+      'target_name': 'gnome_keyring_direct',
+      'type': 'none',
+      'conditions': [
+        ['use_gnome_keyring==1', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags gnome-keyring-1)',
+            ],
+            'defines': [
+              'USE_GNOME_KEYRING',
+            ],
+            'conditions': [
+              ['linux_link_gnome_keyring==0', {
+                'defines': ['DLOPEN_GNOME_KEYRING'],
+              }],
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other gnome-keyring-1)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l gnome-keyring-1)',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'dbus',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(<(pkg-config) --cflags dbus-1)',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(<(pkg-config) --libs-only-L --libs-only-other dbus-1)',
+        ],
+        'libraries': [
+          '<!@(<(pkg-config) --libs-only-l dbus-1)',
+        ],
+      },
+    },
+    {
+      # TODO(satorux): Remove this once dbus-glib clients are gone.
+      'target_name': 'dbus-glib',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(<(pkg-config) --cflags dbus-glib-1)',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(<(pkg-config) --libs-only-L --libs-only-other dbus-glib-1)',
+        ],
+        'libraries': [
+          '<!@(<(pkg-config) --libs-only-l dbus-glib-1)',
+        ],
+      },
+    },
+    {
+      'target_name': 'glib',
+      'type': 'none',
+      'toolsets': ['host', 'target'],
+      'conditions': [
+        ['_toolset=="target"', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags glib-2.0 gobject-2.0 gthread-2.0)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other glib-2.0 gobject-2.0 gthread-2.0)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l glib-2.0 gobject-2.0 gthread-2.0)',
+            ],
+          },
+        }, {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(pkg-config --cflags glib-2.0 gobject-2.0 gthread-2.0)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(pkg-config --libs-only-L --libs-only-other glib-2.0 gobject-2.0 gthread-2.0)',
+            ],
+            'libraries': [
+              '<!@(pkg-config --libs-only-l glib-2.0 gobject-2.0 gthread-2.0)',
+            ],
+          },
+        }],
+        ['chromeos==1', {
+          'link_settings': {
+            'libraries': [ '-lXtst' ]
+          }
+        }],
+      ],
+    },
+    {
+      'target_name': 'pangocairo',
+      'type': 'none',
+      'toolsets': ['host', 'target'],
+      'conditions': [
+        ['_toolset=="target"', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags pangocairo)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other pangocairo)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l pangocairo)',
+            ],
+          },
+        }, {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(pkg-config --cflags pangocairo)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(pkg-config --libs-only-L --libs-only-other pangocairo)',
+            ],
+            'libraries': [
+              '<!@(pkg-config --libs-only-l pangocairo)',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'libresolv',
+      'type': 'none',
+      'link_settings': {
+        'libraries': [
+          '-lresolv',
+        ],
+      },
+    },
+    {
+      'target_name': 'ibus',
+      'type': 'none',
+      'conditions': [
+        ['use_ibus==1', {
+          'variables': {
+            'ibus_min_version': '1.3.99.20110425',
+          },
+          'direct_dependent_settings': {
+            'defines': ['HAVE_IBUS=1'],
+            'cflags': [
+              '<!@(<(pkg-config) --cflags "ibus-1.0 >= <(ibus_min_version)")',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other "ibus-1.0 >= <(ibus_min_version)")',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l "ibus-1.0 >= <(ibus_min_version)")',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'wayland',
+      'type': 'none',
+      'conditions': [
+        ['use_wayland == 1', {
+          'cflags': [
+            '<!@(<(pkg-config) --cflags cairo wayland-client wayland-egl xkbcommon)',
+          ],
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags cairo wayland-client wayland-egl xkbcommon)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other wayland-client wayland-egl xkbcommon)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l wayland-client wayland-egl xkbcommon)',
+            ],
+          },
+        }],
+      ],
+    },
+  ],
+}
diff --git a/build/mac/OWNERS b/build/mac/OWNERS
new file mode 100644
index 0000000..c56e89d
--- /dev/null
+++ b/build/mac/OWNERS
@@ -0,0 +1,2 @@
+mark@chromium.org
+thomasvl@chromium.org
diff --git a/build/mac/change_mach_o_flags.py b/build/mac/change_mach_o_flags.py
new file mode 100755
index 0000000..c2aeaec
--- /dev/null
+++ b/build/mac/change_mach_o_flags.py
@@ -0,0 +1,273 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Usage: change_mach_o_flags.py [--executable-heap] [--no-pie] <executablepath>
+
+Arranges for the executable at |executable_path| to have its data (heap)
+pages protected to prevent execution on Mac OS X 10.7 ("Lion"), and to have
+the PIE (position independent executable) bit set to enable ASLR (address
+space layout randomization). With --executable-heap or --no-pie, the
+respective bits are cleared instead of set, making the heap executable or
+disabling PIE/ASLR.
+
+This script is able to operate on thin (single-architecture) Mach-O files
+and fat (universal, multi-architecture) files. When operating on fat files,
+it will set or clear the bits for each architecture contained therein.
+
+NON-EXECUTABLE HEAP
+
+Traditionally in Mac OS X, 32-bit processes did not have data pages set to
+prohibit execution. Although user programs could call mprotect and
+mach_vm_protect to deny execution of code in data pages, the kernel would
+silently ignore such requests without updating the page tables, and the
+hardware would happily execute code on such pages. 64-bit processes were
+always given proper hardware protection of data pages. This behavior was
+controllable on a system-wide level via the vm.allow_data_exec sysctl, which
+is set by default to 1. The bit with value 1 (set by default) allows code
+execution on data pages for 32-bit processes, and the bit with value 2
+(clear by default) does the same for 64-bit processes.
+
+In Mac OS X 10.7, executables can "opt in" to having hardware protection
+against code execution on data pages applied. This is done by setting a new
+bit in the |flags| field of an executable's |mach_header|. When
+MH_NO_HEAP_EXECUTION is set, proper protections will be applied, regardless
+of the setting of vm.allow_data_exec. See xnu-1699.22.73/osfmk/vm/vm_map.c
+override_nx and xnu-1699.22.73/bsd/kern/mach_loader.c load_machfile.
+
+The Apple toolchain has been revised to set the MH_NO_HEAP_EXECUTION when
+producing executables, provided that -allow_heap_execute is not specified
+at link time. Only linkers shipping with Xcode 4.0 and later (ld64-123.2 and
+later) have this ability. See ld64-123.2.1/src/ld/Options.cpp
+Options::reconfigureDefaults() and
+ld64-123.2.1/src/ld/HeaderAndLoadCommands.hpp
+HeaderAndLoadCommandsAtom<A>::flags().
+
+This script sets the MH_NO_HEAP_EXECUTION bit on Mach-O executables. It is
+intended for use with executables produced by a linker that predates Apple's
+modifications to set this bit itself. It is also useful for setting this bit
+for non-i386 executables, including x86_64 executables. Apple's linker only
+sets it for 32-bit i386 executables, presumably under the assumption that
+the value of vm.allow_data_exec is set in stone. However, if someone were to
+change vm.allow_data_exec to 2 or 3, 64-bit x86_64 executables would run
+without hardware protection against code execution on data pages. This
+script can set the bit for x86_64 executables, guaranteeing that they run
+with appropriate protection even when vm.allow_data_exec has been tampered
+with.
+
+POSITION-INDEPENDENT EXECUTABLES/ADDRESS SPACE LAYOUT RANDOMIZATION
+
+This script sets or clears the MH_PIE bit in an executable's Mach-O header,
+enabling or disabling position independence on Mac OS X 10.5 and later.
+Processes running position-independent executables have varying levels of
+ASLR protection depending on the OS release. The main executable's load
+address, shared library load addresess, and the heap and stack base
+addresses may be randomized. Position-independent executables are produced
+by supplying the -pie flag to the linker (or defeated by supplying -no_pie).
+Executables linked with a deployment target of 10.7 or higher have PIE on
+by default.
+
+This script is never strictly needed during the build to enable PIE, as all
+linkers used are recent enough to support -pie. However, it's used to
+disable the PIE bit as needed on already-linked executables.
+"""
+
+import optparse
+import os
+import struct
+import sys
+
+
+# <mach-o/fat.h>
+FAT_MAGIC = 0xcafebabe
+FAT_CIGAM = 0xbebafeca
+
+# <mach-o/loader.h>
+MH_MAGIC = 0xfeedface
+MH_CIGAM = 0xcefaedfe
+MH_MAGIC_64 = 0xfeedfacf
+MH_CIGAM_64 = 0xcffaedfe
+MH_EXECUTE = 0x2
+MH_PIE = 0x00200000
+MH_NO_HEAP_EXECUTION = 0x01000000
+
+
+class MachOError(Exception):
+  """A class for exceptions thrown by this module."""
+
+  pass
+
+
+def CheckedSeek(file, offset):
+  """Seeks the file-like object at |file| to offset |offset| and raises a
+  MachOError if anything funny happens."""
+
+  file.seek(offset, os.SEEK_SET)
+  new_offset = file.tell()
+  if new_offset != offset:
+    raise MachOError, \
+          'seek: expected offset %d, observed %d' % (offset, new_offset)
+
+
+def CheckedRead(file, count):
+  """Reads |count| bytes from the file-like |file| object, raising a
+  MachOError if any other number of bytes is read."""
+
+  bytes = file.read(count)
+  if len(bytes) != count:
+    raise MachOError, \
+          'read: expected length %d, observed %d' % (count, len(bytes))
+
+  return bytes
+
+
+def ReadUInt32(file, endian):
+  """Reads an unsinged 32-bit integer from the file-like |file| object,
+  treating it as having endianness specified by |endian| (per the |struct|
+  module), and returns it as a number. Raises a MachOError if the proper
+  length of data can't be read from |file|."""
+
+  bytes = CheckedRead(file, 4)
+
+  (uint32,) = struct.unpack(endian + 'I', bytes)
+  return uint32
+
+
+def ReadMachHeader(file, endian):
+  """Reads an entire |mach_header| structure (<mach-o/loader.h>) from the
+  file-like |file| object, treating it as having endianness specified by
+  |endian| (per the |struct| module), and returns a 7-tuple of its members
+  as numbers. Raises a MachOError if the proper length of data can't be read
+  from |file|."""
+
+  bytes = CheckedRead(file, 28)
+
+  magic, cputype, cpusubtype, filetype, ncmds, sizeofcmds, flags = \
+      struct.unpack(endian + '7I', bytes)
+  return magic, cputype, cpusubtype, filetype, ncmds, sizeofcmds, flags
+
+
+def ReadFatArch(file):
+  """Reads an entire |fat_arch| structure (<mach-o/fat.h>) from the file-like
+  |file| object, treating it as having endianness specified by |endian|
+  (per the |struct| module), and returns a 5-tuple of its members as numbers.
+  Raises a MachOError if the proper length of data can't be read from
+  |file|."""
+
+  bytes = CheckedRead(file, 20)
+
+  cputype, cpusubtype, offset, size, align = struct.unpack('>5I', bytes)
+  return cputype, cpusubtype, offset, size, align
+
+
+def WriteUInt32(file, uint32, endian):
+  """Writes |uint32| as an unsinged 32-bit integer to the file-like |file|
+  object, treating it as having endianness specified by |endian| (per the
+  |struct| module)."""
+
+  bytes = struct.pack(endian + 'I', uint32)
+  assert len(bytes) == 4
+
+  file.write(bytes)
+
+
+def HandleMachOFile(file, options, offset=0):
+  """Seeks the file-like |file| object to |offset|, reads its |mach_header|,
+  and rewrites the header's |flags| field if appropriate. The header's
+  endianness is detected. Both 32-bit and 64-bit Mach-O headers are supported
+  (mach_header and mach_header_64). Raises MachOError if used on a header that
+  does not have a known magic number or is not of type MH_EXECUTE. The
+  MH_PIE and MH_NO_HEAP_EXECUTION bits are set or cleared in the |flags| field
+  according to |options| and written to |file| if any changes need to be made.
+  If already set or clear as specified by |options|, nothing is written."""
+
+  CheckedSeek(file, offset)
+  magic = ReadUInt32(file, '<')
+  if magic == MH_MAGIC or magic == MH_MAGIC_64:
+    endian = '<'
+  elif magic == MH_CIGAM or magic == MH_CIGAM_64:
+    endian = '>'
+  else:
+    raise MachOError, \
+          'Mach-O file at offset %d has illusion of magic' % offset
+
+  CheckedSeek(file, offset)
+  magic, cputype, cpusubtype, filetype, ncmds, sizeofcmds, flags = \
+      ReadMachHeader(file, endian)
+  assert magic == MH_MAGIC or magic == MH_MAGIC_64
+  if filetype != MH_EXECUTE:
+    raise MachOError, \
+          'Mach-O file at offset %d is type 0x%x, expected MH_EXECUTE' % \
+              (offset, filetype)
+
+  original_flags = flags
+
+  if options.no_heap_execution:
+    flags |= MH_NO_HEAP_EXECUTION
+  else:
+    flags &= ~MH_NO_HEAP_EXECUTION
+
+  if options.pie:
+    flags |= MH_PIE
+  else:
+    flags &= ~MH_PIE
+
+  if flags != original_flags:
+    CheckedSeek(file, offset + 24)
+    WriteUInt32(file, flags, endian)
+
+
+def HandleFatFile(file, options, fat_offset=0):
+  """Seeks the file-like |file| object to |offset| and loops over its
+  |fat_header| entries, calling HandleMachOFile for each."""
+
+  CheckedSeek(file, fat_offset)
+  magic = ReadUInt32(file, '>')
+  assert magic == FAT_MAGIC
+
+  nfat_arch = ReadUInt32(file, '>')
+
+  for index in xrange(0, nfat_arch):
+    cputype, cpusubtype, offset, size, align = ReadFatArch(file)
+    assert size >= 28
+
+    # HandleMachOFile will seek around. Come back here after calling it, in
+    # case it sought.
+    fat_arch_offset = file.tell()
+    HandleMachOFile(file, options, offset)
+    CheckedSeek(file, fat_arch_offset)
+
+
+def main(me, args):
+  parser = optparse.OptionParser('%prog [options] <executable_path>')
+  parser.add_option('--executable-heap', action='store_false',
+                    dest='no_heap_execution', default=True,
+                    help='Clear the MH_NO_HEAP_EXECUTION bit')
+  parser.add_option('--no-pie', action='store_false',
+                    dest='pie', default=True,
+                    help='Clear the MH_PIE bit')
+  (options, loose_args) = parser.parse_args(args)
+  if len(loose_args) != 1:
+    parser.print_usage()
+    return 1
+
+  executable_path = loose_args[0]
+  executable_file = open(executable_path, 'rb+')
+
+  magic = ReadUInt32(executable_file, '<')
+  if magic == FAT_CIGAM:
+    # Check FAT_CIGAM and not FAT_MAGIC because the read was little-endian.
+    HandleFatFile(executable_file, options)
+  elif magic == MH_MAGIC or magic == MH_CIGAM or \
+      magic == MH_MAGIC_64 or magic == MH_CIGAM_64:
+    HandleMachOFile(executable_file, options)
+  else:
+    raise MachOError, '%s is not a Mach-O or fat file' % executable_file
+
+  executable_file.close()
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[0], sys.argv[1:]))
diff --git a/build/mac/change_mach_o_flags_from_xcode.sh b/build/mac/change_mach_o_flags_from_xcode.sh
new file mode 100755
index 0000000..1824f8d
--- /dev/null
+++ b/build/mac/change_mach_o_flags_from_xcode.sh
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is a small wrapper script around change_mach_o_flags.py allowing it to
+# be invoked easily from Xcode. change_mach_o_flags.py expects its arguments
+# on the command line, but Xcode puts its parameters in the environment.
+
+set -e
+
+exec "$(dirname "${0}")/change_mach_o_flags.py" \
+     "${@}" \
+     "${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}"
diff --git a/build/mac/chrome_mac.croc b/build/mac/chrome_mac.croc
new file mode 100644
index 0000000..8cde00c
--- /dev/null
+++ b/build/mac/chrome_mac.croc
@@ -0,0 +1,36 @@
+# -*- python -*-
+# Crocodile config file for Chromium mac
+
+{
+  # List of rules, applied in order
+  'rules' : [
+    # Specify inclusions before exclusions, since rules are in order.
+
+    # Don't include chromeos, linux, or windows specific files
+    {
+      'regexp' : '.*(_|/)(chromeos|linux|win|views)(\\.|_)',
+      'include' : 0,
+    },
+    # Don't include ChromeOS dirs
+    {
+      'regexp' : '.*/chromeos/',
+      'include' : 0,
+    },
+
+    # Groups
+    {
+      'regexp' : '.*_test_mac\\.',
+      'group' : 'test',
+    },
+
+    # Languages
+    {
+      'regexp' : '.*\\.m$',
+      'language' : 'ObjC',
+    },
+    {
+      'regexp' : '.*\\.mm$',
+      'language' : 'ObjC++',
+    },
+  ],
+}
diff --git a/build/mac/strip_from_xcode b/build/mac/strip_from_xcode
new file mode 100755
index 0000000..c26b9fb
--- /dev/null
+++ b/build/mac/strip_from_xcode
@@ -0,0 +1,62 @@
+#!/bin/bash
+
+# Copyright (c) 2008 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is a handy wrapper script that figures out how to call the strip
+# utility (strip_save_dsym in this case), if it even needs to be called at all,
+# and then does it.  This script should be called by a post-link phase in
+# targets that might generate Mach-O executables, dynamic libraries, or
+# loadable bundles.
+#
+# An example "Strip If Needed" build phase placed after "Link Binary With
+# Libraries" would do:
+# exec "${XCODEPROJ_DEPTH}/build/mac/strip_from_xcode"
+
+if [ "${CONFIGURATION}" != "Release" ] ; then
+  # Only strip in release mode.
+  exit 0
+fi
+
+declare -a FLAGS
+
+# MACH_O_TYPE is not set for a command-line tool, so check PRODUCT_TYPE too.
+# Weird.
+if [ "${MACH_O_TYPE}" = "mh_execute" ] || \
+   [ "${PRODUCT_TYPE}" = "com.apple.product-type.tool" ] ; then
+  # Strip everything (no special flags).  No-op.
+  true
+elif [ "${MACH_O_TYPE}" = "mh_dylib" ] || \
+     [ "${MACH_O_TYPE}" = "mh_bundle" ]; then
+  # Strip debugging symbols and local symbols
+  FLAGS[${#FLAGS[@]}]=-S
+  FLAGS[${#FLAGS[@]}]=-x
+elif [ "${MACH_O_TYPE}" = "staticlib" ] ; then
+  # Don't strip static libraries.
+  exit 0
+else
+  # Warn, but don't treat this as an error.
+  echo $0: warning: unrecognized MACH_O_TYPE ${MACH_O_TYPE}
+  exit 0
+fi
+
+if [ -n "${STRIPFLAGS}" ] ; then
+  # Pick up the standard STRIPFLAGS Xcode setting, used for "Additional Strip
+  # Flags".
+  for stripflag in "${STRIPFLAGS}" ; do
+    FLAGS[${#FLAGS[@]}]="${stripflag}"
+  done
+fi
+
+if [ -n "${CHROMIUM_STRIP_SAVE_FILE}" ] ; then
+  # An Xcode project can communicate a file listing symbols to saved in this
+  # environment variable by setting it as a build setting.  This isn't a
+  # standard Xcode setting.  It's used in preference to STRIPFLAGS to
+  # eliminate quoting ambiguity concerns.
+  FLAGS[${#FLAGS[@]}]=-s
+  FLAGS[${#FLAGS[@]}]="${CHROMIUM_STRIP_SAVE_FILE}"
+fi
+
+exec "$(dirname ${0})/strip_save_dsym" "${FLAGS[@]}" \
+     "${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}"
diff --git a/build/mac/strip_save_dsym b/build/mac/strip_save_dsym
new file mode 100755
index 0000000..ef08d83
--- /dev/null
+++ b/build/mac/strip_save_dsym
@@ -0,0 +1,341 @@
+#!/usr/bin/python
+
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Usage: strip_save_dsym <whatever-arguments-you-would-pass-to-strip>
+#
+# strip_save_dsym is a wrapper around the standard strip utility.  Given an
+# input Mach-O file, strip_save_dsym will save a copy of the file in a "fake"
+# .dSYM bundle for debugging, and then call strip to strip the Mach-O file.
+# Note that the .dSYM file is a "fake" in that it's not a self-contained
+# .dSYM bundle, it just contains a copy of the original (unstripped) Mach-O
+# file, and therefore contains references to object files on the filesystem.
+# The generated .dSYM bundle is therefore unsuitable for debugging in the
+# absence of these .o files.
+#
+# If a .dSYM already exists and has a newer timestamp than the Mach-O file,
+# this utility does nothing.  That allows strip_save_dsym to be run on a file
+# that has already been stripped without trashing the .dSYM.
+#
+# Rationale: the "right" way to generate dSYM bundles, dsymutil, is incredibly
+# slow.  On the other hand, doing a file copy (which is really all that
+# dsymutil does) is comparatively fast.  Since we usually just want to strip
+# a release-mode executable but still be able to debug it, and we don't care
+# so much about generating a hermetic dSYM bundle, we'll prefer the file copy.
+# If a real dSYM is ever needed, it's still possible to create one by running
+# dsymutil and pointing it at the original Mach-O file inside the "fake"
+# bundle, provided that the object files are available.
+
+import errno
+import os
+import re
+import shutil
+import subprocess
+import sys
+import time
+
+# Returns a list of architectures contained in a Mach-O file.  The file can be
+# a universal (fat) file, in which case there will be one list element for
+# each contained architecture, or it can be a thin single-architecture Mach-O
+# file, in which case the list will contain a single element identifying the
+# architecture.  On error, returns an empty list.  Determines the architecture
+# list by calling file.
+def macho_archs(macho):
+  macho_types = ["executable",
+                 "dynamically linked shared library",
+                 "bundle"]
+  macho_types_re = "Mach-O (?:64-bit )?(?:" + "|".join(macho_types) + ")"
+
+  file_cmd = subprocess.Popen(["/usr/bin/file", "-b", "--", macho],
+                              stdout=subprocess.PIPE)
+
+  archs = []
+
+  type_line = file_cmd.stdout.readline()
+  type_match = re.match("^%s (.*)$" % macho_types_re, type_line)
+  if type_match:
+    archs.append(type_match.group(1))
+    return [type_match.group(1)]
+  else:
+    type_match = re.match("^Mach-O universal binary with (.*) architectures$",
+                          type_line)
+    if type_match:
+      for i in range(0, int(type_match.group(1))):
+        arch_line = file_cmd.stdout.readline()
+        arch_match = re.match(
+                     "^.* \(for architecture (.*)\):\t%s .*$" % macho_types_re,
+                     arch_line)
+        if arch_match:
+          archs.append(arch_match.group(1))
+
+  if file_cmd.wait() != 0:
+    archs = []
+
+  if len(archs) == 0:
+    print >> sys.stderr, "No architectures in %s" % macho
+
+  return archs
+
+# Returns a dictionary mapping architectures contained in the file as returned
+# by macho_archs to the LC_UUID load command for that architecture.
+# Architectures with no LC_UUID load command are omitted from the dictionary.
+# Determines the UUID value by calling otool.
+def macho_uuids(macho):
+  uuids = {}
+
+  archs = macho_archs(macho)
+  if len(archs) == 0:
+    return uuids
+
+  for arch in archs:
+    if arch == "":
+      continue
+
+    otool_cmd = subprocess.Popen(["/usr/bin/otool", "-arch", arch, "-l", "-",
+                                  macho],
+                                 stdout=subprocess.PIPE)
+    # state 0 is when nothing UUID-related has been seen yet.  State 1 is
+    # entered after a load command begins, but it may not be an LC_UUID load
+    # command.  States 2, 3, and 4 are intermediate states while reading an
+    # LC_UUID command.  State 5 is the terminal state for a successful LC_UUID
+    # read.  State 6 is the error state.
+    state = 0
+    uuid = ""
+    for otool_line in otool_cmd.stdout:
+      if state == 0:
+        if re.match("^Load command .*$", otool_line):
+          state = 1
+      elif state == 1:
+        if re.match("^     cmd LC_UUID$", otool_line):
+          state = 2
+        else:
+          state = 0
+      elif state == 2:
+        if re.match("^ cmdsize 24$", otool_line):
+          state = 3
+        else:
+          state = 6
+      elif state == 3:
+        # The UUID display format changed in the version of otool shipping
+        # with the Xcode 3.2.2 prerelease.  The new format is traditional:
+        #    uuid 4D7135B2-9C56-C5F5-5F49-A994258E0955
+        # and with Xcode 3.2.6, then line is indented one more space:
+        #     uuid 4D7135B2-9C56-C5F5-5F49-A994258E0955
+        # The old format, from cctools-750 and older's otool, breaks the UUID
+        # up into a sequence of bytes:
+        #    uuid 0x4d 0x71 0x35 0xb2 0x9c 0x56 0xc5 0xf5
+        #         0x5f 0x49 0xa9 0x94 0x25 0x8e 0x09 0x55
+        new_uuid_match = re.match("^ {3,4}uuid (.{8}-.{4}-.{4}-.{4}-.{12})$",
+                                  otool_line)
+        if new_uuid_match:
+          uuid = new_uuid_match.group(1)
+
+          # Skip state 4, there is no second line to read.
+          state = 5
+        else:
+          old_uuid_match = re.match("^   uuid 0x(..) 0x(..) 0x(..) 0x(..) "
+                                    "0x(..) 0x(..) 0x(..) 0x(..)$",
+                                    otool_line)
+          if old_uuid_match:
+            state = 4
+            uuid = old_uuid_match.group(1) + old_uuid_match.group(2) + \
+                   old_uuid_match.group(3) + old_uuid_match.group(4) + "-" + \
+                   old_uuid_match.group(5) + old_uuid_match.group(6) + "-" + \
+                   old_uuid_match.group(7) + old_uuid_match.group(8) + "-"
+          else:
+            state = 6
+      elif state == 4:
+        old_uuid_match = re.match("^        0x(..) 0x(..) 0x(..) 0x(..) "
+                                  "0x(..) 0x(..) 0x(..) 0x(..)$",
+                                  otool_line)
+        if old_uuid_match:
+          state = 5
+          uuid += old_uuid_match.group(1) + old_uuid_match.group(2) + "-" + \
+                  old_uuid_match.group(3) + old_uuid_match.group(4) + \
+                  old_uuid_match.group(5) + old_uuid_match.group(6) + \
+                  old_uuid_match.group(7) + old_uuid_match.group(8)
+        else:
+          state = 6
+
+    if otool_cmd.wait() != 0:
+      state = 6
+
+    if state == 5:
+      uuids[arch] = uuid.upper()
+
+  if len(uuids) == 0:
+    print >> sys.stderr, "No UUIDs in %s" % macho
+
+  return uuids
+
+# Given a path to a Mach-O file and possible information from the environment,
+# determines the desired path to the .dSYM.
+def dsym_path(macho):
+  # If building a bundle, the .dSYM should be placed next to the bundle.  Use
+  # WRAPPER_NAME to make this determination.  If called from xcodebuild,
+  # WRAPPER_NAME will be set to the name of the bundle.
+  dsym = ""
+  if "WRAPPER_NAME" in os.environ:
+    if "BUILT_PRODUCTS_DIR" in os.environ:
+      dsym = os.path.join(os.environ["BUILT_PRODUCTS_DIR"],
+                          os.environ["WRAPPER_NAME"])
+    else:
+      dsym = os.environ["WRAPPER_NAME"]
+  else:
+    dsym = macho
+
+  dsym += ".dSYM"
+
+  return dsym
+
+# Creates a fake .dSYM bundle at dsym for macho, a Mach-O image with the
+# architectures and UUIDs specified by the uuids map.
+def make_fake_dsym(macho, dsym):
+  uuids = macho_uuids(macho)
+  if len(uuids) == 0:
+    return False
+
+  dwarf_dir = os.path.join(dsym, "Contents", "Resources", "DWARF")
+  dwarf_file = os.path.join(dwarf_dir, os.path.basename(macho))
+  try:
+    os.makedirs(dwarf_dir)
+  except OSError, (err, error_string):
+    if err != errno.EEXIST:
+      raise
+  shutil.copyfile(macho, dwarf_file)
+
+  # info_template is the same as what dsymutil would have written, with the
+  # addition of the fake_dsym key.
+  info_template = \
+'''<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+	<dict>
+		<key>CFBundleDevelopmentRegion</key>
+		<string>English</string>
+		<key>CFBundleIdentifier</key>
+		<string>com.apple.xcode.dsym.%(root_name)s</string>
+		<key>CFBundleInfoDictionaryVersion</key>
+		<string>6.0</string>
+		<key>CFBundlePackageType</key>
+		<string>dSYM</string>
+		<key>CFBundleSignature</key>
+		<string>????</string>
+		<key>CFBundleShortVersionString</key>
+		<string>1.0</string>
+		<key>CFBundleVersion</key>
+		<string>1</string>
+		<key>dSYM_UUID</key>
+		<dict>
+%(uuid_dict)s		</dict>
+		<key>fake_dsym</key>
+		<true/>
+	</dict>
+</plist>
+'''
+
+  root_name = os.path.basename(dsym)[:-5]  # whatever.dSYM without .dSYM
+  uuid_dict = ""
+  for arch in sorted(uuids):
+    uuid_dict += "\t\t\t<key>" + arch + "</key>\n"\
+                 "\t\t\t<string>" + uuids[arch] + "</string>\n"
+  info_dict = {
+    "root_name": root_name,
+    "uuid_dict": uuid_dict,
+  }
+  info_contents = info_template % info_dict
+  info_file = os.path.join(dsym, "Contents", "Info.plist")
+  info_fd = open(info_file, "w")
+  info_fd.write(info_contents)
+  info_fd.close()
+
+  return True
+
+# For a Mach-O file, determines where the .dSYM bundle should be located.  If
+# the bundle does not exist or has a modification time older than the Mach-O
+# file, calls make_fake_dsym to create a fake .dSYM bundle there, then strips
+# the Mach-O file and sets the modification time on the .dSYM bundle and Mach-O
+# file to be identical.
+def strip_and_make_fake_dsym(macho):
+  dsym = dsym_path(macho)
+  macho_stat = os.stat(macho)
+  dsym_stat = None
+  try:
+    dsym_stat = os.stat(dsym)
+  except OSError, (err, error_string):
+    if err != errno.ENOENT:
+      raise
+
+  if dsym_stat is None or dsym_stat.st_mtime < macho_stat.st_mtime:
+    # Make a .dSYM bundle
+    if not make_fake_dsym(macho, dsym):
+      return False
+
+    # Strip the Mach-O file
+    remove_dsym = True
+    try:
+      strip_path = ""
+      if "SYSTEM_DEVELOPER_BIN_DIR" in os.environ:
+        strip_path = os.environ["SYSTEM_DEVELOPER_BIN_DIR"]
+      else:
+        strip_path = "/usr/bin"
+      strip_path = os.path.join(strip_path, "strip")
+      strip_cmdline = [strip_path] + sys.argv[1:]
+      strip_cmd = subprocess.Popen(strip_cmdline)
+      if strip_cmd.wait() == 0:
+        remove_dsym = False
+    finally:
+      if remove_dsym:
+        shutil.rmtree(dsym)
+
+    # Update modification time on the Mach-O file and .dSYM bundle
+    now = time.time()
+    os.utime(macho, (now, now))
+    os.utime(dsym, (now, now))
+
+  return True
+
+def main(argv=None):
+  if argv is None:
+    argv = sys.argv
+
+  # This only supports operating on one file at a time.  Look at the arguments
+  # to strip to figure out what the source to be stripped is.  Arguments are
+  # processed in the same way that strip does, although to reduce complexity,
+  # this doesn't do all of the same checking as strip.  For example, strip
+  # has no -Z switch and would treat -Z on the command line as an error.  For
+  # the purposes this is needed for, that's fine.
+  macho = None
+  process_switches = True
+  ignore_argument = False
+  for arg in argv[1:]:
+    if ignore_argument:
+      ignore_argument = False
+      continue
+    if process_switches:
+      if arg == "-":
+        process_switches = False
+      # strip has these switches accept an argument:
+      if arg in ["-s", "-R", "-d", "-o", "-arch"]:
+        ignore_argument = True
+      if arg[0] == "-":
+        continue
+    if macho is None:
+      macho = arg
+    else:
+      print >> sys.stderr, "Too many things to strip"
+      return 1
+
+  if macho is None:
+    print >> sys.stderr, "Nothing to strip"
+    return 1
+
+  if not strip_and_make_fake_dsym(macho):
+    return 1
+
+  return 0
+
+if __name__ == "__main__":
+  sys.exit(main(sys.argv))
diff --git a/build/nocompile.gypi b/build/nocompile.gypi
new file mode 100644
index 0000000..f9021ae
--- /dev/null
+++ b/build/nocompile.gypi
@@ -0,0 +1,96 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an target to create a unittest that
+# invokes a set of no-compile tests.  A no-compile test is a test that asserts
+# a particular construct will not compile.
+#
+# Also see:
+#   http://dev.chromium.org/developers/testing/no-compile-tests
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_module_nc_unittests',
+#   'type': 'executable',
+#   'sources': [
+#     'nc_testset_1.nc',
+#     'nc_testset_2.nc',
+#   ],
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+# The .nc files are C++ files that contain code we wish to assert will not
+# compile.  Each individual test case in the file should be put in its own
+# #ifdef section.  The expected output should be appended with a C++-style
+# comment that has a python list of regular expressions.  This will likely
+# be greater than 80-characters. Giving a solid expected output test is
+# important so that random compile failures do not cause the test to pass.
+#
+# Example .nc file:
+#
+#   #if defined(TEST_NEEDS_SEMICOLON)  // [r"expected ',' or ';' at end of input"]
+#
+#   int a = 1
+#
+#   #elif defined(TEST_NEEDS_CAST)  // [r"invalid conversion from 'void*' to 'char*'"]
+#
+#   void* a = NULL;
+#   char* b = a;
+#
+#   #endif
+#
+# If we needed disable TEST_NEEDS_SEMICOLON, then change the define to:
+#
+#   DISABLE_TEST_NEEDS_SEMICOLON
+#   TEST_NEEDS_CAST
+#
+# The lines above are parsed by a regexp so avoid getting creative with the
+# formatting or ifdef logic; it will likely just not work.
+#
+# Implementation notes:
+# The .nc files are actually processed by a python script which executes the
+# compiler and generates a .cc file that is empty on success, or will have a
+# series of #error lines on failure, and a set of trivially passing gunit
+# TEST() functions on success. This allows us to fail at the compile step when
+# something goes wrong, and know during the unittest run that the test was at
+# least processed when things go right.
+
+{
+  # TODO(awong): Disabled until http://crbug.com/105388 is resolved.
+  'sources/': [['exclude', '\\.nc$']],
+  'conditions': [
+    [ 'OS=="linux" and clang==0', {
+      'rules': [
+        {
+          'variables': {
+            'nocompile_driver': '<(DEPTH)/tools/nocompile_driver.py',
+            'nc_result_path': ('<(INTERMEDIATE_DIR)/<(module_dir)/'
+                               '<(RULE_INPUT_ROOT)_nc.cc'),
+           },
+          'rule_name': 'run_nocompile',
+          'extension': 'nc',
+          'inputs': [
+            '<(nocompile_driver)',
+          ],
+          'outputs': [
+            '<(nc_result_path)'
+          ],
+          'action': [
+            'python',
+            '<(nocompile_driver)',
+            '4', # number of compilers to invoke in parallel.
+            '<(RULE_INPUT_PATH)',
+            '-Wall -Werror -Wfatal-errors -I<(DEPTH)',
+            '<(nc_result_path)',
+            ],
+          'message': 'Generating no compile results for <(RULE_INPUT_PATH)',
+          'process_outputs_as_sources': 1,
+        },
+      ],
+    }, {
+      'sources/': [['exclude', '\\.nc$']]
+    }],  # 'OS=="linux" and clang=="0"'
+  ],
+}
+
diff --git a/build/output_dll_copy.rules b/build/output_dll_copy.rules
new file mode 100644
index 0000000..c6e9051
--- /dev/null
+++ b/build/output_dll_copy.rules
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="utf-8"?>
+<VisualStudioToolFile
+	Name="Output DLL copy"
+	Version="8.00"
+	>
+	<Rules>
+		<CustomBuildRule
+			Name="Output DLL copy"
+			CommandLine="xcopy /R /C /Y $(InputPath) $(OutDir)"
+			Outputs="$(OutDir)\$(InputFileName)"
+			FileExtensions="*.dll"
+			>
+			<Properties>
+			</Properties>
+		</CustomBuildRule>
+	</Rules>
+</VisualStudioToolFile>
diff --git a/build/precompile.cc b/build/precompile.cc
new file mode 100644
index 0000000..db1ef6d
--- /dev/null
+++ b/build/precompile.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2011 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Precompiled header generator for Windows builds. No include is needed
+// in this file as the PCH include is forced via the "Forced Include File"
+// flag in the projects generated by GYP.
diff --git a/build/precompile.h b/build/precompile.h
new file mode 100644
index 0000000..73dc57f
--- /dev/null
+++ b/build/precompile.h
@@ -0,0 +1,108 @@
+// Copyright (c) 2011 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Precompiled header for Chromium project on Windows, not used by
+// other build configurations. Using precompiled headers speeds the
+// build up significantly, around 1/4th on VS 2010 on an HP Z600 with 12
+// GB of memory.
+//
+// Numeric comments beside includes are the number of times they were
+// included under src/chrome/browser on 2011/8/20, which was used as a
+// baseline for deciding what to include in the PCH. It may be
+// possible to tweak the speed of the build by commenting out or
+// removing some of the less frequently used headers.
+
+#if defined(BUILD_PRECOMPILE_H_)
+#error You shouldn't include the precompiled header file more than once.
+#endif
+
+#define BUILD_PRECOMPILE_H_
+
+// The Windows header needs to come before almost all the other
+// Windows-specific headers.
+#include <Windows.h>
+#include <dwmapi.h>
+#include <shellapi.h>
+#include <wincrypt.h>  // 4
+#include <wtypes.h>  // 2
+
+// TODO(joi): Defines in atlbase.h cause conflicts; need to figure out
+// if/how this family of headers can be included in the PCH; several
+// of them are used quite frequently.
+/*
+#include <atlbase.h>
+#include <atlapp.h>
+#include <atlcom.h>
+#include <atlcrack.h>  // 2
+#include <atlctrls.h>  // 2
+#include <atlmisc.h>  // 2
+#include <atlsafe.h>  // 1
+#include <atltheme.h>  // 1
+#include <atlwin.h>  // 2
+*/
+
+// TODO(joi): Objbase.h and other files that rely on it bring in
+// [ #define interface struct ] which can cause problems in a
+// multi-platform build like Chrome's. Tried #undef-ing it, but
+// there are currently 118 targets that break if we do this, so
+// leaving out of the precompiled header for now.
+//#include <commctrl.h>  // 2
+//#include <commdlg.h>  // 3
+//#include <cryptuiapi.h>  // 2
+//#include <Objbase.h>  // 2
+//#include <objidl.h>  // 1
+//#include <ole2.h>  // 1
+//#include <oleacc.h>  // 2
+//#include <oleauto.h>  // 1
+//#include <oleidl.h>  // 1
+//#include <propkey.h>  // 2
+//#include <propvarutil.h>  // 2
+//#include <pstore.h>  // 2
+//#include <shlguid.h>  // 1
+//#include <shlwapi.h>  // 1
+//#include <shobjidl.h>  // 4
+//#include <urlhist.h>  // 2
+
+// TODO(joi): Caused other conflicts in addition to the 'interface' issue
+// above, see if they can be resolved.
+//#include <shlobj.h>
+
+#include <errno.h>
+#include <fcntl.h>
+#include <limits.h>  // 4
+#include <math.h>
+#include <memory.h>  // 1
+#include <signal.h>
+#include <stdarg.h>  // 1
+#include <stddef.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <time.h>  // 4
+
+#include <algorithm>
+#include <bitset>  // 3
+#include <cmath>
+#include <cstddef>
+#include <cstdio>  // 3
+#include <cstdlib>  // 2
+#include <cstring>
+#include <deque>
+#include <fstream>  // 3
+#include <functional>
+#include <iomanip>  // 2
+#include <iosfwd>  // 2
+#include <iterator>
+#include <limits>
+#include <list>
+#include <map>
+#include <numeric>  // 2
+#include <ostream>
+#include <queue>
+#include <set>
+#include <sstream>
+#include <stack>
+#include <string>
+#include <utility>
+#include <vector>
diff --git a/build/protoc.gypi b/build/protoc.gypi
new file mode 100644
index 0000000..555c5be
--- /dev/null
+++ b/build/protoc.gypi
@@ -0,0 +1,92 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an target to provide a rule
+# to invoke protoc in a consistent manner.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_proto_lib',
+#   'type': 'static_library',
+#   'sources': [
+#     'foo.proto',
+#     'bar.proto',
+#   ],
+#   'variables': {
+#     # Optional, see below: 'proto_in_dir': '.'
+#     'proto_out_dir': 'dir/for/my_proto_lib'
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+# If necessary, you may add normal .cc files to the sources list or other gyp
+# dependencies.  The proto headers are guaranteed to be generated before any
+# source files, even within this target, are compiled.
+#
+# The 'proto_in_dir' variable must be the relative path to the
+# directory containing the .proto files.  If left out, it defaults to '.'.
+#
+# The 'proto_out_dir' variable specifies the path suffix that output
+# files are generated under.  Targets that gyp-depend on my_proto_lib
+# will be able to include the resulting proto headers with an include
+# like:
+#   #include "dir/for/my_proto_lib/foo.pb.h"
+#
+# Implementation notes:
+# A proto_out_dir of foo/bar produces
+#   <(SHARED_INTERMEDIATE_DIR)/protoc_out/foo/bar/{file1,file2}.pb.{cc,h}
+#   <(SHARED_INTERMEDIATE_DIR)/pyproto/foo/bar/{file1,file2}_pb2.py
+
+{
+  'variables': {
+    'protoc': '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)protoc<(EXECUTABLE_SUFFIX)',
+    'cc_dir': '<(SHARED_INTERMEDIATE_DIR)/protoc_out/<(proto_out_dir)',
+    'py_dir': '<(PRODUCT_DIR)/pyproto/<(proto_out_dir)',
+    'proto_in_dir%': '.',
+  },
+  'rules': [
+    {
+      'rule_name': 'genproto',
+      'extension': 'proto',
+      'inputs': [
+        '<(protoc)',
+      ],
+      'outputs': [
+        '<(py_dir)/<(RULE_INPUT_ROOT)_pb2.py',
+        '<(cc_dir)/<(RULE_INPUT_ROOT).pb.cc',
+        '<(cc_dir)/<(RULE_INPUT_ROOT).pb.h',
+      ],
+      'action': [
+        '<(protoc)',
+        '--proto_path=<(proto_in_dir)',
+        # Naively you'd use <(RULE_INPUT_PATH) here, but protoc requires
+        # --proto_path is a strict prefix of the path given as an argument.
+        '<(proto_in_dir)/<(RULE_INPUT_ROOT)<(RULE_INPUT_EXT)',
+        '--cpp_out=<(cc_dir)',
+        '--python_out=<(py_dir)',
+        ],
+      'message': 'Generating C++ and Python code from <(RULE_INPUT_PATH)',
+      'process_outputs_as_sources': 1,
+    },
+  ],
+  'dependencies': [
+    '<(DEPTH)/third_party/protobuf/protobuf.gyp:protoc#host',
+    '<(DEPTH)/third_party/protobuf/protobuf.gyp:protobuf_lite',
+  ],
+  'include_dirs': [
+    '<(SHARED_INTERMEDIATE_DIR)/protoc_out',
+  ],
+  'direct_dependent_settings': {
+    'include_dirs': [
+      '<(SHARED_INTERMEDIATE_DIR)/protoc_out',
+    ]
+  },
+  'export_dependent_settings': [
+    # The generated headers reference headers within protobuf_lite,
+    # so dependencies must be able to find those headers too.
+    '<(DEPTH)/third_party/protobuf/protobuf.gyp:protobuf_lite',
+  ],
+  # This target exports a hard dependency because it generates header
+  # files.
+  'hard_dependency': 1,
+}
diff --git a/build/release.gypi b/build/release.gypi
new file mode 100644
index 0000000..7595ef5
--- /dev/null
+++ b/build/release.gypi
@@ -0,0 +1,17 @@
+{
+  'conditions': [
+    # Handle build types.
+    ['buildtype=="Dev"', {
+      'includes': ['internal/release_impl.gypi'],
+    }],
+    ['buildtype=="Official"', {
+      'includes': ['internal/release_impl_official.gypi'],
+    }],
+    # TODO(bradnelson): may also need:
+    #     checksenabled
+    #     coverage
+    #     dom_stats
+    #     pgo_instrument
+    #     pgo_optimize
+  ],
+}
diff --git a/build/sanitize-mac-build-log.sed b/build/sanitize-mac-build-log.sed
new file mode 100644
index 0000000..25b311c
--- /dev/null
+++ b/build/sanitize-mac-build-log.sed
@@ -0,0 +1,35 @@
+#!/bin/echo Use sanitize-mac-build-log.sh or sed -f
+
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Use this sed script to reduce a Mac build log into something readable.
+
+# Drop uninformative lines.
+/^distcc/d
+/^Check dependencies/d
+/^    setenv /d
+/^    cd /d
+/^make: Nothing to be done/d
+/^$/d
+
+# Xcode prints a short "compiling foobar.o" line followed by the lengthy
+# full command line.  These deletions drop the command line.
+\|^    /Developer/usr/bin/|d
+\|^    /Developer/Library/PrivateFrameworks/DevToolsCore.framework/|d
+\|^    /Developer/Library/Xcode/Plug-ins/CoreBuildTasks.xcplugin/|d
+
+# Drop any goma command lines as well.
+\|^    .*/gomacc |d
+
+# And, if you've overridden something from your own bin directory, remove those
+# full command lines, too.
+\|^    /Users/[^/]*/bin/|d
+
+# There's already a nice note for bindings, don't need the command line.
+\|^python scripts/rule_binding.py|d
+
+# Shorten the "compiling foobar.o" line.
+s|^Distributed-CompileC \(.*\) normal i386 c++ com.apple.compilers.gcc.4_2|    CC \1|
+s|^CompileC \(.*\) normal i386 c++ com.apple.compilers.gcc.4_2|    CC \1|
diff --git a/build/sanitize-mac-build-log.sh b/build/sanitize-mac-build-log.sh
new file mode 100755
index 0000000..dc743fa
--- /dev/null
+++ b/build/sanitize-mac-build-log.sh
@@ -0,0 +1,6 @@
+#!/bin/sh
+# Copyright (c) 2010 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+sed -f `dirname "${0}"`/`basename "${0}" sh`sed
+
diff --git a/build/sanitize-win-build-log.sed b/build/sanitize-win-build-log.sed
new file mode 100644
index 0000000..d6d049c
--- /dev/null
+++ b/build/sanitize-win-build-log.sed
@@ -0,0 +1,14 @@
+#!/bin/echo Use sanitize-win-build-log.sh or sed -f
+
+# Copyright (c) 2010 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Use this sed script to reduce a Windows build log into something
+# machine-parsable.
+
+# Drop uninformative lines.
+/The operation completed successfully./d
+
+# Drop parallelization indicators on lines.
+s/^[0-9]\+>//
diff --git a/build/sanitize-win-build-log.sh b/build/sanitize-win-build-log.sh
new file mode 100755
index 0000000..dc743fa
--- /dev/null
+++ b/build/sanitize-win-build-log.sh
@@ -0,0 +1,6 @@
+#!/bin/sh
+# Copyright (c) 2010 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+sed -f `dirname "${0}"`/`basename "${0}" sh`sed
+
diff --git a/build/scan_sources.py b/build/scan_sources.py
new file mode 100755
index 0000000..2b0e806
--- /dev/null
+++ b/build/scan_sources.py
@@ -0,0 +1,235 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from optparse import OptionParser
+import os
+import re
+import sys
+
+"""Header Scanner.
+
+This module will scan a set of input sources for include dependencies.  Use
+the command-line switch -Ixxxx to add include paths.  All filenames and paths
+are expected and returned with POSIX separators.
+"""
+
+
+debug = False
+
+
+def DebugPrint(txt):
+  if debug: print txt
+
+
+class PathConverter(object):
+  """PathConverter does path manipulates using Posix style pathnames.
+
+  Regardless of the native path type, all inputs and outputs to the path
+  functions are with POSIX style separators.
+  """
+  def ToNativePath(self, pathname):
+    return os.path.sep.join(pathname.split('/'))
+
+  def ToPosixPath(self, pathname):
+    return '/'.join(pathname.split(os.path.sep))
+
+  def exists(self, pathname):
+    ospath = self.ToNativePath(pathname)
+    return os.path.exists(ospath)
+
+  def getcwd(self):
+    return self.ToPosixPath(os.getcwd())
+
+  def isabs(self, pathname):
+    ospath = self.ToNativePath(pathname)
+    return os.path.isabs(ospath)
+
+  def isdir(self, pathname):
+    ospath = self.ToNativePath(pathname)
+    return os.path.isdir(ospath)
+
+  def open(self, pathname):
+    ospath = self.ToNativePath(pathname)
+    return open(ospath)
+
+  def realpath(self, pathname):
+    ospath = self.ToNativePath(pathname)
+    ospath = os.path.realpath(ospath)
+    return self.ToPosixPath(ospath)
+
+
+class Resolver(object):
+  """Resolver finds and generates relative paths for include files.
+
+  The Resolver object provides a mechanism to to find and convert a source or
+  include filename into a relative path based on provided search paths.  All
+  paths use POSIX style separator.
+  """
+  def __init__(self, pathobj=PathConverter()):
+    self.search_dirs = []
+    self.pathobj = pathobj
+    self.cwd = self.pathobj.getcwd()
+    self.offs = len(self.cwd)
+
+  def AddOneDirectory(self, pathname):
+    """Add an include search path."""
+    pathname = self.pathobj.realpath(pathname)
+    DebugPrint('Adding DIR: %s' % pathname)
+    if pathname not in self.search_dirs:
+      if self.pathobj.isdir(pathname):
+        self.search_dirs.append(pathname)
+      else:
+        sys.stderr.write('Not a directory: %s\n' % pathname)
+        return False
+    return True
+
+  def AddDirectories(self, pathlist):
+    """Add list of space separated directories."""
+    failed = False
+    dirlist = ' '.join(pathlist)
+    for dirname in dirlist.split(' '):
+      if not self.AddOneDirectory(dirname):
+        failed = True
+    return not failed
+
+  def GetDirectories(self):
+    return self.search_dirs
+
+  def RealToRelative(self, filepath, basepath):
+    """Returns a relative path from an absolute basepath and filepath."""
+    path_parts = filepath.split('/')
+    base_parts = basepath.split('/')
+    while path_parts and base_parts and path_parts[0] == base_parts[0]:
+      path_parts = path_parts[1:]
+      base_parts = base_parts[1:]
+    rel_parts = ['..'] * len(base_parts) + path_parts
+    return '/'.join(rel_parts)
+
+  def FilenameToRelative(self, filepath):
+    """Returns a relative path from CWD to filepath."""
+    filepath = self.pathobj.realpath(filepath)
+    basepath = self.cwd
+    return self.RealToRelative(filepath, basepath)
+
+  def FindFile(self, filename):
+    """Search for <filename> across the search directories, if the path is not
+       absolute.  Return the filepath relative to the CWD or None. """
+    if self.pathobj.isabs(filename):
+      if self.pathobj.exists(filename):
+        return self.FilenameToRelative(filename)
+      return None
+    for pathname in self.search_dirs:
+      fullname = '%s/%s' % (pathname, filename)
+      if self.pathobj.exists(fullname):
+        return self.FilenameToRelative(fullname)
+    return None
+
+
+def LoadFile(filename):
+  # Catch cases where the file does not exist
+  try:
+    fd = PathConverter().open(filename)
+  except IOError:
+    DebugPrint('Exception on file: %s' % filename)
+    return ''
+  # Go ahead and throw if you fail to read
+  return fd.read()
+
+
+class Scanner(object):
+  """Scanner searches for '#include' to find dependencies."""
+
+  def __init__(self, loader=None):
+    regex = r'\#[ \t]*include[ \t]*[<"]([^>^"]+)[>"]'
+    self.parser = re.compile(regex)
+    self.loader = loader
+    if not loader:
+      self.loader = LoadFile
+
+  def ScanData(self, data):
+    """Generate a list of includes from this text block."""
+    return self.parser.findall(data)
+
+  def ScanFile(self, filename):
+    """Generate a list of includes from this filename."""
+    includes = self.ScanData(self.loader(filename))
+    DebugPrint('Source %s contains:\n\t%s' % (filename, '\n\t'.join(includes)))
+    return includes
+
+
+class WorkQueue(object):
+  """WorkQueue contains the list of files to be scanned.
+
+  WorkQueue contains provides a queue of files to be processed.  The scanner
+  will attempt to push new items into the queue, which will be ignored if the
+  item is already in the queue.  If the item is new, it will be added to the
+  work list, which is drained by the scanner.
+  """
+  def __init__(self, resolver, scanner=Scanner()):
+    self.added_set = set()
+    self.todo_list = list()
+    self.scanner = scanner
+    self.resolver = resolver
+
+  def PushIfNew(self, filename):
+    """Add this dependency to the list of not already there."""
+    DebugPrint('Adding %s' % filename)
+    resolved_name = self.resolver.FindFile(filename)
+    if not resolved_name:
+      DebugPrint('Failed to resolve %s' % filename)
+      return
+    DebugPrint('Resolvd as %s' % resolved_name)
+    if resolved_name in self.added_set:
+      return
+    self.todo_list.append(resolved_name)
+    self.added_set.add(resolved_name)
+
+  def PopIfAvail(self):
+    """Fetch the next dependency to search."""
+    if not self.todo_list:
+      return None
+    return self.todo_list.pop()
+
+  def Run(self):
+    """Search through the available dependencies until the list becomes empty.
+      The list must be primed with one or more source files to search."""
+    scan_name = self.PopIfAvail()
+    while scan_name:
+      includes = self.scanner.ScanFile(scan_name)
+      for include_file in includes:
+        self.PushIfNew(include_file)
+      scan_name = self.PopIfAvail()
+    return sorted(self.added_set)
+
+
+def Main(argv):
+  global debug
+  parser = OptionParser()
+  parser.add_option('-I', dest='includes', action='append',
+                    help='Set include path.')
+  parser.add_option('-D', dest='debug', action='store_true',
+                    help='Enable debugging output.', default=False)
+  (options, files) = parser.parse_args(argv[1:])
+
+  if options.debug:
+    debug = Trueglobal_var_name,
+
+  resolver = Resolver()
+  if options.includes:
+    if not resolver.AddDirectories(options.includes):
+      return -1
+
+  workQ = WorkQueue(resolver)
+  for filename in files:
+    workQ.PushIfNew(filename)
+
+  sorted_list = workQ.Run()
+  for pathname in sorted_list:
+    sys.stderr.write(pathname + '\n')
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(Main(sys.argv))
diff --git a/build/scan_sources_test.py b/build/scan_sources_test.py
new file mode 100755
index 0000000..0d98030
--- /dev/null
+++ b/build/scan_sources_test.py
@@ -0,0 +1,91 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import scan_sources
+import unittest
+
+
+class MockSimplePath(object):
+  def exists(self, pathname):
+    return True
+
+  def getcwd(self):
+    return '.'
+
+  def isdir(self, pathname):
+    return True
+
+  def realpath(self, pathname):
+    return pathname
+
+
+class MockScanner(object):
+  def __init__(self, filelists):
+    self.filelists = filelists
+
+  def ScanFile(self, filename):
+    if not self.filelists:
+      return []
+    return self.filelists.pop()
+
+
+class MockResolver(object):
+  def FindFile(self, filename):
+    return filename
+
+
+class ScannerUnitTest(unittest.TestCase):
+
+  def testScanData(self):
+    scanner = scan_sources.Scanner()
+    test = """
+#This is not an include
+#include is <bogus>
+#include <x1>
+ #include "x2"
+#include     <x3>
+#include     "x4"
+# include     <x5>
+# include  "x6"
+# include "x7"
+Not
+"""
+    results = scanner.ScanData(test)
+    self.assertEqual(results, ['x1', 'x2', 'x3', 'x4', 'x5', 'x6', 'x7'])
+
+  def testResolverAddDirectories(self):
+    resolver = scan_sources.Resolver(MockSimplePath())
+    resolver.AddDirectories(['Dir1 Dir2', 'Dir3', 'Dir4', 'Dir5'])
+    results = resolver.GetDirectories()
+    self.assertEqual(results, ['Dir1','Dir2','Dir3','Dir4','Dir5'])
+
+  def testResolverRelative(self):
+    resolver = scan_sources.Resolver()
+    tests = [
+        ('/foo/bar','/foo/bar/file1','file1'),
+        ('/foo/bar/extra', '/foo/bar/file2', '../file2'),
+        ('/foo/bar', '/foo/bar/extra/file3', 'extra/file3'),
+    ]
+    for (base, full, rel) in tests:
+      self.assertEqual(rel, resolver.RealToRelative(full, base))
+
+  def testWorkQ(self):
+    filelists = [['file1', 'file4', 'file2'], ['file3'], ['file5', 'file2']]
+    resolver = MockResolver()
+    scanner = MockScanner(filelists)
+    workq = scan_sources.WorkQueue(resolver, scanner)
+    workq.PushIfNew('file3')
+    result = workq.Run()
+
+    flat = set([item for item in [sublist for sublist in filelists]])
+    flat = [item for item in flat]
+
+    flat = sorted(flat)
+    result = sorted(flat)
+    self.assertEqual(flat, result)
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/some.gyp b/build/some.gyp
new file mode 100644
index 0000000..44a1dd5
--- /dev/null
+++ b/build/some.gyp
@@ -0,0 +1,24 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+  'targets': [
+    {
+      'target_name': 'some',
+      'type': 'none',
+      'dependencies': [
+        # This file is intended to be locally modified. List the targets you use
+        # regularly. The generated some.sln will contains projects for only
+        # those targets and the targets they are transitively dependent on. This
+        # can result in a solution that loads and unloads faster in Visual
+        # Studio.
+        #
+        # Tip: Create a dummy CL to hold your local edits to this file, so they
+        # don't accidentally get added to another CL that you are editing.
+        #
+        # Example:
+        # '../chrome/chrome.gyp:chrome',
+      ],
+    },
+  ],
+}
diff --git a/build/temp_gyp/README.chromium b/build/temp_gyp/README.chromium
new file mode 100644
index 0000000..8045d61
--- /dev/null
+++ b/build/temp_gyp/README.chromium
@@ -0,0 +1,3 @@
+This directory will be removed once the files in it are committed upstream and
+Chromium imports an upstream revision with these files.  Contact mark for
+details.
diff --git a/build/temp_gyp/googleurl.gyp b/build/temp_gyp/googleurl.gyp
new file mode 100644
index 0000000..8d61551
--- /dev/null
+++ b/build/temp_gyp/googleurl.gyp
@@ -0,0 +1,101 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TODO(mark): Upstream this file to googleurl.
+{
+  'variables': {
+    'chromium_code': 1,
+  },
+  'targets': [
+    {
+      'target_name': 'googleurl',
+      'type': '<(component)',
+      'dependencies': [
+        '../../base/base.gyp:base',
+        '../../third_party/icu/icu.gyp:icudata',
+        '../../third_party/icu/icu.gyp:icui18n',
+        '../../third_party/icu/icu.gyp:icuuc',
+      ],
+      'sources': [
+        '../../googleurl/src/gurl.cc',
+        '../../googleurl/src/gurl.h',
+        '../../googleurl/src/url_canon.h',
+        '../../googleurl/src/url_canon_etc.cc',
+        '../../googleurl/src/url_canon_fileurl.cc',
+        '../../googleurl/src/url_canon_host.cc',
+        '../../googleurl/src/url_canon_icu.cc',
+        '../../googleurl/src/url_canon_icu.h',
+        '../../googleurl/src/url_canon_internal.cc',
+        '../../googleurl/src/url_canon_internal.h',
+        '../../googleurl/src/url_canon_internal_file.h',
+        '../../googleurl/src/url_canon_ip.cc',
+        '../../googleurl/src/url_canon_ip.h',
+        '../../googleurl/src/url_canon_mailtourl.cc',
+        '../../googleurl/src/url_canon_path.cc',
+        '../../googleurl/src/url_canon_pathurl.cc',
+        '../../googleurl/src/url_canon_query.cc',
+        '../../googleurl/src/url_canon_relative.cc',
+        '../../googleurl/src/url_canon_stdstring.h',
+        '../../googleurl/src/url_canon_stdurl.cc',
+        '../../googleurl/src/url_file.h',
+        '../../googleurl/src/url_parse.cc',
+        '../../googleurl/src/url_parse.h',
+        '../../googleurl/src/url_parse_file.cc',
+        '../../googleurl/src/url_parse_internal.h',
+        '../../googleurl/src/url_util.cc',
+        '../../googleurl/src/url_util.h',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '../..',
+        ],
+      },
+      'conditions': [
+        ['component=="shared_library"', {
+          'defines': [
+            'GURL_DLL',
+            'GURL_IMPLEMENTATION=1',
+          ],
+          'direct_dependent_settings': {
+            'defines': [
+              'GURL_DLL',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'googleurl_unittests',
+      'type': 'executable',
+      'dependencies': [
+        'googleurl',
+        '../../base/base.gyp:base_i18n',
+        '../../base/base.gyp:test_support_base',
+        '../../testing/gtest.gyp:gtest',
+        '../../third_party/icu/icu.gyp:icuuc',
+      ],
+      'sources': [
+        '../../googleurl/src/gurl_unittest.cc',
+        '../../googleurl/src/url_canon_unittest.cc',
+        '../../googleurl/src/url_parse_unittest.cc',
+        '../../googleurl/src/url_test_utils.h',
+        '../../googleurl/src/url_util_unittest.cc',
+        # Make sure base and ICU are started up the 'Chromium way' since the
+        # build is using the Chromium base & ICU.
+        '../../base/test/run_all_unittests.cc',
+      ],
+      'conditions': [
+        ['os_posix==1 and OS!="mac"', {
+          'conditions': [
+            ['linux_use_tcmalloc==1', {
+              'dependencies': [
+                '../../base/allocator/allocator.gyp:allocator',
+              ],
+            }],
+          ],
+        }],
+      ],
+    },
+  ],
+}
diff --git a/build/temp_gyp/pdfsqueeze.gyp b/build/temp_gyp/pdfsqueeze.gyp
new file mode 100644
index 0000000..2b3b1ff
--- /dev/null
+++ b/build/temp_gyp/pdfsqueeze.gyp
@@ -0,0 +1,40 @@
+# Copyright (c) 2009 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'pdfsqueeze',
+      'type': 'executable',
+      'sources': [
+        '../../third_party/pdfsqueeze/pdfsqueeze.m',
+      ],
+      'defines': [
+        # Use defines to map the full path names that will be used for
+        # the vars into the short forms expected by pdfsqueeze.m.
+        '______third_party_pdfsqueeze_ApplyGenericRGB_qfilter=ApplyGenericRGB_qfilter',
+        '______third_party_pdfsqueeze_ApplyGenericRGB_qfilter_len=ApplyGenericRGB_qfilter_len',
+      ],
+      'include_dirs': [
+        '<(INTERMEDIATE_DIR)',
+      ],
+      'libraries': [
+        '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
+        '$(SDKROOT)/System/Library/Frameworks/Quartz.framework',
+      ],
+      'actions': [
+        {
+          'action_name': 'Generate inline filter data',
+          'inputs': [
+            '../../third_party/pdfsqueeze/ApplyGenericRGB.qfilter',
+          ],
+          'outputs': [
+            '<(INTERMEDIATE_DIR)/ApplyGenericRGB.h',
+          ],
+          'action': ['xxd', '-i', '<@(_inputs)', '<@(_outputs)'],
+        },
+      ],
+    },
+  ],
+}
diff --git a/build/use_skia_on_mac.gypi b/build/use_skia_on_mac.gypi
new file mode 100644
index 0000000..2b9ce61
--- /dev/null
+++ b/build/use_skia_on_mac.gypi
@@ -0,0 +1,9 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Whether the Mac port uses Skia (instead of Core Graphics) by default.
+# This overrides the setting in common.gypi .
+{
+  'use_skia_on_mac%': 1,
+}
diff --git a/build/util/build_util.gyp b/build/util/build_util.gyp
new file mode 100644
index 0000000..c916b05
--- /dev/null
+++ b/build/util/build_util.gyp
@@ -0,0 +1,37 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'lastchange',
+      'type': 'none',
+      'variables': {
+        'lastchange_out_path': '<(SHARED_INTERMEDIATE_DIR)/build/LASTCHANGE',
+        'default_lastchange_path': '../LASTCHANGE.in',
+      },
+      'actions': [
+        {
+          'action_name': 'lastchange',
+          'inputs': [
+            # Note:  <(default_lastchange_path) is optional,
+            # so it doesn't show up in inputs.
+            './lastchange.py',
+          ],
+          'outputs': [
+            '<(lastchange_out_path).always',
+            '<(lastchange_out_path)',
+          ],
+          'action': [
+            'python', '<@(_inputs)',
+            '-o', '<(lastchange_out_path)',
+            '-d', '<(default_lastchange_path)',
+          ],
+          'message': 'Extracting last change to <(lastchange_out_path)',
+          'process_outputs_as_sources': '1',
+        },
+      ],
+    },
+  ]
+}
diff --git a/build/util/lastchange.py b/build/util/lastchange.py
new file mode 100755
index 0000000..870bf07
--- /dev/null
+++ b/build/util/lastchange.py
@@ -0,0 +1,254 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+lastchange.py -- Chromium revision fetching utility.
+"""
+
+import re
+import optparse
+import os
+import subprocess
+import sys
+
+_GIT_SVN_ID_REGEX = re.compile(r'.*git-svn-id:\s*([^@]*)@([0-9]+)', re.DOTALL)
+
+class VersionInfo(object):
+  def __init__(self, url, revision):
+    self.url = url
+    self.revision = revision
+
+
+def FetchSVNRevision(directory, svn_url_regex):
+  """
+  Fetch the Subversion branch and revision for a given directory.
+
+  Errors are swallowed.
+
+  Returns:
+    A VersionInfo object or None on error.
+  """
+  try:
+    proc = subprocess.Popen(['svn', 'info'],
+                            stdout=subprocess.PIPE,
+                            stderr=subprocess.PIPE,
+                            cwd=directory,
+                            shell=(sys.platform=='win32'))
+  except OSError:
+    # command is apparently either not installed or not executable.
+    return None
+  if not proc:
+    return None
+
+  attrs = {}
+  for line in proc.stdout:
+    line = line.strip()
+    if not line:
+      continue
+    key, val = line.split(': ', 1)
+    attrs[key] = val
+
+  try:
+    match = svn_url_regex.search(attrs['URL'])
+    if match:
+      url = match.group(2)
+    else:
+      url = ''
+    revision = attrs['Revision']
+  except KeyError:
+    return None
+
+  return VersionInfo(url, revision)
+
+
+def RunGitCommand(directory, command):
+  """
+  Launches git subcommand.
+
+  Errors are swallowed.
+
+  Returns:
+    A process object or None.
+  """
+  command = ['git'] + command
+  # Force shell usage under cygwin & win32. This is a workaround for
+  # mysterious loss of cwd while invoking cygwin's git.
+  # We can't just pass shell=True to Popen, as under win32 this will
+  # cause CMD to be used, while we explicitly want a cygwin shell.
+  if sys.platform in ('cygwin', 'win32'):
+    command = ['sh', '-c', ' '.join(command)]
+  try:
+    proc = subprocess.Popen(command,
+                            stdout=subprocess.PIPE,
+                            stderr=subprocess.PIPE,
+                            cwd=directory)
+    return proc
+  except OSError:
+    return None
+
+
+def FetchGitRevision(directory):
+  """
+  Fetch the Git hash for a given directory.
+
+  Errors are swallowed.
+
+  Returns:
+    A VersionInfo object or None on error.
+  """
+  proc = RunGitCommand(directory, ['rev-parse', 'HEAD'])
+  if proc:
+    output = proc.communicate()[0].strip()
+    if proc.returncode == 0 and output:
+      return VersionInfo('git', output[:7])
+  return None
+
+
+def FetchGitSVNURLAndRevision(directory, svn_url_regex):
+  """
+  Fetch the Subversion URL and revision through Git.
+
+  Errors are swallowed.
+
+  Returns:
+    A tuple containing the Subversion URL and revision.
+  """
+  proc = RunGitCommand(directory, ['log', '-1',
+                                   '--grep=git-svn-id', '--format=%b'])
+  if proc:
+    output = proc.communicate()[0].strip()
+    if proc.returncode == 0 and output:
+      # Extract the latest SVN revision and the SVN URL.
+      # The target line is the last "git-svn-id: ..." line like this:
+      # git-svn-id: svn://svn.chromium.org/chrome/trunk/src@85528 0039d316....
+      match = _GIT_SVN_ID_REGEX.search(output)
+      if match:
+        revision = match.group(2)
+        url_match = svn_url_regex.search(match.group(1))
+        if url_match:
+          url = url_match.group(2)
+        else:
+          url = ''
+        return url, revision
+  return None, None
+
+
+def IsGitSVNDirty(directory):
+  """
+  Checks whether our git-svn tree contains clean trunk or any local changes.
+
+  Errors are swallowed.
+  """
+  proc = RunGitCommand(directory, ['log', '-1'])
+  if proc:
+    output = proc.communicate()[0].strip()
+    if proc.returncode == 0 and output:
+      # Extract the latest SVN revision and the SVN URL.
+      # The target line is the last "git-svn-id: ..." line like this:
+      # git-svn-id: svn://svn.chromium.org/chrome/trunk/src@85528 0039d316....
+      match = _GIT_SVN_ID_REGEX.search(output)
+      if match:
+        # Check if there are any local uncommitted changes.
+        proc = RunGitCommand(directory, ['checkout'])
+        if proc:
+          output = proc.communicate()[0].strip()
+          if proc.returncode == 0 and not output:
+            return False
+  return True
+
+
+def FetchGitSVNRevision(directory, svn_url_regex):
+  """
+  Fetch the Git-SVN identifier for the local tree.
+
+  Errors are swallowed.
+  """
+  url, revision = FetchGitSVNURLAndRevision(directory, svn_url_regex)
+  if url and revision:
+    if IsGitSVNDirty(directory):
+      revision = revision + '-dirty'
+    return VersionInfo(url, revision)
+  return None
+
+
+def FetchVersionInfo(default_lastchange, directory=None,
+                     directory_regex_prior_to_src_url='chrome|svn'):
+  """
+  Returns the last change (in the form of a branch, revision tuple),
+  from some appropriate revision control system.
+  """
+  svn_url_regex = re.compile(
+      r'.*/(' + directory_regex_prior_to_src_url + r')(/.*)')
+
+  version_info = (FetchSVNRevision(directory, svn_url_regex) or
+                  FetchGitSVNRevision(directory, svn_url_regex) or
+                  FetchGitRevision(directory))
+  if not version_info:
+    if default_lastchange and os.path.exists(default_lastchange):
+      revision = open(default_lastchange, 'r').read().strip()
+      version_info = VersionInfo(None, revision)
+    else:
+      version_info = VersionInfo(None, None)
+  return version_info
+
+
+def WriteIfChanged(file_name, contents):
+  """
+  Writes the specified contents to the specified file_name
+  iff the contents are different than the current contents.
+  """
+  try:
+    old_contents = open(file_name, 'r').read()
+  except EnvironmentError:
+    pass
+  else:
+    if contents == old_contents:
+      return
+    os.unlink(file_name)
+  open(file_name, 'w').write(contents)
+
+
+def main(argv=None):
+  if argv is None:
+    argv = sys.argv
+
+  parser = optparse.OptionParser(usage="lastchange.py [options]")
+  parser.add_option("-d", "--default-lastchange", metavar="FILE",
+                    help="default last change input FILE")
+  parser.add_option("-o", "--output", metavar="FILE",
+                    help="write last change to FILE")
+  parser.add_option("--revision-only", action='store_true',
+                    help="just print the SVN revision number")
+  opts, args = parser.parse_args(argv[1:])
+
+  out_file = opts.output
+
+  while len(args) and out_file is None:
+    if out_file is None:
+      out_file = args.pop(0)
+  if args:
+    sys.stderr.write('Unexpected arguments: %r\n\n' % args)
+    parser.print_help()
+    sys.exit(2)
+
+  version_info = FetchVersionInfo(opts.default_lastchange)
+
+  if version_info.revision == None:
+    version_info.revision = '0'
+
+  if opts.revision_only:
+    print version_info.revision
+  else:
+    contents = "LASTCHANGE=%s\n" % version_info.revision
+    if out_file:
+      WriteIfChanged(out_file, contents)
+    else:
+      sys.stdout.write(contents)
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/whitespace_file.txt b/build/whitespace_file.txt
new file mode 100644
index 0000000..2d1ad41
--- /dev/null
+++ b/build/whitespace_file.txt
@@ -0,0 +1,45 @@
+Copyright (c) 2011 The Chromium Authors. All rights reserved.
+Use of this useless file is governed by a BSD-style license that can be
+found in the LICENSE file.
+
+This file is used for making non-code changes to trigger buildbot cycles. Make
+any modification below this line.
+
+==============================================================================
+
+Let's make a story. Add one sentence for each commit:
+
+CHAPTER 1:
+It was a dark and stormy night; the rain fell in torrents--except at
+occasional intervals, when it was checked by a violent gust of wind which
+swept up the streets (for it is in London that our scene lies), rattling along
+the housetops, and fiercely agitating the scanty flame of the lamps that
+struggled against the darkness. A dark figure emerged.
+
+It was a Domo-Kun. "What took you so long?", inquired his wife.
+Silence. Not noticing his silence, she continued, "Did Mr. Usagi enjoy the
+waffles you brought him?" "You know him, he's not one to forego a waffle,
+no matter how burnt", he snickered.
+The pause was filled with the sound of thunder.
+
+CHAPTER 2:
+The syrup was as dark as night, and just as runny.
+The Domo-Kun shuddered, remembering the way Mr. Usagi had speared his waffles
+with his fork, watching the runny syrup spread and pool across his plate,
+like the blood of a dying fawn. "It reminds me of the time --" he started, as
+his wife cut in quickly: "-- please. I can't bear to hear it.". A flury of
+images coming from the past flowed through his mind.
+
+"You recall what happened on Mulholland drive?" The ceiling fan rotated slowly
+overhead, barely disturbing the thick cigarette smoke. No doubt was left about
+when the fan was last cleaned.
+
+There was a poignant pause.
+
+CHAPTER 3:
+Mr. Usagi felt that something wasn't right. Shortly after the Domo-Kun left he
+began feeling sick. He thought out loud to himself, "No, he wouldn't have done
+that to me." He considered that perhaps he shouldn't have pushed him so far.
+Perhaps he shouldn't have been so cold and sarcastic, after the unimaginable
+horror that had occurred, just the week before.
+
diff --git a/build/win/chrome_win.croc b/build/win/chrome_win.croc
new file mode 100644
index 0000000..e1e3bb7
--- /dev/null
+++ b/build/win/chrome_win.croc
@@ -0,0 +1,26 @@
+# -*- python -*-
+# Crocodile config file for Chromium windows
+
+{
+  # List of rules, applied in order
+  'rules' : [
+    # Specify inclusions before exclusions, since rules are in order.
+
+    # Don't include chromeos, posix, or linux specific files
+    {
+      'regexp' : '.*(_|/)(chromeos|linux|posix)(\\.|_)',
+      'include' : 0,
+    },
+    # Don't include ChromeOS dirs
+    {
+      'regexp' : '.*/chromeos/',
+      'include' : 0,
+    },
+
+    # Groups
+    {
+      'regexp' : '.*_test_win\\.',
+      'group' : 'test',
+    },
+  ],
+}
diff --git a/build/win/system.gyp b/build/win/system.gyp
new file mode 100644
index 0000000..bc29e82
--- /dev/null
+++ b/build/win/system.gyp
@@ -0,0 +1,27 @@
+# Copyright (c) 2009 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'cygwin',
+      'type': 'none',
+      'actions': [
+        {
+          'action_name': 'setup_mount',
+          'msvs_cygwin_shell': 0,
+          'inputs': [
+            '../../third_party/cygwin/setup_mount.bat',
+          ],
+          # Visual Studio requires an output file, or else the
+          # custom build step won't run.
+          'outputs': [
+            '../../third_party/cygwin/_always_run_setup_mount.marker',
+          ],
+          'action': ['', '<@(_inputs)'],
+        },
+      ],
+    },
+  ],
+}
diff --git a/build/win_precompile.gypi b/build/win_precompile.gypi
new file mode 100644
index 0000000..fb86076
--- /dev/null
+++ b/build/win_precompile.gypi
@@ -0,0 +1,20 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Include this file to make targets in your .gyp use the default
+# precompiled header on Windows, in debug builds only as the official
+# builders blow up (out of memory) if precompiled headers are used for
+# release builds.
+
+{
+  'conditions': [
+    ['OS=="win" and chromium_win_pch==1', {
+        'target_defaults': {
+          'msvs_precompiled_header': '<(DEPTH)/build/precompile.h',
+          'msvs_precompiled_source': '<(DEPTH)/build/precompile.cc',
+          'sources': ['<(DEPTH)/build/precompile.cc'],
+        }
+      }],
+  ],
+}
diff --git a/codereview.settings b/codereview.settings
new file mode 100644
index 0000000..b70057e
--- /dev/null
+++ b/codereview.settings
@@ -0,0 +1,9 @@
+# This file is used by gcl to get repository specific information.

+CODE_REVIEW_SERVER: webrtc-codereview.appspot.com

+#CC_LIST:

+#VIEW_VC:

+#STATUS: 

+TRY_ON_UPLOAD: False

+#TRYSERVER_SVN_URL: 

+#GITCL_PREUPLOAD: 

+#GITCL_PREDCOMMIT: 

diff --git a/libvpx.mk b/libvpx.mk
new file mode 100644
index 0000000..07c04dc
--- /dev/null
+++ b/libvpx.mk
@@ -0,0 +1,107 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+include $(CLEAR_VARS)
+
+MY_LIBVPX_PATH = ../libvpx
+
+LOCAL_SRC_FILES = \
+     $(MY_LIBVPX_PATH)/vp8/common/alloccommon.c \
+     $(MY_LIBVPX_PATH)/vp8/common/blockd.c \
+     $(MY_LIBVPX_PATH)/vp8/common/debugmodes.c \
+     $(MY_LIBVPX_PATH)/vp8/common/entropy.c \
+     $(MY_LIBVPX_PATH)/vp8/common/entropymode.c \
+     $(MY_LIBVPX_PATH)/vp8/common/entropymv.c \
+     $(MY_LIBVPX_PATH)/vp8/common/extend.c \
+     $(MY_LIBVPX_PATH)/vp8/common/filter.c \
+     $(MY_LIBVPX_PATH)/vp8/common/findnearmv.c \
+     $(MY_LIBVPX_PATH)/vp8/common/generic/systemdependent.c \
+     $(MY_LIBVPX_PATH)/vp8/common/idctllm.c \
+     $(MY_LIBVPX_PATH)/vp8/common/invtrans.c \
+     $(MY_LIBVPX_PATH)/vp8/common/loopfilter.c \
+     $(MY_LIBVPX_PATH)/vp8/common/loopfilter_filters.c \
+     $(MY_LIBVPX_PATH)/vp8/common/mbpitch.c \
+     $(MY_LIBVPX_PATH)/vp8/common/modecont.c \
+     $(MY_LIBVPX_PATH)/vp8/common/modecontext.c \
+     $(MY_LIBVPX_PATH)/vp8/common/quant_common.c \
+     $(MY_LIBVPX_PATH)/vp8/common/recon.c \
+     $(MY_LIBVPX_PATH)/vp8/common/reconinter.c \
+     $(MY_LIBVPX_PATH)/vp8/common/reconintra.c \
+     $(MY_LIBVPX_PATH)/vp8/common/reconintra4x4.c \
+     $(MY_LIBVPX_PATH)/vp8/common/setupintrarecon.c \
+     $(MY_LIBVPX_PATH)/vp8/common/swapyv12buffer.c \
+     $(MY_LIBVPX_PATH)/vp8/common/textblit.c \
+     $(MY_LIBVPX_PATH)/vp8/common/treecoder.c \
+     $(MY_LIBVPX_PATH)/vp8/vp8_cx_iface.c \
+     $(MY_LIBVPX_PATH)/vp8/vp8_dx_iface.c \
+     $(MY_LIBVPX_PATH)/vpx_config.c \
+     $(MY_LIBVPX_PATH)/vpx/src/vpx_codec.c \
+     $(MY_LIBVPX_PATH)/vpx/src/vpx_decoder.c \
+     $(MY_LIBVPX_PATH)/vpx/src/vpx_image.c \
+     $(MY_LIBVPX_PATH)/vpx_mem/vpx_mem.c \
+     $(MY_LIBVPX_PATH)/vpx_scale/generic/vpxscale.c \
+     $(MY_LIBVPX_PATH)/vpx_scale/generic/yv12config.c \
+     $(MY_LIBVPX_PATH)/vpx_scale/generic/yv12extend.c \
+     $(MY_LIBVPX_PATH)/vpx_scale/generic/gen_scalers.c \
+     $(MY_LIBVPX_PATH)/vpx_scale/generic/scalesystemdependent.c \
+     $(MY_LIBVPX_PATH)/vpx/src/vpx_encoder.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/bitstream.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/boolhuff.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/dct.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/encodeframe.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/encodeintra.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/encodemb.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/encodemv.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/ethreading.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/firstpass.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/generic/csystemdependent.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/mcomp.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/modecosts.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/pickinter.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/picklpf.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/psnr.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/quantize.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/ratectrl.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/rdopt.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/sad_c.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/segmentation.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/tokenize.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/treewriter.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/onyx_if.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/temporal_filter.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/variance_c.c \
+     $(MY_LIBVPX_PATH)/vp8/decoder/dboolhuff.c \
+     $(MY_LIBVPX_PATH)/vp8/decoder/decodemv.c \
+     $(MY_LIBVPX_PATH)/vp8/decoder/decodframe.c \
+     $(MY_LIBVPX_PATH)/vp8/decoder/dequantize.c \
+     $(MY_LIBVPX_PATH)/vp8/decoder/detokenize.c \
+     $(MY_LIBVPX_PATH)/vp8/decoder/generic/dsystemdependent.c \
+     $(MY_LIBVPX_PATH)/vp8/decoder/onyxd_if.c \
+     $(MY_LIBVPX_PATH)/vp8/decoder/reconintra_mt.c \
+     $(MY_LIBVPX_PATH)/vp8/decoder/threading.c \
+     $(MY_LIBVPX_PATH)/vp8/decoder/idct_blk.c \
+     $(MY_LIBVPX_PATH)/vp8/common/arm/arm_systemdependent.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/arm/arm_csystemdependent.c \
+     $(MY_LIBVPX_PATH)/vp8/decoder/arm/arm_dsystemdependent.c \
+
+LOCAL_CFLAGS := \
+    -DHAVE_CONFIG_H=vpx_config.h \
+    -include $(LOCAL_PATH)/third_party/libvpx/source/config/android/vpx_config.h
+
+LOCAL_MODULE := libwebrtc_vpx
+
+LOCAL_C_INCLUDES := \
+    external/libvpx \
+    external/libvpx/vpx_ports \
+    external/libvpx/vp8/common \
+    external/libvpx/vp8/encoder \
+    external/libvpx/vp8 \
+    external/libvpx/vpx_codec 
+
+include $(BUILD_STATIC_LIBRARY)
diff --git a/license_template.txt b/license_template.txt
new file mode 100644
index 0000000..5a3e653
--- /dev/null
+++ b/license_template.txt
@@ -0,0 +1,10 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
diff --git a/peerconnection/OWNERS b/peerconnection/OWNERS
new file mode 100644
index 0000000..0fba125
--- /dev/null
+++ b/peerconnection/OWNERS
@@ -0,0 +1 @@
+tommi@webrtc.org
diff --git a/peerconnection/peerconnection.Makefile b/peerconnection/peerconnection.Makefile
new file mode 100644
index 0000000..729c818
--- /dev/null
+++ b/peerconnection/peerconnection.Makefile
@@ -0,0 +1,6 @@
+# This file is generated by gyp; do not edit.
+
+export builddir_name ?= trunk/peerconnection/out
+.PHONY: all
+all:
+	$(MAKE) -C .. peerconnection_server peerconnection_client
diff --git a/peerconnection/peerconnection.gyp b/peerconnection/peerconnection.gyp
new file mode 100644
index 0000000..ab24dd5
--- /dev/null
+++ b/peerconnection/peerconnection.gyp
@@ -0,0 +1,103 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'includes': [ '../src/build/common.gypi', ],
+  'targets': [
+    {
+      'target_name': 'peerconnection_server',
+      'type': 'executable',
+      'sources': [
+        'samples/server/data_socket.cc',
+        'samples/server/data_socket.h',
+        'samples/server/main.cc',
+        'samples/server/peer_channel.cc',
+        'samples/server/peer_channel.h',
+        'samples/server/utils.cc',
+        'samples/server/utils.h',
+      ],
+    },
+  ],
+  'conditions': [
+    ['OS=="win"', {
+      'targets': [
+        {
+          'target_name': 'peerconnection_client',
+          'type': 'executable',
+          'sources': [
+            'samples/client/conductor.cc',
+            'samples/client/conductor.h',
+            'samples/client/defaults.cc',
+            'samples/client/defaults.h',
+            'samples/client/main.cc',
+            'samples/client/main_wnd.cc',
+            'samples/client/main_wnd.h',
+            'samples/client/peer_connection_client.cc',
+            'samples/client/peer_connection_client.h',
+            '../third_party/libjingle/source/talk/base/win32socketinit.cc',
+            '../third_party/libjingle/source/talk/base/win32socketserver.cc',
+          ],
+          'msvs_settings': {
+            'VCLinkerTool': {
+             'SubSystem': '2',  # Windows
+            },
+          },
+          'dependencies': [
+            '../third_party_mods/libjingle/libjingle.gyp:libjingle_app',
+          ],
+          'include_dirs': [
+            '../third_party/libjingle/source',
+            '../third_party_mods/libjingle/source',
+          ],
+        },
+      ],  # targets
+    }, ],  # OS="win"
+    ['OS=="linux"', {
+      'targets': [
+        {
+          'target_name': 'peerconnection_client',
+          'type': 'executable',
+          'sources': [
+            'samples/client/conductor.cc',
+            'samples/client/conductor.h',
+            'samples/client/defaults.cc',
+            'samples/client/defaults.h',
+            'samples/client/linux/main.cc',
+            'samples/client/linux/main_wnd.cc',
+            'samples/client/linux/main_wnd.h',
+            'samples/client/peer_connection_client.cc',
+            'samples/client/peer_connection_client.h',
+          ],
+          'dependencies': [
+            '../third_party_mods/libjingle/libjingle.gyp:libjingle_app',
+            # TODO(tommi): Switch to this and remove specific gtk dependency
+            # sections below for cflags and link_settings.
+            # '<(DEPTH)/build/linux/system.gyp:gtk',
+          ],
+          'include_dirs': [
+            '../third_party/libjingle/source',
+            '../third_party_mods/libjingle/source',
+          ],
+          'cflags': [
+            '<!@(pkg-config --cflags gtk+-2.0)',
+          ],
+          'link_settings': {
+            'ldflags': [
+              '<!@(pkg-config --libs-only-L --libs-only-other gtk+-2.0 gthread-2.0)',
+            ],
+            'libraries': [
+              '<!@(pkg-config --libs-only-l gtk+-2.0 gthread-2.0)',
+              '-lX11',
+              '-lXext',
+            ],
+          },
+        },
+      ],  # targets
+    }, ],  # OS="linux"
+  ],
+}
diff --git a/peerconnection/peerconnection_client.target.mk b/peerconnection/peerconnection_client.target.mk
new file mode 100644
index 0000000..d5c3de2
--- /dev/null
+++ b/peerconnection/peerconnection_client.target.mk
@@ -0,0 +1,240 @@
+# This file is generated by gyp; do not edit.
+
+TOOLSET := target
+TARGET := peerconnection_client
+DEFS_Debug := '-DNO_HEAPCHECKER' \
+	'-DCHROMIUM_BUILD' \
+	'-DUSE_NSS=1' \
+	'-DTOOLKIT_USES_GTK=1' \
+	'-DGTK_DISABLE_SINGLE_INCLUDES=1' \
+	'-DWEBUI_TASK_MANAGER=1' \
+	'-DENABLE_REMOTING=1' \
+	'-DENABLE_P2P_APIS=1' \
+	'-DENABLE_CONFIGURATION_POLICY' \
+	'-DENABLE_INPUT_SPEECH' \
+	'-DENABLE_NOTIFICATIONS' \
+	'-DENABLE_GPU=1' \
+	'-DENABLE_EGLIMAGE=1' \
+	'-DUSE_SKIA=1' \
+	'-DENABLE_REGISTER_PROTOCOL_HANDLER=1' \
+	'-DWEBRTC_TARGET_PC' \
+	'-DWEBRTC_LINUX' \
+	'-DWEBRTC_THREAD_RR' \
+	'-DFEATURE_ENABLE_SSL' \
+	'-DFEATURE_ENABLE_VOICEMAIL' \
+	'-DEXPAT_RELATIVE_PATH' \
+	'-DWEBRTC_RELATIVE_PATH' \
+	'-DLINUX' \
+	'-DPOSIX' \
+	'-D__STDC_FORMAT_MACROS' \
+	'-DDYNAMIC_ANNOTATIONS_ENABLED=1' \
+	'-DWTF_USE_DYNAMIC_ANNOTATIONS=1' \
+	'-D_DEBUG'
+
+# Flags passed to all source files.
+CFLAGS_Debug := -Werror \
+	-pthread \
+	-fno-exceptions \
+	-Wall \
+	-Wno-unused-parameter \
+	-Wno-missing-field-initializers \
+	-D_FILE_OFFSET_BITS=64 \
+	-fvisibility=hidden \
+	-pipe \
+	-fPIC \
+	-fno-strict-aliasing \
+	-Wextra \
+	-Wno-unused-parameter \
+	-Wno-missing-field-initializers \
+	-pthread \
+	-D_REENTRANT \
+	-I/usr/include/gtk-2.0 \
+	-I/usr/lib/gtk-2.0/include \
+	-I/usr/include/atk-1.0 \
+	-I/usr/include/cairo \
+	-I/usr/include/pango-1.0 \
+	-I/usr/include/gio-unix-2.0/ \
+	-I/usr/include/glib-2.0 \
+	-I/usr/lib/glib-2.0/include \
+	-I/usr/include/pixman-1 \
+	-I/usr/include/freetype2 \
+	-I/usr/include/directfb \
+	-I/usr/include/libpng12 \
+	-O0 \
+	-g
+
+# Flags passed to only C files.
+CFLAGS_C_Debug := 
+
+# Flags passed to only C++ files.
+CFLAGS_CC_Debug := -fno-rtti \
+	-fno-threadsafe-statics \
+	-fvisibility-inlines-hidden \
+	-Wsign-compare
+
+INCS_Debug := -Isrc \
+	-I. \
+	-Ithird_party/libjingle/source \
+	-Ithird_party_mods/libjingle/source
+
+DEFS_Release := '-DNO_HEAPCHECKER' \
+	'-DCHROMIUM_BUILD' \
+	'-DUSE_NSS=1' \
+	'-DTOOLKIT_USES_GTK=1' \
+	'-DGTK_DISABLE_SINGLE_INCLUDES=1' \
+	'-DWEBUI_TASK_MANAGER=1' \
+	'-DENABLE_REMOTING=1' \
+	'-DENABLE_P2P_APIS=1' \
+	'-DENABLE_CONFIGURATION_POLICY' \
+	'-DENABLE_INPUT_SPEECH' \
+	'-DENABLE_NOTIFICATIONS' \
+	'-DENABLE_GPU=1' \
+	'-DENABLE_EGLIMAGE=1' \
+	'-DUSE_SKIA=1' \
+	'-DENABLE_REGISTER_PROTOCOL_HANDLER=1' \
+	'-DWEBRTC_TARGET_PC' \
+	'-DWEBRTC_LINUX' \
+	'-DWEBRTC_THREAD_RR' \
+	'-DFEATURE_ENABLE_SSL' \
+	'-DFEATURE_ENABLE_VOICEMAIL' \
+	'-DEXPAT_RELATIVE_PATH' \
+	'-DWEBRTC_RELATIVE_PATH' \
+	'-DLINUX' \
+	'-DPOSIX' \
+	'-D__STDC_FORMAT_MACROS' \
+	'-DNDEBUG' \
+	'-DNVALGRIND' \
+	'-DDYNAMIC_ANNOTATIONS_ENABLED=0'
+
+# Flags passed to all source files.
+CFLAGS_Release := -Werror \
+	-pthread \
+	-fno-exceptions \
+	-Wall \
+	-Wno-unused-parameter \
+	-Wno-missing-field-initializers \
+	-D_FILE_OFFSET_BITS=64 \
+	-fvisibility=hidden \
+	-pipe \
+	-fPIC \
+	-fno-strict-aliasing \
+	-Wextra \
+	-Wno-unused-parameter \
+	-Wno-missing-field-initializers \
+	-pthread \
+	-D_REENTRANT \
+	-I/usr/include/gtk-2.0 \
+	-I/usr/lib/gtk-2.0/include \
+	-I/usr/include/atk-1.0 \
+	-I/usr/include/cairo \
+	-I/usr/include/pango-1.0 \
+	-I/usr/include/gio-unix-2.0/ \
+	-I/usr/include/glib-2.0 \
+	-I/usr/lib/glib-2.0/include \
+	-I/usr/include/pixman-1 \
+	-I/usr/include/freetype2 \
+	-I/usr/include/directfb \
+	-I/usr/include/libpng12 \
+	-O2 \
+	-fno-ident \
+	-fdata-sections \
+	-ffunction-sections
+
+# Flags passed to only C files.
+CFLAGS_C_Release := 
+
+# Flags passed to only C++ files.
+CFLAGS_CC_Release := -fno-rtti \
+	-fno-threadsafe-statics \
+	-fvisibility-inlines-hidden \
+	-Wsign-compare
+
+INCS_Release := -Isrc \
+	-I. \
+	-Ithird_party/libjingle/source \
+	-Ithird_party_mods/libjingle/source
+
+OBJS := $(obj).target/$(TARGET)/peerconnection/samples/client/conductor.o \
+	$(obj).target/$(TARGET)/peerconnection/samples/client/defaults.o \
+	$(obj).target/$(TARGET)/peerconnection/samples/client/linux/main.o \
+	$(obj).target/$(TARGET)/peerconnection/samples/client/linux/main_wnd.o \
+	$(obj).target/$(TARGET)/peerconnection/samples/client/peer_connection_client.o
+
+# Add to the list of files we specially track dependencies for.
+all_deps += $(OBJS)
+
+# Make sure our dependencies are built before any of us.
+$(OBJS): | $(obj).target/third_party_mods/libjingle/libjingle_app.a $(obj).target/third_party/expat/expat.stamp $(obj).target/third_party/libsrtp/libsrtp.a $(obj).target/third_party_mods/libjingle/libjsoncpp.a $(obj).target/src/modules/libvideo_capture_module.a $(obj).target/src/modules/libwebrtc_utility.a $(obj).target/src/modules/libaudio_coding_module.a $(obj).target/src/modules/libCNG.a $(obj).target/src/common_audio/libsignal_processing.a $(obj).target/src/modules/libG711.a $(obj).target/src/modules/libG722.a $(obj).target/src/modules/libiLBC.a $(obj).target/src/modules/libiSAC.a $(obj).target/src/modules/libiSACFix.a $(obj).target/src/modules/libPCM16B.a $(obj).target/src/modules/libNetEq.a $(obj).target/src/common_audio/libresampler.a $(obj).target/src/common_audio/libvad.a $(obj).target/src/system_wrappers/source/libsystem_wrappers.a $(obj).target/src/modules/libwebrtc_video_coding.a $(obj).target/src/modules/libwebrtc_i420.a $(obj).target/src/modules/libwebrtc_vp8.a $(obj).target/src/common_video/libwebrtc_libyuv.a $(obj).target/third_party/libyuv/libyuv.a $(obj).target/third_party/libvpx/libvpx.a $(obj).target/src/modules/libvideo_render_module.a $(obj).target/src/video_engine/libvideo_engine_core.a $(obj).target/src/common_video/libwebrtc_jpeg.a $(obj).target/third_party/libjpeg_turbo/libjpeg_turbo.a $(obj).target/src/modules/libmedia_file.a $(obj).target/src/modules/librtp_rtcp.a $(obj).target/src/modules/libudp_transport.a $(obj).target/src/modules/libvideo_processing.a $(obj).target/src/modules/libvideo_processing_sse2.a $(obj).target/src/voice_engine/libvoice_engine_core.a $(obj).target/src/modules/libaudio_conference_mixer.a $(obj).target/src/modules/libaudio_processing.a $(obj).target/src/modules/libaec.a $(obj).target/src/modules/libapm_util.a $(obj).target/src/modules/libaec_sse2.a $(obj).target/src/modules/libaecm.a $(obj).target/src/modules/libagc.a $(obj).target/src/modules/libns.a $(obj).target/src/modules/libaudioproc_debug_proto.a $(obj).target/third_party/protobuf/libprotobuf_lite.a $(obj).target/src/modules/libaudio_device.a $(obj).target/third_party_mods/libjingle/libjingle_p2p.a $(obj).target/third_party_mods/libjingle/libjingle.a
+
+# CFLAGS et al overrides must be target-local.
+# See "Target-specific Variable Values" in the GNU Make manual.
+$(OBJS): TOOLSET := $(TOOLSET)
+$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
+$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
+
+# Suffix rules, putting all outputs into $(obj).
+
+$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
+	@$(call do_cmd,cxx,1)
+
+# Try building from generated source, too.
+
+$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
+	@$(call do_cmd,cxx,1)
+
+$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cc FORCE_DO_CMD
+	@$(call do_cmd,cxx,1)
+
+# End of this set of suffix rules
+### Rules for final target.
+LDFLAGS_Debug := -pthread \
+	-Wl,-z,noexecstack \
+	-fPIC
+
+LDFLAGS_Release := -pthread \
+	-Wl,-z,noexecstack \
+	-fPIC \
+	-Wl,-O1 \
+	-Wl,--as-needed \
+	-Wl,--gc-sections
+
+LIBS := -lgtk-x11-2.0 \
+	-lgdk-x11-2.0 \
+	-latk-1.0 \
+	-lgio-2.0 \
+	-lpangoft2-1.0 \
+	-lgdk_pixbuf-2.0 \
+	-lm \
+	-lpangocairo-1.0 \
+	-lcairo \
+	-lpango-1.0 \
+	-lfreetype \
+	-lfontconfig \
+	-lgobject-2.0 \
+	-lgmodule-2.0 \
+	-lgthread-2.0 \
+	-lrt \
+	-lglib-2.0 \
+	-lX11 \
+	-lXext \
+	-lexpat \
+	-ldl \
+	-lasound \
+	-lpulse
+
+$(builddir)/peerconnection_client: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
+$(builddir)/peerconnection_client: LIBS := $(LIBS)
+$(builddir)/peerconnection_client: LD_INPUTS := $(OBJS) $(obj).target/third_party_mods/libjingle/libjingle_app.a $(obj).target/third_party/libsrtp/libsrtp.a $(obj).target/third_party_mods/libjingle/libjsoncpp.a $(obj).target/src/modules/libvideo_capture_module.a $(obj).target/src/modules/libwebrtc_utility.a $(obj).target/src/modules/libaudio_coding_module.a $(obj).target/src/modules/libCNG.a $(obj).target/src/common_audio/libsignal_processing.a $(obj).target/src/modules/libG711.a $(obj).target/src/modules/libG722.a $(obj).target/src/modules/libiLBC.a $(obj).target/src/modules/libiSAC.a $(obj).target/src/modules/libiSACFix.a $(obj).target/src/modules/libPCM16B.a $(obj).target/src/modules/libNetEq.a $(obj).target/src/common_audio/libresampler.a $(obj).target/src/common_audio/libvad.a $(obj).target/src/system_wrappers/source/libsystem_wrappers.a $(obj).target/src/modules/libwebrtc_video_coding.a $(obj).target/src/modules/libwebrtc_i420.a $(obj).target/src/modules/libwebrtc_vp8.a $(obj).target/src/common_video/libwebrtc_libyuv.a $(obj).target/third_party/libyuv/libyuv.a $(obj).target/third_party/libvpx/libvpx.a $(obj).target/src/modules/libvideo_render_module.a $(obj).target/src/video_engine/libvideo_engine_core.a $(obj).target/src/common_video/libwebrtc_jpeg.a $(obj).target/third_party/libjpeg_turbo/libjpeg_turbo.a $(obj).target/src/modules/libmedia_file.a $(obj).target/src/modules/librtp_rtcp.a $(obj).target/src/modules/libudp_transport.a $(obj).target/src/modules/libvideo_processing.a $(obj).target/src/modules/libvideo_processing_sse2.a $(obj).target/src/voice_engine/libvoice_engine_core.a $(obj).target/src/modules/libaudio_conference_mixer.a $(obj).target/src/modules/libaudio_processing.a $(obj).target/src/modules/libaec.a $(obj).target/src/modules/libapm_util.a $(obj).target/src/modules/libaec_sse2.a $(obj).target/src/modules/libaecm.a $(obj).target/src/modules/libagc.a $(obj).target/src/modules/libns.a $(obj).target/src/modules/libaudioproc_debug_proto.a $(obj).target/third_party/protobuf/libprotobuf_lite.a $(obj).target/src/modules/libaudio_device.a $(obj).target/third_party_mods/libjingle/libjingle_p2p.a $(obj).target/third_party_mods/libjingle/libjingle.a
+$(builddir)/peerconnection_client: TOOLSET := $(TOOLSET)
+$(builddir)/peerconnection_client: $(OBJS) $(obj).target/third_party_mods/libjingle/libjingle_app.a $(obj).target/third_party/libsrtp/libsrtp.a $(obj).target/third_party_mods/libjingle/libjsoncpp.a $(obj).target/src/modules/libvideo_capture_module.a $(obj).target/src/modules/libwebrtc_utility.a $(obj).target/src/modules/libaudio_coding_module.a $(obj).target/src/modules/libCNG.a $(obj).target/src/common_audio/libsignal_processing.a $(obj).target/src/modules/libG711.a $(obj).target/src/modules/libG722.a $(obj).target/src/modules/libiLBC.a $(obj).target/src/modules/libiSAC.a $(obj).target/src/modules/libiSACFix.a $(obj).target/src/modules/libPCM16B.a $(obj).target/src/modules/libNetEq.a $(obj).target/src/common_audio/libresampler.a $(obj).target/src/common_audio/libvad.a $(obj).target/src/system_wrappers/source/libsystem_wrappers.a $(obj).target/src/modules/libwebrtc_video_coding.a $(obj).target/src/modules/libwebrtc_i420.a $(obj).target/src/modules/libwebrtc_vp8.a $(obj).target/src/common_video/libwebrtc_libyuv.a $(obj).target/third_party/libyuv/libyuv.a $(obj).target/third_party/libvpx/libvpx.a $(obj).target/src/modules/libvideo_render_module.a $(obj).target/src/video_engine/libvideo_engine_core.a $(obj).target/src/common_video/libwebrtc_jpeg.a $(obj).target/third_party/libjpeg_turbo/libjpeg_turbo.a $(obj).target/src/modules/libmedia_file.a $(obj).target/src/modules/librtp_rtcp.a $(obj).target/src/modules/libudp_transport.a $(obj).target/src/modules/libvideo_processing.a $(obj).target/src/modules/libvideo_processing_sse2.a $(obj).target/src/voice_engine/libvoice_engine_core.a $(obj).target/src/modules/libaudio_conference_mixer.a $(obj).target/src/modules/libaudio_processing.a $(obj).target/src/modules/libaec.a $(obj).target/src/modules/libapm_util.a $(obj).target/src/modules/libaec_sse2.a $(obj).target/src/modules/libaecm.a $(obj).target/src/modules/libagc.a $(obj).target/src/modules/libns.a $(obj).target/src/modules/libaudioproc_debug_proto.a $(obj).target/third_party/protobuf/libprotobuf_lite.a $(obj).target/src/modules/libaudio_device.a $(obj).target/third_party_mods/libjingle/libjingle_p2p.a $(obj).target/third_party_mods/libjingle/libjingle.a FORCE_DO_CMD
+	$(call do_cmd,link)
+
+all_deps += $(builddir)/peerconnection_client
+# Add target alias
+.PHONY: peerconnection_client
+peerconnection_client: $(builddir)/peerconnection_client
+
+# Add executable to "all" target.
+.PHONY: all
+all: $(builddir)/peerconnection_client
+
diff --git a/peerconnection/peerconnection_server.target.mk b/peerconnection/peerconnection_server.target.mk
new file mode 100644
index 0000000..ad94117
--- /dev/null
+++ b/peerconnection/peerconnection_server.target.mk
@@ -0,0 +1,170 @@
+# This file is generated by gyp; do not edit.
+
+TOOLSET := target
+TARGET := peerconnection_server
+DEFS_Debug := '-DNO_HEAPCHECKER' \
+	'-DCHROMIUM_BUILD' \
+	'-DUSE_NSS=1' \
+	'-DTOOLKIT_USES_GTK=1' \
+	'-DGTK_DISABLE_SINGLE_INCLUDES=1' \
+	'-DWEBUI_TASK_MANAGER=1' \
+	'-DENABLE_REMOTING=1' \
+	'-DENABLE_P2P_APIS=1' \
+	'-DENABLE_CONFIGURATION_POLICY' \
+	'-DENABLE_INPUT_SPEECH' \
+	'-DENABLE_NOTIFICATIONS' \
+	'-DENABLE_GPU=1' \
+	'-DENABLE_EGLIMAGE=1' \
+	'-DUSE_SKIA=1' \
+	'-DENABLE_REGISTER_PROTOCOL_HANDLER=1' \
+	'-DWEBRTC_TARGET_PC' \
+	'-DWEBRTC_LINUX' \
+	'-DWEBRTC_THREAD_RR' \
+	'-D__STDC_FORMAT_MACROS' \
+	'-DDYNAMIC_ANNOTATIONS_ENABLED=1' \
+	'-DWTF_USE_DYNAMIC_ANNOTATIONS=1' \
+	'-D_DEBUG'
+
+# Flags passed to all source files.
+CFLAGS_Debug := -Werror \
+	-pthread \
+	-fno-exceptions \
+	-Wall \
+	-Wno-unused-parameter \
+	-Wno-missing-field-initializers \
+	-D_FILE_OFFSET_BITS=64 \
+	-fvisibility=hidden \
+	-pipe \
+	-fPIC \
+	-fno-strict-aliasing \
+	-Wextra \
+	-Wno-unused-parameter \
+	-Wno-missing-field-initializers \
+	-O0 \
+	-g
+
+# Flags passed to only C files.
+CFLAGS_C_Debug := 
+
+# Flags passed to only C++ files.
+CFLAGS_CC_Debug := -fno-rtti \
+	-fno-threadsafe-statics \
+	-fvisibility-inlines-hidden \
+	-Wsign-compare
+
+INCS_Debug := -Isrc \
+	-I.
+
+DEFS_Release := '-DNO_HEAPCHECKER' \
+	'-DCHROMIUM_BUILD' \
+	'-DUSE_NSS=1' \
+	'-DTOOLKIT_USES_GTK=1' \
+	'-DGTK_DISABLE_SINGLE_INCLUDES=1' \
+	'-DWEBUI_TASK_MANAGER=1' \
+	'-DENABLE_REMOTING=1' \
+	'-DENABLE_P2P_APIS=1' \
+	'-DENABLE_CONFIGURATION_POLICY' \
+	'-DENABLE_INPUT_SPEECH' \
+	'-DENABLE_NOTIFICATIONS' \
+	'-DENABLE_GPU=1' \
+	'-DENABLE_EGLIMAGE=1' \
+	'-DUSE_SKIA=1' \
+	'-DENABLE_REGISTER_PROTOCOL_HANDLER=1' \
+	'-DWEBRTC_TARGET_PC' \
+	'-DWEBRTC_LINUX' \
+	'-DWEBRTC_THREAD_RR' \
+	'-D__STDC_FORMAT_MACROS' \
+	'-DNDEBUG' \
+	'-DNVALGRIND' \
+	'-DDYNAMIC_ANNOTATIONS_ENABLED=0'
+
+# Flags passed to all source files.
+CFLAGS_Release := -Werror \
+	-pthread \
+	-fno-exceptions \
+	-Wall \
+	-Wno-unused-parameter \
+	-Wno-missing-field-initializers \
+	-D_FILE_OFFSET_BITS=64 \
+	-fvisibility=hidden \
+	-pipe \
+	-fPIC \
+	-fno-strict-aliasing \
+	-Wextra \
+	-Wno-unused-parameter \
+	-Wno-missing-field-initializers \
+	-O2 \
+	-fno-ident \
+	-fdata-sections \
+	-ffunction-sections
+
+# Flags passed to only C files.
+CFLAGS_C_Release := 
+
+# Flags passed to only C++ files.
+CFLAGS_CC_Release := -fno-rtti \
+	-fno-threadsafe-statics \
+	-fvisibility-inlines-hidden \
+	-Wsign-compare
+
+INCS_Release := -Isrc \
+	-I.
+
+OBJS := $(obj).target/$(TARGET)/peerconnection/samples/server/data_socket.o \
+	$(obj).target/$(TARGET)/peerconnection/samples/server/main.o \
+	$(obj).target/$(TARGET)/peerconnection/samples/server/peer_channel.o \
+	$(obj).target/$(TARGET)/peerconnection/samples/server/utils.o
+
+# Add to the list of files we specially track dependencies for.
+all_deps += $(OBJS)
+
+# CFLAGS et al over